Browse Source

Upgrade V8 to v3.5.4

v0.7.4-release
Ryan Dahl 13 years ago
parent
commit
89bed19513
  1. 1
      deps/v8/.gitignore
  2. 18
      deps/v8/ChangeLog
  3. 110
      deps/v8/Makefile
  4. 84
      deps/v8/SConstruct
  5. 3
      deps/v8/build/all.gyp
  6. 2
      deps/v8/build/armu.gypi
  7. 27
      deps/v8/build/common.gypi
  8. 7
      deps/v8/build/v8-features.gypi
  9. 2
      deps/v8/include/v8.h
  10. 41
      deps/v8/preparser/preparser.gyp
  11. 19
      deps/v8/samples/samples.gyp
  12. 5
      deps/v8/src/SConscript
  13. 9
      deps/v8/src/api.cc
  14. 7
      deps/v8/src/api.h
  15. 180
      deps/v8/src/arm/code-stubs-arm.cc
  16. 17
      deps/v8/src/arm/deoptimizer-arm.cc
  17. 2
      deps/v8/src/arm/disasm-arm.cc
  18. 8
      deps/v8/src/arm/lithium-arm.cc
  19. 168
      deps/v8/src/arm/lithium-codegen-arm.cc
  20. 14
      deps/v8/src/arm/macro-assembler-arm.h
  21. 2
      deps/v8/src/assembler.cc
  22. 2
      deps/v8/src/assembler.h
  23. 13
      deps/v8/src/ast.h
  24. 24
      deps/v8/src/bootstrapper.cc
  25. 3
      deps/v8/src/codegen.cc
  26. 1
      deps/v8/src/compiler.cc
  27. 180
      deps/v8/src/d8.cc
  28. 19
      deps/v8/src/d8.gyp
  29. 48
      deps/v8/src/d8.h
  30. 4
      deps/v8/src/debug.cc
  31. 4
      deps/v8/src/deoptimizer.cc
  32. 6
      deps/v8/src/deoptimizer.h
  33. 536
      deps/v8/src/elements.cc
  34. 69
      deps/v8/src/elements.h
  35. 2
      deps/v8/src/execution.cc
  36. 1
      deps/v8/src/flag-definitions.h
  37. 43
      deps/v8/src/heap.cc
  38. 1
      deps/v8/src/heap.h
  39. 17
      deps/v8/src/hydrogen-instructions.cc
  40. 4
      deps/v8/src/ia32/deoptimizer-ia32.cc
  41. 19
      deps/v8/src/ia32/disasm-ia32.cc
  42. 145
      deps/v8/src/isolate.cc
  43. 54
      deps/v8/src/isolate.h
  44. 4
      deps/v8/src/json-parser.h
  45. 2
      deps/v8/src/jsregexp.cc
  46. 2
      deps/v8/src/log-utils.cc
  47. 2
      deps/v8/src/log-utils.h
  48. 107
      deps/v8/src/mark-compact.cc
  49. 16
      deps/v8/src/mark-compact.h
  50. 1
      deps/v8/src/messages.js
  51. 2
      deps/v8/src/mips/deoptimizer-mips.cc
  52. 13
      deps/v8/src/objects-debug.cc
  53. 139
      deps/v8/src/objects-inl.h
  54. 34
      deps/v8/src/objects-printer.cc
  55. 3
      deps/v8/src/objects-visiting.cc
  56. 13
      deps/v8/src/objects-visiting.h
  57. 852
      deps/v8/src/objects.cc
  58. 158
      deps/v8/src/objects.h
  59. 70
      deps/v8/src/parser.cc
  60. 8
      deps/v8/src/parser.h
  61. 114
      deps/v8/src/runtime.cc
  62. 8
      deps/v8/src/runtime.h
  63. 449
      deps/v8/src/scanner-base.cc
  64. 145
      deps/v8/src/scanner-base.h
  65. 16
      deps/v8/src/spaces.cc
  66. 22
      deps/v8/src/spaces.h
  67. 8
      deps/v8/src/token.cc
  68. 5
      deps/v8/src/token.h
  69. 3
      deps/v8/src/v8.cc
  70. 10
      deps/v8/src/v8natives.js
  71. 5
      deps/v8/src/v8threads.cc
  72. 4
      deps/v8/src/version.cc
  73. 95
      deps/v8/src/weakmap.js
  74. 4
      deps/v8/src/x64/deoptimizer-x64.cc
  75. 15
      deps/v8/src/x64/disasm-x64.cc
  76. 3
      deps/v8/test/cctest/SConscript
  77. 1
      deps/v8/test/cctest/cctest.gyp
  78. 3
      deps/v8/test/cctest/cctest.status
  79. 13
      deps/v8/test/cctest/test-alloc.cc
  80. 5
      deps/v8/test/cctest/test-api.cc
  81. 1
      deps/v8/test/cctest/test-debug.cc
  82. 8
      deps/v8/test/cctest/test-heap.cc
  83. 128
      deps/v8/test/cctest/test-parsing.cc
  84. 8
      deps/v8/test/cctest/test-serialize.cc
  85. 125
      deps/v8/test/cctest/test-spaces.cc
  86. 149
      deps/v8/test/cctest/test-weakmaps.cc
  87. 13
      deps/v8/test/cctest/testcfg.py
  88. 173
      deps/v8/test/mjsunit/d8-os.js
  89. 145
      deps/v8/test/mjsunit/harmony/weakmaps.js
  90. 27
      deps/v8/test/mjsunit/math-floor.js
  91. 17
      deps/v8/test/mjsunit/math-round.js
  92. 38
      deps/v8/test/mjsunit/regress/regress-1419.js
  93. 32
      deps/v8/test/mjsunit/regress/regress-1546.js
  94. 57
      deps/v8/test/mjsunit/regress/regress-1583.js
  95. 64
      deps/v8/test/mjsunit/regress/regress-1586.js
  96. 112
      deps/v8/test/mjsunit/regress/regress-91517.js
  97. 35
      deps/v8/test/mjsunit/regress/regress-91787.js
  98. 9
      deps/v8/test/preparser/testcfg.py
  99. 119
      deps/v8/tools/grokdump.py
  100. 222
      deps/v8/tools/gyp/v8.gyp

1
deps/v8/.gitignore

@ -31,5 +31,4 @@ shell_g
/tools/visual_studio/Release
/xcodebuild/
TAGS
Makefile
*.Makefile

18
deps/v8/ChangeLog

@ -1,3 +1,21 @@
2011-08-10: Version 3.5.4
Added a preliminary implementation of ES Harmony weak maps. Weak
maps can be enabled by the flag --harmony-weakmaps.
Introduced a toplevel Makefile to support GYP-based building. GYP
can be obtained from http://gyp.googlecode.com.
Fixed a bug in the length property of functions created by
Function.prototype.bind.
Reduced malloc heap allocation on process startup.
Several important code generation bug fixes.
Performance improvements on all platforms.
2011-08-03: Version 3.5.3
MIPS: Port of fix to ClassOf check from ARM.

110
deps/v8/Makefile

@ -0,0 +1,110 @@
# Copyright 2011 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Variable default definitions. Override them by exporting them in your shell.
CXX ?= "g++" # For distcc: export CXX="distcc g++"
LINK ?= "g++"
OUTDIR ?= out
TESTJOBS ?= -j16
GYPFLAGS ?= -Dv8_can_use_vfp_instructions=true
# Architectures and modes to be compiled.
ARCHES = ia32 x64 arm
MODES = release debug
# List of files that trigger Makefile regeneration:
GYPFILES = build/all.gyp build/common.gypi build/v8-features.gypi \
preparser/preparser.gyp samples/samples.gyp src/d8.gyp \
test/cctest/cctest.gyp tools/gyp/v8.gyp
# Generates all combinations of ARCHES and MODES, e.g. "ia32.release".
BUILDS = $(foreach mode,$(MODES),$(addsuffix .$(mode),$(ARCHES)))
CHECKS = $(addsuffix .check,$(BUILDS))
# Generates corresponding test targets, e.g. "ia32.release.check".
.PHONY: all release debug ia32 x64 arm $(BUILDS)
# Target definitions. "all" is the default, you can specify any others on the
# command line, e.g. "make ia32". Targets defined in $(BUILDS), e.g.
# "ia32.debug", can also be specified.
all: release debug
release: $(addsuffix .release,$(ARCHES))
debug: $(addsuffix .debug,$(ARCHES))
ia32: $(addprefix ia32.,$(MODES))
x64: $(addprefix x64.,$(MODES))
arm: $(addprefix arm.,$(MODES))
.SECONDEXPANSION:
$(BUILDS): $(OUTDIR)/Makefile-$$(basename $$@)
@$(MAKE) -C "$(OUTDIR)" -f Makefile-$(basename $@) \
CXX="$(CXX)" LINK="$(LINK)" \
BUILDTYPE=$(shell echo $(subst .,,$(suffix $@)) | \
python -c "print raw_input().capitalize()") \
builddir="$(shell pwd)/$(OUTDIR)/$@"
# Test targets.
check: all
@tools/test-wrapper-gypbuild.py $(TESTJOBS) --outdir=$(OUTDIR)
$(addsuffix .check,$(MODES)): $$(basename $$@)
@tools/test-wrapper-gypbuild.py $(TESTJOBS) --outdir=$(OUTDIR) --mode=$(basename $@)
$(addsuffix .check,$(ARCHES)): $$(basename $$@)
@tools/test-wrapper-gypbuild.py $(TESTJOBS) --outdir=$(OUTDIR) --arch=$(basename $@)
$(CHECKS): $$(basename $$@)
@tools/test-wrapper-gypbuild.py $(TESTJOBS) --outdir=$(OUTDIR) --arch-and-mode=$(basename $@)
# Clean targets. You can clean each architecture individually, or everything.
$(addsuffix .clean,$(ARCHES)):
rm -f $(OUTDIR)/Makefile-$(basename $@)
rm -rf $(OUTDIR)/$(basename $@).release
rm -rf $(OUTDIR)/$(basename $@).debug
clean: $(addsuffix .clean,$(ARCHES))
# GYP file generation targets.
$(OUTDIR)/Makefile-ia32: $(GYPFILES)
build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \
-Ibuild/common.gypi --depth=. -Dtarget_arch=ia32 -S-ia32 \
$(GYPFLAGS)
$(OUTDIR)/Makefile-x64: $(GYPFILES)
build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \
-Ibuild/common.gypi --depth=. -Dtarget_arch=x64 -S-x64 \
$(GYPFLAGS)
$(OUTDIR)/Makefile-arm: $(GYPFILES) build/armu.gypi
build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \
-Ibuild/common.gypi --depth=. -Ibuild/armu.gypi -S-arm \
$(GYPFLAGS)

84
deps/v8/SConstruct

@ -153,13 +153,19 @@ LIBRARY_FLAGS = {
}
},
'armeabi:softfp' : {
'CPPDEFINES' : ['USE_EABI_HARDFLOAT=0', 'CAN_USE_VFP_INSTRUCTIONS'],
'CPPDEFINES' : ['USE_EABI_HARDFLOAT=0'],
'vfp3:on': {
'CPPDEFINES' : ['CAN_USE_VFP_INSTRUCTIONS']
},
'simulator:none': {
'CCFLAGS': ['-mfloat-abi=softfp'],
}
},
'armeabi:hard' : {
'CPPDEFINES' : ['USE_EABI_HARDFLOAT=1', 'CAN_USE_VFP_INSTRUCTIONS'],
'CPPDEFINES' : ['USE_EABI_HARDFLOAT=1'],
'vfp3:on': {
'CPPDEFINES' : ['CAN_USE_VFP_INSTRUCTIONS']
},
'simulator:none': {
'CCFLAGS': ['-mfloat-abi=hard'],
}
@ -436,7 +442,7 @@ CCTEST_EXTRA_FLAGS = {
},
'arch:x64': {
'CPPDEFINES': ['V8_TARGET_ARCH_X64'],
'LINKFLAGS': ['/STACK:2091752']
'LINKFLAGS': ['/STACK:2097152']
},
}
}
@ -496,7 +502,10 @@ SAMPLE_FLAGS = {
}
},
'armeabi:hard' : {
'CPPDEFINES' : ['USE_EABI_HARDFLOAT=1', 'CAN_USE_VFP_INSTRUCTIONS'],
'CPPDEFINES' : ['USE_EABI_HARDFLOAT=1'],
'vfp3:on': {
'CPPDEFINES' : ['CAN_USE_VFP_INSTRUCTIONS']
},
'simulator:none': {
'CCFLAGS': ['-mfloat-abi=hard'],
}
@ -601,7 +610,7 @@ SAMPLE_FLAGS = {
},
'arch:x64': {
'CPPDEFINES': ['V8_TARGET_ARCH_X64', 'WIN32'],
'LINKFLAGS': ['/MACHINE:X64', '/STACK:2091752']
'LINKFLAGS': ['/MACHINE:X64', '/STACK:2097152']
},
'mode:debug': {
'CCFLAGS': ['/Od'],
@ -756,7 +765,7 @@ PREPARSER_FLAGS = {
},
'arch:x64': {
'CPPDEFINES': ['V8_TARGET_ARCH_X64', 'WIN32'],
'LINKFLAGS': ['/MACHINE:X64', '/STACK:2091752']
'LINKFLAGS': ['/MACHINE:X64', '/STACK:2097152']
},
'mode:debug': {
'CCFLAGS': ['/Od'],
@ -822,6 +831,57 @@ D8_FLAGS = {
'msvc': {
'all': {
'LIBS': ['winmm', 'ws2_32']
},
'verbose:off': {
'CCFLAGS': ['/nologo'],
'LINKFLAGS': ['/NOLOGO']
},
'verbose:on': {
'LINKFLAGS': ['/VERBOSE']
},
'prof:on': {
'LINKFLAGS': ['/MAP']
},
'mode:release': {
'CCFLAGS': ['/O2'],
'LINKFLAGS': ['/OPT:REF', '/OPT:ICF'],
'msvcrt:static': {
'CCFLAGS': ['/MT']
},
'msvcrt:shared': {
'CCFLAGS': ['/MD']
},
'msvcltcg:on': {
'CCFLAGS': ['/GL'],
'pgo:off': {
'LINKFLAGS': ['/LTCG'],
},
},
'pgo:instrument': {
'LINKFLAGS': ['/LTCG:PGI']
},
'pgo:optimize': {
'LINKFLAGS': ['/LTCG:PGO']
}
},
'arch:ia32': {
'CPPDEFINES': ['V8_TARGET_ARCH_IA32', 'WIN32'],
'LINKFLAGS': ['/MACHINE:X86']
},
'arch:x64': {
'CPPDEFINES': ['V8_TARGET_ARCH_X64', 'WIN32'],
'LINKFLAGS': ['/MACHINE:X64', '/STACK:2097152']
},
'mode:debug': {
'CCFLAGS': ['/Od'],
'LINKFLAGS': ['/DEBUG'],
'CPPDEFINES': ['DEBUG'],
'msvcrt:static': {
'CCFLAGS': ['/MTd']
},
'msvcrt:shared': {
'CCFLAGS': ['/MDd']
}
}
}
}
@ -1039,6 +1099,12 @@ SIMPLE_OPTIONS = {
'default': 'off',
'help': 'compress startup data (snapshot) [Linux only]'
},
'vfp3': {
'values': ['on', 'off'],
'default': 'on',
'help': 'use vfp3 instructions when building the snapshot [Arm only]'
},
}
ALL_OPTIONS = dict(PLATFORM_OPTIONS, **SIMPLE_OPTIONS)
@ -1343,10 +1409,12 @@ def BuildSpecific(env, mode, env_overrides, tools):
env['SONAME'] = soname
# Build the object files by invoking SCons recursively.
d8_env = Environment(tools=tools)
d8_env.Replace(**context.flags['d8'])
(object_files, shell_files, mksnapshot, preparser_files) = env.SConscript(
join('src', 'SConscript'),
build_dir=join('obj', target_id),
exports='context tools',
exports='context tools d8_env',
duplicate=False
)
@ -1375,8 +1443,6 @@ def BuildSpecific(env, mode, env_overrides, tools):
context.library_targets.append(library)
context.library_targets.append(preparser_library)
d8_env = Environment(tools=tools)
d8_env.Replace(**context.flags['d8'])
context.ApplyEnvOverrides(d8_env)
if context.options['library'] == 'static':
shell = d8_env.Program('d8' + suffix, object_files + shell_files)

3
deps/v8/build/all.gyp

@ -1,4 +1,4 @@
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@ -8,6 +8,7 @@
'target_name': 'All',
'type': 'none',
'dependencies': [
'../preparser/preparser.gyp:*',
'../samples/samples.gyp:*',
'../src/d8.gyp:d8',
],

2
deps/v8/build/armu.gypi

@ -32,5 +32,5 @@
'armv7': 1,
'arm_neon': 0,
'arm_fpu': 'vfpv3',
}
},
}

27
deps/v8/build/common.gypi

@ -1,4 +1,4 @@
# Copyright 2010 the V8 project authors. All rights reserved.
# Copyright 2011 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
@ -30,6 +30,7 @@
'library%': 'static_library',
'component%': 'static_library',
'visibility%': 'hidden',
'msvs_multi_core_compile%': '1',
'variables': {
'conditions': [
[ 'OS=="linux" or OS=="freebsd" or OS=="openbsd"', {
@ -46,23 +47,41 @@
'host_arch%': '<(host_arch)',
'target_arch%': '<(host_arch)',
'v8_target_arch%': '<(target_arch)',
'v8_enable_debugger_support%': 1,
'conditions': [
['(target_arch=="arm" and host_arch!="arm") or \
(target_arch=="x64" and host_arch!="x64")', {
'want_separate_host_toolset': 1,
}, {
'want_separate_host_toolset': 0,
}],
],
},
'target_defaults': {
'default_configuration': 'Debug',
'conditions': [
['v8_enable_debugger_support==1', {
'defines': ['ENABLE_DEBUGGER_SUPPORT',],
},
],
],
'configurations': {
'Debug': {
'cflags': [ '-g', '-O0' ],
'defines': [ 'ENABLE_DISASSEMBLER', 'DEBUG' ],
'defines': [ 'ENABLE_DISASSEMBLER', 'DEBUG', 'V8_ENABLE_CHECKS',
'OBJECT_PRINT' ],
},
'Release': {
'cflags': [ '-O3', '-fomit-frame-pointer', '-fdata-sections', '-ffunction-sections' ],
'cflags': [ '-O3', '-fomit-frame-pointer', '-fdata-sections',
'-ffunction-sections' ],
},
},
},
'conditions': [
[ 'OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris"', {
'target_defaults': {
'cflags': [ '-Wall', '-pthread', '-fno-rtti', '-fno-exceptions' ],
'cflags': [ '-Wall', '-pthread', '-fno-rtti', '-fno-exceptions',
'-pedantic' ],
'ldflags': [ '-pthread', ],
'conditions': [
[ 'target_arch=="ia32"', {

7
deps/v8/build/v8-features.gypi

@ -89,6 +89,13 @@
'USE_EABI_HARDFLOAT=1',
'CAN_USE_VFP_INSTRUCTIONS',
],
'cflags': [
'-mfloat-abi=hard',
],
}, {
'defines': [
'USE_EABI_HARDFLOAT=0',
],
}],
],
}],

2
deps/v8/include/v8.h

@ -3606,7 +3606,7 @@ class V8EXPORT Locker {
/**
* Returns whether v8::Locker is being used by this V8 instance.
*/
static bool IsActive() { return active_; }
static bool IsActive();
private:
bool has_lock_;

41
deps/v8/preparser/preparser.gyp

@ -0,0 +1,41 @@
# Copyright 2011 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
{
'targets': [
{
'target_name': 'preparser',
'type': 'executable',
'dependencies': [
'../tools/gyp/v8.gyp:preparser_lib',
],
'sources': [
'preparser-process.cc',
],
},
],
}

19
deps/v8/samples/samples.gyp

@ -1,4 +1,4 @@
# Copyright 2010 the V8 project authors. All rights reserved.
# Copyright 2011 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
@ -26,23 +26,24 @@
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
{
'target_defaults': {
'type': 'executable',
'dependencies': [
'../tools/gyp/v8.gyp:v8',
],
'include_dirs': [
'../include',
],
},
'targets': [
{
'target_name': 'shell',
'type': 'executable',
'dependencies': [
'../tools/gyp/v8.gyp:v8',
],
'sources': [
'shell.cc',
],
},
{
'target_name': 'process',
'type': 'executable',
'dependencies': [
'../tools/gyp/v8.gyp:v8',
],
'sources': [
'process.cc',
],

5
deps/v8/src/SConscript

@ -32,6 +32,7 @@ sys.path.append(join(root_dir, 'tools'))
import js2c
Import('context')
Import('tools')
Import('d8_env')
SOURCES = {
@ -65,6 +66,7 @@ SOURCES = {
disassembler.cc
diy-fp.cc
dtoa.cc
elements.cc
execution.cc
factory.cc
flags.cc
@ -307,6 +309,7 @@ debug-debugger.js
EXPERIMENTAL_LIBRARY_FILES = '''
proxy.js
weakmap.js
'''.split()
@ -338,7 +341,7 @@ def ConfigureObjectFiles():
else:
d8_files = context.GetRelevantSources(D8_FULL_FILES)
d8_objs = [d8_js_obj]
d8_objs.append(context.ConfigureObject(env, [d8_files]))
d8_objs.append(context.ConfigureObject(d8_env, [d8_files]))
# Combine the JavaScript library files into a single C++ file and
# compile it.

9
deps/v8/src/api.cc

@ -84,7 +84,7 @@ namespace v8 {
if (has_pending_exception) { \
if (handle_scope_implementer->CallDepthIsZero() && \
(isolate)->is_out_of_memory()) { \
if (!handle_scope_implementer->ignore_out_of_memory()) \
if (!(isolate)->ignore_out_of_memory()) \
i::V8::FatalProcessOutOfMemory(NULL); \
} \
bool call_depth_is_zero = handle_scope_implementer->CallDepthIsZero(); \
@ -4259,8 +4259,8 @@ static void* ExternalValueImpl(i::Handle<i::Object> obj) {
Local<Value> v8::External::Wrap(void* data) {
i::Isolate* isolate = i::Isolate::Current();
STATIC_ASSERT(sizeof(data) == sizeof(i::Address));
LOG_API(isolate, "External::Wrap");
EnsureInitializedForIsolate(isolate, "v8::External::Wrap()");
LOG_API(isolate, "External::Wrap");
ENTER_V8(isolate);
v8::Local<v8::Value> result = CanBeEncodedAsSmi(data)
@ -4304,8 +4304,8 @@ void* v8::External::FullUnwrap(v8::Handle<v8::Value> wrapper) {
Local<External> v8::External::New(void* data) {
STATIC_ASSERT(sizeof(data) == sizeof(i::Address));
i::Isolate* isolate = i::Isolate::Current();
LOG_API(isolate, "External::New");
EnsureInitializedForIsolate(isolate, "v8::External::New()");
LOG_API(isolate, "External::New");
ENTER_V8(isolate);
return ExternalNewImpl(data);
}
@ -4797,8 +4797,7 @@ Local<Integer> Integer::NewFromUnsigned(uint32_t value) {
void V8::IgnoreOutOfMemoryException() {
EnterIsolateIfNeeded()->handle_scope_implementer()->set_ignore_out_of_memory(
true);
EnterIsolateIfNeeded()->set_ignore_out_of_memory(true);
}

7
deps/v8/src/api.h

@ -404,7 +404,6 @@ class HandleScopeImplementer {
entered_contexts_(0),
saved_contexts_(0),
spare_(NULL),
ignore_out_of_memory_(false),
call_depth_(0) { }
// Threading support for handle data.
@ -437,10 +436,6 @@ class HandleScopeImplementer {
inline bool HasSavedContexts();
inline List<internal::Object**>* blocks() { return &blocks_; }
inline bool ignore_out_of_memory() { return ignore_out_of_memory_; }
inline void set_ignore_out_of_memory(bool value) {
ignore_out_of_memory_ = value;
}
private:
void ResetAfterArchive() {
@ -448,7 +443,6 @@ class HandleScopeImplementer {
entered_contexts_.Initialize(0);
saved_contexts_.Initialize(0);
spare_ = NULL;
ignore_out_of_memory_ = false;
call_depth_ = 0;
}
@ -473,7 +467,6 @@ class HandleScopeImplementer {
// Used as a stack to keep track of saved contexts.
List<Context*> saved_contexts_;
Object** spare_;
bool ignore_out_of_memory_;
int call_depth_;
// This is only used for threading support.
v8::ImplementationUtilities::HandleScopeData handle_scope_data_;

180
deps/v8/src/arm/code-stubs-arm.cc

@ -1603,83 +1603,139 @@ void CompareStub::Generate(MacroAssembler* masm) {
}
// The stub returns zero for false, and a non-zero value for true.
// The stub expects its argument in the tos_ register and returns its result in
// it, too: zero for false, and a non-zero value for true.
void ToBooleanStub::Generate(MacroAssembler* masm) {
// This stub uses VFP3 instructions.
CpuFeatures::Scope scope(VFP3);
Label false_result, true_result, not_string;
Label patch;
const Register map = r9.is(tos_) ? r7 : r9;
// undefined -> false
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ cmp(tos_, ip);
__ b(eq, &false_result);
// Boolean -> its value
__ LoadRoot(ip, Heap::kFalseValueRootIndex);
__ cmp(tos_, ip);
__ b(eq, &false_result);
__ LoadRoot(ip, Heap::kTrueValueRootIndex);
__ cmp(tos_, ip);
// "tos_" is a register and contains a non-zero value. Hence we implicitly
// return true if the equal condition is satisfied.
__ Ret(eq);
// undefined -> false.
CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false, &patch);
// Smis: 0 -> false, all other -> true
__ tst(tos_, tos_);
__ b(eq, &false_result);
__ tst(tos_, Operand(kSmiTagMask));
// "tos_" is a register and contains a non-zero value. Hence we implicitly
// return true if the not equal condition is satisfied.
__ Ret(eq);
// Boolean -> its value.
CheckOddball(masm, BOOLEAN, Heap::kFalseValueRootIndex, false, &patch);
CheckOddball(masm, BOOLEAN, Heap::kTrueValueRootIndex, true, &patch);
// 'null' -> false
__ LoadRoot(ip, Heap::kNullValueRootIndex);
__ cmp(tos_, ip);
__ b(eq, &false_result);
// 'null' -> false.
CheckOddball(masm, NULL_TYPE, Heap::kNullValueRootIndex, false, &patch);
// Get the map of the heap object.
__ ldr(map, FieldMemOperand(tos_, HeapObject::kMapOffset));
if (types_.Contains(SMI)) {
// Smis: 0 -> false, all other -> true
__ tst(tos_, Operand(kSmiTagMask));
// tos_ contains the correct return value already
__ Ret(eq);
} else if (types_.NeedsMap()) {
// If we need a map later and have a Smi -> patch.
__ JumpIfSmi(tos_, &patch);
}
// Undetectable -> false.
__ ldrb(ip, FieldMemOperand(map, Map::kBitFieldOffset));
__ tst(ip, Operand(1 << Map::kIsUndetectable));
__ b(&false_result, ne);
if (types_.NeedsMap()) {
__ ldr(map, FieldMemOperand(tos_, HeapObject::kMapOffset));
// JavaScript object -> true.
__ CompareInstanceType(map, ip, FIRST_SPEC_OBJECT_TYPE);
// "tos_" is a register and contains a non-zero value. Hence we implicitly
// return true if the greater than condition is satisfied.
__ Ret(ge);
// Everything with a map could be undetectable, so check this now.
__ ldrb(ip, FieldMemOperand(map, Map::kBitFieldOffset));
__ tst(ip, Operand(1 << Map::kIsUndetectable));
// Undetectable -> false.
__ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, ne);
__ Ret(ne);
}
if (types_.Contains(SPEC_OBJECT)) {
// Spec object -> true.
__ CompareInstanceType(map, ip, FIRST_SPEC_OBJECT_TYPE);
// tos_ contains the correct non-zero return value already.
__ Ret(ge);
} else if (types_.Contains(INTERNAL_OBJECT)) {
// We've seen a spec object for the first time -> patch.
__ CompareInstanceType(map, ip, FIRST_SPEC_OBJECT_TYPE);
__ b(ge, &patch);
}
// String value -> false iff empty.
if (types_.Contains(STRING)) {
// String value -> false iff empty.
__ CompareInstanceType(map, ip, FIRST_NONSTRING_TYPE);
__ b(&not_string, ge);
__ ldr(tos_, FieldMemOperand(tos_, String::kLengthOffset));
// Return string length as boolean value, i.e. return false iff length is 0.
__ Ret();
__ ldr(tos_, FieldMemOperand(tos_, String::kLengthOffset), lt);
__ Ret(lt); // the string length is OK as the return value
} else if (types_.Contains(INTERNAL_OBJECT)) {
// We've seen a string for the first time -> patch
__ CompareInstanceType(map, ip, FIRST_NONSTRING_TYPE);
__ b(lt, &patch);
}
__ bind(&not_string);
// HeapNumber -> false iff +0, -0, or NaN.
__ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
__ b(&true_result, ne);
__ vldr(d1, FieldMemOperand(tos_, HeapNumber::kValueOffset));
__ VFPCompareAndSetFlags(d1, 0.0);
// "tos_" is a register, and contains a non zero value by default.
// Hence we only need to overwrite "tos_" with zero to return false for
// FP_ZERO or FP_NAN cases. Otherwise, by default it returns true.
__ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, eq); // for FP_ZERO
__ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, vs); // for FP_NAN
__ Ret();
if (types_.Contains(HEAP_NUMBER)) {
// Heap number -> false iff +0, -0, or NaN.
Label not_heap_number;
__ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
__ b(ne, &not_heap_number);
__ vldr(d1, FieldMemOperand(tos_, HeapNumber::kValueOffset));
__ VFPCompareAndSetFlags(d1, 0.0);
// "tos_" is a register, and contains a non zero value by default.
// Hence we only need to overwrite "tos_" with zero to return false for
// FP_ZERO or FP_NAN cases. Otherwise, by default it returns true.
__ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, eq); // for FP_ZERO
__ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, vs); // for FP_NAN
__ Ret();
__ bind(&not_heap_number);
} else if (types_.Contains(INTERNAL_OBJECT)) {
// We've seen a heap number for the first time -> patch
__ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
__ b(eq, &patch);
}
// Return 1/0 for true/false in tos_.
__ bind(&true_result);
__ mov(tos_, Operand(1, RelocInfo::NONE));
__ Ret();
__ bind(&false_result);
__ mov(tos_, Operand(0, RelocInfo::NONE));
__ Ret();
if (types_.Contains(INTERNAL_OBJECT)) {
// Internal objects -> true.
__ mov(tos_, Operand(1, RelocInfo::NONE));
__ Ret();
}
if (!types_.IsAll()) {
__ bind(&patch);
GenerateTypeTransition(masm);
}
}
void ToBooleanStub::CheckOddball(MacroAssembler* masm,
Type type,
Heap::RootListIndex value,
bool result,
Label* patch) {
if (types_.Contains(type)) {
// If we see an expected oddball, return its ToBoolean value tos_.
__ LoadRoot(ip, value);
__ cmp(tos_, ip);
// The value of a root is never NULL, so we can avoid loading a non-null
// value into tos_ when we want to return 'true'.
if (!result) {
__ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, eq);
}
__ Ret(eq);
} else if (types_.Contains(INTERNAL_OBJECT)) {
// If we see an unexpected oddball and handle internal objects, we must
// patch because the code for internal objects doesn't handle it explictly.
__ LoadRoot(ip, value);
__ cmp(tos_, ip);
__ b(eq, patch);
}
}
void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) {
if (!tos_.is(r3)) {
__ mov(r3, Operand(tos_));
}
__ mov(r2, Operand(Smi::FromInt(tos_.code())));
__ mov(r1, Operand(Smi::FromInt(types_.ToByte())));
__ Push(r3, r2, r1);
// Patch the caller to an appropriate specialized stub and return the
// operation result to the caller of the stub.
__ TailCallExternalReference(
ExternalReference(IC_Utility(IC::kToBoolean_Patch), masm->isolate()),
3,
1);
}

17
deps/v8/src/arm/deoptimizer-arm.cc

@ -35,7 +35,7 @@
namespace v8 {
namespace internal {
int Deoptimizer::table_entry_size_ = 16;
const int Deoptimizer::table_entry_size_ = 16;
int Deoptimizer::patch_size() {
@ -65,8 +65,6 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
// For each return after a safepoint insert an absolute call to the
// corresponding deoptimization entry.
ASSERT(patch_size() % Assembler::kInstrSize == 0);
int call_size_in_words = patch_size() / Assembler::kInstrSize;
unsigned last_pc_offset = 0;
SafepointTable table(function->code());
for (unsigned i = 0; i < table.length(); i++) {
@ -87,13 +85,18 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
#endif
last_pc_offset = pc_offset;
if (deoptimization_index != Safepoint::kNoDeoptimizationIndex) {
Address deoptimization_entry = Deoptimizer::GetDeoptimizationEntry(
deoptimization_index, Deoptimizer::LAZY);
last_pc_offset += gap_code_size;
int call_size_in_bytes = MacroAssembler::CallSize(deoptimization_entry,
RelocInfo::NONE);
int call_size_in_words = call_size_in_bytes / Assembler::kInstrSize;
ASSERT(call_size_in_bytes % Assembler::kInstrSize == 0);
ASSERT(call_size_in_bytes <= patch_size());
CodePatcher patcher(code->instruction_start() + last_pc_offset,
call_size_in_words);
Address deoptimization_entry = Deoptimizer::GetDeoptimizationEntry(
deoptimization_index, Deoptimizer::LAZY);
patcher.masm()->Call(deoptimization_entry, RelocInfo::NONE);
last_pc_offset += patch_size();
last_pc_offset += call_size_in_bytes;
}
}
@ -530,8 +533,6 @@ void Deoptimizer::DoComputeFrame(TranslationIterator* iterator,
output_frame->SetContinuation(
reinterpret_cast<uint32_t>(continuation->entry()));
}
if (output_count_ - 1 == frame_index) iterator->Done();
}

2
deps/v8/src/arm/disasm-arm.cc

@ -200,7 +200,7 @@ void Decoder::PrintDRegister(int reg) {
// These shift names are defined in a way to match the native disassembler
// formatting. See for example the command "objdump -d <binary file>".
static const char* shift_names[kNumberOfShifts] = {
static const char* const shift_names[kNumberOfShifts] = {
"lsl", "lsr", "asr", "ror"
};

8
deps/v8/src/arm/lithium-arm.cc

@ -1039,7 +1039,13 @@ LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
: instr->SecondSuccessor();
return new LGoto(successor->block_id());
}
return new LBranch(UseRegisterAtStart(v));
LInstruction* branch = new LBranch(UseRegister(v));
// When we handle all cases, we never deopt, so we don't need to assign the
// environment then. Note that we map the "empty" case to the "all" case in
// the code generator.
ToBooleanStub::Types types = instr->expected_input_types();
bool all_cases_handled = types.IsAll() || types.IsEmpty();
return all_cases_handled ? branch : AssignEnvironment(branch);
}

168
deps/v8/src/arm/lithium-codegen-arm.cc

@ -1564,52 +1564,138 @@ void LCodeGen::DoBranch(LBranch* instr) {
} else {
ASSERT(r.IsTagged());
Register reg = ToRegister(instr->InputAt(0));
if (instr->hydrogen()->value()->type().IsBoolean()) {
__ LoadRoot(ip, Heap::kTrueValueRootIndex);
__ cmp(reg, ip);
HType type = instr->hydrogen()->value()->type();
if (type.IsBoolean()) {
__ CompareRoot(reg, Heap::kTrueValueRootIndex);
EmitBranch(true_block, false_block, eq);
} else if (type.IsSmi()) {
__ cmp(reg, Operand(0));
EmitBranch(true_block, false_block, ne);
} else {
Label* true_label = chunk_->GetAssemblyLabel(true_block);
Label* false_label = chunk_->GetAssemblyLabel(false_block);
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ cmp(reg, ip);
__ b(eq, false_label);
__ LoadRoot(ip, Heap::kTrueValueRootIndex);
__ cmp(reg, ip);
__ b(eq, true_label);
__ LoadRoot(ip, Heap::kFalseValueRootIndex);
__ cmp(reg, ip);
__ b(eq, false_label);
__ cmp(reg, Operand(0));
__ b(eq, false_label);
__ JumpIfSmi(reg, true_label);
// Test double values. Zero and NaN are false.
Label call_stub;
DoubleRegister dbl_scratch = double_scratch0();
Register scratch = scratch0();
__ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
__ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
__ cmp(scratch, Operand(ip));
__ b(ne, &call_stub);
__ sub(ip, reg, Operand(kHeapObjectTag));
__ vldr(dbl_scratch, ip, HeapNumber::kValueOffset);
__ VFPCompareAndLoadFlags(dbl_scratch, 0.0, scratch);
__ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPVConditionFlagBit));
__ b(ne, false_label);
__ b(true_label);
// The conversion stub doesn't cause garbage collections so it's
// safe to not record a safepoint after the call.
__ bind(&call_stub);
ToBooleanStub stub(reg);
RegList saved_regs = kJSCallerSaved | kCalleeSaved;
__ stm(db_w, sp, saved_regs);
__ CallStub(&stub);
__ cmp(reg, Operand(0));
__ ldm(ia_w, sp, saved_regs);
EmitBranch(true_block, false_block, ne);
ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types();
// Avoid deopts in the case where we've never executed this path before.
if (expected.IsEmpty()) expected = ToBooleanStub::all_types();
if (expected.Contains(ToBooleanStub::UNDEFINED)) {
// undefined -> false.
__ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
__ b(eq, false_label);
} else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
// We've seen undefined for the first time -> deopt.
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
DeoptimizeIf(eq, instr->environment());
}
if (expected.Contains(ToBooleanStub::BOOLEAN)) {
// Boolean -> its value.
__ CompareRoot(reg, Heap::kTrueValueRootIndex);
__ b(eq, true_label);
__ CompareRoot(reg, Heap::kFalseValueRootIndex);
__ b(eq, false_label);
} else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
// We've seen a boolean for the first time -> deopt.
__ CompareRoot(reg, Heap::kTrueValueRootIndex);
DeoptimizeIf(eq, instr->environment());
__ CompareRoot(reg, Heap::kFalseValueRootIndex);
DeoptimizeIf(eq, instr->environment());
}
#if 0
if (expected.Contains(ToBooleanStub::BOOLEAN)) {
// false -> false.
__ CompareRoot(reg, Heap::kFalseValueRootIndex);
__ b(eq, false_label);
} else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
// We've seen a boolean for the first time -> deopt.
__ CompareRoot(reg, Heap::kFalseValueRootIndex);
DeoptimizeIf(eq, instr->environment());
}
#endif
if (expected.Contains(ToBooleanStub::NULL_TYPE)) {
// 'null' -> false.
__ CompareRoot(reg, Heap::kNullValueRootIndex);
__ b(eq, false_label);
} else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
// We've seen null for the first time -> deopt.
__ CompareRoot(reg, Heap::kNullValueRootIndex);
DeoptimizeIf(eq, instr->environment());
}
if (expected.Contains(ToBooleanStub::SMI)) {
// Smis: 0 -> false, all other -> true.
__ cmp(reg, Operand(0));
__ b(eq, false_label);
__ JumpIfSmi(reg, true_label);
} else if (expected.NeedsMap()) {
// If we need a map later and have a Smi -> deopt.
__ tst(reg, Operand(kSmiTagMask));
DeoptimizeIf(eq, instr->environment());
}
const Register map = scratch0();
if (expected.NeedsMap()) {
__ ldr(map, FieldMemOperand(reg, HeapObject::kMapOffset));
// Everything with a map could be undetectable, so check this now.
__ ldrb(ip, FieldMemOperand(map, Map::kBitFieldOffset));
__ tst(ip, Operand(1 << Map::kIsUndetectable));
__ b(ne, false_label);
}
if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) {
// spec object -> true.
__ CompareInstanceType(map, ip, FIRST_SPEC_OBJECT_TYPE);
__ b(ge, true_label);
} else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
// We've seen a spec object for the first time -> deopt.
__ CompareInstanceType(map, ip, FIRST_SPEC_OBJECT_TYPE);
DeoptimizeIf(ge, instr->environment());
}
if (expected.Contains(ToBooleanStub::STRING)) {
// String value -> false iff empty.
Label not_string;
__ CompareInstanceType(map, ip, FIRST_NONSTRING_TYPE);
__ b(ge, &not_string);
__ ldr(ip, FieldMemOperand(reg, String::kLengthOffset));
__ cmp(ip, Operand(0));
__ b(ne, true_label);
__ b(false_label);
__ bind(&not_string);
} else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
// We've seen a string for the first time -> deopt
__ CompareInstanceType(map, ip, FIRST_NONSTRING_TYPE);
DeoptimizeIf(lt, instr->environment());
}
if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) {
// heap number -> false iff +0, -0, or NaN.
DoubleRegister dbl_scratch = double_scratch0();
Label not_heap_number;
__ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
__ b(ne, &not_heap_number);
__ vldr(dbl_scratch, FieldMemOperand(reg, HeapNumber::kValueOffset));
__ VFPCompareAndSetFlags(dbl_scratch, 0.0);
__ b(vs, false_label); // NaN -> false.
__ b(eq, false_label); // +0, -0 -> false.
__ b(true_label);
__ bind(&not_heap_number);
} else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
// We've seen a heap number for the first time -> deopt.
__ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
DeoptimizeIf(eq, instr->environment());
}
if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
// internal objects -> true
__ b(true_label);
} else {
// We've seen something for the first time -> deopt.
DeoptimizeIf(al, instr->environment());
}
}
}
}

14
deps/v8/src/arm/macro-assembler-arm.h

@ -92,14 +92,16 @@ class MacroAssembler: public Assembler {
void Jump(Register target, Condition cond = al);
void Jump(Address target, RelocInfo::Mode rmode, Condition cond = al);
void Jump(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
int CallSize(Register target, Condition cond = al);
static int CallSize(Register target, Condition cond = al);
void Call(Register target, Condition cond = al);
int CallSize(Address target, RelocInfo::Mode rmode, Condition cond = al);
static int CallSize(Address target,
RelocInfo::Mode rmode,
Condition cond = al);
void Call(Address target, RelocInfo::Mode rmode, Condition cond = al);
int CallSize(Handle<Code> code,
RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
unsigned ast_id = kNoASTId,
Condition cond = al);
static int CallSize(Handle<Code> code,
RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
unsigned ast_id = kNoASTId,
Condition cond = al);
void Call(Handle<Code> code,
RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
unsigned ast_id = kNoASTId,

2
deps/v8/src/assembler.cc

@ -74,7 +74,7 @@ const double DoubleConstant::zero = 0.0;
const double DoubleConstant::canonical_non_hole_nan = OS::nan_value();
const double DoubleConstant::the_hole_nan = BitCast<double>(kHoleNanInt64);
const double DoubleConstant::negative_infinity = -V8_INFINITY;
const char* RelocInfo::kFillerCommentString = "DEOPTIMIZATION PADDING";
const char* const RelocInfo::kFillerCommentString = "DEOPTIMIZATION PADDING";
// -----------------------------------------------------------------------------
// Implementation of AssemblerBase

2
deps/v8/src/assembler.h

@ -171,7 +171,7 @@ class RelocInfo BASE_EMBEDDED {
// where we are not sure to have enough space for patching in during
// lazy deoptimization. This is the case if we have indirect calls for which
// we do not normally record relocation info.
static const char* kFillerCommentString;
static const char* const kFillerCommentString;
// The minimum size of a comment is equal to three bytes for the extra tagged
// pc + the tag for the data, and kPointerSize for the actual pointer to the

13
deps/v8/src/ast.h

@ -1711,6 +1711,12 @@ class Throw: public Expression {
class FunctionLiteral: public Expression {
public:
enum Type {
ANONYMOUS_EXPRESSION,
NAMED_EXPRESSION,
DECLARATION
};
FunctionLiteral(Isolate* isolate,
Handle<String> name,
Scope* scope,
@ -1722,7 +1728,7 @@ class FunctionLiteral: public Expression {
int num_parameters,
int start_position,
int end_position,
bool is_expression,
Type type,
bool has_duplicate_parameters)
: Expression(isolate),
name_(name),
@ -1738,7 +1744,8 @@ class FunctionLiteral: public Expression {
end_position_(end_position),
function_token_position_(RelocInfo::kNoPosition),
inferred_name_(HEAP->empty_string()),
is_expression_(is_expression),
is_expression_(type != DECLARATION),
is_anonymous_(type == ANONYMOUS_EXPRESSION),
pretenure_(false),
has_duplicate_parameters_(has_duplicate_parameters) {
}
@ -1753,6 +1760,7 @@ class FunctionLiteral: public Expression {
int start_position() const { return start_position_; }
int end_position() const { return end_position_; }
bool is_expression() const { return is_expression_; }
bool is_anonymous() const { return is_anonymous_; }
bool strict_mode() const;
int materialized_literal_count() { return materialized_literal_count_; }
@ -1797,6 +1805,7 @@ class FunctionLiteral: public Expression {
int function_token_position_;
Handle<String> inferred_name_;
bool is_expression_;
bool is_anonymous_;
bool pretenure_;
bool has_duplicate_parameters_;
};

24
deps/v8/src/bootstrapper.cc

@ -199,6 +199,7 @@ class Genesis BASE_EMBEDDED {
// New context initialization. Used for creating a context from scratch.
void InitializeGlobal(Handle<GlobalObject> inner_global,
Handle<JSFunction> empty_function);
void InitializeExperimentalGlobal();
// Installs the contents of the native .js files on the global objects.
// Used for creating a context from scratch.
void InstallNativeFunctions();
@ -1190,6 +1191,21 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
}
void Genesis::InitializeExperimentalGlobal() {
Isolate* isolate = this->isolate();
Handle<JSObject> global = Handle<JSObject>(global_context()->global());
// TODO(mstarzinger): Move this into Genesis::InitializeGlobal once we no
// longer need to live behind a flag, so WeakMap gets added to the snapshot.
if (FLAG_harmony_weakmaps) { // -- W e a k M a p
Handle<JSFunction> weakmap_fun =
InstallFunction(global, "WeakMap", JS_WEAK_MAP_TYPE, JSWeakMap::kSize,
isolate->initial_object_prototype(),
Builtins::kIllegal, true);
}
}
bool Genesis::CompileBuiltin(Isolate* isolate, int index) {
Vector<const char> name = Natives::GetScriptName(index);
Handle<String> source_code =
@ -1680,6 +1696,11 @@ bool Genesis::InstallExperimentalNatives() {
"native proxy.js") == 0) {
if (!CompileExperimentalBuiltin(isolate(), i)) return false;
}
if (FLAG_harmony_weakmaps &&
strcmp(ExperimentalNatives::GetScriptName(i).start(),
"native weakmap.js") == 0) {
if (!CompileExperimentalBuiltin(isolate(), i)) return false;
}
}
InstallExperimentalNativeFunctions();
@ -2169,7 +2190,8 @@ Genesis::Genesis(Isolate* isolate,
isolate->counters()->contexts_created_from_scratch()->Increment();
}
// Install experimental natives.
// Initialize experimental globals and install experimental natives.
InitializeExperimentalGlobal();
if (!InstallExperimentalNatives()) return;
result_ = global_context_;

3
deps/v8/src/codegen.cc

@ -169,7 +169,6 @@ void CodeGenerator::PrintCode(Handle<Code> code, CompilationInfo* info) {
#endif // ENABLE_DISASSEMBLER
}
static Vector<const char> kRegexp = CStrVector("regexp");
bool CodeGenerator::ShouldGenerateLog(Expression* type) {
ASSERT(type != NULL);
@ -179,7 +178,7 @@ bool CodeGenerator::ShouldGenerateLog(Expression* type) {
}
Handle<String> name = Handle<String>::cast(type->AsLiteral()->handle());
if (FLAG_log_regexp) {
if (name->IsEqualTo(kRegexp))
if (name->IsEqualTo(CStrVector("regexp")))
return true;
}
return false;

1
deps/v8/src/compiler.cc

@ -736,6 +736,7 @@ void Compiler::SetFunctionInfo(Handle<SharedFunctionInfo> function_info,
function_info->set_start_position(lit->start_position());
function_info->set_end_position(lit->end_position());
function_info->set_is_expression(lit->is_expression());
function_info->set_is_anonymous(lit->is_anonymous());
function_info->set_is_toplevel(is_toplevel);
function_info->set_inferred_name(*lit->inferred_name());
function_info->SetThisPropertyAssignmentsInfo(

180
deps/v8/src/d8.cc

@ -26,8 +26,8 @@
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#ifdef V8_SHARED
#define USING_V8_SHARED
#ifdef USING_V8_SHARED // Defined when linking against shared lib on Windows.
#define V8_SHARED
#endif
#ifdef COMPRESS_STARTUP_DATA_BZ2
@ -37,15 +37,16 @@
#include <errno.h>
#include <stdlib.h>
#include <string.h>
#include <sys/stat.h>
#ifdef USING_V8_SHARED
#ifdef V8_SHARED
#include <assert.h>
#include "../include/v8-testing.h"
#endif // USING_V8_SHARED
#endif // V8_SHARED
#include "d8.h"
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
#include "api.h"
#include "checks.h"
#include "d8-debug.h"
@ -53,20 +54,20 @@
#include "natives.h"
#include "platform.h"
#include "v8.h"
#endif // USING_V8_SHARED
#endif // V8_SHARED
#if !defined(_WIN32) && !defined(_WIN64)
#include <unistd.h> // NOLINT
#endif
#ifdef USING_V8_SHARED
#ifndef ASSERT
#define ASSERT(condition) assert(condition)
#endif // USING_V8_SHARED
#endif
namespace v8 {
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
LineEditor *LineEditor::first_ = NULL;
const char* Shell::kHistoryFileName = ".d8_history";
@ -116,20 +117,20 @@ CounterCollection Shell::local_counters_;
CounterCollection* Shell::counters_ = &local_counters_;
i::Mutex* Shell::context_mutex_(i::OS::CreateMutex());
Persistent<Context> Shell::utility_context_;
#endif // USING_V8_SHARED
#endif // V8_SHARED
Persistent<Context> Shell::evaluation_context_;
ShellOptions Shell::options;
const char* Shell::kPrompt = "d8> ";
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
bool CounterMap::Match(void* key1, void* key2) {
const char* name1 = reinterpret_cast<const char*>(key1);
const char* name2 = reinterpret_cast<const char*>(key2);
return strcmp(name1, name2) == 0;
}
#endif // USING_V8_SHARED
#endif // V8_SHARED
// Converts a V8 value to a C string.
@ -143,11 +144,11 @@ bool Shell::ExecuteString(Handle<String> source,
Handle<Value> name,
bool print_result,
bool report_exceptions) {
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
bool FLAG_debugger = i::FLAG_debugger;
#else
bool FLAG_debugger = false;
#endif // USING_V8_SHARED
#endif // V8_SHARED
HandleScope handle_scope;
TryCatch try_catch;
options.script_executed = true;
@ -276,9 +277,9 @@ Handle<Value> Shell::CreateExternalArray(const Arguments& args,
String::New("Array constructor needs one parameter."));
}
static const int kMaxLength = 0x3fffffff;
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
ASSERT(kMaxLength == i::ExternalArray::kMaxLength);
#endif // USING_V8_SHARED
#endif // V8_SHARED
size_t length = 0;
if (args[0]->IsUint32()) {
length = args[0]->Uint32Value();
@ -378,9 +379,9 @@ Handle<Value> Shell::Yield(const Arguments& args) {
Handle<Value> Shell::Quit(const Arguments& args) {
int exit_code = args[0]->Int32Value();
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
OnExit();
#endif // USING_V8_SHARED
#endif // V8_SHARED
exit(exit_code);
return Undefined();
}
@ -429,7 +430,7 @@ void Shell::ReportException(v8::TryCatch* try_catch) {
}
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
Handle<Array> Shell::GetCompletions(Handle<String> text, Handle<String> full) {
HandleScope handle_scope;
Context::Scope context_scope(utility_context_);
@ -464,10 +465,10 @@ Handle<Value> Shell::DebugCommandToJSONRequest(Handle<String> command) {
return val;
}
#endif // ENABLE_DEBUGGER_SUPPORT
#endif // USING_V8_SHARED
#endif // V8_SHARED
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
int32_t* Counter::Bind(const char* name, bool is_histogram) {
int i;
for (i = 0; i < kMaxNameSize - 1 && name[i]; i++)
@ -615,7 +616,7 @@ void Shell::InstallUtilityScript() {
}
#endif // ENABLE_DEBUGGER_SUPPORT
}
#endif // USING_V8_SHARED
#endif // V8_SHARED
#ifdef COMPRESS_STARTUP_DATA_BZ2
@ -682,11 +683,11 @@ Handle<ObjectTemplate> Shell::CreateGlobalTemplate() {
global_template->Set(String::New("lol_is_enabled"), Boolean::New(false));
#endif
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
Handle<ObjectTemplate> os_templ = ObjectTemplate::New();
AddOSMethods(os_templ);
global_template->Set(String::New("os"), os_templ);
#endif // USING_V8_SHARED
#endif // V8_SHARED
return global_template;
}
@ -702,7 +703,7 @@ void Shell::Initialize() {
}
#endif
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
Shell::counter_map_ = new CounterMap();
// Set up counters
if (i::StrLength(i::FLAG_map_counters) != 0)
@ -712,10 +713,10 @@ void Shell::Initialize() {
V8::SetCreateHistogramFunction(CreateHistogram);
V8::SetAddHistogramSampleFunction(AddHistogramSample);
}
#endif // USING_V8_SHARED
#endif // V8_SHARED
if (options.test_shell) return;
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
Locker lock;
HandleScope scope;
Handle<ObjectTemplate> global_template = CreateGlobalTemplate();
@ -727,21 +728,21 @@ void Shell::Initialize() {
v8::Debug::EnableAgent("d8 shell", i::FLAG_debugger_port, true);
}
#endif // ENABLE_DEBUGGER_SUPPORT
#endif // USING_V8_SHARED
#endif // V8_SHARED
}
Persistent<Context> Shell::CreateEvaluationContext() {
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
// This needs to be a critical section since this is not thread-safe
i::ScopedLock lock(context_mutex_);
#endif // USING_V8_SHARED
#endif // V8_SHARED
// Initialize the global objects
Handle<ObjectTemplate> global_template = CreateGlobalTemplate();
Persistent<Context> context = Context::New(NULL, global_template);
Context::Scope scope(context);
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
i::JSArguments js_args = i::FLAG_js_arguments;
i::Handle<i::FixedArray> arguments_array =
FACTORY->NewFixedArray(js_args.argc());
@ -754,12 +755,12 @@ Persistent<Context> Shell::CreateEvaluationContext() {
FACTORY->NewJSArrayWithElements(arguments_array);
context->Global()->Set(String::New("arguments"),
Utils::ToLocal(arguments_jsarray));
#endif // USING_V8_SHARED
#endif // V8_SHARED
return context;
}
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
void Shell::OnExit() {
if (i::FLAG_dump_counters) {
printf("+----------------------------------------+-------------+\n");
@ -779,18 +780,34 @@ void Shell::OnExit() {
if (counters_file_ != NULL)
delete counters_file_;
}
#endif // USING_V8_SHARED
#endif // V8_SHARED
static FILE* FOpen(const char* path, const char* mode) {
#if (defined(_WIN32) || defined(_WIN64))
FILE* result;
if (fopen_s(&result, path, mode) == 0) {
return result;
} else {
return NULL;
}
#else
FILE* file = fopen(path, mode);
if (file == NULL) return NULL;
struct stat file_stat;
if (fstat(fileno(file), &file_stat) != 0) return NULL;
bool is_regular_file = ((file_stat.st_mode & S_IFREG) != 0);
if (is_regular_file) return file;
fclose(file);
return NULL;
#endif
}
static char* ReadChars(const char* name, int* size_out) {
// Release the V8 lock while reading files.
v8::Unlocker unlocker(Isolate::GetCurrent());
#ifndef USING_V8_SHARED
FILE* file = i::OS::FOpen(name, "rb");
#else
// TODO(yangguo@chromium.org): reading from a directory hangs!
FILE* file = fopen(name, "rb");
#endif // USING_V8_SHARED
FILE* file = FOpen(name, "rb");
if (file == NULL) return NULL;
fseek(file, 0, SEEK_END);
@ -809,7 +826,7 @@ static char* ReadChars(const char* name, int* size_out) {
}
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
static char* ReadToken(char* data, char token) {
char* next = i::OS::StrChr(data, token);
if (next != NULL) {
@ -829,7 +846,7 @@ static char* ReadLine(char* data) {
static char* ReadWord(char* data) {
return ReadToken(data, ' ');
}
#endif // USING_V8_SHARED
#endif // V8_SHARED
// Reads a file into a v8 string.
@ -848,7 +865,7 @@ void Shell::RunShell() {
Context::Scope context_scope(evaluation_context_);
HandleScope handle_scope;
Handle<String> name = String::New("(d8)");
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
LineEditor* editor = LineEditor::Get();
printf("V8 version %s [console: %s]\n", V8::GetVersion(), editor->name());
if (i::FLAG_debugger) {
@ -871,12 +888,12 @@ void Shell::RunShell() {
if (fgets(buffer, kBufferSize, stdin) == NULL) break;
ExecuteString(String::New(buffer), name, true, true);
}
#endif // USING_V8_SHARED
#endif // V8_SHARED
printf("\n");
}
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
class ShellThread : public i::Thread {
public:
ShellThread(int no, i::Vector<const char> files)
@ -929,7 +946,7 @@ void ShellThread::Run() {
ptr = next_line;
}
}
#endif // USING_V8_SHARED
#endif // V8_SHARED
void SourceGroup::ExitShell(int exit_code) {
@ -976,32 +993,16 @@ void SourceGroup::Execute() {
Handle<String> SourceGroup::ReadFile(const char* name) {
#ifndef USING_V8_SHARED
FILE* file = i::OS::FOpen(name, "rb");
#else
// TODO(yangguo@chromium.org): reading from a directory hangs!
FILE* file = fopen(name, "rb");
#endif // USING_V8_SHARED
if (file == NULL) return Handle<String>();
fseek(file, 0, SEEK_END);
int size = ftell(file);
rewind(file);
char* chars = new char[size + 1];
chars[size] = '\0';
for (int i = 0; i < size;) {
int read = static_cast<int>(fread(&chars[i], 1, size - i, file));
i += read;
}
fclose(file);
int size;
const char* chars = ReadChars(name, &size);
if (chars == NULL) return Handle<String>();
Handle<String> result = String::New(chars, size);
delete[] chars;
return result;
}
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
i::Thread::Options SourceGroup::GetThreadOptions() {
i::Thread::Options options;
options.name = "IsolateThread";
@ -1053,7 +1054,7 @@ void SourceGroup::WaitForThread() {
done_semaphore_->Wait();
}
}
#endif // USING_V8_SHARED
#endif // V8_SHARED
bool Shell::SetOptions(int argc, char* argv[]) {
@ -1075,23 +1076,23 @@ bool Shell::SetOptions(int argc, char* argv[]) {
options.test_shell = true;
argv[i] = NULL;
} else if (strcmp(argv[i], "--preemption") == 0) {
#ifdef USING_V8_SHARED
#ifdef V8_SHARED
printf("D8 with shared library does not support multi-threading\n");
return false;
#else
options.use_preemption = true;
argv[i] = NULL;
#endif // USING_V8_SHARED
#endif // V8_SHARED
} else if (strcmp(argv[i], "--no-preemption") == 0) {
#ifdef USING_V8_SHARED
#ifdef V8_SHARED
printf("D8 with shared library does not support multi-threading\n");
return false;
#else
options.use_preemption = false;
argv[i] = NULL;
#endif // USING_V8_SHARED
#endif // V8_SHARED
} else if (strcmp(argv[i], "--preemption-interval") == 0) {
#ifdef USING_V8_SHARED
#ifdef V8_SHARED
printf("D8 with shared library does not support multi-threading\n");
return false;
#else
@ -1110,19 +1111,19 @@ bool Shell::SetOptions(int argc, char* argv[]) {
printf("Missing value for --preemption-interval\n");
return false;
}
#endif // USING_V8_SHARED
#endif // V8_SHARED
} else if (strcmp(argv[i], "-f") == 0) {
// Ignore any -f flags for compatibility with other stand-alone
// JavaScript engines.
continue;
} else if (strcmp(argv[i], "--isolate") == 0) {
#ifdef USING_V8_SHARED
#ifdef V8_SHARED
printf("D8 with shared library does not support multi-threading\n");
return false;
#endif // USING_V8_SHARED
#endif // V8_SHARED
options.num_isolates++;
}
#ifdef USING_V8_SHARED
#ifdef V8_SHARED
else if (strcmp(argv[i], "--dump-counters") == 0) {
printf("D8 with shared library does not include counters\n");
return false;
@ -1133,10 +1134,10 @@ bool Shell::SetOptions(int argc, char* argv[]) {
printf("Javascript debugger not included\n");
return false;
}
#endif // USING_V8_SHARED
#endif // V8_SHARED
}
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
// Run parallel threads if we are not using --isolate
for (int i = 1; i < argc; i++) {
if (argv[i] == NULL) continue;
@ -1157,9 +1158,10 @@ bool Shell::SetOptions(int argc, char* argv[]) {
}
argv[i] = NULL;
options.parallel_files->Add(i::Vector<const char>(files, size));
delete[] files;
}
}
#endif // USING_V8_SHARED
#endif // V8_SHARED
v8::V8::SetFlagsFromCommandLine(&argc, argv, true);
@ -1184,7 +1186,7 @@ bool Shell::SetOptions(int argc, char* argv[]) {
int Shell::RunMain(int argc, char* argv[]) {
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
i::List<i::Thread*> threads(1);
if (options.parallel_files != NULL)
for (int i = 0; i < options.parallel_files->length(); i++) {
@ -1197,7 +1199,7 @@ int Shell::RunMain(int argc, char* argv[]) {
for (int i = 1; i < options.num_isolates; ++i) {
options.isolate_sources[i].StartExecuteInThread();
}
#endif // USING_V8_SHARED
#endif // V8_SHARED
{ // NOLINT
Locker lock;
HandleScope scope;
@ -1213,17 +1215,17 @@ int Shell::RunMain(int argc, char* argv[]) {
context.Dispose();
}
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
// Start preemption if threads have been created and preemption is enabled.
if (options.parallel_files != NULL
&& threads.length() > 0
&& options.use_preemption) {
Locker::StartPreemption(options.preemption_interval);
}
#endif // USING_V8_SHARED
#endif // V8_SHARED
}
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
for (int i = 1; i < options.num_isolates; ++i) {
options.isolate_sources[i].WaitForThread();
}
@ -1236,7 +1238,7 @@ int Shell::RunMain(int argc, char* argv[]) {
}
OnExit();
#endif // USING_V8_SHARED
#endif // V8_SHARED
return 0;
}
@ -1264,14 +1266,14 @@ int Shell::Main(int argc, char* argv[]) {
}
#if !defined(USING_V8_SHARED) && defined(ENABLE_DEBUGGER_SUPPORT)
#if !defined(V8_SHARED) && defined(ENABLE_DEBUGGER_SUPPORT)
// Run remote debugger if requested, but never on --test
if (i::FLAG_remote_debugger && !options.test_shell) {
InstallUtilityScript();
RunRemoteDebugger(i::FLAG_debugger_port);
return 0;
}
#endif // !USING_V8_SHARED && ENABLE_DEBUGGER_SUPPORT
#endif // !V8_SHARED && ENABLE_DEBUGGER_SUPPORT
// Run interactive shell if explicitly requested or if no script has been
// executed, but never on --test
@ -1279,9 +1281,9 @@ int Shell::Main(int argc, char* argv[]) {
if (( options.interactive_shell
|| !options.script_executed )
&& !options.test_shell ) {
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
InstallUtilityScript();
#endif // USING_V8_SHARED
#endif // V8_SHARED
RunShell();
}

19
deps/v8/src/d8.gyp

@ -47,9 +47,17 @@
],
'conditions': [
[ 'component!="shared_library"', {
'dependencies': [ 'd8_js2c#host', ],
'sources': [ 'd8-debug.cc', '<(SHARED_INTERMEDIATE_DIR)/d8-js.cc', ],
'conditions': [
[ 'want_separate_host_toolset==1', {
'dependencies': [
'd8_js2c#host',
],
}, {
'dependencies': [
'd8_js2c',
],
}],
[ 'console=="readline"', {
'libraries': [ '-lreadline', ],
'sources': [ 'd8-readline.cc' ],
@ -68,13 +76,19 @@
{
'target_name': 'd8_js2c',
'type': 'none',
'toolsets': ['host'],
'variables': {
'js_files': [
'd8.js',
'macros.py',
],
},
'conditions': [
[ 'want_separate_host_toolset==1', {
'toolsets': ['host'],
}, {
'toolsets': ['target'],
}]
],
'actions': [
{
'action_name': 'd8_js2c',
@ -90,6 +104,7 @@
'../tools/js2c.py',
'<@(_outputs)',
'D8',
'off', # compress startup data
'<@(js_files)'
],
},

48
deps/v8/src/d8.h

@ -29,22 +29,22 @@
#define V8_D8_H_
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
#include "v8.h"
#include "allocation.h"
#include "hashmap.h"
#else
#include "../include/v8.h"
#endif // USING_V8_SHARED
#endif // V8_SHARED
namespace v8 {
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
namespace i = v8::internal;
#endif // USING_V8_SHARED
#endif // V8_SHARED
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
// A single counter in a counter collection.
class Counter {
public:
@ -117,17 +117,17 @@ class CounterMap {
static bool Match(void* key1, void* key2);
i::HashMap hash_map_;
};
#endif // USING_V8_SHARED
#endif // V8_SHARED
class SourceGroup {
public:
SourceGroup() :
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
next_semaphore_(v8::internal::OS::CreateSemaphore(0)),
done_semaphore_(v8::internal::OS::CreateSemaphore(0)),
thread_(NULL),
#endif // USING_V8_SHARED
#endif // V8_SHARED
argv_(NULL),
begin_offset_(0),
end_offset_(0) { }
@ -141,7 +141,7 @@ class SourceGroup {
void Execute();
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
void StartExecuteInThread();
void WaitForThread();
@ -165,7 +165,7 @@ class SourceGroup {
i::Semaphore* next_semaphore_;
i::Semaphore* done_semaphore_;
i::Thread* thread_;
#endif // USING_V8_SHARED
#endif // V8_SHARED
void ExitShell(int exit_code);
Handle<String> ReadFile(const char* name);
@ -179,11 +179,11 @@ class SourceGroup {
class ShellOptions {
public:
ShellOptions() :
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
use_preemption(true),
preemption_interval(10),
parallel_files(NULL),
#endif // USING_V8_SHARED
#endif // V8_SHARED
script_executed(false),
last_run(true),
stress_opt(false),
@ -193,11 +193,11 @@ class ShellOptions {
num_isolates(1),
isolate_sources(NULL) { }
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
bool use_preemption;
int preemption_interval;
i::List< i::Vector<const char> >* parallel_files;
#endif // USING_V8_SHARED
#endif // V8_SHARED
bool script_executed;
bool last_run;
bool stress_opt;
@ -208,11 +208,11 @@ class ShellOptions {
SourceGroup* isolate_sources;
};
#ifdef USING_V8_SHARED
#ifdef V8_SHARED
class Shell {
#else
class Shell : public i::AllStatic {
#endif // USING_V8_SHARED
#endif // V8_SHARED
public:
static bool ExecuteString(Handle<String> source,
Handle<Value> name,
@ -225,7 +225,7 @@ class Shell : public i::AllStatic {
static int RunMain(int argc, char* argv[]);
static int Main(int argc, char* argv[]);
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
static Handle<Array> GetCompletions(Handle<String> text,
Handle<String> full);
static void OnExit();
@ -236,7 +236,7 @@ class Shell : public i::AllStatic {
size_t buckets);
static void AddHistogramSample(void* histogram, int sample);
static void MapCounters(const char* name);
#endif // USING_V8_SHARED
#endif // V8_SHARED
#ifdef ENABLE_DEBUGGER_SUPPORT
static Handle<Object> DebugMessageDetails(Handle<String> message);
@ -300,15 +300,15 @@ class Shell : public i::AllStatic {
static Handle<Value> RemoveDirectory(const Arguments& args);
static void AddOSMethods(Handle<ObjectTemplate> os_template);
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
static const char* kHistoryFileName;
#endif // USING_V8_SHARED
#endif // V8_SHARED
static const char* kPrompt;
static ShellOptions options;
private:
static Persistent<Context> evaluation_context_;
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
static Persistent<Context> utility_context_;
static CounterMap* counter_map_;
// We statically allocate a set of local counters to be used if we
@ -320,7 +320,7 @@ class Shell : public i::AllStatic {
static Counter* GetCounter(const char* name, bool is_histogram);
static void InstallUtilityScript();
#endif // USING_V8_SHARED
#endif // V8_SHARED
static void Initialize();
static void RunShell();
static bool SetOptions(int argc, char* argv[]);
@ -332,7 +332,7 @@ class Shell : public i::AllStatic {
};
#ifndef USING_V8_SHARED
#ifndef V8_SHARED
class LineEditor {
public:
enum Type { DUMB = 0, READLINE = 1 };
@ -352,7 +352,7 @@ class LineEditor {
LineEditor* next_;
static LineEditor* first_;
};
#endif // USING_V8_SHARED
#endif // V8_SHARED
} // namespace v8

4
deps/v8/src/debug.cc

@ -1965,7 +1965,7 @@ void Debug::AfterGarbageCollection() {
Debugger::Debugger(Isolate* isolate)
: debugger_access_(OS::CreateMutex()),
: debugger_access_(isolate->debugger_access()),
event_listener_(Handle<Object>()),
event_listener_data_(Handle<Object>()),
compiling_natives_(false),
@ -1987,8 +1987,6 @@ Debugger::Debugger(Isolate* isolate)
Debugger::~Debugger() {
delete debugger_access_;
debugger_access_ = 0;
delete dispatch_handler_access_;
dispatch_handler_access_ = 0;
delete command_received_;

4
deps/v8/src/deoptimizer.cc

@ -1183,11 +1183,11 @@ void TranslationBuffer::Add(int32_t value) {
int32_t TranslationIterator::Next() {
ASSERT(HasNext());
// Run through the bytes until we reach one with a least significant
// bit of zero (marks the end).
uint32_t bits = 0;
for (int i = 0; true; i += 7) {
ASSERT(HasNext());
uint8_t next = buffer_->get(index_++);
bits |= (next >> 1) << i;
if ((next & 1) == 0) break;
@ -1438,6 +1438,7 @@ void SlotRef::ComputeSlotMappingForArguments(JavaScriptFrame* frame,
UNREACHABLE();
}
#ifdef ENABLE_DEBUGGER_SUPPORT
DeoptimizedFrameInfo::DeoptimizedFrameInfo(
Deoptimizer* deoptimizer, int frame_index) {
@ -1467,5 +1468,6 @@ void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) {
v->VisitPointers(expression_stack_, expression_stack_ + expression_count_);
}
#endif // ENABLE_DEBUGGER_SUPPORT
} } // namespace v8::internal

6
deps/v8/src/deoptimizer.h

@ -317,7 +317,7 @@ class Deoptimizer : public Malloced {
List<HeapNumberMaterializationDescriptor> deferred_heap_numbers_;
static int table_entry_size_;
static const int table_entry_size_;
friend class FrameDescription;
friend class DeoptimizingCodeListNode;
@ -497,9 +497,7 @@ class TranslationIterator BASE_EMBEDDED {
int32_t Next();
bool HasNext() const { return index_ >= 0; }
void Done() { index_ = -1; }
bool HasNext() const { return index_ < buffer_->length(); }
void Skip(int n) {
for (int i = 0; i < n; i++) Next();

536
deps/v8/src/elements.cc

@ -0,0 +1,536 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "v8.h"
#include "objects.h"
#include "elements.h"
namespace v8 {
namespace internal {
ElementsAccessor** ElementsAccessor::elements_accessors_;
bool HasKey(FixedArray* array, Object* key) {
int len0 = array->length();
for (int i = 0; i < len0; i++) {
Object* element = array->get(i);
if (element->IsSmi() && element == key) return true;
if (element->IsString() &&
key->IsString() && String::cast(element)->Equals(String::cast(key))) {
return true;
}
}
return false;
}
// Base class for element handler implementations. Contains the
// the common logic for objects with different ElementsKinds.
// Subclasses must specialize method for which the element
// implementation differs from the base class implementation.
//
// This class is intended to be used in the following way:
//
// class SomeElementsAccessor :
// public ElementsAccessorBase<SomeElementsAccessor,
// BackingStoreClass> {
// ...
// }
//
// This is an example of the Curiously Recurring Template Pattern (see
// http://en.wikipedia.org/wiki/Curiously_recurring_template_pattern). We use
// CRTP to guarantee aggressive compile time optimizations (i.e. inlining and
// specialization of SomeElementsAccessor methods).
template <typename ElementsAccessorSubclass, typename BackingStoreClass>
class ElementsAccessorBase : public ElementsAccessor {
public:
ElementsAccessorBase() { }
virtual MaybeObject* GetWithReceiver(JSObject* obj,
Object* receiver,
uint32_t index) {
if (index < ElementsAccessorSubclass::GetLength(obj)) {
BackingStoreClass* backing_store =
ElementsAccessorSubclass::GetBackingStore(obj);
return backing_store->get(index);
}
return obj->GetHeap()->the_hole_value();
}
virtual MaybeObject* Delete(JSObject* obj,
uint32_t index,
JSReceiver::DeleteMode mode) = 0;
virtual MaybeObject* AddJSArrayKeysToFixedArray(JSArray* other,
FixedArray* keys) {
int len0 = keys->length();
#ifdef DEBUG
if (FLAG_enable_slow_asserts) {
for (int i = 0; i < len0; i++) {
ASSERT(keys->get(i)->IsString() || keys->get(i)->IsNumber());
}
}
#endif
int len1 = ElementsAccessorSubclass::GetCapacity(other);
// Optimize if 'other' is empty.
// We cannot optimize if 'this' is empty, as other may have holes
// or non keys.
if (len1 == 0) return keys;
// Compute how many elements are not in other.
int extra = 0;
for (int y = 0; y < len1; y++) {
Object* value;
MaybeObject* maybe_value =
ElementsAccessorSubclass::GetElementAtCapacityIndex(other, y);
if (!maybe_value->ToObject(&value)) return maybe_value;
if (!value->IsTheHole() && !HasKey(keys, value)) extra++;
}
if (extra == 0) return keys;
// Allocate the result
FixedArray* result;
MaybeObject* maybe_obj =
other->GetHeap()->AllocateFixedArray(len0 + extra);
if (!maybe_obj->To<FixedArray>(&result)) return maybe_obj;
// Fill in the content
{
AssertNoAllocation no_gc;
WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
for (int i = 0; i < len0; i++) {
Object* e = keys->get(i);
ASSERT(e->IsString() || e->IsNumber());
result->set(i, e, mode);
}
}
// Fill in the extra keys.
int index = 0;
for (int y = 0; y < len1; y++) {
MaybeObject* maybe_value =
ElementsAccessorSubclass::GetElementAtCapacityIndex(other, y);
Object* value;
if (!maybe_value->ToObject(&value)) return maybe_value;
if (!value->IsTheHole() && !HasKey(keys, value)) {
ASSERT(value->IsString() || value->IsNumber());
result->set(len0 + index, value);
index++;
}
}
ASSERT(extra == index);
return result;
}
static uint32_t GetCapacity(JSObject* obj) {
return ElementsAccessorSubclass::GetBackingStore(obj)->length();
}
static MaybeObject* GetElementAtCapacityIndex(JSObject* obj, int index) {
BackingStoreClass* backing_store =
ElementsAccessorSubclass::GetBackingStore(obj);
return backing_store->get(index);
}
protected:
static BackingStoreClass* GetBackingStore(JSObject* obj) {
return BackingStoreClass::cast(obj->elements());
}
static uint32_t GetLength(JSObject* obj) {
return ElementsAccessorSubclass::GetBackingStore(obj)->length();
}
private:
DISALLOW_COPY_AND_ASSIGN(ElementsAccessorBase);
};
class FastElementsAccessor
: public ElementsAccessorBase<FastElementsAccessor, FixedArray> {
public:
static MaybeObject* DeleteCommon(JSObject* obj,
uint32_t index) {
ASSERT(obj->HasFastElements() || obj->HasFastArgumentsElements());
Heap* heap = obj->GetHeap();
FixedArray* backing_store = FixedArray::cast(obj->elements());
if (backing_store->map() == heap->non_strict_arguments_elements_map()) {
backing_store = FixedArray::cast(backing_store->get(1));
} else {
Object* writable;
MaybeObject* maybe = obj->EnsureWritableFastElements();
if (!maybe->ToObject(&writable)) return maybe;
backing_store = FixedArray::cast(writable);
}
uint32_t length = static_cast<uint32_t>(
obj->IsJSArray()
? Smi::cast(JSArray::cast(obj)->length())->value()
: backing_store->length());
if (index < length) {
backing_store->set_the_hole(index);
// If an old space backing store is larger than a certain size and
// has too few used values, normalize it.
// To avoid doing the check on every delete we require at least
// one adjacent hole to the value being deleted.
Object* hole = heap->the_hole_value();
const int kMinLengthForSparsenessCheck = 64;
if (backing_store->length() >= kMinLengthForSparsenessCheck &&
!heap->InNewSpace(backing_store) &&
((index > 0 && backing_store->get(index - 1) == hole) ||
(index + 1 < length && backing_store->get(index + 1) == hole))) {
int num_used = 0;
for (int i = 0; i < backing_store->length(); ++i) {
if (backing_store->get(i) != hole) ++num_used;
// Bail out early if more than 1/4 is used.
if (4 * num_used > backing_store->length()) break;
}
if (4 * num_used <= backing_store->length()) {
MaybeObject* result = obj->NormalizeElements();
if (result->IsFailure()) return result;
}
}
}
return heap->true_value();
}
virtual MaybeObject* Delete(JSObject* obj,
uint32_t index,
JSReceiver::DeleteMode mode) {
return DeleteCommon(obj, index);
}
};
class FastDoubleElementsAccessor
: public ElementsAccessorBase<FastDoubleElementsAccessor,
FixedDoubleArray> {
virtual MaybeObject* Delete(JSObject* obj,
uint32_t index,
JSReceiver::DeleteMode mode) {
int length = obj->IsJSArray()
? Smi::cast(JSArray::cast(obj)->length())->value()
: FixedDoubleArray::cast(obj->elements())->length();
if (index < static_cast<uint32_t>(length)) {
FixedDoubleArray::cast(obj->elements())->set_the_hole(index);
}
return obj->GetHeap()->true_value();
}
};
// Super class for all external element arrays.
template<typename ExternalElementsAccessorSubclass,
typename ExternalArray>
class ExternalElementsAccessor
: public ElementsAccessorBase<ExternalElementsAccessorSubclass,
ExternalArray> {
public:
virtual MaybeObject* GetWithReceiver(JSObject* obj,
Object* receiver,
uint32_t index) {
if (index < ExternalElementsAccessorSubclass::GetLength(obj)) {
ExternalArray* backing_store =
ExternalElementsAccessorSubclass::GetBackingStore(obj);
return backing_store->get(index);
} else {
return obj->GetHeap()->undefined_value();
}
}
virtual MaybeObject* Delete(JSObject* obj,
uint32_t index,
JSReceiver::DeleteMode mode) {
// External arrays always ignore deletes.
return obj->GetHeap()->true_value();
}
};
class ExternalByteElementsAccessor
: public ExternalElementsAccessor<ExternalByteElementsAccessor,
ExternalByteArray> {
};
class ExternalUnsignedByteElementsAccessor
: public ExternalElementsAccessor<ExternalUnsignedByteElementsAccessor,
ExternalUnsignedByteArray> {
};
class ExternalShortElementsAccessor
: public ExternalElementsAccessor<ExternalShortElementsAccessor,
ExternalShortArray> {
};
class ExternalUnsignedShortElementsAccessor
: public ExternalElementsAccessor<ExternalUnsignedShortElementsAccessor,
ExternalUnsignedShortArray> {
};
class ExternalIntElementsAccessor
: public ExternalElementsAccessor<ExternalIntElementsAccessor,
ExternalIntArray> {
};
class ExternalUnsignedIntElementsAccessor
: public ExternalElementsAccessor<ExternalUnsignedIntElementsAccessor,
ExternalUnsignedIntArray> {
};
class ExternalFloatElementsAccessor
: public ExternalElementsAccessor<ExternalFloatElementsAccessor,
ExternalFloatArray> {
};
class ExternalDoubleElementsAccessor
: public ExternalElementsAccessor<ExternalDoubleElementsAccessor,
ExternalDoubleArray> {
};
class PixelElementsAccessor
: public ExternalElementsAccessor<PixelElementsAccessor,
ExternalPixelArray> {
};
class DictionaryElementsAccessor
: public ElementsAccessorBase<DictionaryElementsAccessor,
NumberDictionary> {
public:
static MaybeObject* GetNumberDictionaryElement(
JSObject* obj,
Object* receiver,
NumberDictionary* backing_store,
uint32_t index) {
int entry = backing_store->FindEntry(index);
if (entry != NumberDictionary::kNotFound) {
Object* element = backing_store->ValueAt(entry);
PropertyDetails details = backing_store->DetailsAt(entry);
if (details.type() == CALLBACKS) {
return obj->GetElementWithCallback(receiver,
element,
index,
obj);
} else {
return element;
}
}
return obj->GetHeap()->the_hole_value();
}
static MaybeObject* DeleteCommon(JSObject* obj,
uint32_t index,
JSReceiver::DeleteMode mode) {
Isolate* isolate = obj->GetIsolate();
Heap* heap = isolate->heap();
FixedArray* backing_store = FixedArray::cast(obj->elements());
bool is_arguments =
(obj->GetElementsKind() == JSObject::NON_STRICT_ARGUMENTS_ELEMENTS);
if (is_arguments) {
backing_store = FixedArray::cast(backing_store->get(1));
}
NumberDictionary* dictionary = NumberDictionary::cast(backing_store);
int entry = dictionary->FindEntry(index);
if (entry != NumberDictionary::kNotFound) {
Object* result = dictionary->DeleteProperty(entry, mode);
if (result == heap->true_value()) {
MaybeObject* maybe_elements = dictionary->Shrink(index);
FixedArray* new_elements = NULL;
if (!maybe_elements->To(&new_elements)) {
return maybe_elements;
}
if (is_arguments) {
FixedArray::cast(obj->elements())->set(1, new_elements);
} else {
obj->set_elements(new_elements);
}
}
if (mode == JSObject::STRICT_DELETION &&
result == heap->false_value()) {
// In strict mode, attempting to delete a non-configurable property
// throws an exception.
HandleScope scope(isolate);
Handle<Object> holder(obj);
Handle<Object> name = isolate->factory()->NewNumberFromUint(index);
Handle<Object> args[2] = { name, holder };
Handle<Object> error =
isolate->factory()->NewTypeError("strict_delete_property",
HandleVector(args, 2));
return isolate->Throw(*error);
}
}
return heap->true_value();
}
virtual MaybeObject* Delete(JSObject* obj,
uint32_t index,
JSReceiver::DeleteMode mode) {
return DeleteCommon(obj, index, mode);
}
virtual MaybeObject* GetWithReceiver(JSObject* obj,
Object* receiver,
uint32_t index) {
return GetNumberDictionaryElement(obj,
receiver,
obj->element_dictionary(),
index);
}
static uint32_t GetCapacity(JSObject* obj) {
return obj->element_dictionary()->Capacity();
}
static MaybeObject* GetElementAtCapacityIndex(JSObject* obj, int index) {
NumberDictionary* dict = obj->element_dictionary();
if (dict->IsKey(dict->KeyAt(index))) {
return dict->ValueAt(index);
} else {
return obj->GetHeap()->the_hole_value();
}
}
};
class NonStrictArgumentsElementsAccessor
: public ElementsAccessorBase<NonStrictArgumentsElementsAccessor,
FixedArray> {
public:
virtual MaybeObject* GetWithReceiver(JSObject* obj,
Object* receiver,
uint32_t index) {
FixedArray* parameter_map = GetBackingStore(obj);
uint32_t length = parameter_map->length();
Object* probe =
(index < length - 2) ? parameter_map->get(index + 2) : NULL;
if (probe != NULL && !probe->IsTheHole()) {
Context* context = Context::cast(parameter_map->get(0));
int context_index = Smi::cast(probe)->value();
ASSERT(!context->get(context_index)->IsTheHole());
return context->get(context_index);
} else {
// Object is not mapped, defer to the arguments.
FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
if (arguments->IsDictionary()) {
return DictionaryElementsAccessor::GetNumberDictionaryElement(
obj,
receiver,
NumberDictionary::cast(arguments),
index);
} else if (index < static_cast<uint32_t>(arguments->length())) {
return arguments->get(index);
}
}
return obj->GetHeap()->the_hole_value();
}
virtual MaybeObject* Delete(JSObject* obj,
uint32_t index,
JSReceiver::DeleteMode mode) {
FixedArray* parameter_map = FixedArray::cast(obj->elements());
uint32_t length = parameter_map->length();
Object* probe =
index < (length - 2) ? parameter_map->get(index + 2) : NULL;
if (probe != NULL && !probe->IsTheHole()) {
// TODO(kmillikin): We could check if this was the last aliased
// parameter, and revert to normal elements in that case. That
// would enable GC of the context.
parameter_map->set_the_hole(index + 2);
} else {
FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
if (arguments->IsDictionary()) {
return DictionaryElementsAccessor::DeleteCommon(obj, index, mode);
} else {
return FastElementsAccessor::DeleteCommon(obj, index);
}
}
return obj->GetHeap()->true_value();
}
static uint32_t GetCapacity(JSObject* obj) {
// TODO(danno): Return max of parameter map length or backing store
// capacity.
return 0;
}
static MaybeObject* GetElementAtCapacityIndex(JSObject* obj, int index) {
// TODO(danno): Return either value from parameter map of backing
// store value at index.
return obj->GetHeap()->the_hole_value();
}
};
void ElementsAccessor::InitializeOncePerProcess() {
static struct ConcreteElementsAccessors {
FastElementsAccessor fast_elements_handler;
FastDoubleElementsAccessor fast_double_elements_handler;
DictionaryElementsAccessor dictionary_elements_handler;
NonStrictArgumentsElementsAccessor non_strict_arguments_elements_handler;
ExternalByteElementsAccessor byte_elements_handler;
ExternalUnsignedByteElementsAccessor unsigned_byte_elements_handler;
ExternalShortElementsAccessor short_elements_handler;
ExternalUnsignedShortElementsAccessor unsigned_short_elements_handler;
ExternalIntElementsAccessor int_elements_handler;
ExternalUnsignedIntElementsAccessor unsigned_int_elements_handler;
ExternalFloatElementsAccessor float_elements_handler;
ExternalDoubleElementsAccessor double_elements_handler;
PixelElementsAccessor pixel_elements_handler;
} element_accessors;
static ElementsAccessor* accessor_array[] = {
&element_accessors.fast_elements_handler,
&element_accessors.fast_double_elements_handler,
&element_accessors.dictionary_elements_handler,
&element_accessors.non_strict_arguments_elements_handler,
&element_accessors.byte_elements_handler,
&element_accessors.unsigned_byte_elements_handler,
&element_accessors.short_elements_handler,
&element_accessors.unsigned_short_elements_handler,
&element_accessors.int_elements_handler,
&element_accessors.unsigned_int_elements_handler,
&element_accessors.float_elements_handler,
&element_accessors.double_elements_handler,
&element_accessors.pixel_elements_handler
};
elements_accessors_ = accessor_array;
}
} } // namespace v8::internal

69
deps/v8/src/elements.h

@ -0,0 +1,69 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#ifndef V8_ELEMENTS_H_
#define V8_ELEMENTS_H_
#include "objects.h"
namespace v8 {
namespace internal {
// Abstract base class for handles that can operate on objects with differing
// ElementsKinds.
class ElementsAccessor {
public:
ElementsAccessor() { }
virtual ~ElementsAccessor() { }
virtual MaybeObject* GetWithReceiver(JSObject* obj,
Object* receiver,
uint32_t index) = 0;
virtual MaybeObject* Delete(JSObject* obj,
uint32_t index,
JSReceiver::DeleteMode mode) = 0;
virtual MaybeObject* AddJSArrayKeysToFixedArray(JSArray* other,
FixedArray* keys) = 0;
// Returns a shared ElementsAccessor for the specified ElementsKind.
static ElementsAccessor* ForKind(JSObject::ElementsKind elements_kind) {
ASSERT(elements_kind < JSObject::kElementsKindCount);
return elements_accessors_[elements_kind];
}
static void InitializeOncePerProcess();
private:
static ElementsAccessor** elements_accessors_;
DISALLOW_COPY_AND_ASSIGN(ElementsAccessor);
};
} } // namespace v8::internal
#endif // V8_ELEMENTS_H_

2
deps/v8/src/execution.cc

@ -132,7 +132,7 @@ static Handle<Object> Invoke(bool construct,
if (*has_pending_exception) {
isolate->ReportPendingMessages();
if (isolate->pending_exception() == Failure::OutOfMemoryException()) {
if (!isolate->handle_scope_implementer()->ignore_out_of_memory()) {
if (!isolate->ignore_out_of_memory()) {
V8::FatalProcessOutOfMemory("JS", true);
}
}

1
deps/v8/src/flag-definitions.h

@ -98,6 +98,7 @@ private:
// Flags for experimental language features.
DEFINE_bool(harmony_proxies, false, "enable harmony proxies")
DEFINE_bool(harmony_weakmaps, false, "enable harmony weak maps")
// Flags for experimental implementation features.
DEFINE_bool(unbox_double_arrays, true, "automatically unbox arrays of doubles")

43
deps/v8/src/heap.cc

@ -438,7 +438,9 @@ void Heap::GarbageCollectionEpilogue() {
#if defined(DEBUG)
ReportStatisticsAfterGC();
#endif // DEBUG
#ifdef ENABLE_DEBUGGER_SUPPORT
isolate_->debug()->AfterGarbageCollection();
#endif // ENABLE_DEBUGGER_SUPPORT
}
@ -1292,6 +1294,10 @@ class ScavengingVisitor : public StaticVisitorBase {
&ObjectEvacuationStrategy<POINTER_OBJECT>::
template VisitSpecialized<SharedFunctionInfo::kSize>);
table_.Register(kVisitJSWeakMap,
&ObjectEvacuationStrategy<POINTER_OBJECT>::
Visit);
table_.Register(kVisitJSRegExp,
&ObjectEvacuationStrategy<POINTER_OBJECT>::
Visit);
@ -2393,40 +2399,41 @@ MaybeObject* Heap::AllocateForeign(Address address, PretenureFlag pretenure) {
MaybeObject* Heap::AllocateSharedFunctionInfo(Object* name) {
Object* result;
{ MaybeObject* maybe_result =
Allocate(shared_function_info_map(), OLD_POINTER_SPACE);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
SharedFunctionInfo* share;
MaybeObject* maybe = Allocate(shared_function_info_map(), OLD_POINTER_SPACE);
if (!maybe->To<SharedFunctionInfo>(&share)) return maybe;
SharedFunctionInfo* share = SharedFunctionInfo::cast(result);
// Set pointer fields.
share->set_name(name);
Code* illegal = isolate_->builtins()->builtin(Builtins::kIllegal);
share->set_code(illegal);
share->set_scope_info(SerializedScopeInfo::Empty());
Code* construct_stub = isolate_->builtins()->builtin(
Builtins::kJSConstructStubGeneric);
Code* construct_stub =
isolate_->builtins()->builtin(Builtins::kJSConstructStubGeneric);
share->set_construct_stub(construct_stub);
share->set_expected_nof_properties(0);
share->set_length(0);
share->set_formal_parameter_count(0);
share->set_instance_class_name(Object_symbol());
share->set_function_data(undefined_value());
share->set_script(undefined_value());
share->set_start_position_and_type(0);
share->set_debug_info(undefined_value());
share->set_inferred_name(empty_string());
share->set_compiler_hints(0);
share->set_deopt_counter(Smi::FromInt(FLAG_deopt_every_n_times));
share->set_initial_map(undefined_value());
share->set_this_property_assignments_count(0);
share->set_this_property_assignments(undefined_value());
share->set_opt_count(0);
share->set_deopt_counter(Smi::FromInt(FLAG_deopt_every_n_times));
// Set integer fields (smi or int, depending on the architecture).
share->set_length(0);
share->set_formal_parameter_count(0);
share->set_expected_nof_properties(0);
share->set_num_literals(0);
share->set_start_position_and_type(0);
share->set_end_position(0);
share->set_function_token_position(0);
share->set_native(false);
return result;
// All compiler hints default to false or 0.
share->set_compiler_hints(0);
share->set_this_property_assignments_count(0);
share->set_opt_count(0);
return share;
}

1
deps/v8/src/heap.h

@ -1646,6 +1646,7 @@ class Heap {
friend class Page;
friend class Isolate;
friend class MarkCompactCollector;
friend class StaticMarkingVisitor;
friend class MapCompact;
DISALLOW_COPY_AND_ASSIGN(Heap);

17
deps/v8/src/hydrogen-instructions.cc

@ -862,10 +862,19 @@ void HInstanceOf::PrintDataTo(StringStream* stream) {
Range* HValue::InferRange() {
// Untagged integer32 cannot be -0, all other representations can.
Range* result = new Range();
result->set_can_be_minus_zero(!representation().IsInteger32());
return result;
if (representation().IsTagged()) {
// Tagged values are always in int32 range when converted to integer,
// but they can contain -0.
Range* result = new Range();
result->set_can_be_minus_zero(true);
return result;
} else if (representation().IsNone()) {
return NULL;
} else {
// Untagged integer32 cannot be -0 and we don't compute ranges for
// untagged doubles.
return new Range();
}
}

4
deps/v8/src/ia32/deoptimizer-ia32.cc

@ -37,7 +37,7 @@
namespace v8 {
namespace internal {
int Deoptimizer::table_entry_size_ = 10;
const int Deoptimizer::table_entry_size_ = 10;
int Deoptimizer::patch_size() {
@ -601,8 +601,6 @@ void Deoptimizer::DoComputeFrame(TranslationIterator* iterator,
output_frame->SetContinuation(
reinterpret_cast<uint32_t>(continuation->entry()));
}
if (output_count_ - 1 == frame_index) iterator->Done();
}

19
deps/v8/src/ia32/disasm-ia32.cc

@ -54,7 +54,7 @@ struct ByteMnemonic {
};
static ByteMnemonic two_operands_instr[] = {
static const ByteMnemonic two_operands_instr[] = {
{0x03, "add", REG_OPER_OP_ORDER},
{0x09, "or", OPER_REG_OP_ORDER},
{0x0B, "or", REG_OPER_OP_ORDER},
@ -79,7 +79,7 @@ static ByteMnemonic two_operands_instr[] = {
};
static ByteMnemonic zero_operands_instr[] = {
static const ByteMnemonic zero_operands_instr[] = {
{0xC3, "ret", UNSET_OP_ORDER},
{0xC9, "leave", UNSET_OP_ORDER},
{0x90, "nop", UNSET_OP_ORDER},
@ -98,14 +98,14 @@ static ByteMnemonic zero_operands_instr[] = {
};
static ByteMnemonic call_jump_instr[] = {
static const ByteMnemonic call_jump_instr[] = {
{0xE8, "call", UNSET_OP_ORDER},
{0xE9, "jmp", UNSET_OP_ORDER},
{-1, "", UNSET_OP_ORDER}
};
static ByteMnemonic short_immediate_instr[] = {
static const ByteMnemonic short_immediate_instr[] = {
{0x05, "add", UNSET_OP_ORDER},
{0x0D, "or", UNSET_OP_ORDER},
{0x15, "adc", UNSET_OP_ORDER},
@ -117,7 +117,7 @@ static ByteMnemonic short_immediate_instr[] = {
};
static const char* jump_conditional_mnem[] = {
static const char* const jump_conditional_mnem[] = {
/*0*/ "jo", "jno", "jc", "jnc",
/*4*/ "jz", "jnz", "jna", "ja",
/*8*/ "js", "jns", "jpe", "jpo",
@ -125,7 +125,7 @@ static const char* jump_conditional_mnem[] = {
};
static const char* set_conditional_mnem[] = {
static const char* const set_conditional_mnem[] = {
/*0*/ "seto", "setno", "setc", "setnc",
/*4*/ "setz", "setnz", "setna", "seta",
/*8*/ "sets", "setns", "setpe", "setpo",
@ -133,7 +133,7 @@ static const char* set_conditional_mnem[] = {
};
static const char* conditional_move_mnem[] = {
static const char* const conditional_move_mnem[] = {
/*0*/ "cmovo", "cmovno", "cmovc", "cmovnc",
/*4*/ "cmovz", "cmovnz", "cmovna", "cmova",
/*8*/ "cmovs", "cmovns", "cmovpe", "cmovpo",
@ -169,7 +169,7 @@ class InstructionTable {
InstructionDesc instructions_[256];
void Clear();
void Init();
void CopyTable(ByteMnemonic bm[], InstructionType type);
void CopyTable(const ByteMnemonic bm[], InstructionType type);
void SetTableRange(InstructionType type,
byte start,
byte end,
@ -208,7 +208,8 @@ void InstructionTable::Init() {
}
void InstructionTable::CopyTable(ByteMnemonic bm[], InstructionType type) {
void InstructionTable::CopyTable(const ByteMnemonic bm[],
InstructionType type) {
for (int i = 0; bm[i].b >= 0; i++) {
InstructionDesc* id = &instructions_[bm[i].b];
id->mnem = bm[i].mnem;

145
deps/v8/src/isolate.cc

@ -76,6 +76,10 @@ int ThreadId::GetCurrentThreadId() {
ThreadLocalTop::ThreadLocalTop() {
InitializeInternal();
// This flag may be set using v8::V8::IgnoreOutOfMemoryException()
// before an isolate is initialized. The initialize methods below do
// not touch it to preserve its value.
ignore_out_of_memory_ = false;
}
@ -382,7 +386,6 @@ void Isolate::EnsureDefaultIsolate() {
if (Thread::GetThreadLocal(isolate_key_) == NULL) {
Thread::SetThreadLocal(isolate_key_, default_isolate_);
}
CHECK(default_isolate_->PreInit());
}
@ -654,6 +657,7 @@ void Isolate::PrintStack() {
incomplete_message_ = &accumulator;
PrintStack(&accumulator);
accumulator.OutputToStdOut();
InitializeLoggingAndCounters();
accumulator.Log();
incomplete_message_ = NULL;
stack_trace_nesting_level_ = 0;
@ -1375,11 +1379,15 @@ Isolate::Isolate()
bootstrapper_(NULL),
runtime_profiler_(NULL),
compilation_cache_(NULL),
counters_(new Counters()),
counters_(NULL),
code_range_(NULL),
// Must be initialized early to allow v8::SetResourceConstraints calls.
break_access_(OS::CreateMutex()),
logger_(new Logger()),
stats_table_(new StatsTable()),
debugger_initialized_(false),
// Must be initialized early to allow v8::Debug calls.
debugger_access_(OS::CreateMutex()),
logger_(NULL),
stats_table_(NULL),
stub_cache_(NULL),
deoptimizer_data_(NULL),
capture_stack_trace_for_uncaught_exceptions_(false),
@ -1510,7 +1518,7 @@ void Isolate::Deinit() {
logger_->TearDown();
// The default isolate is re-initializable due to legacy API.
state_ = PREINITIALIZED;
state_ = UNINITIALIZED;
}
}
@ -1592,58 +1600,6 @@ Isolate::~Isolate() {
}
bool Isolate::PreInit() {
if (state_ != UNINITIALIZED) return true;
TRACE_ISOLATE(preinit);
ASSERT(Isolate::Current() == this);
#ifdef ENABLE_DEBUGGER_SUPPORT
debug_ = new Debug(this);
debugger_ = new Debugger(this);
#endif
memory_allocator_ = new MemoryAllocator();
memory_allocator_->isolate_ = this;
code_range_ = new CodeRange();
code_range_->isolate_ = this;
// Safe after setting Heap::isolate_, initializing StackGuard and
// ensuring that Isolate::Current() == this.
heap_.SetStackLimits();
#ifdef DEBUG
DisallowAllocationFailure disallow_allocation_failure;
#endif
#define C(name) isolate_addresses_[Isolate::k_##name] = \
reinterpret_cast<Address>(name());
ISOLATE_ADDRESS_LIST(C)
#undef C
string_tracker_ = new StringTracker();
string_tracker_->isolate_ = this;
compilation_cache_ = new CompilationCache(this);
transcendental_cache_ = new TranscendentalCache();
keyed_lookup_cache_ = new KeyedLookupCache();
context_slot_cache_ = new ContextSlotCache();
descriptor_lookup_cache_ = new DescriptorLookupCache();
unicode_cache_ = new UnicodeCache();
pc_to_code_cache_ = new PcToCodeCache(this);
write_input_buffer_ = new StringInputBuffer();
global_handles_ = new GlobalHandles(this);
bootstrapper_ = new Bootstrapper();
handle_scope_implementer_ = new HandleScopeImplementer(this);
stub_cache_ = new StubCache(this);
ast_sentinels_ = new AstSentinels();
regexp_stack_ = new RegExpStack();
regexp_stack_->isolate_ = this;
state_ = PREINITIALIZED;
return true;
}
void Isolate::InitializeThreadLocal() {
thread_local_top_.isolate_ = this;
thread_local_top_.Initialize();
@ -1680,19 +1636,71 @@ void Isolate::PropagatePendingExceptionToExternalTryCatch() {
}
void Isolate::InitializeLoggingAndCounters() {
if (logger_ == NULL) {
logger_ = new Logger;
}
if (counters_ == NULL) {
counters_ = new Counters;
}
}
void Isolate::InitializeDebugger() {
#ifdef ENABLE_DEBUGGER_SUPPORT
ScopedLock lock(debugger_access_);
if (NoBarrier_Load(&debugger_initialized_)) return;
InitializeLoggingAndCounters();
debug_ = new Debug(this);
debugger_ = new Debugger(this);
Release_Store(&debugger_initialized_, true);
#endif
}
bool Isolate::Init(Deserializer* des) {
ASSERT(state_ != INITIALIZED);
ASSERT(Isolate::Current() == this);
TRACE_ISOLATE(init);
bool create_heap_objects = des == NULL;
#ifdef DEBUG
// The initialization process does not handle memory exhaustion.
DisallowAllocationFailure disallow_allocation_failure;
#endif
if (state_ == UNINITIALIZED && !PreInit()) return false;
InitializeLoggingAndCounters();
InitializeDebugger();
memory_allocator_ = new MemoryAllocator(this);
code_range_ = new CodeRange(this);
// Safe after setting Heap::isolate_, initializing StackGuard and
// ensuring that Isolate::Current() == this.
heap_.SetStackLimits();
#define C(name) isolate_addresses_[Isolate::k_##name] = \
reinterpret_cast<Address>(name());
ISOLATE_ADDRESS_LIST(C)
#undef C
string_tracker_ = new StringTracker();
string_tracker_->isolate_ = this;
compilation_cache_ = new CompilationCache(this);
transcendental_cache_ = new TranscendentalCache();
keyed_lookup_cache_ = new KeyedLookupCache();
context_slot_cache_ = new ContextSlotCache();
descriptor_lookup_cache_ = new DescriptorLookupCache();
unicode_cache_ = new UnicodeCache();
pc_to_code_cache_ = new PcToCodeCache(this);
write_input_buffer_ = new StringInputBuffer();
global_handles_ = new GlobalHandles(this);
bootstrapper_ = new Bootstrapper();
handle_scope_implementer_ = new HandleScopeImplementer(this);
stub_cache_ = new StubCache(this);
ast_sentinels_ = new AstSentinels();
regexp_stack_ = new RegExpStack();
regexp_stack_->isolate_ = this;
// Enable logging before setting up the heap
logger_->Setup();
@ -1715,7 +1723,8 @@ bool Isolate::Init(Deserializer* des) {
stack_guard_.InitThread(lock);
}
// Setup the object heap
// Setup the object heap.
const bool create_heap_objects = (des == NULL);
ASSERT(!heap_.HasBeenSetup());
if (!heap_.Setup(create_heap_objects)) {
V8::SetFatalError();
@ -1775,6 +1784,16 @@ bool Isolate::Init(Deserializer* des) {
}
// Initialized lazily to allow early
// v8::V8::SetAddHistogramSampleFunction calls.
StatsTable* Isolate::stats_table() {
if (stats_table_ == NULL) {
stats_table_ = new StatsTable;
}
return stats_table_;
}
void Isolate::Enter() {
Isolate* current_isolate = NULL;
PerIsolateThreadData* current_data = CurrentPerIsolateThreadData();
@ -1814,8 +1833,6 @@ void Isolate::Enter() {
SetIsolateThreadLocals(this, data);
CHECK(PreInit());
// In case it's the first time some thread enters the isolate.
set_thread_id(data->thread_id());
}

54
deps/v8/src/isolate.h

@ -256,6 +256,9 @@ class ThreadLocalTop BASE_EMBEDDED {
// Call back function to report unsafe JS accesses.
v8::FailedAccessCheckCallback failed_access_check_callback_;
// Whether out of memory exceptions should be ignored.
bool ignore_out_of_memory_;
private:
void InitializeInternal();
@ -446,6 +449,13 @@ class Isolate {
return reinterpret_cast<Isolate*>(Thread::GetThreadLocal(isolate_key_));
}
// Usually called by Init(), but can be called early e.g. to allow
// testing components that require logging but not the whole
// isolate.
//
// Safe to call more than once.
void InitializeLoggingAndCounters();
bool Init(Deserializer* des);
bool IsInitialized() { return state_ == INITIALIZED; }
@ -498,10 +508,12 @@ class Isolate {
// switched to non-legacy behavior).
static void EnterDefaultIsolate();
// Debug.
// Mutex for serializing access to break control structures.
Mutex* break_access() { return break_access_; }
// Mutex for serializing access to debugger.
Mutex* debugger_access() { return debugger_access_; }
Address get_address_from_id(AddressId id);
// Access to top context (where the current function object was created).
@ -661,6 +673,12 @@ class Isolate {
// Tells whether the current context has experienced an out of memory
// exception.
bool is_out_of_memory();
bool ignore_out_of_memory() {
return thread_local_top_.ignore_out_of_memory_;
}
void set_ignore_out_of_memory(bool value) {
thread_local_top_.ignore_out_of_memory_ = value;
}
void PrintCurrentStackTrace(FILE* out);
void PrintStackTrace(FILE* out, char* thread_data);
@ -769,14 +787,24 @@ class Isolate {
#undef GLOBAL_CONTEXT_FIELD_ACCESSOR
Bootstrapper* bootstrapper() { return bootstrapper_; }
Counters* counters() { return counters_; }
Counters* counters() {
// Call InitializeLoggingAndCounters() if logging is needed before
// the isolate is fully initialized.
ASSERT(counters_ != NULL);
return counters_;
}
CodeRange* code_range() { return code_range_; }
RuntimeProfiler* runtime_profiler() { return runtime_profiler_; }
CompilationCache* compilation_cache() { return compilation_cache_; }
Logger* logger() { return logger_; }
Logger* logger() {
// Call InitializeLoggingAndCounters() if logging is needed before
// the isolate is fully initialized.
ASSERT(logger_ != NULL);
return logger_;
}
StackGuard* stack_guard() { return &stack_guard_; }
Heap* heap() { return &heap_; }
StatsTable* stats_table() { return stats_table_; }
StatsTable* stats_table();
StubCache* stub_cache() { return stub_cache_; }
DeoptimizerData* deoptimizer_data() { return deoptimizer_data_; }
ThreadLocalTop* thread_local_top() { return &thread_local_top_; }
@ -877,8 +905,14 @@ class Isolate {
void PreallocatedStorageInit(size_t size);
#ifdef ENABLE_DEBUGGER_SUPPORT
Debugger* debugger() { return debugger_; }
Debug* debug() { return debug_; }
Debugger* debugger() {
if (!NoBarrier_Load(&debugger_initialized_)) InitializeDebugger();
return debugger_;
}
Debug* debug() {
if (!NoBarrier_Load(&debugger_initialized_)) InitializeDebugger();
return debug_;
}
#endif
inline bool DebuggerHasBreakPoints();
@ -1010,8 +1044,6 @@ class Isolate {
static Isolate* default_isolate_;
static ThreadDataTable* thread_data_table_;
bool PreInit();
void Deinit();
static void SetIsolateThreadLocals(Isolate* isolate,
@ -1019,7 +1051,6 @@ class Isolate {
enum State {
UNINITIALIZED, // Some components may not have been allocated.
PREINITIALIZED, // Components have been allocated but not initialized.
INITIALIZED // All components are fully initialized.
};
@ -1063,6 +1094,8 @@ class Isolate {
void PropagatePendingExceptionToExternalTryCatch();
void InitializeDebugger();
int stack_trace_nesting_level_;
StringStream* incomplete_message_;
// The preallocated memory thread singleton.
@ -1076,6 +1109,8 @@ class Isolate {
Counters* counters_;
CodeRange* code_range_;
Mutex* break_access_;
Atomic32 debugger_initialized_;
Mutex* debugger_access_;
Heap heap_;
Logger* logger_;
StackGuard stack_guard_;
@ -1165,6 +1200,7 @@ class Isolate {
friend class Simulator;
friend class StackGuard;
friend class ThreadId;
friend class TestMemoryAllocatorScope;
friend class v8::Isolate;
friend class v8::Locker;
friend class v8::Unlocker;

4
deps/v8/src/json-parser.h

@ -458,12 +458,12 @@ Handle<String> JsonParser<seq_ascii>::SlowScanJsonString(
String::WriteToFlat(*prefix, dest, start, end);
while (c0_ != '"') {
// Check for control character (0x00-0x1f) or unterminated string (<0).
if (c0_ < 0x20) return Handle<String>::null();
if (count >= length) {
// We need to create a longer sequential string for the result.
return SlowScanJsonString<StringType, SinkChar>(seq_str, 0, count);
}
// Check for control character (0x00-0x1f) or unterminated string (<0).
if (c0_ < 0x20) return Handle<String>::null();
if (c0_ != '\\') {
// If the sink can contain UC16 characters, or source_ contains only
// ASCII characters, there's no need to test whether we can store the

2
deps/v8/src/jsregexp.cc

@ -267,7 +267,7 @@ Handle<Object> RegExpImpl::AtomExec(Handle<JSRegExp> re,
seq_sub->ToUC16Vector(),
needle->ToUC16Vector(),
index)));
if (index == -1) return FACTORY->null_value();
if (index == -1) return isolate->factory()->null_value();
}
ASSERT(last_match_info->HasFastElements());

2
deps/v8/src/log-utils.cc

@ -34,7 +34,7 @@ namespace v8 {
namespace internal {
const char* Log::kLogToTemporaryFile = "&";
const char* const Log::kLogToTemporaryFile = "&";
Log::Log(Logger* logger)

2
deps/v8/src/log-utils.h

@ -59,7 +59,7 @@ class Log {
// This mode is only used in tests, as temporary files are automatically
// deleted on close and thus can't be accessed afterwards.
static const char* kLogToTemporaryFile;
static const char* const kLogToTemporaryFile;
private:
explicit Log(Logger* logger);

107
deps/v8/src/mark-compact.cc

@ -64,13 +64,15 @@ MarkCompactCollector::MarkCompactCollector() : // NOLINT
live_bytes_(0),
#endif
heap_(NULL),
code_flusher_(NULL) { }
code_flusher_(NULL),
encountered_weak_maps_(NULL) { }
void MarkCompactCollector::CollectGarbage() {
// Make sure that Prepare() has been called. The individual steps below will
// update the state as they proceed.
ASSERT(state_ == PREPARE_GC);
ASSERT(encountered_weak_maps_ == Smi::FromInt(0));
// Prepare has selected whether to compact the old generation or not.
// Tell the tracer.
@ -80,6 +82,8 @@ void MarkCompactCollector::CollectGarbage() {
if (FLAG_collect_maps) ClearNonLiveTransitions();
ClearWeakMaps();
SweepLargeObjectSpace();
if (IsCompacting()) {
@ -407,6 +411,8 @@ class StaticMarkingVisitor : public StaticVisitorBase {
table_.Register(kVisitSeqAsciiString, &DataObjectVisitor::Visit);
table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
table_.Register(kVisitJSWeakMap, &VisitJSWeakMap);
table_.Register(kVisitOddball,
&FixedBodyVisitor<StaticMarkingVisitor,
Oddball::BodyDescriptor,
@ -556,6 +562,34 @@ class StaticMarkingVisitor : public StaticVisitorBase {
StructBodyDescriptor,
void> StructObjectVisitor;
static void VisitJSWeakMap(Map* map, HeapObject* object) {
MarkCompactCollector* collector = map->heap()->mark_compact_collector();
JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(object);
// Enqueue weak map in linked list of encountered weak maps.
ASSERT(weak_map->next() == Smi::FromInt(0));
weak_map->set_next(collector->encountered_weak_maps());
collector->set_encountered_weak_maps(weak_map);
// Skip visiting the backing hash table containing the mappings.
int object_size = JSWeakMap::BodyDescriptor::SizeOf(map, object);
BodyVisitorBase<StaticMarkingVisitor>::IteratePointers(
map->heap(),
object,
JSWeakMap::BodyDescriptor::kStartOffset,
JSWeakMap::kTableOffset);
BodyVisitorBase<StaticMarkingVisitor>::IteratePointers(
map->heap(),
object,
JSWeakMap::kTableOffset + kPointerSize,
object_size);
// Mark the backing hash table without pushing it on the marking stack.
ASSERT(!weak_map->unchecked_table()->IsMarked());
ASSERT(weak_map->unchecked_table()->map()->IsMarked());
collector->SetMark(weak_map->unchecked_table());
}
static void VisitCode(Map* map, HeapObject* object) {
reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>(
map->heap());
@ -1369,20 +1403,26 @@ void MarkCompactCollector::MarkImplicitRefGroups() {
// marking stack have been marked, or are overflowed in the heap.
void MarkCompactCollector::EmptyMarkingStack() {
while (!marking_stack_.is_empty()) {
HeapObject* object = marking_stack_.Pop();
ASSERT(object->IsHeapObject());
ASSERT(heap()->Contains(object));
ASSERT(object->IsMarked());
ASSERT(!object->IsOverflowed());
// Because the object is marked, we have to recover the original map
// pointer and use it to mark the object's body.
MapWord map_word = object->map_word();
map_word.ClearMark();
Map* map = map_word.ToMap();
MarkObject(map);
while (!marking_stack_.is_empty()) {
HeapObject* object = marking_stack_.Pop();
ASSERT(object->IsHeapObject());
ASSERT(heap()->Contains(object));
ASSERT(object->IsMarked());
ASSERT(!object->IsOverflowed());
StaticMarkingVisitor::IterateBody(map, object);
// Because the object is marked, we have to recover the original map
// pointer and use it to mark the object's body.
MapWord map_word = object->map_word();
map_word.ClearMark();
Map* map = map_word.ToMap();
MarkObject(map);
StaticMarkingVisitor::IterateBody(map, object);
}
// Process encountered weak maps, mark objects only reachable by those
// weak maps and repeat until fix-point is reached.
ProcessWeakMaps();
}
}
@ -1735,6 +1775,45 @@ void MarkCompactCollector::ClearNonLiveTransitions() {
}
}
void MarkCompactCollector::ProcessWeakMaps() {
Object* weak_map_obj = encountered_weak_maps();
while (weak_map_obj != Smi::FromInt(0)) {
ASSERT(HeapObject::cast(weak_map_obj)->IsMarked());
JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj);
ObjectHashTable* table = weak_map->unchecked_table();
for (int i = 0; i < table->Capacity(); i++) {
if (HeapObject::cast(table->KeyAt(i))->IsMarked()) {
Object* value = table->get(table->EntryToValueIndex(i));
StaticMarkingVisitor::MarkObjectByPointer(heap(), &value);
table->set_unchecked(heap(),
table->EntryToValueIndex(i),
value,
UPDATE_WRITE_BARRIER);
}
}
weak_map_obj = weak_map->next();
}
}
void MarkCompactCollector::ClearWeakMaps() {
Object* weak_map_obj = encountered_weak_maps();
while (weak_map_obj != Smi::FromInt(0)) {
ASSERT(HeapObject::cast(weak_map_obj)->IsMarked());
JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj);
ObjectHashTable* table = weak_map->unchecked_table();
for (int i = 0; i < table->Capacity(); i++) {
if (!HeapObject::cast(table->KeyAt(i))->IsMarked()) {
table->RemoveEntry(i, heap());
}
}
weak_map_obj = weak_map->next();
weak_map->set_next(Smi::FromInt(0));
}
set_encountered_weak_maps(Smi::FromInt(0));
}
// -------------------------------------------------------------------------
// Phase 2: Encode forwarding addresses.
// When compacting, forwarding addresses for objects in old space and map

16
deps/v8/src/mark-compact.h

@ -193,6 +193,11 @@ class MarkCompactCollector {
inline bool is_code_flushing_enabled() const { return code_flusher_ != NULL; }
void EnableCodeFlushing(bool enable);
inline Object* encountered_weak_maps() { return encountered_weak_maps_; }
inline void set_encountered_weak_maps(Object* weak_map) {
encountered_weak_maps_ = weak_map;
}
private:
MarkCompactCollector();
~MarkCompactCollector();
@ -329,6 +334,16 @@ class MarkCompactCollector {
// We replace them with a null descriptor, with the same key.
void ClearNonLiveTransitions();
// Mark all values associated with reachable keys in weak maps encountered
// so far. This might push new object or even new weak maps onto the
// marking stack.
void ProcessWeakMaps();
// After all reachable objects have been marked those weak map entries
// with an unreachable key are removed from all encountered weak maps.
// The linked list of all encountered weak maps is destroyed.
void ClearWeakMaps();
// -----------------------------------------------------------------------
// Phase 2: Sweeping to clear mark bits and free non-live objects for
// a non-compacting collection, or else computing and encoding
@ -499,6 +514,7 @@ class MarkCompactCollector {
Heap* heap_;
MarkingStack marking_stack_;
CodeFlusher* code_flusher_;
Object* encountered_weak_maps_;
friend class Heap;
friend class OverflowedObjectsScanner;

1
deps/v8/src/messages.js

@ -201,6 +201,7 @@ function FormatMessage(message) {
proxy_prop_not_configurable: ["Trap ", "%1", " of proxy handler ", "%0", " returned non-configurable descriptor for property ", "%2"],
proxy_non_object_prop_names: ["Trap ", "%1", " returned non-object ", "%0"],
proxy_repeated_prop_name: ["Trap ", "%1", " returned repeated property name ", "%2"],
invalid_weakmap_key: ["Invalid value used as weak map key"],
// RangeError
invalid_array_length: ["Invalid array length"],
stack_overflow: ["Maximum call stack size exceeded"],

2
deps/v8/src/mips/deoptimizer-mips.cc

@ -39,7 +39,7 @@ namespace v8 {
namespace internal {
int Deoptimizer::table_entry_size_ = 10;
const int Deoptimizer::table_entry_size_ = 10;
int Deoptimizer::patch_size() {

13
deps/v8/src/objects-debug.cc

@ -153,6 +153,9 @@ void HeapObject::HeapObjectVerify() {
case JS_ARRAY_TYPE:
JSArray::cast(this)->JSArrayVerify();
break;
case JS_WEAK_MAP_TYPE:
JSWeakMap::cast(this)->JSWeakMapVerify();
break;
case JS_REGEXP_TYPE:
JSRegExp::cast(this)->JSRegExpVerify();
break;
@ -313,7 +316,7 @@ void FixedArray::FixedArrayVerify() {
void FixedDoubleArray::FixedDoubleArrayVerify() {
for (int i = 0; i < length(); i++) {
if (!is_the_hole(i)) {
double value = get(i);
double value = get_scalar(i);
ASSERT(!isnan(value) ||
(BitCast<uint64_t>(value) ==
BitCast<uint64_t>(canonical_not_the_hole_nan_as_double())));
@ -453,6 +456,14 @@ void JSArray::JSArrayVerify() {
}
void JSWeakMap::JSWeakMapVerify() {
CHECK(IsJSWeakMap());
JSObjectVerify();
VerifyHeapPointer(table());
ASSERT(table()->IsHashTable());
}
void JSRegExp::JSRegExpVerify() {
JSObjectVerify();
ASSERT(data()->IsUndefined() || data()->IsFixedArray());

139
deps/v8/src/objects-inl.h

@ -35,6 +35,7 @@
#ifndef V8_OBJECTS_INL_H_
#define V8_OBJECTS_INL_H_
#include "elements.h"
#include "objects.h"
#include "contexts.h"
#include "conversions-inl.h"
@ -480,6 +481,12 @@ bool Object::IsJSFunctionProxy() {
}
bool Object::IsJSWeakMap() {
return Object::IsJSObject() &&
HeapObject::cast(this)->map()->instance_type() == JS_WEAK_MAP_TYPE;
}
bool Object::IsJSContextExtensionObject() {
return IsHeapObject()
&& (HeapObject::cast(this)->map()->instance_type() ==
@ -1416,6 +1423,8 @@ int JSObject::GetHeaderSize() {
return JSValue::kSize;
case JS_ARRAY_TYPE:
return JSValue::kSize;
case JS_WEAK_MAP_TYPE:
return JSWeakMap::kSize;
case JS_REGEXP_TYPE:
return JSValue::kSize;
case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
@ -1603,6 +1612,7 @@ Object* FixedArray::get(int index) {
void FixedArray::set(int index, Smi* value) {
ASSERT(map() != HEAP->fixed_cow_array_map());
ASSERT(index >= 0 && index < this->length());
ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
int offset = kHeaderSize + index * kPointerSize;
WRITE_FIELD(this, offset, value);
@ -1635,7 +1645,7 @@ inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() {
}
double FixedDoubleArray::get(int index) {
double FixedDoubleArray::get_scalar(int index) {
ASSERT(map() != HEAP->fixed_cow_array_map() &&
map() != HEAP->fixed_array_map());
ASSERT(index >= 0 && index < this->length());
@ -1645,6 +1655,15 @@ double FixedDoubleArray::get(int index) {
}
MaybeObject* FixedDoubleArray::get(int index) {
if (is_the_hole(index)) {
return GetHeap()->the_hole_value();
} else {
return GetHeap()->NumberFromDouble(get_scalar(index));
}
}
void FixedDoubleArray::set(int index, double value) {
ASSERT(map() != HEAP->fixed_cow_array_map() &&
map() != HEAP->fixed_array_map());
@ -2066,6 +2085,7 @@ CAST_ACCESSOR(JSArray)
CAST_ACCESSOR(JSRegExp)
CAST_ACCESSOR(JSProxy)
CAST_ACCESSOR(JSFunctionProxy)
CAST_ACCESSOR(JSWeakMap)
CAST_ACCESSOR(Foreign)
CAST_ACCESSOR(ByteArray)
CAST_ACCESSOR(ExternalArray)
@ -2369,13 +2389,18 @@ uint8_t* ExternalPixelArray::external_pixel_pointer() {
}
uint8_t ExternalPixelArray::get(int index) {
uint8_t ExternalPixelArray::get_scalar(int index) {
ASSERT((index >= 0) && (index < this->length()));
uint8_t* ptr = external_pixel_pointer();
return ptr[index];
}
MaybeObject* ExternalPixelArray::get(int index) {
return Smi::FromInt(static_cast<int>(get_scalar(index)));
}
void ExternalPixelArray::set(int index, uint8_t value) {
ASSERT((index >= 0) && (index < this->length()));
uint8_t* ptr = external_pixel_pointer();
@ -2395,13 +2420,18 @@ void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
}
int8_t ExternalByteArray::get(int index) {
int8_t ExternalByteArray::get_scalar(int index) {
ASSERT((index >= 0) && (index < this->length()));
int8_t* ptr = static_cast<int8_t*>(external_pointer());
return ptr[index];
}
MaybeObject* ExternalByteArray::get(int index) {
return Smi::FromInt(static_cast<int>(get_scalar(index)));
}
void ExternalByteArray::set(int index, int8_t value) {
ASSERT((index >= 0) && (index < this->length()));
int8_t* ptr = static_cast<int8_t*>(external_pointer());
@ -2409,13 +2439,18 @@ void ExternalByteArray::set(int index, int8_t value) {
}
uint8_t ExternalUnsignedByteArray::get(int index) {
uint8_t ExternalUnsignedByteArray::get_scalar(int index) {
ASSERT((index >= 0) && (index < this->length()));
uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
return ptr[index];
}
MaybeObject* ExternalUnsignedByteArray::get(int index) {
return Smi::FromInt(static_cast<int>(get_scalar(index)));
}
void ExternalUnsignedByteArray::set(int index, uint8_t value) {
ASSERT((index >= 0) && (index < this->length()));
uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
@ -2423,13 +2458,18 @@ void ExternalUnsignedByteArray::set(int index, uint8_t value) {
}
int16_t ExternalShortArray::get(int index) {
int16_t ExternalShortArray::get_scalar(int index) {
ASSERT((index >= 0) && (index < this->length()));
int16_t* ptr = static_cast<int16_t*>(external_pointer());
return ptr[index];
}
MaybeObject* ExternalShortArray::get(int index) {
return Smi::FromInt(static_cast<int>(get_scalar(index)));
}
void ExternalShortArray::set(int index, int16_t value) {
ASSERT((index >= 0) && (index < this->length()));
int16_t* ptr = static_cast<int16_t*>(external_pointer());
@ -2437,13 +2477,18 @@ void ExternalShortArray::set(int index, int16_t value) {
}
uint16_t ExternalUnsignedShortArray::get(int index) {
uint16_t ExternalUnsignedShortArray::get_scalar(int index) {
ASSERT((index >= 0) && (index < this->length()));
uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
return ptr[index];
}
MaybeObject* ExternalUnsignedShortArray::get(int index) {
return Smi::FromInt(static_cast<int>(get_scalar(index)));
}
void ExternalUnsignedShortArray::set(int index, uint16_t value) {
ASSERT((index >= 0) && (index < this->length()));
uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
@ -2451,13 +2496,18 @@ void ExternalUnsignedShortArray::set(int index, uint16_t value) {
}
int32_t ExternalIntArray::get(int index) {
int32_t ExternalIntArray::get_scalar(int index) {
ASSERT((index >= 0) && (index < this->length()));
int32_t* ptr = static_cast<int32_t*>(external_pointer());
return ptr[index];
}
MaybeObject* ExternalIntArray::get(int index) {
return GetHeap()->NumberFromInt32(get_scalar(index));
}
void ExternalIntArray::set(int index, int32_t value) {
ASSERT((index >= 0) && (index < this->length()));
int32_t* ptr = static_cast<int32_t*>(external_pointer());
@ -2465,13 +2515,18 @@ void ExternalIntArray::set(int index, int32_t value) {
}
uint32_t ExternalUnsignedIntArray::get(int index) {
uint32_t ExternalUnsignedIntArray::get_scalar(int index) {
ASSERT((index >= 0) && (index < this->length()));
uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
return ptr[index];
}
MaybeObject* ExternalUnsignedIntArray::get(int index) {
return GetHeap()->NumberFromUint32(get_scalar(index));
}
void ExternalUnsignedIntArray::set(int index, uint32_t value) {
ASSERT((index >= 0) && (index < this->length()));
uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
@ -2479,13 +2534,18 @@ void ExternalUnsignedIntArray::set(int index, uint32_t value) {
}
float ExternalFloatArray::get(int index) {
float ExternalFloatArray::get_scalar(int index) {
ASSERT((index >= 0) && (index < this->length()));
float* ptr = static_cast<float*>(external_pointer());
return ptr[index];
}
MaybeObject* ExternalFloatArray::get(int index) {
return GetHeap()->NumberFromDouble(get_scalar(index));
}
void ExternalFloatArray::set(int index, float value) {
ASSERT((index >= 0) && (index < this->length()));
float* ptr = static_cast<float*>(external_pointer());
@ -2493,13 +2553,18 @@ void ExternalFloatArray::set(int index, float value) {
}
double ExternalDoubleArray::get(int index) {
double ExternalDoubleArray::get_scalar(int index) {
ASSERT((index >= 0) && (index < this->length()));
double* ptr = static_cast<double*>(external_pointer());
return ptr[index];
}
MaybeObject* ExternalDoubleArray::get(int index) {
return GetHeap()->NumberFromDouble(get_scalar(index));
}
void ExternalDoubleArray::set(int index, double value) {
ASSERT((index >= 0) && (index < this->length()));
double* ptr = static_cast<double*>(external_pointer());
@ -3469,35 +3534,14 @@ void SharedFunctionInfo::set_optimization_disabled(bool disable) {
}
BOOL_ACCESSORS(SharedFunctionInfo,
compiler_hints,
strict_mode,
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, strict_mode,
kStrictModeFunction)
bool SharedFunctionInfo::native() {
return BooleanBit::get(compiler_hints(), kNative);
}
void SharedFunctionInfo::set_native(bool value) {
set_compiler_hints(BooleanBit::set(compiler_hints(),
kNative,
value));
}
bool SharedFunctionInfo::bound() {
return BooleanBit::get(compiler_hints(), kBoundFunction);
}
void SharedFunctionInfo::set_bound(bool value) {
set_compiler_hints(BooleanBit::set(compiler_hints(),
kBoundFunction,
value));
}
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
name_should_print_as_anonymous,
kNameShouldPrintAsAnonymous)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
@ -3796,6 +3840,15 @@ ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
ACCESSORS(JSProxy, padding, Object, kPaddingOffset)
ACCESSORS(JSWeakMap, table, ObjectHashTable, kTableOffset)
ACCESSORS_GCSAFE(JSWeakMap, next, Object, kNextOffset)
ObjectHashTable* JSWeakMap::unchecked_table() {
return reinterpret_cast<ObjectHashTable*>(READ_FIELD(this, kTableOffset));
}
Address Foreign::address() {
return AddressFrom<Address>(READ_INTPTR_FIELD(this, kAddressOffset));
}
@ -3981,6 +4034,11 @@ JSObject::ElementsKind JSObject::GetElementsKind() {
}
ElementsAccessor* JSObject::GetElementsAccessor() {
return ElementsAccessor::ForKind(GetElementsKind());
}
bool JSObject::HasFastElements() {
return GetElementsKind() == FAST_ELEMENTS;
}
@ -4399,6 +4457,11 @@ MaybeObject* ObjectHashTableShape::AsObject(JSObject* key) {
}
void ObjectHashTable::RemoveEntry(int entry) {
RemoveEntry(entry, GetHeap());
}
void Map::ClearCodeCache(Heap* heap) {
// No write barrier is needed since empty_fixed_array is not in new space.
// Please note this function is used during marking:

34
deps/v8/src/objects-printer.cc

@ -151,6 +151,9 @@ void HeapObject::HeapObjectPrint(FILE* out) {
case JS_PROXY_TYPE:
JSProxy::cast(this)->JSProxyPrint(out);
break;
case JS_WEAK_MAP_TYPE:
JSWeakMap::cast(this)->JSWeakMapPrint(out);
break;
case FOREIGN_TYPE:
Foreign::cast(this)->ForeignPrint(out);
break;
@ -289,7 +292,7 @@ void JSObject::PrintElements(FILE* out) {
if (p->is_the_hole(i)) {
PrintF(out, " %d: <the hole>", i);
} else {
PrintF(out, " %d: %g", i, p->get(i));
PrintF(out, " %d: %g", i, p->get_scalar(i));
}
PrintF(out, "\n");
}
@ -298,14 +301,14 @@ void JSObject::PrintElements(FILE* out) {
case EXTERNAL_PIXEL_ELEMENTS: {
ExternalPixelArray* p = ExternalPixelArray::cast(elements());
for (int i = 0; i < p->length(); i++) {
PrintF(out, " %d: %d\n", i, p->get(i));
PrintF(out, " %d: %d\n", i, p->get_scalar(i));
}
break;
}
case EXTERNAL_BYTE_ELEMENTS: {
ExternalByteArray* p = ExternalByteArray::cast(elements());
for (int i = 0; i < p->length(); i++) {
PrintF(out, " %d: %d\n", i, static_cast<int>(p->get(i)));
PrintF(out, " %d: %d\n", i, static_cast<int>(p->get_scalar(i)));
}
break;
}
@ -313,14 +316,14 @@ void JSObject::PrintElements(FILE* out) {
ExternalUnsignedByteArray* p =
ExternalUnsignedByteArray::cast(elements());
for (int i = 0; i < p->length(); i++) {
PrintF(out, " %d: %d\n", i, static_cast<int>(p->get(i)));
PrintF(out, " %d: %d\n", i, static_cast<int>(p->get_scalar(i)));
}
break;
}
case EXTERNAL_SHORT_ELEMENTS: {
ExternalShortArray* p = ExternalShortArray::cast(elements());
for (int i = 0; i < p->length(); i++) {
PrintF(out, " %d: %d\n", i, static_cast<int>(p->get(i)));
PrintF(out, " %d: %d\n", i, static_cast<int>(p->get_scalar(i)));
}
break;
}
@ -328,14 +331,14 @@ void JSObject::PrintElements(FILE* out) {
ExternalUnsignedShortArray* p =
ExternalUnsignedShortArray::cast(elements());
for (int i = 0; i < p->length(); i++) {
PrintF(out, " %d: %d\n", i, static_cast<int>(p->get(i)));
PrintF(out, " %d: %d\n", i, static_cast<int>(p->get_scalar(i)));
}
break;
}
case EXTERNAL_INT_ELEMENTS: {
ExternalIntArray* p = ExternalIntArray::cast(elements());
for (int i = 0; i < p->length(); i++) {
PrintF(out, " %d: %d\n", i, static_cast<int>(p->get(i)));
PrintF(out, " %d: %d\n", i, static_cast<int>(p->get_scalar(i)));
}
break;
}
@ -343,21 +346,21 @@ void JSObject::PrintElements(FILE* out) {
ExternalUnsignedIntArray* p =
ExternalUnsignedIntArray::cast(elements());
for (int i = 0; i < p->length(); i++) {
PrintF(out, " %d: %d\n", i, static_cast<int>(p->get(i)));
PrintF(out, " %d: %d\n", i, static_cast<int>(p->get_scalar(i)));
}
break;
}
case EXTERNAL_FLOAT_ELEMENTS: {
ExternalFloatArray* p = ExternalFloatArray::cast(elements());
for (int i = 0; i < p->length(); i++) {
PrintF(out, " %d: %f\n", i, p->get(i));
PrintF(out, " %d: %f\n", i, p->get_scalar(i));
}
break;
}
case EXTERNAL_DOUBLE_ELEMENTS: {
ExternalDoubleArray* p = ExternalDoubleArray::cast(elements());
for (int i = 0; i < p->length(); i++) {
PrintF(out, " %d: %f\n", i, p->get(i));
PrintF(out, " %d: %f\n", i, p->get_scalar(i));
}
break;
}
@ -431,6 +434,7 @@ static const char* TypeToString(InstanceType type) {
case CODE_TYPE: return "CODE";
case JS_ARRAY_TYPE: return "JS_ARRAY";
case JS_PROXY_TYPE: return "JS_PROXY";
case JS_WEAK_MAP_TYPE: return "JS_WEAK_MAP";
case JS_REGEXP_TYPE: return "JS_REGEXP";
case JS_VALUE_TYPE: return "JS_VALUE";
case JS_GLOBAL_OBJECT_TYPE: return "JS_GLOBAL_OBJECT";
@ -584,6 +588,16 @@ void JSProxy::JSProxyPrint(FILE* out) {
}
void JSWeakMap::JSWeakMapPrint(FILE* out) {
HeapObject::PrintHeader(out, "JSWeakMap");
PrintF(out, " - map = 0x%p\n", reinterpret_cast<void*>(map()));
PrintF(out, " - number of elements = %d\n", table()->NumberOfElements());
PrintF(out, " - table = ");
table()->ShortPrint(out);
PrintF(out, "\n");
}
void JSFunction::JSFunctionPrint(FILE* out) {
HeapObject::PrintHeader(out, "Function");
PrintF(out, " - map = 0x%p\n", reinterpret_cast<void*>(map()));

3
deps/v8/src/objects-visiting.cc

@ -88,6 +88,9 @@ StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
case JS_GLOBAL_PROPERTY_CELL_TYPE:
return kVisitPropertyCell;
case JS_WEAK_MAP_TYPE:
return kVisitJSWeakMap;
case JS_REGEXP_TYPE:
return kVisitJSRegExp;

13
deps/v8/src/objects-visiting.h

@ -121,6 +121,7 @@ class StaticVisitorBase : public AllStatic {
kVisitPropertyCell,
kVisitSharedFunctionInfo,
kVisitJSFunction,
kVisitJSWeakMap,
kVisitJSRegExp,
kVisitorIdCount,
@ -317,7 +318,9 @@ class StaticNewSpaceVisitor : public StaticVisitorBase {
SharedFunctionInfo::BodyDescriptor,
int>::Visit);
table_.Register(kVisitJSRegExp, &VisitJSRegExp);
table_.Register(kVisitJSWeakMap, &VisitJSObject);
table_.Register(kVisitJSRegExp, &VisitJSObject);
table_.Register(kVisitSeqAsciiString, &VisitSeqAsciiString);
@ -356,15 +359,15 @@ class StaticNewSpaceVisitor : public StaticVisitorBase {
return FixedDoubleArray::SizeFor(length);
}
static inline int VisitJSObject(Map* map, HeapObject* object) {
return JSObjectVisitor::Visit(map, object);
}
static inline int VisitSeqAsciiString(Map* map, HeapObject* object) {
return SeqAsciiString::cast(object)->
SeqAsciiStringSize(map->instance_type());
}
static inline int VisitJSRegExp(Map* map, HeapObject* object) {
return JSObjectVisitor::Visit(map, object);
}
static inline int VisitSeqTwoByteString(Map* map, HeapObject* object) {
return SeqTwoByteString::cast(object)->
SeqTwoByteStringSize(map->instance_type());

852
deps/v8/src/objects.cc

File diff suppressed because it is too large

158
deps/v8/src/objects.h

@ -51,6 +51,7 @@
// - JSReceiver (suitable for property access)
// - JSObject
// - JSArray
// - JSWeakMap
// - JSRegExp
// - JSFunction
// - GlobalObject
@ -71,17 +72,19 @@
// - ExternalIntArray
// - ExternalUnsignedIntArray
// - ExternalFloatArray
// - FixedArray
// - DescriptorArray
// - HashTable
// - Dictionary
// - SymbolTable
// - CompilationCacheTable
// - CodeCacheHashTable
// - MapCache
// - Context
// - JSFunctionResultCache
// - SerializedScopeInfo
// - FixedArrayBase
// - FixedArray
// - DescriptorArray
// - HashTable
// - Dictionary
// - SymbolTable
// - CompilationCacheTable
// - CodeCacheHashTable
// - MapCache
// - Context
// - JSFunctionResultCache
// - SerializedScopeInfo
// - FixedDoubleArray
// - String
// - SeqString
// - SeqAsciiString
@ -331,6 +334,7 @@ static const int kVariableSizeSentinel = 0;
V(JS_GLOBAL_PROXY_TYPE) \
V(JS_ARRAY_TYPE) \
V(JS_PROXY_TYPE) \
V(JS_WEAK_MAP_TYPE) \
V(JS_REGEXP_TYPE) \
\
V(JS_FUNCTION_TYPE) \
@ -568,6 +572,7 @@ enum InstanceType {
JS_GLOBAL_PROXY_TYPE,
JS_ARRAY_TYPE,
JS_PROXY_TYPE,
JS_WEAK_MAP_TYPE,
JS_REGEXP_TYPE, // LAST_NONCALLABLE_SPEC_OBJECT_TYPE
@ -630,8 +635,10 @@ enum CompareResult {
WriteBarrierMode mode = UPDATE_WRITE_BARRIER); \
class ElementsAccessor;
class StringStream;
class ObjectVisitor;
class DictionaryElementsAccessor;
struct ValueInfo : public Malloced {
ValueInfo() : type(FIRST_TYPE), ptr(NULL), str(NULL), number(0) { }
@ -748,6 +755,7 @@ class MaybeObject BASE_EMBEDDED {
V(JSArray) \
V(JSProxy) \
V(JSFunctionProxy) \
V(JSWeakMap) \
V(JSRegExp) \
V(HashTable) \
V(Dictionary) \
@ -1488,6 +1496,7 @@ class JSObject: public JSReceiver {
inline void initialize_elements();
MUST_USE_RESULT inline MaybeObject* ResetElements();
inline ElementsKind GetElementsKind();
inline ElementsAccessor* GetElementsAccessor();
inline bool HasFastElements();
inline bool HasFastDoubleElements();
inline bool HasDictionaryElements();
@ -1730,14 +1739,8 @@ class JSObject: public JSReceiver {
// Returns the index'th element.
// The undefined object if index is out of bounds.
MaybeObject* GetElementWithReceiver(Object* receiver, uint32_t index);
MaybeObject* GetElementWithInterceptor(Object* receiver, uint32_t index);
// Get external element value at index if there is one and undefined
// otherwise. Can return a failure if allocation of a heap number
// failed.
MaybeObject* GetExternalElement(uint32_t index);
// Replace the elements' backing store with fast elements of the given
// capacity. Update the length for JSArrays. Returns the new backing
// store.
@ -2001,6 +2004,8 @@ class JSObject: public JSReceiver {
};
private:
friend class DictionaryElementsAccessor;
MUST_USE_RESULT MaybeObject* GetElementWithCallback(Object* receiver,
Object* structure,
uint32_t index,
@ -2021,14 +2026,10 @@ class JSObject: public JSReceiver {
StrictModeFlag strict_mode,
bool check_prototype);
MaybeObject* GetElementPostInterceptor(Object* receiver, uint32_t index);
MUST_USE_RESULT MaybeObject* DeletePropertyPostInterceptor(String* name,
DeleteMode mode);
MUST_USE_RESULT MaybeObject* DeletePropertyWithInterceptor(String* name);
MUST_USE_RESULT MaybeObject* DeleteElementPostInterceptor(uint32_t index,
DeleteMode mode);
MUST_USE_RESULT MaybeObject* DeleteElementWithInterceptor(uint32_t index);
MUST_USE_RESULT MaybeObject* DeleteFastElement(uint32_t index);
@ -2092,6 +2093,7 @@ class FixedArray: public FixedArrayBase {
inline Object* get(int index);
// Setter that uses write barrier.
inline void set(int index, Object* value);
inline bool is_the_hole(int index);
// Setter that doesn't need write barrier).
inline void set(int index, Smi* value);
@ -2126,10 +2128,6 @@ class FixedArray: public FixedArrayBase {
// Compute the union of this and other.
MUST_USE_RESULT MaybeObject* UnionOfKeys(FixedArray* other);
// Compute the union of this and other.
MUST_USE_RESULT MaybeObject* UnionOfDoubleKeys(
FixedDoubleArray* other);
// Copy a sub array from the receiver to dest.
void CopyTo(int pos, FixedArray* dest, int dest_pos, int len);
@ -2197,7 +2195,8 @@ class FixedDoubleArray: public FixedArrayBase {
inline void Initialize(NumberDictionary* from);
// Setter and getter for elements.
inline double get(int index);
inline double get_scalar(int index);
inline MaybeObject* get(int index);
inline void set(int index, double value);
inline void set_the_hole(int index);
@ -2805,7 +2804,7 @@ class Dictionary: public HashTable<Shape, Key> {
PropertyAttributes filter,
SortMode sort_mode);
// Fill in details for properties into storage.
void CopyKeysTo(FixedArray* storage, SortMode sort_mode);
void CopyKeysTo(FixedArray* storage, int index, SortMode sort_mode);
// Accessors for next enumeration index.
void SetNextEnumerationIndex(int index) {
@ -2979,8 +2978,16 @@ class ObjectHashTable: public HashTable<ObjectHashTableShape, JSObject*> {
MUST_USE_RESULT MaybeObject* Put(JSObject* key, Object* value);
private:
friend class MarkCompactCollector;
void AddEntry(int entry, JSObject* key, Object* value);
void RemoveEntry(int entry);
void RemoveEntry(int entry, Heap* heap);
inline void RemoveEntry(int entry);
// Returns the index to the value of an entry.
static inline int EntryToValueIndex(int entry) {
return EntryToIndex(entry) + 1;
}
};
@ -3129,6 +3136,8 @@ class ExternalArray: public HeapObject {
inline int length();
inline void set_length(int value);
inline bool is_the_hole(int index) { return false; }
// [external_pointer]: The pointer to the external memory area backing this
// external array.
DECL_ACCESSORS(external_pointer, void) // Pointer to the data store.
@ -3164,7 +3173,8 @@ class ExternalPixelArray: public ExternalArray {
inline uint8_t* external_pixel_pointer();
// Setter and getter.
inline uint8_t get(int index);
inline uint8_t get_scalar(int index);
inline MaybeObject* get(int index);
inline void set(int index, uint8_t value);
// This accessor applies the correct conversion from Smi, HeapNumber and
@ -3192,7 +3202,8 @@ class ExternalPixelArray: public ExternalArray {
class ExternalByteArray: public ExternalArray {
public:
// Setter and getter.
inline int8_t get(int index);
inline int8_t get_scalar(int index);
inline MaybeObject* get(int index);
inline void set(int index, int8_t value);
// This accessor applies the correct conversion from Smi, HeapNumber
@ -3220,7 +3231,8 @@ class ExternalByteArray: public ExternalArray {
class ExternalUnsignedByteArray: public ExternalArray {
public:
// Setter and getter.
inline uint8_t get(int index);
inline uint8_t get_scalar(int index);
inline MaybeObject* get(int index);
inline void set(int index, uint8_t value);
// This accessor applies the correct conversion from Smi, HeapNumber
@ -3248,7 +3260,8 @@ class ExternalUnsignedByteArray: public ExternalArray {
class ExternalShortArray: public ExternalArray {
public:
// Setter and getter.
inline int16_t get(int index);
inline int16_t get_scalar(int index);
inline MaybeObject* get(int index);
inline void set(int index, int16_t value);
// This accessor applies the correct conversion from Smi, HeapNumber
@ -3276,7 +3289,8 @@ class ExternalShortArray: public ExternalArray {
class ExternalUnsignedShortArray: public ExternalArray {
public:
// Setter and getter.
inline uint16_t get(int index);
inline uint16_t get_scalar(int index);
inline MaybeObject* get(int index);
inline void set(int index, uint16_t value);
// This accessor applies the correct conversion from Smi, HeapNumber
@ -3304,7 +3318,8 @@ class ExternalUnsignedShortArray: public ExternalArray {
class ExternalIntArray: public ExternalArray {
public:
// Setter and getter.
inline int32_t get(int index);
inline int32_t get_scalar(int index);
inline MaybeObject* get(int index);
inline void set(int index, int32_t value);
// This accessor applies the correct conversion from Smi, HeapNumber
@ -3332,7 +3347,8 @@ class ExternalIntArray: public ExternalArray {
class ExternalUnsignedIntArray: public ExternalArray {
public:
// Setter and getter.
inline uint32_t get(int index);
inline uint32_t get_scalar(int index);
inline MaybeObject* get(int index);
inline void set(int index, uint32_t value);
// This accessor applies the correct conversion from Smi, HeapNumber
@ -3360,7 +3376,8 @@ class ExternalUnsignedIntArray: public ExternalArray {
class ExternalFloatArray: public ExternalArray {
public:
// Setter and getter.
inline float get(int index);
inline float get_scalar(int index);
inline MaybeObject* get(int index);
inline void set(int index, float value);
// This accessor applies the correct conversion from Smi, HeapNumber
@ -3388,7 +3405,8 @@ class ExternalFloatArray: public ExternalArray {
class ExternalDoubleArray: public ExternalArray {
public:
// Setter and getter.
inline double get(int index);
inline double get_scalar(int index);
inline MaybeObject* get(int index);
inline void set(int index, double value);
// This accessor applies the correct conversion from Smi, HeapNumber
@ -4644,12 +4662,10 @@ class SharedFunctionInfo: public HeapObject {
inline void set_end_position(int end_position);
// Is this function a function expression in the source code.
inline bool is_expression();
inline void set_is_expression(bool value);
DECL_BOOLEAN_ACCESSORS(is_expression)
// Is this function a top-level function (scripts, evals).
inline bool is_toplevel();
inline void set_is_toplevel(bool value);
DECL_BOOLEAN_ACCESSORS(is_toplevel)
// Bit field containing various information collected by the compiler to
// drive optimization.
@ -4705,13 +4721,21 @@ class SharedFunctionInfo: public HeapObject {
// These needs special threatment in .call and .apply since
// null passed as the receiver should not be translated to the
// global object.
inline bool native();
inline void set_native(bool value);
DECL_BOOLEAN_ACCESSORS(native)
// Indicates that the function was created by the Function function.
// Though it's anonymous, toString should treat it as if it had the name
// "anonymous". We don't set the name itself so that the system does not
// see a binding for it.
DECL_BOOLEAN_ACCESSORS(name_should_print_as_anonymous)
// Indicates whether the function is a bound function created using
// the bind function.
inline bool bound();
inline void set_bound(bool value);
DECL_BOOLEAN_ACCESSORS(bound)
// Indicates that the function is anonymous (the name field can be set
// through the API, which does not change this flag).
DECL_BOOLEAN_ACCESSORS(is_anonymous)
// Indicates whether or not the code in the shared function support
// deoptimization.
@ -4893,7 +4917,6 @@ class SharedFunctionInfo: public HeapObject {
// Bit positions in compiler_hints.
static const int kCodeAgeSize = 3;
static const int kCodeAgeMask = (1 << kCodeAgeSize) - 1;
static const int kBoundFunction = 9;
enum CompilerHints {
kHasOnlySimpleThisPropertyAssignments,
@ -4904,7 +4927,11 @@ class SharedFunctionInfo: public HeapObject {
kStrictModeFunction,
kUsesArguments,
kHasDuplicateParameters,
kNative
kNative,
kBoundFunction,
kIsAnonymous,
kNameShouldPrintAsAnonymous,
kCompilerHintsCount // Pseudo entry
};
private:
@ -4918,6 +4945,9 @@ class SharedFunctionInfo: public HeapObject {
static const int kCompilerHintsSize = kIntSize;
#endif
STATIC_ASSERT(SharedFunctionInfo::kCompilerHintsCount <=
SharedFunctionInfo::kCompilerHintsSize * kBitsPerByte);
public:
// Constants for optimizing codegen for strict mode function and
// native tests.
@ -6620,6 +6650,40 @@ class JSFunctionProxy: public JSProxy {
};
// The JSWeakMap describes EcmaScript Harmony weak maps
class JSWeakMap: public JSObject {
public:
// [table]: the backing hash table mapping keys to values.
DECL_ACCESSORS(table, ObjectHashTable)
// [next]: linked list of encountered weak maps during GC.
DECL_ACCESSORS(next, Object)
// Unchecked accessors to be used during GC.
inline ObjectHashTable* unchecked_table();
// Casting.
static inline JSWeakMap* cast(Object* obj);
#ifdef OBJECT_PRINT
inline void JSWeakMapPrint() {
JSWeakMapPrint(stdout);
}
void JSWeakMapPrint(FILE* out);
#endif
#ifdef DEBUG
void JSWeakMapVerify();
#endif
static const int kTableOffset = JSObject::kHeaderSize;
static const int kNextOffset = kTableOffset + kPointerSize;
static const int kSize = kNextOffset + kPointerSize;
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(JSWeakMap);
};
// Foreign describes objects pointing from JavaScript to C structures.
// Since they cannot contain references to JS HeapObjects they can be
// placed in old_data_space.

70
deps/v8/src/parser.cc

@ -659,8 +659,8 @@ FunctionLiteral* Parser::DoParseProgram(Handle<String> source,
0,
0,
source->length(),
false,
false);
FunctionLiteral::ANONYMOUS_EXPRESSION,
false); // Does not have duplicate parameters.
} else if (stack_overflow_) {
isolate()->StackOverflow();
}
@ -729,12 +729,17 @@ FunctionLiteral* Parser::ParseLazy(CompilationInfo* info,
top_scope_->EnableStrictMode();
}
FunctionLiteralType type =
shared_info->is_expression() ? EXPRESSION : DECLARATION;
FunctionLiteral::Type type = shared_info->is_expression()
? (shared_info->is_anonymous()
? FunctionLiteral::ANONYMOUS_EXPRESSION
: FunctionLiteral::NAMED_EXPRESSION)
: FunctionLiteral::DECLARATION;
bool ok = true;
result = ParseFunctionLiteral(name,
false, // Strict mode name already checked.
RelocInfo::kNoPosition, type, &ok);
false, // Strict mode name already checked.
RelocInfo::kNoPosition,
type,
&ok);
// Make sure the results agree.
ASSERT(ok == (result != NULL));
}
@ -1471,7 +1476,7 @@ Statement* Parser::ParseFunctionDeclaration(bool* ok) {
FunctionLiteral* fun = ParseFunctionLiteral(name,
is_strict_reserved,
function_token_position,
DECLARATION,
FunctionLiteral::DECLARATION,
CHECK_OK);
// Even if we're not at the top-level of the global or a function
// scope, we treat is as such and introduce the function with it's
@ -2842,8 +2847,14 @@ Expression* Parser::ParseMemberWithNewPrefixesExpression(PositionStack* stack,
name = ParseIdentifierOrStrictReservedWord(&is_strict_reserved_name,
CHECK_OK);
}
result = ParseFunctionLiteral(name, is_strict_reserved_name,
function_token_position, NESTED, CHECK_OK);
FunctionLiteral::Type type = name.is_null()
? FunctionLiteral::ANONYMOUS_EXPRESSION
: FunctionLiteral::NAMED_EXPRESSION;
result = ParseFunctionLiteral(name,
is_strict_reserved_name,
function_token_position,
type,
CHECK_OK);
} else {
result = ParsePrimaryExpression(CHECK_OK);
}
@ -3412,7 +3423,7 @@ ObjectLiteral::Property* Parser::ParseObjectLiteralGetSet(bool is_getter,
ParseFunctionLiteral(name,
false, // reserved words are allowed here
RelocInfo::kNoPosition,
DECLARATION,
FunctionLiteral::ANONYMOUS_EXPRESSION,
CHECK_OK);
// Allow any number of parameters for compatiabilty with JSC.
// Specification only allows zero parameters for get and one for set.
@ -3619,30 +3630,27 @@ ZoneList<Expression*>* Parser::ParseArguments(bool* ok) {
}
FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> var_name,
FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> function_name,
bool name_is_strict_reserved,
int function_token_position,
FunctionLiteralType type,
FunctionLiteral::Type type,
bool* ok) {
// Function ::
// '(' FormalParameterList? ')' '{' FunctionBody '}'
bool is_named = !var_name.is_null();
// The name associated with this function. If it's a function expression,
// this is the actual function name, otherwise this is the name of the
// variable declared and initialized with the function (expression). In
// that case, we don't have a function name (it's empty).
Handle<String> name =
is_named ? var_name : isolate()->factory()->empty_symbol();
// The function name, if any.
Handle<String> function_name = isolate()->factory()->empty_symbol();
if (is_named && (type == EXPRESSION || type == NESTED)) {
function_name = name;
// Anonymous functions were passed either the empty symbol or a null
// handle as the function name. Remember if we were passed a non-empty
// handle to decide whether to invoke function name inference.
bool should_infer_name = function_name.is_null();
// We want a non-null handle as the function name.
if (should_infer_name) {
function_name = isolate()->factory()->empty_symbol();
}
int num_parameters = 0;
// Function declarations are hoisted.
Scope* scope = (type == DECLARATION)
Scope* scope = (type == FunctionLiteral::DECLARATION)
? NewScope(top_scope_->DeclarationScope(), Scope::FUNCTION_SCOPE, false)
: NewScope(top_scope_, Scope::FUNCTION_SCOPE, inside_with());
ZoneList<Statement*>* body = new(zone()) ZoneList<Statement*>(8);
@ -3655,7 +3663,7 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> var_name,
bool has_duplicate_parameters = false;
// Parse function body.
{ LexicalScope lexical_scope(this, scope, isolate());
top_scope_->SetScopeName(name);
top_scope_->SetScopeName(function_name);
// FormalParameterList ::
// '(' (Identifier)*[','] ')'
@ -3705,7 +3713,7 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> var_name,
// NOTE: We create a proxy and resolve it here so that in the
// future we can change the AST to only refer to VariableProxies
// instead of Variables and Proxis as is the case now.
if (!function_name.is_null() && function_name->length() > 0) {
if (type == FunctionLiteral::NAMED_EXPRESSION) {
Variable* fvar = top_scope_->DeclareFunctionVar(function_name);
VariableProxy* fproxy =
top_scope_->NewUnresolved(function_name, inside_with());
@ -3739,7 +3747,7 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> var_name,
end_pos = entry.end_pos();
if (end_pos <= function_block_pos) {
// End position greater than end of stream is safe, and hard to check.
ReportInvalidPreparseData(name, CHECK_OK);
ReportInvalidPreparseData(function_name, CHECK_OK);
}
isolate()->counters()->total_preparse_skipped()->Increment(
end_pos - function_block_pos);
@ -3769,7 +3777,7 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> var_name,
// Validate strict mode.
if (top_scope_->is_strict_mode()) {
if (IsEvalOrArguments(name)) {
if (IsEvalOrArguments(function_name)) {
int position = function_token_position != RelocInfo::kNoPosition
? function_token_position
: (start_pos > 0 ? start_pos - 1 : start_pos);
@ -3813,7 +3821,7 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> var_name,
FunctionLiteral* function_literal =
new(zone()) FunctionLiteral(isolate(),
name,
function_name,
scope,
body,
materialized_literal_count,
@ -3823,11 +3831,11 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> var_name,
num_parameters,
start_pos,
end_pos,
(function_name->length() > 0),
type,
has_duplicate_parameters);
function_literal->set_function_token_position(function_token_position);
if (fni_ != NULL && !is_named) fni_->AddFunction(function_literal);
if (fni_ != NULL && should_infer_name) fni_->AddFunction(function_literal);
return function_literal;
}

8
deps/v8/src/parser.h

@ -552,17 +552,11 @@ class Parser {
// in the object literal boilerplate.
Handle<Object> GetBoilerplateValue(Expression* expression);
enum FunctionLiteralType {
EXPRESSION,
DECLARATION,
NESTED
};
ZoneList<Expression*>* ParseArguments(bool* ok);
FunctionLiteral* ParseFunctionLiteral(Handle<String> var_name,
bool name_is_reserved,
int function_token_position,
FunctionLiteralType type,
FunctionLiteral::Type type,
bool* ok);

114
deps/v8/src/runtime.cc

@ -615,8 +615,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateJSProxy) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_IsJSProxy) {
ASSERT(args.length() == 1);
Object* obj = args[0];
return obj->IsJSProxy()
? isolate->heap()->true_value() : isolate->heap()->false_value();
return isolate->heap()->ToBoolean(obj->IsJSProxy());
}
@ -635,6 +634,43 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_Fix) {
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_WeakMapInitialize) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);
CONVERT_ARG_CHECKED(JSWeakMap, weakmap, 0);
ASSERT(weakmap->map()->inobject_properties() == 0);
Handle<ObjectHashTable> table = isolate->factory()->NewObjectHashTable(0);
weakmap->set_table(*table);
weakmap->set_next(Smi::FromInt(0));
return *weakmap;
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_WeakMapGet) {
NoHandleAllocation ha;
ASSERT(args.length() == 2);
CONVERT_ARG_CHECKED(JSWeakMap, weakmap, 0);
// TODO(mstarzinger): Currently we cannot use JSProxy objects as keys
// because they cannot be cast to JSObject to get an identity hash code.
CONVERT_ARG_CHECKED(JSObject, key, 1);
return weakmap->table()->Lookup(*key);
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_WeakMapSet) {
HandleScope scope(isolate);
ASSERT(args.length() == 3);
CONVERT_ARG_CHECKED(JSWeakMap, weakmap, 0);
// TODO(mstarzinger): See Runtime_WeakMapGet above.
CONVERT_ARG_CHECKED(JSObject, key, 1);
Handle<Object> value(args[2]);
Handle<ObjectHashTable> table(weakmap->table());
Handle<ObjectHashTable> new_table = PutIntoObjectHashTable(table, key, value);
weakmap->set_table(*new_table);
return *value;
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_ClassOf) {
NoHandleAllocation ha;
ASSERT(args.length() == 1);
@ -1001,8 +1037,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_IsExtensible) {
ASSERT(proto->IsJSGlobalObject());
obj = JSObject::cast(proto);
}
return obj->map()->is_extensible() ? isolate->heap()->true_value()
: isolate->heap()->false_value();
return isolate->heap()->ToBoolean(obj->map()->is_extensible());
}
@ -1068,8 +1103,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DisableAccessChecks) {
Map::cast(new_map)->set_is_access_check_needed(false);
object->set_map(Map::cast(new_map));
}
return needs_access_checks ? isolate->heap()->true_value()
: isolate->heap()->false_value();
return isolate->heap()->ToBoolean(needs_access_checks);
}
@ -1880,6 +1914,24 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetName) {
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionNameShouldPrintAsAnonymous) {
NoHandleAllocation ha;
ASSERT(args.length() == 1);
CONVERT_CHECKED(JSFunction, f, args[0]);
return isolate->heap()->ToBoolean(
f->shared()->name_should_print_as_anonymous());
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionMarkNameShouldPrintAsAnonymous) {
NoHandleAllocation ha;
ASSERT(args.length() == 1);
CONVERT_CHECKED(JSFunction, f, args[0]);
f->shared()->set_name_should_print_as_anonymous(true);
return isolate->heap()->undefined_value();
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetBound) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);
@ -1967,6 +2019,24 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetLength) {
}
// Creates a local, readonly, property called length with the correct
// length (when read by the user). This effectively overwrites the
// interceptor used to normally provide the length.
RUNTIME_FUNCTION(MaybeObject*, Runtime_BoundFunctionSetLength) {
NoHandleAllocation ha;
ASSERT(args.length() == 2);
CONVERT_CHECKED(JSFunction, fun, args[0]);
CONVERT_CHECKED(Smi, length, args[1]);
MaybeObject* maybe_name =
isolate->heap()->AllocateStringFromAscii(CStrVector("length"));
String* name;
if (!maybe_name->To(&name)) return maybe_name;
PropertyAttributes attr =
static_cast<PropertyAttributes>(DONT_DELETE | DONT_ENUM | READ_ONLY);
return fun->AddProperty(name, length, attr, kNonStrictMode);
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetPrototype) {
NoHandleAllocation ha;
ASSERT(args.length() == 2);
@ -2042,8 +2112,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionIsAPIFunction) {
ASSERT(args.length() == 1);
CONVERT_CHECKED(JSFunction, f, args[0]);
return f->shared()->IsApiFunction() ? isolate->heap()->true_value()
: isolate->heap()->false_value();
return isolate->heap()->ToBoolean(f->shared()->IsApiFunction());
}
@ -2052,8 +2121,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionIsBuiltin) {
ASSERT(args.length() == 1);
CONVERT_CHECKED(JSFunction, f, args[0]);
return f->IsBuiltin() ? isolate->heap()->true_value() :
isolate->heap()->false_value();
return isolate->heap()->ToBoolean(f->IsBuiltin());
}
@ -4529,9 +4597,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetLocalPropertyNames) {
// Get the property names.
jsproto = obj;
int proto_with_hidden_properties = 0;
int next_copy_index = 0;
for (int i = 0; i < length; i++) {
jsproto->GetLocalPropertyNames(*names,
i == 0 ? 0 : local_property_count[i - 1]);
jsproto->GetLocalPropertyNames(*names, next_copy_index);
next_copy_index += local_property_count[i];
if (jsproto->HasHiddenProperties()) {
proto_with_hidden_properties++;
}
@ -9200,13 +9269,13 @@ static void IterateExternalArrayElements(Isolate* isolate,
if (elements_are_guaranteed_smis) {
for (uint32_t j = 0; j < len; j++) {
HandleScope loop_scope;
Handle<Smi> e(Smi::FromInt(static_cast<int>(array->get(j))));
Handle<Smi> e(Smi::FromInt(static_cast<int>(array->get_scalar(j))));
visitor->visit(j, e);
}
} else {
for (uint32_t j = 0; j < len; j++) {
HandleScope loop_scope;
int64_t val = static_cast<int64_t>(array->get(j));
int64_t val = static_cast<int64_t>(array->get_scalar(j));
if (Smi::IsValid(static_cast<intptr_t>(val))) {
Handle<Smi> e(Smi::FromInt(static_cast<int>(val)));
visitor->visit(j, e);
@ -9220,7 +9289,7 @@ static void IterateExternalArrayElements(Isolate* isolate,
} else {
for (uint32_t j = 0; j < len; j++) {
HandleScope loop_scope(isolate);
Handle<Object> e = isolate->factory()->NewNumber(array->get(j));
Handle<Object> e = isolate->factory()->NewNumber(array->get_scalar(j));
visitor->visit(j, e);
}
}
@ -9406,7 +9475,7 @@ static bool IterateElements(Isolate* isolate,
Handle<ExternalPixelArray> pixels(ExternalPixelArray::cast(
receiver->elements()));
for (uint32_t j = 0; j < length; j++) {
Handle<Smi> e(Smi::FromInt(pixels->get(j)));
Handle<Smi> e(Smi::FromInt(pixels->get_scalar(j)));
visitor->visit(j, e);
}
break;
@ -9924,9 +9993,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugGetPropertyDetails) {
details->set(0, *value);
details->set(1, property_details);
if (hasJavaScriptAccessors) {
details->set(2,
caught_exception ? isolate->heap()->true_value()
: isolate->heap()->false_value());
details->set(2, isolate->heap()->ToBoolean(caught_exception));
details->set(3, FixedArray::cast(*result_callback_obj)->get(0));
details->set(4, FixedArray::cast(*result_callback_obj)->get(1));
}
@ -11378,7 +11445,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugEvaluate) {
context->set_extension(*local_scope);
// Copy any with contexts present and chain them in front of this context.
Handle<Context> frame_context(Context::cast(frame->context()));
Handle<Context> function_context(frame_context->declaration_context());
Handle<Context> function_context;
// Get the function's context if it has one.
if (scope_info->HasHeapAllocatedLocals()) {
function_context = Handle<Context>(frame_context->declaration_context());
}
context = CopyWithContextChain(isolate, go_between, frame_context, context);
if (additional_context->IsJSObject()) {
@ -12109,8 +12180,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DeleteLOL) {
#ifdef LIVE_OBJECT_LIST
CONVERT_SMI_ARG_CHECKED(id, 0);
bool success = LiveObjectList::Delete(id);
return success ? isolate->heap()->true_value() :
isolate->heap()->false_value();
return isolate->heap()->ToBoolean(success);
#else
return isolate->heap()->undefined_value();
#endif

8
deps/v8/src/runtime.h

@ -209,10 +209,13 @@ namespace internal {
/* Reflection */ \
F(FunctionSetInstanceClassName, 2, 1) \
F(FunctionSetLength, 2, 1) \
F(BoundFunctionSetLength, 2, 1) \
F(FunctionSetPrototype, 2, 1) \
F(FunctionSetReadOnlyPrototype, 1, 1) \
F(FunctionGetName, 1, 1) \
F(FunctionSetName, 2, 1) \
F(FunctionNameShouldPrintAsAnonymous, 1, 1) \
F(FunctionMarkNameShouldPrintAsAnonymous, 1, 1) \
F(FunctionSetBound, 1, 1) \
F(FunctionRemovePrototype, 1, 1) \
F(FunctionGetSourceCode, 1, 1) \
@ -287,6 +290,11 @@ namespace internal {
F(GetHandler, 1, 1) \
F(Fix, 1, 1) \
\
/* Harmony weakmaps */ \
F(WeakMapInitialize, 1, 1) \
F(WeakMapGet, 2, 1) \
F(WeakMapSet, 3, 1) \
\
/* Statements */ \
F(NewClosure, 3, 1) \
F(NewObject, 1, 1) \

449
deps/v8/src/scanner-base.cc

@ -89,10 +89,158 @@ void JavaScriptScanner::Initialize(UC16CharacterStream* source) {
Scan();
}
// Ensure that tokens can be stored in a byte.
STATIC_ASSERT(Token::NUM_TOKENS <= 0x100);
// Table of one-character tokens, by character (0x00..0x7f only).
static const byte one_char_tokens[] = {
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::LPAREN, // 0x28
Token::RPAREN, // 0x29
Token::ILLEGAL,
Token::ILLEGAL,
Token::COMMA, // 0x2c
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::COLON, // 0x3a
Token::SEMICOLON, // 0x3b
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::CONDITIONAL, // 0x3f
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::LBRACK, // 0x5b
Token::ILLEGAL,
Token::RBRACK, // 0x5d
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::ILLEGAL,
Token::LBRACE, // 0x7b
Token::ILLEGAL,
Token::RBRACE, // 0x7d
Token::BIT_NOT, // 0x7e
Token::ILLEGAL
};
Token::Value JavaScriptScanner::Next() {
current_ = next_;
has_line_terminator_before_next_ = false;
has_multiline_comment_before_next_ = false;
if (static_cast<unsigned>(c0_) <= 0x7f) {
Token::Value token = static_cast<Token::Value>(one_char_tokens[c0_]);
if (token != Token::ILLEGAL) {
int pos = source_pos();
next_.token = token;
next_.location.beg_pos = pos;
next_.location.end_pos = pos + 1;
Advance();
return current_.token;
}
}
Scan();
return current_.token;
}
@ -171,7 +319,7 @@ Token::Value JavaScriptScanner::SkipMultiLineComment() {
Advance();
while (c0_ >= 0) {
char ch = c0_;
uc32 ch = c0_;
Advance();
if (unicode_cache_->IsLineTerminator(ch)) {
// Following ECMA-262, section 7.4, a comment containing
@ -662,10 +810,114 @@ uc32 JavaScriptScanner::ScanIdentifierUnicodeEscape() {
}
// ----------------------------------------------------------------------------
// Keyword Matcher
#define KEYWORDS(KEYWORD_GROUP, KEYWORD) \
KEYWORD_GROUP('b') \
KEYWORD("break", BREAK) \
KEYWORD_GROUP('c') \
KEYWORD("case", CASE) \
KEYWORD("catch", CATCH) \
KEYWORD("class", FUTURE_RESERVED_WORD) \
KEYWORD("const", CONST) \
KEYWORD("continue", CONTINUE) \
KEYWORD_GROUP('d') \
KEYWORD("debugger", DEBUGGER) \
KEYWORD("default", DEFAULT) \
KEYWORD("delete", DELETE) \
KEYWORD("do", DO) \
KEYWORD_GROUP('e') \
KEYWORD("else", ELSE) \
KEYWORD("enum", FUTURE_RESERVED_WORD) \
KEYWORD("export", FUTURE_RESERVED_WORD) \
KEYWORD("extends", FUTURE_RESERVED_WORD) \
KEYWORD_GROUP('f') \
KEYWORD("false", FALSE_LITERAL) \
KEYWORD("finally", FINALLY) \
KEYWORD("for", FOR) \
KEYWORD("function", FUNCTION) \
KEYWORD_GROUP('i') \
KEYWORD("if", IF) \
KEYWORD("implements", FUTURE_STRICT_RESERVED_WORD) \
KEYWORD("import", FUTURE_RESERVED_WORD) \
KEYWORD("in", IN) \
KEYWORD("instanceof", INSTANCEOF) \
KEYWORD("interface", FUTURE_STRICT_RESERVED_WORD) \
KEYWORD_GROUP('l') \
KEYWORD("let", FUTURE_STRICT_RESERVED_WORD) \
KEYWORD_GROUP('n') \
KEYWORD("new", NEW) \
KEYWORD("null", NULL_LITERAL) \
KEYWORD_GROUP('p') \
KEYWORD("package", FUTURE_STRICT_RESERVED_WORD) \
KEYWORD("private", FUTURE_STRICT_RESERVED_WORD) \
KEYWORD("protected", FUTURE_STRICT_RESERVED_WORD) \
KEYWORD("public", FUTURE_STRICT_RESERVED_WORD) \
KEYWORD_GROUP('r') \
KEYWORD("return", RETURN) \
KEYWORD_GROUP('s') \
KEYWORD("static", FUTURE_STRICT_RESERVED_WORD) \
KEYWORD("super", FUTURE_RESERVED_WORD) \
KEYWORD("switch", SWITCH) \
KEYWORD_GROUP('t') \
KEYWORD("this", THIS) \
KEYWORD("throw", THROW) \
KEYWORD("true", TRUE_LITERAL) \
KEYWORD("try", TRY) \
KEYWORD("typeof", TYPEOF) \
KEYWORD_GROUP('v') \
KEYWORD("var", VAR) \
KEYWORD("void", VOID) \
KEYWORD_GROUP('w') \
KEYWORD("while", WHILE) \
KEYWORD("with", WITH) \
KEYWORD_GROUP('y') \
KEYWORD("yield", FUTURE_STRICT_RESERVED_WORD)
static Token::Value KeywordOrIdentifierToken(const char* input,
int input_length) {
ASSERT(input_length >= 1);
const int kMinLength = 2;
const int kMaxLength = 10;
if (input_length < kMinLength || input_length > kMaxLength) {
return Token::IDENTIFIER;
}
switch (input[0]) {
default:
#define KEYWORD_GROUP_CASE(ch) \
break; \
case ch:
#define KEYWORD(keyword, token) \
{ \
/* 'keyword' is a char array, so sizeof(keyword) is */ \
/* strlen(keyword) plus 1 for the NUL char. */ \
const int keyword_length = sizeof(keyword) - 1; \
STATIC_ASSERT(keyword_length >= kMinLength); \
STATIC_ASSERT(keyword_length <= kMaxLength); \
if (input_length == keyword_length && \
input[1] == keyword[1] && \
(keyword_length <= 2 || input[2] == keyword[2]) && \
(keyword_length <= 3 || input[3] == keyword[3]) && \
(keyword_length <= 4 || input[4] == keyword[4]) && \
(keyword_length <= 5 || input[5] == keyword[5]) && \
(keyword_length <= 6 || input[6] == keyword[6]) && \
(keyword_length <= 7 || input[7] == keyword[7]) && \
(keyword_length <= 8 || input[8] == keyword[8]) && \
(keyword_length <= 9 || input[9] == keyword[9])) { \
return Token::token; \
} \
}
KEYWORDS(KEYWORD_GROUP_CASE, KEYWORD)
}
return Token::IDENTIFIER;
}
Token::Value JavaScriptScanner::ScanIdentifierOrKeyword() {
ASSERT(unicode_cache_->IsIdentifierStart(c0_));
LiteralScope literal(this);
KeywordMatcher keyword_match;
// Scan identifier start character.
if (c0_ == '\\') {
uc32 c = ScanIdentifierUnicodeEscape();
@ -678,9 +930,6 @@ Token::Value JavaScriptScanner::ScanIdentifierOrKeyword() {
uc32 first_char = c0_;
Advance();
AddLiteralChar(first_char);
if (!keyword_match.AddChar(first_char)) {
return ScanIdentifierSuffix(&literal);
}
// Scan the rest of the identifier characters.
while (unicode_cache_->IsIdentifierPart(c0_)) {
@ -688,14 +937,20 @@ Token::Value JavaScriptScanner::ScanIdentifierOrKeyword() {
uc32 next_char = c0_;
Advance();
AddLiteralChar(next_char);
if (keyword_match.AddChar(next_char)) continue;
continue;
}
// Fallthrough if no loner able to complete keyword.
// Fallthrough if no longer able to complete keyword.
return ScanIdentifierSuffix(&literal);
}
literal.Complete();
return keyword_match.token();
if (next_.literal_chars->is_ascii()) {
Vector<const char> chars = next_.literal_chars->ascii_literal();
return KeywordOrIdentifierToken(chars.start(), chars.length());
}
return Token::IDENTIFIER;
}
@ -785,182 +1040,4 @@ bool JavaScriptScanner::ScanRegExpFlags() {
return true;
}
// ----------------------------------------------------------------------------
// Keyword Matcher
KeywordMatcher::FirstState KeywordMatcher::first_states_[] = {
{ "break", KEYWORD_PREFIX, Token::BREAK },
{ NULL, C, Token::ILLEGAL },
{ NULL, D, Token::ILLEGAL },
{ NULL, E, Token::ILLEGAL },
{ NULL, F, Token::ILLEGAL },
{ NULL, UNMATCHABLE, Token::ILLEGAL },
{ NULL, UNMATCHABLE, Token::ILLEGAL },
{ NULL, I, Token::ILLEGAL },
{ NULL, UNMATCHABLE, Token::ILLEGAL },
{ NULL, UNMATCHABLE, Token::ILLEGAL },
{ "let", KEYWORD_PREFIX, Token::FUTURE_STRICT_RESERVED_WORD },
{ NULL, UNMATCHABLE, Token::ILLEGAL },
{ NULL, N, Token::ILLEGAL },
{ NULL, UNMATCHABLE, Token::ILLEGAL },
{ NULL, P, Token::ILLEGAL },
{ NULL, UNMATCHABLE, Token::ILLEGAL },
{ "return", KEYWORD_PREFIX, Token::RETURN },
{ NULL, S, Token::ILLEGAL },
{ NULL, T, Token::ILLEGAL },
{ NULL, UNMATCHABLE, Token::ILLEGAL },
{ NULL, V, Token::ILLEGAL },
{ NULL, W, Token::ILLEGAL },
{ NULL, UNMATCHABLE, Token::ILLEGAL },
{ "yield", KEYWORD_PREFIX, Token::FUTURE_STRICT_RESERVED_WORD }
};
void KeywordMatcher::Step(unibrow::uchar input) {
switch (state_) {
case INITIAL: {
// matching the first character is the only state with significant fanout.
// Match only lower-case letters in range 'b'..'y'.
unsigned int offset = input - kFirstCharRangeMin;
if (offset < kFirstCharRangeLength) {
state_ = first_states_[offset].state;
if (state_ == KEYWORD_PREFIX) {
keyword_ = first_states_[offset].keyword;
counter_ = 1;
keyword_token_ = first_states_[offset].token;
}
return;
}
break;
}
case KEYWORD_PREFIX:
if (static_cast<unibrow::uchar>(keyword_[counter_]) == input) {
counter_++;
if (keyword_[counter_] == '\0') {
state_ = KEYWORD_MATCHED;
token_ = keyword_token_;
}
return;
}
break;
case KEYWORD_MATCHED:
token_ = Token::IDENTIFIER;
break;
case C:
if (MatchState(input, 'a', CA)) return;
if (MatchKeywordStart(input, "class", 1,
Token::FUTURE_RESERVED_WORD)) return;
if (MatchState(input, 'o', CO)) return;
break;
case CA:
if (MatchKeywordStart(input, "case", 2, Token::CASE)) return;
if (MatchKeywordStart(input, "catch", 2, Token::CATCH)) return;
break;
case CO:
if (MatchState(input, 'n', CON)) return;
break;
case CON:
if (MatchKeywordStart(input, "const", 3, Token::CONST)) return;
if (MatchKeywordStart(input, "continue", 3, Token::CONTINUE)) return;
break;
case D:
if (MatchState(input, 'e', DE)) return;
if (MatchKeyword(input, 'o', KEYWORD_MATCHED, Token::DO)) return;
break;
case DE:
if (MatchKeywordStart(input, "debugger", 2, Token::DEBUGGER)) return;
if (MatchKeywordStart(input, "default", 2, Token::DEFAULT)) return;
if (MatchKeywordStart(input, "delete", 2, Token::DELETE)) return;
break;
case E:
if (MatchKeywordStart(input, "else", 1, Token::ELSE)) return;
if (MatchKeywordStart(input, "enum", 1,
Token::FUTURE_RESERVED_WORD)) return;
if (MatchState(input, 'x', EX)) return;
break;
case EX:
if (MatchKeywordStart(input, "export", 2,
Token::FUTURE_RESERVED_WORD)) return;
if (MatchKeywordStart(input, "extends", 2,
Token::FUTURE_RESERVED_WORD)) return;
break;
case F:
if (MatchKeywordStart(input, "false", 1, Token::FALSE_LITERAL)) return;
if (MatchKeywordStart(input, "finally", 1, Token::FINALLY)) return;
if (MatchKeywordStart(input, "for", 1, Token::FOR)) return;
if (MatchKeywordStart(input, "function", 1, Token::FUNCTION)) return;
break;
case I:
if (MatchKeyword(input, 'f', KEYWORD_MATCHED, Token::IF)) return;
if (MatchState(input, 'm', IM)) return;
if (MatchKeyword(input, 'n', IN, Token::IN)) return;
break;
case IM:
if (MatchState(input, 'p', IMP)) return;
break;
case IMP:
if (MatchKeywordStart(input, "implements", 3,
Token::FUTURE_STRICT_RESERVED_WORD )) return;
if (MatchKeywordStart(input, "import", 3,
Token::FUTURE_RESERVED_WORD)) return;
break;
case IN:
token_ = Token::IDENTIFIER;
if (MatchKeywordStart(input, "interface", 2,
Token::FUTURE_STRICT_RESERVED_WORD)) return;
if (MatchKeywordStart(input, "instanceof", 2, Token::INSTANCEOF)) return;
break;
case N:
if (MatchKeywordStart(input, "new", 1, Token::NEW)) return;
if (MatchKeywordStart(input, "null", 1, Token::NULL_LITERAL)) return;
break;
case P:
if (MatchKeywordStart(input, "package", 1,
Token::FUTURE_STRICT_RESERVED_WORD)) return;
if (MatchState(input, 'r', PR)) return;
if (MatchKeywordStart(input, "public", 1,
Token::FUTURE_STRICT_RESERVED_WORD)) return;
break;
case PR:
if (MatchKeywordStart(input, "private", 2,
Token::FUTURE_STRICT_RESERVED_WORD)) return;
if (MatchKeywordStart(input, "protected", 2,
Token::FUTURE_STRICT_RESERVED_WORD)) return;
break;
case S:
if (MatchKeywordStart(input, "static", 1,
Token::FUTURE_STRICT_RESERVED_WORD)) return;
if (MatchKeywordStart(input, "super", 1,
Token::FUTURE_RESERVED_WORD)) return;
if (MatchKeywordStart(input, "switch", 1,
Token::SWITCH)) return;
break;
case T:
if (MatchState(input, 'h', TH)) return;
if (MatchState(input, 'r', TR)) return;
if (MatchKeywordStart(input, "typeof", 1, Token::TYPEOF)) return;
break;
case TH:
if (MatchKeywordStart(input, "this", 2, Token::THIS)) return;
if (MatchKeywordStart(input, "throw", 2, Token::THROW)) return;
break;
case TR:
if (MatchKeywordStart(input, "true", 2, Token::TRUE_LITERAL)) return;
if (MatchKeyword(input, 'y', KEYWORD_MATCHED, Token::TRY)) return;
break;
case V:
if (MatchKeywordStart(input, "var", 1, Token::VAR)) return;
if (MatchKeywordStart(input, "void", 1, Token::VOID)) return;
break;
case W:
if (MatchKeywordStart(input, "while", 1, Token::WHILE)) return;
if (MatchKeywordStart(input, "with", 1, Token::WITH)) return;
break;
case UNMATCHABLE:
break;
}
// On fallthrough, it's a failure.
state_ = UNMATCHABLE;
}
} } // namespace v8::internal

145
deps/v8/src/scanner-base.h

@ -542,151 +542,6 @@ class JavaScriptScanner : public Scanner {
bool has_multiline_comment_before_next_;
};
// ----------------------------------------------------------------------------
// Keyword matching state machine.
class KeywordMatcher {
// Incrementally recognize keywords.
//
// We distinguish between normal future reserved words and words that are
// considered to be future reserved words only in strict mode as required by
// ECMA-262 7.6.1.2.
//
// Recognized as keywords:
// break, case, catch, const*, continue, debugger, default, delete, do,
// else, finally, false, for, function, if, in, instanceof, new, null,
// return, switch, this, throw, true, try, typeof, var, void, while, with.
//
// Recognized as Future Reserved Keywords:
// class, enum, export, extends, import, super.
//
// Recognized as Future Reserved Keywords (strict mode only):
// implements, interface, let, package, private, protected, public,
// static, yield.
//
// *: Actually a "future reserved keyword". It's the only one we are
// recognizing outside of ES5 strict mode, the remaining are allowed
// as identifiers.
//
public:
KeywordMatcher()
: state_(INITIAL),
token_(Token::IDENTIFIER),
keyword_(NULL),
counter_(0),
keyword_token_(Token::ILLEGAL) {}
Token::Value token() { return token_; }
inline bool AddChar(unibrow::uchar input) {
if (state_ != UNMATCHABLE) {
Step(input);
}
return state_ != UNMATCHABLE;
}
void Fail() {
token_ = Token::IDENTIFIER;
state_ = UNMATCHABLE;
}
private:
enum State {
UNMATCHABLE,
INITIAL,
KEYWORD_PREFIX,
KEYWORD_MATCHED,
C,
CA,
CO,
CON,
D,
DE,
E,
EX,
F,
I,
IM,
IMP,
IN,
N,
P,
PR,
S,
T,
TH,
TR,
V,
W
};
struct FirstState {
const char* keyword;
State state;
Token::Value token;
};
// Range of possible first characters of a keyword.
static const unsigned int kFirstCharRangeMin = 'b';
static const unsigned int kFirstCharRangeMax = 'y';
static const unsigned int kFirstCharRangeLength =
kFirstCharRangeMax - kFirstCharRangeMin + 1;
// State map for first keyword character range.
static FirstState first_states_[kFirstCharRangeLength];
// If input equals keyword's character at position, continue matching keyword
// from that position.
inline bool MatchKeywordStart(unibrow::uchar input,
const char* keyword,
int position,
Token::Value token_if_match) {
if (input != static_cast<unibrow::uchar>(keyword[position])) {
return false;
}
state_ = KEYWORD_PREFIX;
this->keyword_ = keyword;
this->counter_ = position + 1;
this->keyword_token_ = token_if_match;
return true;
}
// If input equals match character, transition to new state and return true.
inline bool MatchState(unibrow::uchar input, char match, State new_state) {
if (input != static_cast<unibrow::uchar>(match)) {
return false;
}
state_ = new_state;
return true;
}
inline bool MatchKeyword(unibrow::uchar input,
char match,
State new_state,
Token::Value keyword_token) {
if (input != static_cast<unibrow::uchar>(match)) {
return false;
}
state_ = new_state;
token_ = keyword_token;
return true;
}
void Step(unibrow::uchar input);
// Current state.
State state_;
// Token for currently added characters.
Token::Value token_;
// Matching a specific keyword string (there is only one possible valid
// keyword with the current prefix).
const char* keyword_;
int counter_;
Token::Value keyword_token_;
};
} } // namespace v8::internal
#endif // V8_SCANNER_BASE_H_

16
deps/v8/src/spaces.cc

@ -148,12 +148,12 @@ PageIterator::PageIterator(PagedSpace* space, Mode mode) : space_(space) {
// CodeRange
CodeRange::CodeRange()
: code_range_(NULL),
CodeRange::CodeRange(Isolate* isolate)
: isolate_(isolate),
code_range_(NULL),
free_list_(0),
allocation_list_(0),
current_allocation_block_index_(0),
isolate_(NULL) {
current_allocation_block_index_(0) {
}
@ -279,8 +279,9 @@ void CodeRange::TearDown() {
const int kEstimatedNumberOfChunks = 270;
MemoryAllocator::MemoryAllocator()
: capacity_(0),
MemoryAllocator::MemoryAllocator(Isolate* isolate)
: isolate_(isolate),
capacity_(0),
capacity_executable_(0),
size_(0),
size_executable_(0),
@ -288,8 +289,7 @@ MemoryAllocator::MemoryAllocator()
chunks_(kEstimatedNumberOfChunks),
free_chunk_ids_(kEstimatedNumberOfChunks),
max_nof_chunks_(0),
top_(0),
isolate_(NULL) {
top_(0) {
}

22
deps/v8/src/spaces.h

@ -408,6 +408,8 @@ class Space : public Malloced {
// manages a range of virtual memory.
class CodeRange {
public:
explicit CodeRange(Isolate* isolate);
// Reserves a range of virtual memory, but does not commit any of it.
// Can only be called once, at heap initialization time.
// Returns false on failure.
@ -417,9 +419,9 @@ class CodeRange {
// manage it.
void TearDown();
bool exists() { return code_range_ != NULL; }
bool exists() { return this != NULL && code_range_ != NULL; }
bool contains(Address address) {
if (code_range_ == NULL) return false;
if (this == NULL || code_range_ == NULL) return false;
Address start = static_cast<Address>(code_range_->address());
return start <= address && address < start + code_range_->size();
}
@ -432,7 +434,7 @@ class CodeRange {
void FreeRawMemory(void* buf, size_t length);
private:
CodeRange();
Isolate* isolate_;
// The reserved range of virtual memory that all code objects are put in.
VirtualMemory* code_range_;
@ -466,10 +468,6 @@ class CodeRange {
static int CompareFreeBlockAddress(const FreeBlock* left,
const FreeBlock* right);
friend class Isolate;
Isolate* isolate_;
DISALLOW_COPY_AND_ASSIGN(CodeRange);
};
@ -500,6 +498,8 @@ class CodeRange {
class MemoryAllocator {
public:
explicit MemoryAllocator(Isolate* isolate);
// Initializes its internal bookkeeping structures.
// Max capacity of the total space and executable memory limit.
bool Setup(intptr_t max_capacity, intptr_t capacity_executable);
@ -657,10 +657,10 @@ class MemoryAllocator {
#endif
private:
MemoryAllocator();
static const int kChunkSize = kPagesPerChunk * Page::kPageSize;
Isolate* isolate_;
// Maximum space size in bytes.
intptr_t capacity_;
// Maximum subset of capacity_ that can be executable
@ -753,10 +753,6 @@ class MemoryAllocator {
Page* prev,
Page** last_page_in_use);
friend class Isolate;
Isolate* isolate_;
DISALLOW_COPY_AND_ASSIGN(MemoryAllocator);
};

8
deps/v8/src/token.cc

@ -33,21 +33,21 @@ namespace internal {
#define T(name, string, precedence) #name,
const char* const Token::name_[NUM_TOKENS] = {
TOKEN_LIST(T, T, IGNORE_TOKEN)
TOKEN_LIST(T, T)
};
#undef T
#define T(name, string, precedence) string,
const char* const Token::string_[NUM_TOKENS] = {
TOKEN_LIST(T, T, IGNORE_TOKEN)
TOKEN_LIST(T, T)
};
#undef T
#define T(name, string, precedence) precedence,
const int8_t Token::precedence_[NUM_TOKENS] = {
TOKEN_LIST(T, T, IGNORE_TOKEN)
TOKEN_LIST(T, T)
};
#undef T
@ -55,7 +55,7 @@ const int8_t Token::precedence_[NUM_TOKENS] = {
#define KT(a, b, c) 'T',
#define KK(a, b, c) 'K',
const char Token::token_type[] = {
TOKEN_LIST(KT, KK, IGNORE_TOKEN)
TOKEN_LIST(KT, KK)
};
#undef KT
#undef KK

5
deps/v8/src/token.h

@ -41,7 +41,6 @@ namespace internal {
//
// T: Non-keyword tokens
// K: Keyword tokens
// F: Future (reserved) keyword tokens
// IGNORE_TOKEN is a convenience macro that can be supplied as
// an argument (at any position) for a TOKEN_LIST call. It does
@ -49,7 +48,7 @@ namespace internal {
#define IGNORE_TOKEN(name, string, precedence)
#define TOKEN_LIST(T, K, F) \
#define TOKEN_LIST(T, K) \
/* End of source indicator. */ \
T(EOS, "EOS", 0) \
\
@ -182,7 +181,7 @@ class Token {
// All token values.
#define T(name, string, precedence) name,
enum Value {
TOKEN_LIST(T, T, IGNORE_TOKEN)
TOKEN_LIST(T, T)
NUM_TOKENS
};
#undef T

3
deps/v8/src/v8.cc

@ -28,6 +28,7 @@
#include "v8.h"
#include "isolate.h"
#include "elements.h"
#include "bootstrapper.h"
#include "debug.h"
#include "deoptimizer.h"
@ -212,6 +213,8 @@ void V8::InitializeOncePerProcess() {
// Peephole optimization might interfere with deoptimization.
FLAG_peephole_optimization = !use_crankshaft_;
ElementsAccessor::InitializeOncePerProcess();
}
} } // namespace v8::internal

10
deps/v8/src/v8natives.js

@ -1,4 +1,4 @@
// Copyright 2006-2008 the V8 project authors. All rights reserved.
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -1428,7 +1428,9 @@ function FunctionSourceString(func) {
}
}
var name = %FunctionGetName(func);
var name = %FunctionNameShouldPrintAsAnonymous(func)
? 'anonymous'
: %FunctionGetName(func);
return 'function ' + name + source;
}
@ -1498,9 +1500,9 @@ function FunctionBind(this_arg) { // Length is 1.
// Set the correct length.
var length = (this.length - argc_bound) > 0 ? this.length - argc_bound : 0;
%FunctionSetLength(result, length);
%FunctionRemovePrototype(result);
%FunctionSetBound(result);
%BoundFunctionSetLength(result, length);
return result;
}
@ -1523,7 +1525,7 @@ function NewFunction(arg1) { // length == 1
// The call to SetNewFunctionAttributes will ensure the prototype
// property of the resulting function is enumerable (ECMA262, 15.3.5.2).
var f = %CompileString(source)();
%FunctionSetName(f, "anonymous");
%FunctionMarkNameShouldPrintAsAnonymous(f);
return %SetNewFunctionAttributes(f);
}

5
deps/v8/src/v8threads.cc

@ -94,6 +94,11 @@ bool Locker::IsLocked(v8::Isolate* isolate) {
}
bool Locker::IsActive() {
return active_;
}
Locker::~Locker() {
ASSERT(isolate_->thread_manager()->IsLockedByCurrentThread());
if (has_lock_) {

4
deps/v8/src/version.cc

@ -34,8 +34,8 @@
// cannot be changed without changing the SCons build script.
#define MAJOR_VERSION 3
#define MINOR_VERSION 5
#define BUILD_NUMBER 3
#define PATCH_LEVEL 0
#define BUILD_NUMBER 4
#define PATCH_LEVEL 3
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
#define IS_CANDIDATE_VERSION 0

95
deps/v8/src/weakmap.js

@ -0,0 +1,95 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// This file relies on the fact that the following declaration has been made
// in runtime.js:
// const $Object = global.Object;
const $WeakMap = global.WeakMap;
// -------------------------------------------------------------------
// Set the WeakMap function and constructor.
%SetCode($WeakMap, function(x) {
if (%_IsConstructCall()) {
%WeakMapInitialize(this);
} else {
return new $WeakMap();
}
});
function WeakMapGet(key) {
if (!IS_SPEC_OBJECT(key)) {
throw %MakeTypeError('invalid_weakmap_key', [this, key]);
}
return %WeakMapGet(this, key);
}
function WeakMapSet(key, value) {
if (!IS_SPEC_OBJECT(key)) {
throw %MakeTypeError('invalid_weakmap_key', [this, key]);
}
return %WeakMapSet(this, key, value);
}
function WeakMapHas(key) {
if (!IS_SPEC_OBJECT(key)) {
throw %MakeTypeError('invalid_weakmap_key', [this, key]);
}
return !IS_UNDEFINED(%WeakMapGet(this, key));
}
function WeakMapDelete(key) {
if (!IS_SPEC_OBJECT(key)) {
throw %MakeTypeError('invalid_weakmap_key', [this, key]);
}
if (!IS_UNDEFINED(%WeakMapGet(this, key))) {
%WeakMapSet(this, key, void 0);
return true;
} else {
return false;
}
}
// -------------------------------------------------------------------
function SetupWeakMap() {
// Setup the non-enumerable functions on the WeakMap prototype object.
InstallFunctionsOnHiddenPrototype($WeakMap.prototype, DONT_ENUM, $Array(
"get", WeakMapGet,
"set", WeakMapSet,
"has", WeakMapHas,
"delete", WeakMapDelete
));
}
SetupWeakMap();

4
deps/v8/src/x64/deoptimizer-x64.cc

@ -38,7 +38,7 @@ namespace v8 {
namespace internal {
int Deoptimizer::table_entry_size_ = 10;
const int Deoptimizer::table_entry_size_ = 10;
int Deoptimizer::patch_size() {
@ -605,8 +605,6 @@ void Deoptimizer::DoComputeFrame(TranslationIterator* iterator,
output_frame->SetContinuation(
reinterpret_cast<intptr_t>(continuation->entry()));
}
if (output_count_ - 1 == frame_index) iterator->Done();
}

15
deps/v8/src/x64/disasm-x64.cc

@ -58,7 +58,7 @@ struct ByteMnemonic {
};
static ByteMnemonic two_operands_instr[] = {
static const ByteMnemonic two_operands_instr[] = {
{ 0x00, BYTE_OPER_REG_OP_ORDER, "add" },
{ 0x01, OPER_REG_OP_ORDER, "add" },
{ 0x02, BYTE_REG_OPER_OP_ORDER, "add" },
@ -105,7 +105,7 @@ static ByteMnemonic two_operands_instr[] = {
};
static ByteMnemonic zero_operands_instr[] = {
static const ByteMnemonic zero_operands_instr[] = {
{ 0xC3, UNSET_OP_ORDER, "ret" },
{ 0xC9, UNSET_OP_ORDER, "leave" },
{ 0xF4, UNSET_OP_ORDER, "hlt" },
@ -125,14 +125,14 @@ static ByteMnemonic zero_operands_instr[] = {
};
static ByteMnemonic call_jump_instr[] = {
static const ByteMnemonic call_jump_instr[] = {
{ 0xE8, UNSET_OP_ORDER, "call" },
{ 0xE9, UNSET_OP_ORDER, "jmp" },
{ -1, UNSET_OP_ORDER, "" }
};
static ByteMnemonic short_immediate_instr[] = {
static const ByteMnemonic short_immediate_instr[] = {
{ 0x05, UNSET_OP_ORDER, "add" },
{ 0x0D, UNSET_OP_ORDER, "or" },
{ 0x15, UNSET_OP_ORDER, "adc" },
@ -145,7 +145,7 @@ static ByteMnemonic short_immediate_instr[] = {
};
static const char* conditional_code_suffix[] = {
static const char* const conditional_code_suffix[] = {
"o", "no", "c", "nc", "z", "nz", "na", "a",
"s", "ns", "pe", "po", "l", "ge", "le", "g"
};
@ -193,7 +193,7 @@ class InstructionTable {
InstructionDesc instructions_[256];
void Clear();
void Init();
void CopyTable(ByteMnemonic bm[], InstructionType type);
void CopyTable(const ByteMnemonic bm[], InstructionType type);
void SetTableRange(InstructionType type, byte start, byte end, bool byte_size,
const char* mnem);
void AddJumpConditionalShort();
@ -228,7 +228,8 @@ void InstructionTable::Init() {
}
void InstructionTable::CopyTable(ByteMnemonic bm[], InstructionType type) {
void InstructionTable::CopyTable(const ByteMnemonic bm[],
InstructionType type) {
for (int i = 0; bm[i].b >= 0; i++) {
InstructionDesc* id = &instructions_[bm[i].b];
id->mnem = bm[i].mnem;

3
deps/v8/test/cctest/SConscript

@ -96,7 +96,8 @@ SOURCES = {
'test-threads.cc',
'test-unbound-queue.cc',
'test-utils.cc',
'test-version.cc'
'test-version.cc',
'test-weakmaps.cc'
],
'arch:arm': [
'test-assembler-arm.cc',

1
deps/v8/test/cctest/cctest.gyp

@ -79,6 +79,7 @@
'test-log.cc',
'test-mark-compact.cc',
'test-parsing.cc',
'test-platform-tls.cc',
'test-profile-generator.cc',
'test-regexp.cc',
'test-reloc-info.cc',

3
deps/v8/test/cctest/cctest.status

@ -38,6 +38,9 @@ test-api/ApplyInterruption: PASS || TIMEOUT
test-serialize/TestThatAlwaysFails: FAIL
test-serialize/DependentTestThatAlwaysFails: FAIL
# We do not yet shrink weak maps after they have been emptied by the GC
test-weakmaps/Shrinking: FAIL
##############################################################################
[ $arch == arm ]

13
deps/v8/test/cctest/test-alloc.cc

@ -186,7 +186,9 @@ class Block {
TEST(CodeRange) {
const int code_range_size = 16*MB;
OS::Setup();
Isolate::Current()->code_range()->Setup(code_range_size);
Isolate::Current()->InitializeLoggingAndCounters();
CodeRange* code_range = new CodeRange(Isolate::Current());
code_range->Setup(code_range_size);
int current_allocated = 0;
int total_allocated = 0;
List<Block> blocks(1000);
@ -198,8 +200,7 @@ TEST(CodeRange) {
size_t requested = (Page::kPageSize << (Pseudorandom() % 6)) +
Pseudorandom() % 5000 + 1;
size_t allocated = 0;
void* base = Isolate::Current()->code_range()->
AllocateRawMemory(requested, &allocated);
void* base = code_range->AllocateRawMemory(requested, &allocated);
CHECK(base != NULL);
blocks.Add(Block(base, static_cast<int>(allocated)));
current_allocated += static_cast<int>(allocated);
@ -207,8 +208,7 @@ TEST(CodeRange) {
} else {
// Free a block.
int index = Pseudorandom() % blocks.length();
Isolate::Current()->code_range()->FreeRawMemory(
blocks[index].base, blocks[index].size);
code_range->FreeRawMemory(blocks[index].base, blocks[index].size);
current_allocated -= blocks[index].size;
if (index < blocks.length() - 1) {
blocks[index] = blocks.RemoveLast();
@ -218,5 +218,6 @@ TEST(CodeRange) {
}
}
Isolate::Current()->code_range()->TearDown();
code_range->TearDown();
delete code_range;
}

5
deps/v8/test/cctest/test-api.cc

@ -11661,7 +11661,7 @@ THREADED_TEST(PixelArray) {
}
HEAP->CollectAllGarbage(false); // Force GC to trigger verification.
for (int i = 0; i < kElementCount; i++) {
CHECK_EQ(i % 256, pixels->get(i));
CHECK_EQ(i % 256, pixels->get_scalar(i));
CHECK_EQ(i % 256, pixel_data[i]);
}
@ -12134,7 +12134,8 @@ static void ExternalArrayTestHelper(v8::ExternalArrayType array_type,
}
HEAP->CollectAllGarbage(false); // Force GC to trigger verification.
for (int i = 0; i < kElementCount; i++) {
CHECK_EQ(static_cast<int64_t>(i), static_cast<int64_t>(array->get(i)));
CHECK_EQ(static_cast<int64_t>(i),
static_cast<int64_t>(array->get_scalar(i)));
CHECK_EQ(static_cast<int64_t>(i), static_cast<int64_t>(array_data[i]));
}

1
deps/v8/test/cctest/test-debug.cc

@ -5844,6 +5844,7 @@ TEST(DebuggerDebugMessageDispatch) {
TEST(DebuggerAgent) {
v8::V8::Initialize();
i::Debugger* debugger = i::Isolate::Current()->debugger();
// Make sure these ports is not used by other tests to allow tests to run in
// parallel.

8
deps/v8/test/cctest/test-heap.cc

@ -291,8 +291,8 @@ TEST(LocalHandles) {
TEST(GlobalHandles) {
GlobalHandles* global_handles = Isolate::Current()->global_handles();
InitializeVM();
GlobalHandles* global_handles = Isolate::Current()->global_handles();
Handle<Object> h1;
Handle<Object> h2;
@ -339,8 +339,8 @@ static void TestWeakGlobalHandleCallback(v8::Persistent<v8::Value> handle,
TEST(WeakGlobalHandlesScavenge) {
GlobalHandles* global_handles = Isolate::Current()->global_handles();
InitializeVM();
GlobalHandles* global_handles = Isolate::Current()->global_handles();
WeakPointerCleared = false;
@ -377,8 +377,8 @@ TEST(WeakGlobalHandlesScavenge) {
TEST(WeakGlobalHandlesMark) {
GlobalHandles* global_handles = Isolate::Current()->global_handles();
InitializeVM();
GlobalHandles* global_handles = Isolate::Current()->global_handles();
WeakPointerCleared = false;
@ -416,8 +416,8 @@ TEST(WeakGlobalHandlesMark) {
}
TEST(DeleteWeakGlobalHandle) {
GlobalHandles* global_handles = Isolate::Current()->global_handles();
InitializeVM();
GlobalHandles* global_handles = Isolate::Current()->global_handles();
WeakPointerCleared = false;

128
deps/v8/test/cctest/test-parsing.cc

@ -42,7 +42,7 @@
namespace i = ::v8::internal;
TEST(KeywordMatcher) {
TEST(ScanKeywords) {
struct KeywordToken {
const char* keyword;
i::Token::Value token;
@ -50,90 +50,62 @@ TEST(KeywordMatcher) {
static const KeywordToken keywords[] = {
#define KEYWORD(t, s, d) { s, i::Token::t },
#define IGNORE(t, s, d) /* */
TOKEN_LIST(IGNORE, KEYWORD, IGNORE)
TOKEN_LIST(IGNORE_TOKEN, KEYWORD)
#undef KEYWORD
{ NULL, i::Token::IDENTIFIER }
};
static const char* future_keywords[] = {
#define FUTURE(t, s, d) s,
TOKEN_LIST(IGNORE, IGNORE, FUTURE)
#undef FUTURE
#undef IGNORE
NULL
};
KeywordToken key_token;
i::UnicodeCache unicode_cache;
i::byte buffer[32];
for (int i = 0; (key_token = keywords[i]).keyword != NULL; i++) {
i::KeywordMatcher matcher;
const char* keyword = key_token.keyword;
int length = i::StrLength(keyword);
for (int j = 0; j < length; j++) {
if (key_token.token == i::Token::INSTANCEOF && j == 2) {
// "in" is a prefix of "instanceof". It's the only keyword
// that is a prefix of another.
CHECK_EQ(i::Token::IN, matcher.token());
} else {
CHECK_EQ(i::Token::IDENTIFIER, matcher.token());
}
matcher.AddChar(keyword[j]);
const i::byte* keyword =
reinterpret_cast<const i::byte*>(key_token.keyword);
int length = i::StrLength(key_token.keyword);
CHECK(static_cast<int>(sizeof(buffer)) >= length);
{
i::Utf8ToUC16CharacterStream stream(keyword, length);
i::JavaScriptScanner scanner(&unicode_cache);
scanner.Initialize(&stream);
CHECK_EQ(key_token.token, scanner.Next());
CHECK_EQ(i::Token::EOS, scanner.Next());
}
CHECK_EQ(key_token.token, matcher.token());
// Adding more characters will make keyword matching fail.
matcher.AddChar('z');
CHECK_EQ(i::Token::IDENTIFIER, matcher.token());
// Adding a keyword later will not make it match again.
matcher.AddChar('i');
matcher.AddChar('f');
CHECK_EQ(i::Token::IDENTIFIER, matcher.token());
}
// Future keywords are not recognized.
const char* future_keyword;
for (int i = 0; (future_keyword = future_keywords[i]) != NULL; i++) {
i::KeywordMatcher matcher;
int length = i::StrLength(future_keyword);
for (int j = 0; j < length; j++) {
matcher.AddChar(future_keyword[j]);
// Removing characters will make keyword matching fail.
{
i::Utf8ToUC16CharacterStream stream(keyword, length - 1);
i::JavaScriptScanner scanner(&unicode_cache);
scanner.Initialize(&stream);
CHECK_EQ(i::Token::IDENTIFIER, scanner.Next());
CHECK_EQ(i::Token::EOS, scanner.Next());
}
// Adding characters will make keyword matching fail.
static const char chars_to_append[] = { 'z', '0', '_' };
for (int j = 0; j < static_cast<int>(ARRAY_SIZE(chars_to_append)); ++j) {
memmove(buffer, keyword, length);
buffer[length] = chars_to_append[j];
i::Utf8ToUC16CharacterStream stream(buffer, length + 1);
i::JavaScriptScanner scanner(&unicode_cache);
scanner.Initialize(&stream);
CHECK_EQ(i::Token::IDENTIFIER, scanner.Next());
CHECK_EQ(i::Token::EOS, scanner.Next());
}
// Replacing characters will make keyword matching fail.
{
memmove(buffer, keyword, length);
buffer[length - 1] = '_';
i::Utf8ToUC16CharacterStream stream(buffer, length);
i::JavaScriptScanner scanner(&unicode_cache);
scanner.Initialize(&stream);
CHECK_EQ(i::Token::IDENTIFIER, scanner.Next());
CHECK_EQ(i::Token::EOS, scanner.Next());
}
CHECK_EQ(i::Token::IDENTIFIER, matcher.token());
}
// Zero isn't ignored at first.
i::KeywordMatcher bad_start;
bad_start.AddChar(0);
CHECK_EQ(i::Token::IDENTIFIER, bad_start.token());
bad_start.AddChar('i');
bad_start.AddChar('f');
CHECK_EQ(i::Token::IDENTIFIER, bad_start.token());
// Zero isn't ignored at end.
i::KeywordMatcher bad_end;
bad_end.AddChar('i');
bad_end.AddChar('f');
CHECK_EQ(i::Token::IF, bad_end.token());
bad_end.AddChar(0);
CHECK_EQ(i::Token::IDENTIFIER, bad_end.token());
// Case isn't ignored.
i::KeywordMatcher bad_case;
bad_case.AddChar('i');
bad_case.AddChar('F');
CHECK_EQ(i::Token::IDENTIFIER, bad_case.token());
// If we mark it as failure, continuing won't help.
i::KeywordMatcher full_stop;
full_stop.AddChar('i');
CHECK_EQ(i::Token::IDENTIFIER, full_stop.token());
full_stop.Fail();
CHECK_EQ(i::Token::IDENTIFIER, full_stop.token());
full_stop.AddChar('f');
CHECK_EQ(i::Token::IDENTIFIER, full_stop.token());
}
TEST(ScanHTMLEndComments) {
v8::V8::Initialize();
// Regression test. See:
// http://code.google.com/p/chromium/issues/detail?id=53548
// Tests that --> is correctly interpreted as comment-to-end-of-line if there
@ -263,6 +235,8 @@ TEST(Preparsing) {
TEST(StandAlonePreParser) {
v8::V8::Initialize();
int marker;
i::Isolate::Current()->stack_guard()->SetStackLimit(
reinterpret_cast<uintptr_t>(&marker) - 128 * 1024);
@ -299,6 +273,8 @@ TEST(StandAlonePreParser) {
TEST(RegressChromium62639) {
v8::V8::Initialize();
int marker;
i::Isolate::Current()->stack_guard()->SetStackLimit(
reinterpret_cast<uintptr_t>(&marker) - 128 * 1024);
@ -320,6 +296,8 @@ TEST(RegressChromium62639) {
TEST(Regress928) {
v8::V8::Initialize();
// Preparsing didn't consider the catch clause of a try statement
// as with-content, which made it assume that a function inside
// the block could be lazily compiled, and an extra, unexpected,
@ -360,6 +338,8 @@ TEST(Regress928) {
TEST(PreParseOverflow) {
v8::V8::Initialize();
int marker;
i::Isolate::Current()->stack_guard()->SetStackLimit(
reinterpret_cast<uintptr_t>(&marker) - 128 * 1024);
@ -610,6 +590,8 @@ void TestStreamScanner(i::UC16CharacterStream* stream,
}
TEST(StreamScanner) {
v8::V8::Initialize();
const char* str1 = "{ foo get for : */ <- \n\n /*foo*/ bib";
i::Utf8ToUC16CharacterStream stream1(reinterpret_cast<const i::byte*>(str1),
static_cast<unsigned>(strlen(str1)));
@ -690,6 +672,8 @@ void TestScanRegExp(const char* re_source, const char* expected) {
TEST(RegExpScanning) {
v8::V8::Initialize();
// RegExp token with added garbage at the end. The scanner should only
// scan the RegExp until the terminating slash just before "flipperwald".
TestScanRegExp("/b/flipperwald", "b");

8
deps/v8/test/cctest/test-serialize.cc

@ -99,10 +99,10 @@ static int make_code(TypeCode type, int id) {
TEST(ExternalReferenceEncoder) {
OS::Setup();
Isolate* isolate = i::Isolate::Current();
isolate->stats_table()->SetCounterFunction(counter_function);
HEAP->Setup(false);
v8::V8::Initialize();
ExternalReferenceEncoder encoder;
CHECK_EQ(make_code(BUILTIN, Builtins::kArrayCode),
Encode(encoder, Builtins::kArrayCode));
@ -139,10 +139,10 @@ TEST(ExternalReferenceEncoder) {
TEST(ExternalReferenceDecoder) {
OS::Setup();
Isolate* isolate = i::Isolate::Current();
isolate->stats_table()->SetCounterFunction(counter_function);
HEAP->Setup(false);
v8::V8::Initialize();
ExternalReferenceDecoder decoder;
CHECK_EQ(AddressOf(Builtins::kArrayCode),
decoder.Decode(make_code(BUILTIN, Builtins::kArrayCode)));

125
deps/v8/test/cctest/test-spaces.cc

@ -91,46 +91,74 @@ TEST(Page) {
}
namespace v8 {
namespace internal {
// Temporarily sets a given allocator in an isolate.
class TestMemoryAllocatorScope {
public:
TestMemoryAllocatorScope(Isolate* isolate, MemoryAllocator* allocator)
: isolate_(isolate),
old_allocator_(isolate->memory_allocator_) {
isolate->memory_allocator_ = allocator;
}
~TestMemoryAllocatorScope() {
isolate_->memory_allocator_ = old_allocator_;
}
private:
Isolate* isolate_;
MemoryAllocator* old_allocator_;
DISALLOW_COPY_AND_ASSIGN(TestMemoryAllocatorScope);
};
} } // namespace v8::internal
TEST(MemoryAllocator) {
OS::Setup();
Isolate* isolate = Isolate::Current();
CHECK(HEAP->ConfigureHeapDefault());
CHECK(isolate->memory_allocator()->Setup(HEAP->MaxReserved(),
HEAP->MaxExecutableSize()));
OldSpace faked_space(HEAP,
HEAP->MaxReserved(),
isolate->InitializeLoggingAndCounters();
Heap* heap = isolate->heap();
CHECK(heap->ConfigureHeapDefault());
MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
CHECK(memory_allocator->Setup(heap->MaxReserved(),
heap->MaxExecutableSize()));
TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
OldSpace faked_space(heap,
heap->MaxReserved(),
OLD_POINTER_SPACE,
NOT_EXECUTABLE);
int total_pages = 0;
int requested = MemoryAllocator::kPagesPerChunk;
int allocated;
// If we request n pages, we should get n or n - 1.
Page* first_page =
isolate->memory_allocator()->AllocatePages(
requested, &allocated, &faked_space);
Page* first_page = memory_allocator->AllocatePages(
requested, &allocated, &faked_space);
CHECK(first_page->is_valid());
CHECK(allocated == requested || allocated == requested - 1);
total_pages += allocated;
Page* last_page = first_page;
for (Page* p = first_page; p->is_valid(); p = p->next_page()) {
CHECK(isolate->memory_allocator()->IsPageInSpace(p, &faked_space));
CHECK(memory_allocator->IsPageInSpace(p, &faked_space));
last_page = p;
}
// Again, we should get n or n - 1 pages.
Page* others =
isolate->memory_allocator()->AllocatePages(
requested, &allocated, &faked_space);
Page* others = memory_allocator->AllocatePages(
requested, &allocated, &faked_space);
CHECK(others->is_valid());
CHECK(allocated == requested || allocated == requested - 1);
total_pages += allocated;
isolate->memory_allocator()->SetNextPage(last_page, others);
memory_allocator->SetNextPage(last_page, others);
int page_count = 0;
for (Page* p = first_page; p->is_valid(); p = p->next_page()) {
CHECK(isolate->memory_allocator()->IsPageInSpace(p, &faked_space));
CHECK(memory_allocator->IsPageInSpace(p, &faked_space));
page_count++;
}
CHECK(total_pages == page_count);
@ -141,34 +169,39 @@ TEST(MemoryAllocator) {
// Freeing pages at the first chunk starting at or after the second page
// should free the entire second chunk. It will return the page it was passed
// (since the second page was in the first chunk).
Page* free_return = isolate->memory_allocator()->FreePages(second_page);
Page* free_return = memory_allocator->FreePages(second_page);
CHECK(free_return == second_page);
isolate->memory_allocator()->SetNextPage(first_page, free_return);
memory_allocator->SetNextPage(first_page, free_return);
// Freeing pages in the first chunk starting at the first page should free
// the first chunk and return an invalid page.
Page* invalid_page = isolate->memory_allocator()->FreePages(first_page);
Page* invalid_page = memory_allocator->FreePages(first_page);
CHECK(!invalid_page->is_valid());
isolate->memory_allocator()->TearDown();
memory_allocator->TearDown();
delete memory_allocator;
}
TEST(NewSpace) {
OS::Setup();
CHECK(HEAP->ConfigureHeapDefault());
CHECK(Isolate::Current()->memory_allocator()->Setup(
HEAP->MaxReserved(), HEAP->MaxExecutableSize()));
Isolate* isolate = Isolate::Current();
isolate->InitializeLoggingAndCounters();
Heap* heap = isolate->heap();
CHECK(heap->ConfigureHeapDefault());
MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
CHECK(memory_allocator->Setup(heap->MaxReserved(),
heap->MaxExecutableSize()));
TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
NewSpace new_space(HEAP);
NewSpace new_space(heap);
void* chunk =
Isolate::Current()->memory_allocator()->ReserveInitialChunk(
4 * HEAP->ReservedSemiSpaceSize());
memory_allocator->ReserveInitialChunk(4 * heap->ReservedSemiSpaceSize());
CHECK(chunk != NULL);
Address start = RoundUp(static_cast<Address>(chunk),
2 * HEAP->ReservedSemiSpaceSize());
CHECK(new_space.Setup(start, 2 * HEAP->ReservedSemiSpaceSize()));
2 * heap->ReservedSemiSpaceSize());
CHECK(new_space.Setup(start, 2 * heap->ReservedSemiSpaceSize()));
CHECK(new_space.HasBeenSetup());
while (new_space.Available() >= Page::kMaxHeapObjectSize) {
@ -178,28 +211,33 @@ TEST(NewSpace) {
}
new_space.TearDown();
Isolate::Current()->memory_allocator()->TearDown();
memory_allocator->TearDown();
delete memory_allocator;
}
TEST(OldSpace) {
OS::Setup();
CHECK(HEAP->ConfigureHeapDefault());
CHECK(Isolate::Current()->memory_allocator()->Setup(
HEAP->MaxReserved(), HEAP->MaxExecutableSize()));
OldSpace* s = new OldSpace(HEAP,
HEAP->MaxOldGenerationSize(),
Isolate* isolate = Isolate::Current();
isolate->InitializeLoggingAndCounters();
Heap* heap = isolate->heap();
CHECK(heap->ConfigureHeapDefault());
MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
CHECK(memory_allocator->Setup(heap->MaxReserved(),
heap->MaxExecutableSize()));
TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
OldSpace* s = new OldSpace(heap,
heap->MaxOldGenerationSize(),
OLD_POINTER_SPACE,
NOT_EXECUTABLE);
CHECK(s != NULL);
void* chunk =
Isolate::Current()->memory_allocator()->ReserveInitialChunk(
4 * HEAP->ReservedSemiSpaceSize());
void* chunk = memory_allocator->ReserveInitialChunk(
4 * heap->ReservedSemiSpaceSize());
CHECK(chunk != NULL);
Address start = static_cast<Address>(chunk);
size_t size = RoundUp(start, 2 * HEAP->ReservedSemiSpaceSize()) - start;
size_t size = RoundUp(start, 2 * heap->ReservedSemiSpaceSize()) - start;
CHECK(s->Setup(start, size));
@ -209,13 +247,13 @@ TEST(OldSpace) {
s->TearDown();
delete s;
Isolate::Current()->memory_allocator()->TearDown();
memory_allocator->TearDown();
delete memory_allocator;
}
TEST(LargeObjectSpace) {
OS::Setup();
CHECK(HEAP->Setup(false));
v8::V8::Initialize();
LargeObjectSpace* lo = HEAP->lo_space();
CHECK(lo != NULL);
@ -247,9 +285,4 @@ TEST(LargeObjectSpace) {
CHECK(!lo->IsEmpty());
CHECK(lo->AllocateRaw(lo_size)->IsFailure());
lo->TearDown();
delete lo;
Isolate::Current()->memory_allocator()->TearDown();
}

149
deps/v8/test/cctest/test-weakmaps.cc

@ -0,0 +1,149 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "v8.h"
#include "global-handles.h"
#include "snapshot.h"
#include "cctest.h"
using namespace v8::internal;
static Handle<JSWeakMap> AllocateJSWeakMap() {
Handle<Map> map = FACTORY->NewMap(JS_WEAK_MAP_TYPE, JSWeakMap::kSize);
Handle<JSObject> weakmap_obj = FACTORY->NewJSObjectFromMap(map);
Handle<JSWeakMap> weakmap(JSWeakMap::cast(*weakmap_obj));
// Do not use handles for the hash table, it would make entries strong.
Object* table_obj = ObjectHashTable::Allocate(1)->ToObjectChecked();
ObjectHashTable* table = ObjectHashTable::cast(table_obj);
weakmap->set_table(table);
weakmap->set_next(Smi::FromInt(0));
return weakmap;
}
static void PutIntoWeakMap(Handle<JSWeakMap> weakmap,
Handle<JSObject> key,
int value) {
Handle<ObjectHashTable> table = PutIntoObjectHashTable(
Handle<ObjectHashTable>(weakmap->table()),
Handle<JSObject>(JSObject::cast(*key)),
Handle<Smi>(Smi::FromInt(value)));
weakmap->set_table(*table);
}
static int NumberOfWeakCalls = 0;
static void WeakPointerCallback(v8::Persistent<v8::Value> handle, void* id) {
ASSERT(id == reinterpret_cast<void*>(1234));
NumberOfWeakCalls++;
handle.Dispose();
}
TEST(Weakness) {
LocalContext context;
v8::HandleScope scope;
Handle<JSWeakMap> weakmap = AllocateJSWeakMap();
GlobalHandles* global_handles = Isolate::Current()->global_handles();
// Keep global reference to the key.
Handle<Object> key;
{
v8::HandleScope scope;
Handle<Map> map = FACTORY->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
Handle<JSObject> object = FACTORY->NewJSObjectFromMap(map);
key = global_handles->Create(*object);
}
CHECK(!global_handles->IsWeak(key.location()));
// Put entry into weak map.
{
v8::HandleScope scope;
PutIntoWeakMap(weakmap, Handle<JSObject>(JSObject::cast(*key)), 23);
}
CHECK_EQ(1, weakmap->table()->NumberOfElements());
// Force a full GC.
HEAP->CollectAllGarbage(false);
CHECK_EQ(0, NumberOfWeakCalls);
CHECK_EQ(1, weakmap->table()->NumberOfElements());
CHECK_EQ(0, weakmap->table()->NumberOfDeletedElements());
// Make the global reference to the key weak.
{
v8::HandleScope scope;
global_handles->MakeWeak(key.location(),
reinterpret_cast<void*>(1234),
&WeakPointerCallback);
}
CHECK(global_handles->IsWeak(key.location()));
// Force a full GC.
// Perform two consecutive GCs because the first one will only clear
// weak references whereas the second one will also clear weak maps.
HEAP->CollectAllGarbage(false);
CHECK_EQ(1, NumberOfWeakCalls);
CHECK_EQ(1, weakmap->table()->NumberOfElements());
CHECK_EQ(0, weakmap->table()->NumberOfDeletedElements());
HEAP->CollectAllGarbage(false);
CHECK_EQ(1, NumberOfWeakCalls);
CHECK_EQ(0, weakmap->table()->NumberOfElements());
CHECK_EQ(1, weakmap->table()->NumberOfDeletedElements());
}
TEST(Shrinking) {
LocalContext context;
v8::HandleScope scope;
Handle<JSWeakMap> weakmap = AllocateJSWeakMap();
// Check initial capacity.
CHECK_EQ(32, weakmap->table()->Capacity());
// Fill up weak map to trigger capacity change.
{
v8::HandleScope scope;
Handle<Map> map = FACTORY->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
for (int i = 0; i < 32; i++) {
Handle<JSObject> object = FACTORY->NewJSObjectFromMap(map);
PutIntoWeakMap(weakmap, object, i);
}
}
// Check increased capacity.
CHECK_EQ(128, weakmap->table()->Capacity());
// Force a full GC.
CHECK_EQ(32, weakmap->table()->NumberOfElements());
CHECK_EQ(0, weakmap->table()->NumberOfDeletedElements());
HEAP->CollectAllGarbage(false);
CHECK_EQ(0, weakmap->table()->NumberOfElements());
CHECK_EQ(32, weakmap->table()->NumberOfDeletedElements());
// Check shrunk capacity.
CHECK_EQ(32, weakmap->table()->Capacity());
}

13
deps/v8/test/cctest/testcfg.py

@ -48,7 +48,11 @@ class CcTestCase(test.TestCase):
return self.path[-1]
def BuildCommand(self, name):
serialization_file = join('obj', 'test', self.mode, 'serdes')
serialization_file = ''
if exists(join(self.context.buildspace, 'obj', 'test', self.mode)):
serialization_file = join('obj', 'test', self.mode, 'serdes')
else:
serialization_file = join('obj', 'serdes')
serialization_file += '_' + self.GetName()
serialization_file = join(self.context.buildspace, serialization_file)
serialization_file += ''.join(self.variant_flags).replace('-', '_')
@ -78,10 +82,15 @@ class CcTestConfiguration(test.TestConfiguration):
return ['cctests']
def ListTests(self, current_path, path, mode, variant_flags):
executable = join('obj', 'test', mode, 'cctest')
executable = 'cctest'
if utils.IsWindows():
executable += '.exe'
executable = join(self.context.buildspace, executable)
if not exists(executable):
executable = join('obj', 'test', mode, 'cctest')
if utils.IsWindows():
executable += '.exe'
executable = join(self.context.buildspace, executable)
output = test.Execute([executable, '--list'], self.context)
if output.exit_code != 0:
print output.stdout

173
deps/v8/test/mjsunit/d8-os.js

@ -30,6 +30,9 @@
// implemented on Windows, and even if it were then many of the things
// we are calling would not be available.
var TEST_DIR = "d8-os-test-directory-" + ((Math.random() * (1<<30)) | 0);
function arg_error(str) {
try {
eval(str);
@ -53,96 +56,98 @@ function str_error(str) {
if (this.os && os.system) {
try {
// Delete the dir if it is lying around from last time.
os.system("ls", ["d8-os-test-directory"]);
os.system("rm", ["-r", "d8-os-test-directory"]);
os.system("ls", [TEST_DIR]);
os.system("rm", ["-r", TEST_DIR]);
} catch (e) {
}
os.mkdirp("d8-os-test-directory");
os.chdir("d8-os-test-directory");
// Check the chdir worked.
os.system('ls', ['../d8-os-test-directory']);
// Simple create dir.
os.mkdirp("dir");
// Create dir in dir.
os.mkdirp("dir/foo");
// Check that they are there.
os.system('ls', ['dir/foo']);
// Check that we can detect when something is not there.
assertThrows("os.system('ls', ['dir/bar']);", "dir not there");
// Check that mkdirp makes intermediate directories.
os.mkdirp("dir2/foo");
os.system("ls", ["dir2/foo"]);
// Check that mkdirp doesn't mind if the dir is already there.
os.mkdirp("dir2/foo");
os.mkdirp("dir2/foo/");
// Check that mkdirp can cope with trailing /
os.mkdirp("dir3/");
os.system("ls", ["dir3"]);
// Check that we get an error if the name is taken by a file.
os.system("sh", ["-c", "echo foo > file1"]);
os.system("ls", ["file1"]);
assertThrows("os.mkdirp('file1');", "mkdir over file1");
assertThrows("os.mkdirp('file1/foo');", "mkdir over file2");
assertThrows("os.mkdirp('file1/');", "mkdir over file3");
assertThrows("os.mkdirp('file1/foo/');", "mkdir over file4");
// Create a dir we cannot read.
os.mkdirp("dir4", 0);
// This test fails if you are root since root can read any dir.
assertThrows("os.chdir('dir4');", "chdir dir4 I");
os.rmdir("dir4");
assertThrows("os.chdir('dir4');", "chdir dir4 II");
// Set umask.
var old_umask = os.umask(0777);
// Create a dir we cannot read.
os.mkdirp("dir5");
// This test fails if you are root since root can read any dir.
assertThrows("os.chdir('dir5');", "cd dir5 I");
os.rmdir("dir5");
assertThrows("os.chdir('dir5');", "chdir dir5 II");
os.umask(old_umask);
os.mkdirp("hest/fisk/../fisk/ged");
os.system("ls", ["hest/fisk/ged"]);
os.setenv("FOO", "bar");
var environment = os.system("printenv");
assertTrue(/FOO=bar/.test(environment));
// Check we time out.
var have_sleep = true;
var have_echo = true;
os.mkdirp(TEST_DIR);
os.chdir(TEST_DIR);
try {
os.system("ls", ["/bin/sleep"]);
} catch (e) {
have_sleep = false;
}
try {
os.system("ls", ["/bin/echo"]);
} catch (e) {
have_echo = false;
}
if (have_sleep) {
assertThrows("os.system('sleep', ['2000'], 200);", "sleep 1");
// Check we time out with total time.
assertThrows("os.system('sleep', ['2000'], -1, 200);", "sleep 2");
// Check that -1 means no timeout.
os.system('sleep', ['1'], -1, -1);
}
// Check that we don't fill up the process table with zombies.
// Disabled because it's too slow.
if (have_echo) {
//for (var i = 0; i < 65536; i++) {
// Check the chdir worked.
os.system('ls', ['../' + TEST_DIR]);
// Simple create dir.
os.mkdirp("dir");
// Create dir in dir.
os.mkdirp("dir/foo");
// Check that they are there.
os.system('ls', ['dir/foo']);
// Check that we can detect when something is not there.
assertThrows("os.system('ls', ['dir/bar']);", "dir not there");
// Check that mkdirp makes intermediate directories.
os.mkdirp("dir2/foo");
os.system("ls", ["dir2/foo"]);
// Check that mkdirp doesn't mind if the dir is already there.
os.mkdirp("dir2/foo");
os.mkdirp("dir2/foo/");
// Check that mkdirp can cope with trailing /
os.mkdirp("dir3/");
os.system("ls", ["dir3"]);
// Check that we get an error if the name is taken by a file.
os.system("sh", ["-c", "echo foo > file1"]);
os.system("ls", ["file1"]);
assertThrows("os.mkdirp('file1');", "mkdir over file1");
assertThrows("os.mkdirp('file1/foo');", "mkdir over file2");
assertThrows("os.mkdirp('file1/');", "mkdir over file3");
assertThrows("os.mkdirp('file1/foo/');", "mkdir over file4");
// Create a dir we cannot read.
os.mkdirp("dir4", 0);
// This test fails if you are root since root can read any dir.
assertThrows("os.chdir('dir4');", "chdir dir4 I");
os.rmdir("dir4");
assertThrows("os.chdir('dir4');", "chdir dir4 II");
// Set umask.
var old_umask = os.umask(0777);
// Create a dir we cannot read.
os.mkdirp("dir5");
// This test fails if you are root since root can read any dir.
assertThrows("os.chdir('dir5');", "cd dir5 I");
os.rmdir("dir5");
assertThrows("os.chdir('dir5');", "chdir dir5 II");
os.umask(old_umask);
os.mkdirp("hest/fisk/../fisk/ged");
os.system("ls", ["hest/fisk/ged"]);
os.setenv("FOO", "bar");
var environment = os.system("printenv");
assertTrue(/FOO=bar/.test(environment));
// Check we time out.
var have_sleep = true;
var have_echo = true;
try {
os.system("ls", ["/bin/sleep"]);
} catch (e) {
have_sleep = false;
}
try {
os.system("ls", ["/bin/echo"]);
} catch (e) {
have_echo = false;
}
if (have_sleep) {
assertThrows("os.system('sleep', ['2000'], 200);", "sleep 1");
// Check we time out with total time.
assertThrows("os.system('sleep', ['2000'], -1, 200);", "sleep 2");
// Check that -1 means no timeout.
os.system('sleep', ['1'], -1, -1);
}
// Check that we don't fill up the process table with zombies.
// Disabled because it's too slow.
if (have_echo) {
//for (var i = 0; i < 65536; i++) {
assertEquals("baz\n", os.system("echo", ["baz"]));
//}
//}
}
} finally {
os.chdir("..");
os.system("rm", ["-r", TEST_DIR]);
}
os.chdir("..");
os.system("rm", ["-r", "d8-os-test-directory"]);
// Too few args.
arg_error("os.umask();");
arg_error("os.system();");

145
deps/v8/test/mjsunit/harmony/weakmaps.js

@ -0,0 +1,145 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --harmony-weakmaps --expose-gc
// Test valid getter and setter calls
var m = new WeakMap;
assertDoesNotThrow(function () { m.get(new Object) });
assertDoesNotThrow(function () { m.set(new Object) });
assertDoesNotThrow(function () { m.has(new Object) });
assertDoesNotThrow(function () { m.delete(new Object) });
// Test invalid getter and setter calls
var m = new WeakMap;
assertThrows(function () { m.get(undefined) }, TypeError);
assertThrows(function () { m.set(undefined, 0) }, TypeError);
assertThrows(function () { m.get(0) }, TypeError);
assertThrows(function () { m.set(0, 0) }, TypeError);
assertThrows(function () { m.get('a-key') }, TypeError);
assertThrows(function () { m.set('a-key', 0) }, TypeError);
// Test expected mapping behavior
var m = new WeakMap;
function TestMapping(map, key, value) {
map.set(key, value);
assertSame(value, map.get(key));
}
TestMapping(m, new Object, 23);
TestMapping(m, new Object, 'the-value');
TestMapping(m, new Object, new Object);
// Test expected querying behavior
var m = new WeakMap;
var key = new Object;
TestMapping(m, key, 'to-be-present');
assertTrue(m.has(key));
assertFalse(m.has(new Object));
TestMapping(m, key, undefined);
assertFalse(m.has(key));
assertFalse(m.has(new Object));
// Test expected deletion behavior
var m = new WeakMap;
var key = new Object;
TestMapping(m, key, 'to-be-deleted');
assertTrue(m.delete(key));
assertFalse(m.delete(key));
assertFalse(m.delete(new Object));
assertSame(m.get(key), undefined);
// Test GC of map with entry
var m = new WeakMap;
var key = new Object;
m.set(key, 'not-collected');
gc();
assertSame('not-collected', m.get(key));
// Test GC of map with chained entries
var m = new WeakMap;
var head = new Object;
for (key = head, i = 0; i < 10; i++, key = m.get(key)) {
m.set(key, new Object);
}
gc();
var count = 0;
for (key = head; key != undefined; key = m.get(key)) {
count++;
}
assertEquals(11, count);
// Test property attribute [[Enumerable]]
var m = new WeakMap;
function props(x) {
var array = [];
for (var p in x) array.push(p);
return array.sort();
}
assertArrayEquals([], props(WeakMap));
assertArrayEquals([], props(WeakMap.prototype));
assertArrayEquals([], props(m));
// Test arbitrary properties on weak maps
var m = new WeakMap;
function TestProperty(map, property, value) {
map[property] = value;
assertEquals(value, map[property]);
}
for (i = 0; i < 20; i++) {
TestProperty(m, i, 'val' + i);
TestProperty(m, 'foo' + i, 'bar' + i);
}
TestMapping(m, new Object, 'foobar');
// Test direct constructor call
var m = WeakMap();
assertTrue(m instanceof WeakMap);
// Test some common JavaScript idioms
var m = new WeakMap;
assertTrue(m instanceof WeakMap);
assertTrue(WeakMap.prototype.set instanceof Function)
assertTrue(WeakMap.prototype.get instanceof Function)
assertTrue(WeakMap.prototype.has instanceof Function)
assertTrue(WeakMap.prototype.delete instanceof Function)
// Stress Test
// There is a proposed stress-test available at the es-discuss mailing list
// which cannot be reasonably automated. Check it out by hand if you like:
// https://mail.mozilla.org/pipermail/es-discuss/2011-May/014096.html

27
deps/v8/test/mjsunit/math-floor.js

@ -51,17 +51,6 @@ function test() {
testFloor(-Infinity, -Infinity);
testFloor(NaN, NaN);
// Ensure that a negative zero coming from Math.floor is properly handled
// by other operations.
function ifloor(x) {
return 1 / Math.floor(x);
}
assertEquals(-Infinity, ifloor(-0));
assertEquals(-Infinity, ifloor(-0));
assertEquals(-Infinity, ifloor(-0));
%OptimizeFunctionOnNextCall(ifloor);
assertEquals(-Infinity, ifloor(-0));
testFloor(0, 0.1);
testFloor(0, 0.49999999999999994);
testFloor(0, 0.5);
@ -140,19 +129,3 @@ function test() {
for (var i = 0; i < 500; i++) {
test();
}
// Regression test for a bug where a negative zero coming from Math.floor
// was not properly handled by other operations.
function floorsum(i, n) {
var ret = Math.floor(n);
while (--i > 0) {
ret += Math.floor(n);
}
return ret;
}
assertEquals(-0, floorsum(1, -0));
%OptimizeFunctionOnNextCall(floorsum);
// The optimized function will deopt. Run it with enough iterations to try
// to optimize via OSR (triggering the bug).
assertEquals(-0, floorsum(100000, -0));

17
deps/v8/test/mjsunit/math-round.js

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Copyright 2010 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -44,21 +44,6 @@ testRound(Infinity, Infinity);
testRound(-Infinity, -Infinity);
testRound(NaN, NaN);
// Regression test for a bug where a negative zero coming from Math.round
// was not properly handled by other operations.
function roundsum(i, n) {
var ret = Math.round(n);
while (--i > 0) {
ret += Math.round(n);
}
return ret;
}
assertEquals(-0, roundsum(1, -0));
%OptimizeFunctionOnNextCall(roundsum);
// The optimized function will deopt. Run it with enough iterations to try
// to optimize via OSR (triggering the bug).
assertEquals(-0, roundsum(100000, -0));
testRound(1, 0.5);
testRound(1, 0.7);
testRound(1, 1);

38
deps/v8/src/shell.h → deps/v8/test/mjsunit/regress/regress-1419.js

@ -1,4 +1,4 @@
// Copyright 2006-2008 the V8 project authors. All rights reserved.
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -24,32 +24,24 @@
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// A simple interactive shell. Enable with --shell.
#ifndef V8_SHELL_H_
#define V8_SHELL_H_
#include "../public/debug.h"
// Test that using bind several time does not change the length of existing
// bound functions.
namespace v8 {
namespace internal {
function foo() {
}
// Debug event handler for interactive debugging.
void handle_debug_event(v8::DebugEvent event,
v8::Handle<v8::Object> exec_state,
v8::Handle<v8::Object> event_data,
v8::Handle<Value> data);
var f1 = function (x) {}.bind(foo);
var f2 = function () {};
assertEquals(1, f1.length);
class Shell {
public:
static void PrintObject(v8::Handle<v8::Value> obj);
// Run the read-eval loop, executing code in the specified
// environment.
static void Run(v8::Handle<v8::Context> context);
};
// the object we bind to can be any object
f2.bind(foo);
} } // namespace v8::internal
assertEquals(1, f1.length);
#endif // V8_SHELL_H_
var desc = Object.getOwnPropertyDescriptor(f1, 'length');
assertEquals(false, desc.writable);
assertEquals(false, desc.enumerable);
assertEquals(false, desc.configurable);

32
deps/v8/test/mjsunit/regress/regress-1546.js

@ -0,0 +1,32 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// See: http://code.google.com/p/v8/issues/detail?id=1546
// Should't throw. Scanner incorrectly truncated to char before comparing
// with "*", so it ended the comment early.
eval("/*\u822a/ */");

57
deps/v8/test/mjsunit/regress/regress-1583.js

@ -0,0 +1,57 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --allow-natives-syntax
// Regression test for a bug in recompilation of anonymous functions inside
// catch. We would incorrectly hoist them outside the catch in some cases.
function f() {
try {
throw 0;
} catch (e) {
try {
var x = { a: 'hest' };
x.m = function (e) { return x.a; };
} catch (e) {
}
}
return x;
}
var o = f();
assertEquals('hest', o.m());
assertEquals('hest', o.m());
assertEquals('hest', o.m());
%OptimizeFunctionOnNextCall(o.m);
assertEquals('hest', o.m());
// Fixing the bug above introduced (revealed?) an inconsistency in named
// getters and setters. The property name was also treated as a function
// name.
var global = 'horse';
var p = { get global() { return global; }};
assertEquals('horse', p.global);

64
deps/v8/test/mjsunit/regress/regress-1586.js

@ -0,0 +1,64 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --expose-debug-as debug
// Test debug evaluation for functions without local context, but with
// nested catch contexts.
function f() {
var i = 1; // Line 1.
{ // Line 2.
try { // Line 3.
throw 'stuff'; // Line 4.
} catch (e) { // Line 5.
x = 2; // Line 6.
}
}
};
// Get the Debug object exposed from the debug context global object.
Debug = debug.Debug
// Set breakpoint on line 6.
var bp = Debug.setBreakPoint(f, 6);
function listener(event, exec_state, event_data, data) {
if (event == Debug.DebugEvent.Break) {
result = exec_state.frame().evaluate("i").value();
}
};
// Add the debug event listener.
Debug.setListener(listener);
result = -1;
f();
assertEquals(1, result);
// Clear breakpoint.
Debug.clearBreakPoint(bp);
// Get rid of the debug event listener.
Debug.setListener(null);

112
deps/v8/test/mjsunit/regress/regress-91517.js

@ -0,0 +1,112 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Getting property names of an object with a prototype chain that
// triggers dictionary elements in GetLocalPropertyNames() shouldn't
// crash the runtime
// Flags: --allow-natives-syntax
function Object1() {
this.foo = 1;
}
function Object2() {
this.fuz = 2;
this.objects = new Object();
this.fuz1 = 2;
this.fuz2 = 2;
this.fuz3 = 2;
this.fuz4 = 2;
this.fuz5 = 2;
this.fuz6 = 2;
this.fuz7 = 2;
this.fuz8 = 2;
this.fuz9 = 2;
this.fuz10 = 2;
this.fuz11 = 2;
this.fuz12 = 2;
this.fuz13 = 2;
this.fuz14 = 2;
this.fuz15 = 2;
this.fuz16 = 2;
this.fuz17 = 2;
// Force dictionary-based properties
for (x=1;x<1000;x++) {
this["sdf" + x] = 2;
}
}
function Object3() {
this.boo = 3;
}
function Object4() {
this.baz = 4;
}
obj1 = new Object1();
obj2 = new Object2();
obj3 = new Object3();
obj4 = new Object4();
%SetHiddenPrototype(obj4, obj3);
%SetHiddenPrototype(obj3, obj2);
%SetHiddenPrototype(obj2, obj1);
function contains(a, obj) {
for(var i = 0; i < a.length; i++) {
if(a[i] === obj){
return true;
}
}
return false;
}
names = %GetLocalPropertyNames(obj4);
assertEquals(1021, names.length);
assertTrue(contains(names, "baz"));
assertTrue(contains(names, "boo"));
assertTrue(contains(names, "foo"));
assertTrue(contains(names, "fuz"));
assertTrue(contains(names, "fuz1"));
assertTrue(contains(names, "fuz2"));
assertTrue(contains(names, "fuz3"));
assertTrue(contains(names, "fuz4"));
assertTrue(contains(names, "fuz5"));
assertTrue(contains(names, "fuz6"));
assertTrue(contains(names, "fuz7"));
assertTrue(contains(names, "fuz8"));
assertTrue(contains(names, "fuz9"));
assertTrue(contains(names, "fuz10"));
assertTrue(contains(names, "fuz11"));
assertTrue(contains(names, "fuz12"));
assertTrue(contains(names, "fuz13"));
assertTrue(contains(names, "fuz14"));
assertTrue(contains(names, "fuz15"));
assertTrue(contains(names, "fuz16"));
assertTrue(contains(names, "fuz17"));
assertFalse(names[1020] == undefined);

35
deps/v8/test/mjsunit/regress/regress-91787.js

@ -0,0 +1,35 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Unterminated non-ASCII string literals in JSON code were not
// detected correctly.
// Shouldn't crash (due to stack overflow).
// Should throw due to invalid syntax.
assertThrows(function() {
JSON.parse('"\x80unterminated');
});

9
deps/v8/test/preparser/testcfg.py

@ -27,7 +27,7 @@
import test
import os
from os.path import join, dirname, exists
from os.path import join, dirname, exists, isfile
import platform
import utils
import re
@ -122,10 +122,15 @@ class PreparserTestConfiguration(test.TestConfiguration):
{"Test": Test, "Template": Template}, {})
def ListTests(self, current_path, path, mode, variant_flags):
executable = join('obj', 'preparser', mode, 'preparser')
executable = 'preparser'
if utils.IsWindows():
executable += '.exe'
executable = join(self.context.buildspace, executable)
if not isfile(executable):
executable = join('obj', 'preparser', mode, 'preparser')
if utils.IsWindows():
executable += '.exe'
executable = join(self.context.buildspace, executable)
expectations = self.GetExpectations()
result = []
# Find all .js files in tests/preparser directory.

119
deps/v8/tools/grokdump.py

@ -345,7 +345,7 @@ class MinidumpReader(object):
for r in self.memory_list64.ranges:
if r.start <= address < r.start + r.size:
return self.memory_list64.base_rva + offset + address - r.start
offset += r.size
offset += r.size
if self.memory_list is not None:
for r in self.memory_list.ranges:
if r.start <= address < r.start + r.memory.data_size:
@ -379,64 +379,65 @@ class MinidumpReader(object):
# };
# static P p;
INSTANCE_TYPES = {
64: "SYMBOL_TYPE",
68: "ASCII_SYMBOL_TYPE",
65: "CONS_SYMBOL_TYPE",
69: "CONS_ASCII_SYMBOL_TYPE",
66: "EXTERNAL_SYMBOL_TYPE",
74: "EXTERNAL_SYMBOL_WITH_ASCII_DATA_TYPE",
70: "EXTERNAL_ASCII_SYMBOL_TYPE",
0: "STRING_TYPE",
4: "ASCII_STRING_TYPE",
1: "CONS_STRING_TYPE",
5: "CONS_ASCII_STRING_TYPE",
2: "EXTERNAL_STRING_TYPE",
10: "EXTERNAL_STRING_WITH_ASCII_DATA_TYPE",
6: "EXTERNAL_ASCII_STRING_TYPE",
6: "PRIVATE_EXTERNAL_ASCII_STRING_TYPE",
128: "MAP_TYPE",
129: "CODE_TYPE",
130: "ODDBALL_TYPE",
131: "JS_GLOBAL_PROPERTY_CELL_TYPE",
132: "HEAP_NUMBER_TYPE",
133: "FOREIGN_TYPE",
134: "BYTE_ARRAY_TYPE",
135: "EXTERNAL_BYTE_ARRAY_TYPE",
136: "EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE",
137: "EXTERNAL_SHORT_ARRAY_TYPE",
138: "EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE",
139: "EXTERNAL_INT_ARRAY_TYPE",
140: "EXTERNAL_UNSIGNED_INT_ARRAY_TYPE",
141: "EXTERNAL_FLOAT_ARRAY_TYPE",
143: "EXTERNAL_PIXEL_ARRAY_TYPE",
145: "FILLER_TYPE",
146: "ACCESSOR_INFO_TYPE",
147: "ACCESS_CHECK_INFO_TYPE",
148: "INTERCEPTOR_INFO_TYPE",
149: "CALL_HANDLER_INFO_TYPE",
150: "FUNCTION_TEMPLATE_INFO_TYPE",
151: "OBJECT_TEMPLATE_INFO_TYPE",
152: "SIGNATURE_INFO_TYPE",
153: "TYPE_SWITCH_INFO_TYPE",
154: "SCRIPT_TYPE",
155: "CODE_CACHE_TYPE",
156: "POLYMORPHIC_CODE_CACHE_TYPE",
159: "FIXED_ARRAY_TYPE",
160: "SHARED_FUNCTION_INFO_TYPE",
161: "JS_MESSAGE_OBJECT_TYPE",
162: "JS_VALUE_TYPE",
163: "JS_OBJECT_TYPE",
164: "JS_CONTEXT_EXTENSION_OBJECT_TYPE",
165: "JS_GLOBAL_OBJECT_TYPE",
166: "JS_BUILTINS_OBJECT_TYPE",
167: "JS_GLOBAL_PROXY_TYPE",
168: "JS_ARRAY_TYPE",
169: "JS_PROXY_TYPE",
170: "JS_REGEXP_TYPE",
171: "JS_FUNCTION_TYPE",
172: "JS_FUNCTION_PROXY_TYPE",
157: "DEBUG_INFO_TYPE",
158: "BREAK_POINT_INFO_TYPE",
64: "SYMBOL_TYPE",
68: "ASCII_SYMBOL_TYPE",
65: "CONS_SYMBOL_TYPE",
69: "CONS_ASCII_SYMBOL_TYPE",
66: "EXTERNAL_SYMBOL_TYPE",
74: "EXTERNAL_SYMBOL_WITH_ASCII_DATA_TYPE",
70: "EXTERNAL_ASCII_SYMBOL_TYPE",
0: "STRING_TYPE",
4: "ASCII_STRING_TYPE",
1: "CONS_STRING_TYPE",
5: "CONS_ASCII_STRING_TYPE",
2: "EXTERNAL_STRING_TYPE",
10: "EXTERNAL_STRING_WITH_ASCII_DATA_TYPE",
6: "EXTERNAL_ASCII_STRING_TYPE",
6: "PRIVATE_EXTERNAL_ASCII_STRING_TYPE",
128: "MAP_TYPE",
129: "CODE_TYPE",
130: "ODDBALL_TYPE",
131: "JS_GLOBAL_PROPERTY_CELL_TYPE",
132: "HEAP_NUMBER_TYPE",
133: "FOREIGN_TYPE",
134: "BYTE_ARRAY_TYPE",
135: "EXTERNAL_BYTE_ARRAY_TYPE",
136: "EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE",
137: "EXTERNAL_SHORT_ARRAY_TYPE",
138: "EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE",
139: "EXTERNAL_INT_ARRAY_TYPE",
140: "EXTERNAL_UNSIGNED_INT_ARRAY_TYPE",
141: "EXTERNAL_FLOAT_ARRAY_TYPE",
143: "EXTERNAL_PIXEL_ARRAY_TYPE",
145: "FILLER_TYPE",
146: "ACCESSOR_INFO_TYPE",
147: "ACCESS_CHECK_INFO_TYPE",
148: "INTERCEPTOR_INFO_TYPE",
149: "CALL_HANDLER_INFO_TYPE",
150: "FUNCTION_TEMPLATE_INFO_TYPE",
151: "OBJECT_TEMPLATE_INFO_TYPE",
152: "SIGNATURE_INFO_TYPE",
153: "TYPE_SWITCH_INFO_TYPE",
154: "SCRIPT_TYPE",
155: "CODE_CACHE_TYPE",
156: "POLYMORPHIC_CODE_CACHE_TYPE",
159: "FIXED_ARRAY_TYPE",
160: "SHARED_FUNCTION_INFO_TYPE",
161: "JS_MESSAGE_OBJECT_TYPE",
162: "JS_VALUE_TYPE",
163: "JS_OBJECT_TYPE",
164: "JS_CONTEXT_EXTENSION_OBJECT_TYPE",
165: "JS_GLOBAL_OBJECT_TYPE",
166: "JS_BUILTINS_OBJECT_TYPE",
167: "JS_GLOBAL_PROXY_TYPE",
168: "JS_ARRAY_TYPE",
169: "JS_PROXY_TYPE",
170: "JS_WEAK_MAP_TYPE",
171: "JS_REGEXP_TYPE",
172: "JS_FUNCTION_TYPE",
173: "JS_FUNCTION_PROXY_TYPE",
157: "DEBUG_INFO_TYPE",
158: "BREAK_POINT_INFO_TYPE",
}

222
deps/v8/tools/gyp/v8.gyp

@ -26,11 +26,6 @@
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
{
'includes': [
'../../build/common.gypi',
'../../build/v8-features.gypi',
],
'variables': {
'use_system_v8%': 0,
'msvs_use_common_release': 0,
@ -59,6 +54,12 @@
# well when compiling for the ARM target.
'v8_use_arm_eabi_hardfloat%': 'false',
'v8_enable_debugger_support%': 1,
# Chrome needs this definition unconditionally. For standalone V8 builds,
# it's handled in common.gypi.
'want_separate_host_toolset%': 1,
'v8_use_snapshot%': 'true',
'host_os%': '<(OS)',
'v8_use_liveobjectlist%': 'false',
@ -66,10 +67,11 @@
'conditions': [
['use_system_v8==0', {
'target_defaults': {
'defines': [
'ENABLE_DEBUGGER_SUPPORT',
],
'conditions': [
['v8_enable_debugger_support==1', {
'defines': ['ENABLE_DEBUGGER_SUPPORT',],
},
],
['OS!="mac"', {
# TODO(mark): The OS!="mac" conditional is temporary. It can be
# removed once the Mac Chromium build stops setting target_arch to
@ -102,6 +104,13 @@
'USE_EABI_HARDFLOAT=1',
'CAN_USE_VFP_INSTRUCTIONS',
],
'cflags': [
'-mfloat-abi=hard',
],
}, {
'defines': [
'USE_EABI_HARDFLOAT=0',
],
}],
],
}],
@ -154,12 +163,20 @@
},
'VCLinkerTool': {
'LinkIncremental': '2',
# For future reference, the stack size needs to be increased
# when building for Windows 64-bit, otherwise some test cases
# can cause stack overflow.
# 'StackReserveSize': '297152',
},
},
'conditions': [
['OS=="freebsd" or OS=="openbsd"', {
'cflags': [ '-I/usr/local/include' ],
}],
['OS=="linux" or OS=="freebsd" or OS=="openbsd"', {
'cflags': [ '-Wall', '-Werror', '-W', '-Wno-unused-parameter',
'-Wnon-virtual-dtor' ],
}],
],
},
'Release': {
@ -224,6 +241,10 @@
'OptimizeReferences': '2',
'OptimizeForWindows98': '1',
'EnableCOMDATFolding': '2',
# For future reference, the stack size needs to be
# increased when building for Windows 64-bit, otherwise
# some test cases can cause stack overflow.
# 'StackReserveSize': '297152',
},
},
}],
@ -234,8 +255,12 @@
'targets': [
{
'target_name': 'v8',
'toolsets': ['host', 'target'],
'conditions': [
['want_separate_host_toolset==1', {
'toolsets': ['host', 'target'],
}, {
'toolsets': ['target'],
}],
['v8_use_snapshot=="true"', {
'dependencies': ['v8_snapshot'],
},
@ -277,23 +302,33 @@
],
'direct_dependent_settings': {
'include_dirs': [
'../../include',
'../../include',
],
},
},
{
'target_name': 'v8_snapshot',
'type': '<(library)',
'toolsets': ['host', 'target'],
'conditions': [
['component=="shared_library"', {
'conditions': [
# The ARM assembler assumes the host is 32 bits, so force building
# 32-bit host tools.
['v8_target_arch=="arm" and host_arch=="x64" and _toolset=="host"', {
['want_separate_host_toolset==1', {
'toolsets': ['host', 'target'],
'dependencies': ['mksnapshot#host', 'js2c#host'],
}, {
'toolsets': ['target'],
'dependencies': ['mksnapshot', 'js2c'],
}],
# The ARM assembler assumes the host is 32 bits,
# so force building 32-bit host tools.
['v8_target_arch=="arm" and host_arch=="x64"', {
'target_conditions': [
['_toolset=="host"', {
'cflags': ['-m32'],
'ldflags': ['-m32'],
}],
],
}],
['component=="shared_library"', {
'conditions': [
['OS=="win"', {
'defines': [
'BUILDING_V8_SHARED',
@ -317,8 +352,6 @@
}],
],
'dependencies': [
'mksnapshot#host',
'js2c#host',
'v8_base',
],
'include_dirs+': [
@ -379,16 +412,13 @@
'<@(mksnapshot_flags)',
'<@(_outputs)'
],
'msvs_cygwin_shell': 0,
},
],
},
{
'target_name': 'v8_nosnapshot',
'type': '<(library)',
'toolsets': ['host', 'target'],
'dependencies': [
'js2c#host',
'v8_base',
],
'include_dirs+': [
@ -400,11 +430,22 @@
'../../src/snapshot-empty.cc',
],
'conditions': [
# The ARM assembler assumes the host is 32 bits, so force building
# 32-bit host tools.
['v8_target_arch=="arm" and host_arch=="x64" and _toolset=="host"', {
'cflags': ['-m32'],
'ldflags': ['-m32'],
# The ARM assembler assumes the host is 32 bits,
# so force building 32-bit host tools.
['v8_target_arch=="arm" and host_arch=="x64"', {
'target_conditions': [
['_toolset=="host"', {
'cflags': ['-m32'],
'ldflags': ['-m32'],
}],
],
}],
['want_separate_host_toolset==1', {
'toolsets': ['host', 'target'],
'dependencies': ['js2c#host'],
}, {
'toolsets': ['target'],
'dependencies': ['js2c'],
}],
['component=="shared_library"', {
'defines': [
@ -417,7 +458,6 @@
{
'target_name': 'v8_base',
'type': '<(library)',
'toolsets': ['host', 'target'],
'include_dirs+': [
'../../src',
],
@ -494,6 +534,8 @@
'../../src/diy-fp.cc',
'../../src/diy-fp.h',
'../../src/double.h',
'../../src/elements.cc',
'../../src/elements.h',
'../../src/execution.cc',
'../../src/execution.h',
'../../src/factory.cc',
@ -616,7 +658,6 @@
'../../src/scopes.h',
'../../src/serialize.cc',
'../../src/serialize.h',
'../../src/shell.h',
'../../src/small-pointer-list.h',
'../../src/smart-pointer.h',
'../../src/snapshot-common.cc',
@ -672,10 +713,12 @@
'../../src/extensions/gc-extension.h',
],
'conditions': [
['want_separate_host_toolset==1', {
'toolsets': ['host', 'target'],
}, {
'toolsets': ['target'],
}],
['v8_target_arch=="arm"', {
'include_dirs+': [
'../../src/arm',
],
'sources': [
'../../src/arm/assembler-arm-inl.h',
'../../src/arm/assembler-arm.cc',
@ -711,16 +754,17 @@
'conditions': [
# The ARM assembler assumes the host is 32 bits,
# so force building 32-bit host tools.
['host_arch=="x64" and _toolset=="host"', {
'cflags': ['-m32'],
'ldflags': ['-m32'],
}]
]
['host_arch=="x64"', {
'target_conditions': [
['_toolset=="host"', {
'cflags': ['-m32'],
'ldflags': ['-m32'],
}],
],
}],
],
}],
['v8_target_arch=="ia32" or v8_target_arch=="mac" or OS=="mac"', {
'include_dirs+': [
'../../src/ia32',
],
'sources': [
'../../src/ia32/assembler-ia32-inl.h',
'../../src/ia32/assembler-ia32.cc',
@ -752,9 +796,6 @@
],
}],
['v8_target_arch=="x64" or v8_target_arch=="mac" or OS=="mac"', {
'include_dirs+': [
'../../src/x64',
],
'sources': [
'../../src/x64/assembler-x64-inl.h',
'../../src/x64/assembler-x64.cc',
@ -787,10 +828,6 @@
}],
['OS=="linux"', {
'link_settings': {
'libraries': [
# Needed for clock_gettime() used by src/platform-linux.cc.
'-lrt',
],
'conditions': [
['v8_compress_startup_data=="bz2"', {
'libraries': [
@ -876,7 +913,13 @@
{
'target_name': 'js2c',
'type': 'none',
'toolsets': ['host'],
'conditions': [
['want_separate_host_toolset==1', {
'toolsets': ['host'],
}, {
'toolsets': ['target'],
}],
],
'variables': {
'library_files': [
'../../src/runtime.js',
@ -896,8 +939,9 @@
'../../src/macros.py',
],
'experimental_library_files': [
'../../src/proxy.js',
'../../src/macros.py',
'../../src/proxy.js',
'../../src/weakmap.js',
],
},
'actions': [
@ -918,7 +962,6 @@
'<(v8_compress_startup_data)',
'<@(library_files)'
],
'msvs_cygwin_shell': 0,
},
{
'action_name': 'js2c_experimental',
@ -937,14 +980,12 @@
'<(v8_compress_startup_data)',
'<@(experimental_library_files)'
],
'msvs_cygwin_shell': 0,
},
],
},
{
'target_name': 'mksnapshot',
'type': 'executable',
'toolsets': ['host'],
'dependencies': [
'v8_nosnapshot',
],
@ -955,11 +996,20 @@
'../../src/mksnapshot.cc',
],
'conditions': [
# The ARM assembler assumes the host is 32 bits, so force building
# 32-bit host tools.
['v8_target_arch=="arm" and host_arch=="x64" and _toolset=="host"', {
'cflags': ['-m32'],
'ldflags': ['-m32'],
['want_separate_host_toolset==1', {
'toolsets': ['host'],
}, {
'toolsets': ['target'],
}],
# The ARM assembler assumes the host is 32 bits,
# so force building 32-bit host tools.
['v8_target_arch=="arm" and host_arch=="x64"', {
'target_conditions': [
['_toolset=="host"', {
'cflags': ['-m32'],
'ldflags': ['-m32'],
}],
],
}],
['v8_compress_startup_data=="bz2"', {
'libraries': [
@ -970,7 +1020,6 @@
{
'target_name': 'v8_shell',
'type': 'executable',
'toolsets': ['host'],
'dependencies': [
'v8'
],
@ -978,29 +1027,66 @@
'../../samples/shell.cc',
],
'conditions': [
['want_separate_host_toolset==1', {
'toolsets': ['host'],
}, {
'toolsets': ['target'],
}],
['OS=="win"', {
# This could be gotten by not setting chromium_code, if that's OK.
'defines': ['_CRT_SECURE_NO_WARNINGS'],
}],
# The ARM assembler assumes the host is 32 bits, so force building
# 32-bit host tools.
['v8_target_arch=="arm" and host_arch=="x64" and _toolset=="host"', {
'cflags': ['-m32'],
'ldflags': ['-m32'],
}],
['v8_compress_startup_data=="bz2"', {
'libraries': [
'-lbz2',
]}],
],
},
{
'target_name': 'preparser_lib',
'type': '<(library)',
'include_dirs+': [
'../../src',
],
'sources': [
'../../src/allocation.cc',
'../../src/bignum.cc',
'../../src/cached-powers.cc',
'../../src/conversions.cc',
'../../src/hashmap.cc',
'../../src/preparse-data.cc',
'../../src/preparser.cc',
'../../src/preparser-api.cc',
'../../src/scanner-base.cc',
'../../src/strtod.cc',
'../../src/token.cc',
'../../src/unicode.cc',
'../../src/utils.cc',
],
},
],
}, { # use_system_v8 != 0
'targets': [
{
'target_name': 'v8',
'type': 'settings',
'toolsets': ['host', 'target'],
'conditions': [
['want_separate_host_toolset==1', {
'toolsets': ['host', 'target'],
}, {
'toolsets': ['target'],
}],
# The ARM assembler assumes the host is 32 bits,
# so force building 32-bit host tools.
['v8_target_arch=="arm" and host_arch=="x64"', {
'target_conditions': [
['_toolset=="host"', {
'cflags': ['-m32'],
'ldflags': ['-m32'],
}],
],
}],
],
'link_settings': {
'libraries': [
'-lv8',
@ -1010,7 +1096,13 @@
{
'target_name': 'v8_shell',
'type': 'none',
'toolsets': ['host'],
'conditions': [
['want_separate_host_toolset==1', {
'toolsets': ['host'],
}, {
'toolsets': ['target'],
}],
],
'dependencies': [
'v8'
],

Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save