From 940a6863ead6622e5439e07be631359c31e63b68 Mon Sep 17 00:00:00 2001 From: isaacs Date: Sat, 9 Jun 2012 08:09:42 -0700 Subject: [PATCH] Roll V8 back to 3.9.24.31 --- deps/v8/AUTHORS | 1 - deps/v8/ChangeLog | 160 - deps/v8/Makefile | 74 +- deps/v8/SConstruct | 13 + deps/v8/build/armu.gypi | 36 + deps/v8/build/common.gypi | 52 +- deps/v8/build/gyp_v8 | 38 +- deps/v8/build/mipsu.gypi | 33 + deps/v8/build/standalone.gypi | 8 +- deps/v8/include/v8-profiler.h | 81 +- deps/v8/include/v8.h | 213 +- deps/v8/samples/lineprocessor.cc | 6 +- deps/v8/samples/samples.gyp | 8 +- deps/v8/samples/shell.cc | 25 +- deps/v8/src/api.cc | 99 +- deps/v8/src/api.h | 1 - deps/v8/src/apiutils.h | 9 +- deps/v8/src/arguments.h | 13 +- deps/v8/src/arm/code-stubs-arm.cc | 55 +- deps/v8/src/arm/full-codegen-arm.cc | 235 +- deps/v8/src/arm/lithium-arm.cc | 141 +- deps/v8/src/arm/lithium-arm.h | 90 +- deps/v8/src/arm/lithium-codegen-arm.cc | 264 +- deps/v8/src/arm/lithium-codegen-arm.h | 19 +- deps/v8/src/arm/macro-assembler-arm.cc | 29 +- deps/v8/src/arm/macro-assembler-arm.h | 11 +- deps/v8/src/arm/regexp-macro-assembler-arm.cc | 54 +- deps/v8/src/arm/regexp-macro-assembler-arm.h | 8 - deps/v8/src/arm/stub-cache-arm.cc | 118 +- deps/v8/src/array.js | 166 +- deps/v8/src/assembler.cc | 71 +- deps/v8/src/assembler.h | 2 - deps/v8/src/ast.cc | 218 +- deps/v8/src/ast.h | 11 +- deps/v8/src/bootstrapper.cc | 4 +- deps/v8/src/builtins.cc | 66 +- deps/v8/src/builtins.h | 1 + deps/v8/src/bytecodes-irregexp.h | 35 +- deps/v8/src/code-stubs.cc | 19 +- deps/v8/src/compiler-intrinsics.h | 17 - deps/v8/src/contexts.h | 2 +- deps/v8/src/conversions-inl.h | 4 +- deps/v8/src/d8.cc | 26 +- deps/v8/src/d8.js | 2 +- deps/v8/src/debug-agent.cc | 26 +- deps/v8/src/debug-debugger.js | 57 +- deps/v8/src/debug.cc | 7 + deps/v8/src/debug.h | 5 +- deps/v8/src/double.h | 6 + deps/v8/src/elements.cc | 20 +- deps/v8/src/elements.h | 1 - .../externalize-string-extension.cc | 7 +- deps/v8/src/extensions/gc-extension.cc | 5 +- deps/v8/src/factory.cc | 23 +- deps/v8/src/factory.h | 12 +- deps/v8/src/flag-definitions.h | 11 +- deps/v8/src/frames.cc | 28 +- deps/v8/src/frames.h | 2 - deps/v8/src/full-codegen.cc | 133 +- deps/v8/src/full-codegen.h | 31 +- deps/v8/src/handles.cc | 6 +- deps/v8/src/hashmap.h | 10 +- deps/v8/src/heap-inl.h | 7 +- deps/v8/src/heap-profiler.cc | 42 +- deps/v8/src/heap-profiler.h | 11 +- deps/v8/src/heap.cc | 258 +- deps/v8/src/heap.h | 37 +- deps/v8/src/hydrogen-instructions.cc | 111 +- deps/v8/src/hydrogen-instructions.h | 233 +- deps/v8/src/hydrogen.cc | 1076 +--- deps/v8/src/hydrogen.h | 70 +- deps/v8/src/ia32/builtins-ia32.cc | 11 +- deps/v8/src/ia32/code-stubs-ia32.cc | 112 +- deps/v8/src/ia32/codegen-ia32.cc | 18 +- deps/v8/src/ia32/debug-ia32.cc | 8 +- deps/v8/src/ia32/deoptimizer-ia32.cc | 16 +- deps/v8/src/ia32/full-codegen-ia32.cc | 254 +- deps/v8/src/ia32/ic-ia32.cc | 164 +- deps/v8/src/ia32/lithium-codegen-ia32.cc | 174 +- deps/v8/src/ia32/lithium-codegen-ia32.h | 8 +- deps/v8/src/ia32/lithium-ia32.cc | 70 +- deps/v8/src/ia32/lithium-ia32.h | 63 +- .../src/ia32/regexp-macro-assembler-ia32.cc | 63 +- .../v8/src/ia32/regexp-macro-assembler-ia32.h | 8 - deps/v8/src/ia32/stub-cache-ia32.cc | 333 +- deps/v8/src/ic.cc | 35 +- deps/v8/src/incremental-marking.cc | 13 - deps/v8/src/interface.cc | 13 +- deps/v8/src/interface.h | 46 +- deps/v8/src/interpreter-irregexp.cc | 87 +- deps/v8/src/isolate.cc | 9 +- deps/v8/src/isolate.h | 28 +- deps/v8/src/jsregexp.cc | 1874 +++---- deps/v8/src/jsregexp.h | 450 +- deps/v8/src/lazy-instance.h | 10 +- deps/v8/src/list-inl.h | 23 +- deps/v8/src/list.h | 8 +- deps/v8/src/lithium-allocator.cc | 2 +- deps/v8/src/lithium.cc | 34 +- deps/v8/src/lithium.h | 32 +- deps/v8/src/liveedit-debugger.js | 5 - deps/v8/src/log.cc | 15 +- deps/v8/src/log.h | 2 - deps/v8/src/macros.py | 10 - deps/v8/src/mark-compact.cc | 57 +- deps/v8/src/mark-compact.h | 2 - deps/v8/src/math.js | 1 + deps/v8/src/mips/code-stubs-mips.cc | 59 +- deps/v8/src/mips/constants-mips.h | 5 + deps/v8/src/mips/full-codegen-mips.cc | 245 +- deps/v8/src/mips/lithium-codegen-mips.cc | 147 +- deps/v8/src/mips/lithium-codegen-mips.h | 8 +- deps/v8/src/mips/lithium-mips.cc | 78 +- deps/v8/src/mips/lithium-mips.h | 69 +- .../src/mips/regexp-macro-assembler-mips.cc | 97 +- .../v8/src/mips/regexp-macro-assembler-mips.h | 8 - deps/v8/src/mips/stub-cache-mips.cc | 137 +- deps/v8/src/mirror-debugger.js | 88 +- deps/v8/src/objects-debug.cc | 12 - deps/v8/src/objects-inl.h | 81 +- deps/v8/src/objects-printer.cc | 19 +- deps/v8/src/objects-visiting-inl.h | 4 +- deps/v8/src/objects-visiting.cc | 1 - deps/v8/src/objects-visiting.h | 17 - deps/v8/src/objects.cc | 74 +- deps/v8/src/objects.h | 57 +- deps/v8/src/parser.cc | 49 +- deps/v8/src/platform-cygwin.cc | 46 +- deps/v8/src/platform-freebsd.cc | 46 +- deps/v8/src/platform-linux.cc | 86 +- deps/v8/src/platform-macos.cc | 38 +- deps/v8/src/platform-nullos.cc | 5 - deps/v8/src/platform-openbsd.cc | 38 +- deps/v8/src/platform-posix.cc | 30 +- deps/v8/src/platform-posix.h | 5 +- deps/v8/src/platform-solaris.cc | 46 +- deps/v8/src/platform-win32.cc | 110 +- deps/v8/src/platform.h | 3 - deps/v8/src/preparser.cc | 10 +- deps/v8/src/preparser.h | 15 +- deps/v8/src/profile-generator-inl.h | 6 - deps/v8/src/profile-generator.cc | 1180 ++-- deps/v8/src/profile-generator.h | 119 +- deps/v8/src/property.h | 7 + .../src/regexp-macro-assembler-irregexp-inl.h | 10 - .../v8/src/regexp-macro-assembler-irregexp.cc | 36 - deps/v8/src/regexp-macro-assembler-irregexp.h | 8 - deps/v8/src/regexp-macro-assembler-tracer.cc | 109 +- deps/v8/src/regexp-macro-assembler-tracer.h | 7 - deps/v8/src/regexp-macro-assembler.h | 18 +- deps/v8/src/regexp.js | 22 +- deps/v8/src/rewriter.cc | 2 +- deps/v8/src/runtime-profiler.cc | 27 +- deps/v8/src/runtime-profiler.h | 10 +- deps/v8/src/runtime.cc | 480 +- deps/v8/src/runtime.h | 5 - deps/v8/src/scanner.cc | 18 +- deps/v8/src/scanner.h | 9 +- deps/v8/src/scopeinfo.cc | 10 +- deps/v8/src/scopes.cc | 44 +- deps/v8/src/scopes.h | 19 +- deps/v8/src/serialize.cc | 2 +- deps/v8/src/small-pointer-list.h | 10 - deps/v8/src/spaces-inl.h | 20 +- deps/v8/src/spaces.cc | 58 +- deps/v8/src/spaces.h | 21 +- deps/v8/src/string.js | 151 +- deps/v8/src/stub-cache.cc | 10 +- deps/v8/src/utils.cc | 15 - deps/v8/src/utils.h | 26 - deps/v8/src/v8-counters.h | 2 - deps/v8/src/v8.cc | 16 +- deps/v8/src/v8globals.h | 4 - deps/v8/src/version.cc | 6 +- deps/v8/src/x64/code-stubs-x64.cc | 64 +- deps/v8/src/x64/deoptimizer-x64.cc | 42 +- deps/v8/src/x64/disasm-x64.cc | 6 +- deps/v8/src/x64/full-codegen-x64.cc | 384 +- deps/v8/src/x64/lithium-codegen-x64.cc | 157 +- deps/v8/src/x64/lithium-codegen-x64.h | 8 +- deps/v8/src/x64/lithium-x64.cc | 78 +- deps/v8/src/x64/lithium-x64.h | 72 +- deps/v8/src/x64/macro-assembler-x64.cc | 18 +- deps/v8/src/x64/macro-assembler-x64.h | 2 - deps/v8/src/x64/regexp-macro-assembler-x64.cc | 56 +- deps/v8/src/x64/regexp-macro-assembler-x64.h | 8 - deps/v8/src/x64/stub-cache-x64.cc | 111 +- deps/v8/test/cctest/test-accessors.cc | 9 +- deps/v8/test/cctest/test-alloc.cc | 36 +- deps/v8/test/cctest/test-api.cc | 286 +- deps/v8/test/cctest/test-debug.cc | 5 +- deps/v8/test/cctest/test-decls.cc | 16 +- deps/v8/test/cctest/test-disasm-x64.cc | 1 - deps/v8/test/cctest/test-double.cc | 15 + deps/v8/test/cctest/test-heap-profiler.cc | 448 +- deps/v8/test/cctest/test-heap.cc | 61 +- deps/v8/test/cctest/test-mark-compact.cc | 8 +- deps/v8/test/cctest/test-regexp.cc | 80 +- deps/v8/test/cctest/test-spaces.cc | 8 +- deps/v8/test/cctest/test-strings.cc | 85 - .../v8/test/cctest/test-thread-termination.cc | 4 - deps/v8/test/cctest/test-weakmaps.cc | 80 +- deps/v8/test/cctest/testcfg.py | 2 + .../mjsunit/array-bounds-check-removal.js | 145 - deps/v8/test/mjsunit/big-array-literal.js | 3 + .../mjsunit/compiler/alloc-object-huge.js | 2 +- .../test/mjsunit/compiler/inline-arguments.js | 67 - deps/v8/test/mjsunit/compiler/literals.js | 24 +- .../test/mjsunit/compiler/optimize-bitnot.js | 42 - .../debug-evaluate-locals-optimized-double.js | 17 +- .../debug-evaluate-locals-optimized.js | 17 +- deps/v8/test/mjsunit/debug-function-scopes.js | 162 - deps/v8/test/mjsunit/debug-scripts-request.js | 6 +- .../mjsunit/debug-stepin-builtin-callback.js | 157 - deps/v8/test/mjsunit/declare-locally.js | 6 +- .../mjsunit/harmony/debug-function-scopes.js | 115 - .../v8/test/mjsunit/harmony/module-linking.js | 121 - .../v8/test/mjsunit/harmony/module-parsing.js | 10 +- .../test/mjsunit/harmony/module-resolution.js | 2 +- deps/v8/test/mjsunit/math-floor-of-div.js | 216 - deps/v8/test/mjsunit/mjsunit.js | 2 +- deps/v8/test/mjsunit/regexp-capture-3.js | 191 +- deps/v8/test/mjsunit/regexp-capture.js | 2 - deps/v8/test/mjsunit/regress/regress-1119.js | 12 +- .../v8/test/mjsunit/regress/regress-115452.js | 19 +- deps/v8/test/mjsunit/regress/regress-1170.js | 64 +- .../v8/test/mjsunit/regress/regress-119609.js | 71 - .../v8/test/mjsunit/regress/regress-120099.js | 40 - deps/v8/test/mjsunit/regress/regress-1217.js | 2 +- .../v8/test/mjsunit/regress/regress-123512.js | 78 - .../v8/test/mjsunit/regress/regress-123919.js | 47 - .../v8/test/mjsunit/regress/regress-126412.js | 33 - deps/v8/test/mjsunit/regress/regress-2030.js | 53 - deps/v8/test/mjsunit/regress/regress-2032.js | 64 - deps/v8/test/mjsunit/regress/regress-2034.js | 46 - deps/v8/test/mjsunit/regress/regress-2054.js | 34 - deps/v8/test/mjsunit/regress/regress-2055.js | 48 - deps/v8/test/mjsunit/regress/regress-2058.js | 37 - deps/v8/test/mjsunit/regress/regress-2110.js | 53 - .../regress-fast-literal-transition.js | 62 - .../mjsunit/unicodelctest-no-optimization.js | 4914 ----------------- deps/v8/test/mjsunit/unicodelctest.js | 4912 ---------------- deps/v8/test/mozilla/mozilla.status | 14 - deps/v8/test/sputnik/sputnik.status | 40 +- deps/v8/test/test262/README | 4 +- deps/v8/test/test262/test262.status | 24 +- deps/v8/test/test262/testcfg.py | 47 +- deps/v8/tools/check-static-initializers.sh | 14 +- deps/v8/tools/gyp/v8.gyp | 7 +- deps/v8/tools/merge-to-branch.sh | 0 deps/v8/tools/presubmit.py | 8 +- deps/v8/tools/push-to-trunk.sh | 9 - deps/v8/tools/test-wrapper-gypbuild.py | 21 +- 253 files changed, 5143 insertions(+), 22142 deletions(-) create mode 100644 deps/v8/build/armu.gypi create mode 100644 deps/v8/build/mipsu.gypi delete mode 100644 deps/v8/test/mjsunit/array-bounds-check-removal.js delete mode 100644 deps/v8/test/mjsunit/compiler/optimize-bitnot.js delete mode 100644 deps/v8/test/mjsunit/debug-function-scopes.js delete mode 100644 deps/v8/test/mjsunit/debug-stepin-builtin-callback.js delete mode 100644 deps/v8/test/mjsunit/harmony/debug-function-scopes.js delete mode 100644 deps/v8/test/mjsunit/harmony/module-linking.js delete mode 100644 deps/v8/test/mjsunit/math-floor-of-div.js delete mode 100644 deps/v8/test/mjsunit/regress/regress-119609.js delete mode 100644 deps/v8/test/mjsunit/regress/regress-120099.js delete mode 100644 deps/v8/test/mjsunit/regress/regress-123512.js delete mode 100644 deps/v8/test/mjsunit/regress/regress-123919.js delete mode 100644 deps/v8/test/mjsunit/regress/regress-126412.js delete mode 100644 deps/v8/test/mjsunit/regress/regress-2030.js delete mode 100644 deps/v8/test/mjsunit/regress/regress-2032.js delete mode 100644 deps/v8/test/mjsunit/regress/regress-2034.js delete mode 100644 deps/v8/test/mjsunit/regress/regress-2054.js delete mode 100644 deps/v8/test/mjsunit/regress/regress-2055.js delete mode 100644 deps/v8/test/mjsunit/regress/regress-2058.js delete mode 100644 deps/v8/test/mjsunit/regress/regress-2110.js delete mode 100644 deps/v8/test/mjsunit/regress/regress-fast-literal-transition.js delete mode 100644 deps/v8/test/mjsunit/unicodelctest-no-optimization.js delete mode 100644 deps/v8/test/mjsunit/unicodelctest.js mode change 100644 => 100755 deps/v8/tools/merge-to-branch.sh diff --git a/deps/v8/AUTHORS b/deps/v8/AUTHORS index 6e46b3d621..dfefad129f 100644 --- a/deps/v8/AUTHORS +++ b/deps/v8/AUTHORS @@ -23,7 +23,6 @@ Daniel James Dineel D Sule Erich Ocean Fedor Indutny -Filipe David Manana Ioseb Dzmanashvili Jan de Mooij Jay Freeman diff --git a/deps/v8/ChangeLog b/deps/v8/ChangeLog index 09c02378f5..2240ec0e68 100644 --- a/deps/v8/ChangeLog +++ b/deps/v8/ChangeLog @@ -1,163 +1,3 @@ -2012-05-03: Version 3.10.8 - - Enabled MIPS cross-compilation. - - Ensured reload of elements pointer in StoreFastDoubleElement stub. - (Chromium issue 125515) - - Fixed corner cases in truncation behavior when storing to - TypedArrays. (issue 2110) - - Fixed failure to properly recognize and report out-of-memory - conditions when allocating code space pages. (Chromium issue - 118625) - - Fixed idle notifications to perform a round of incremental GCs - after context disposal. (issue 2107) - - Fixed preparser for try statement. (issue 2109) - - Performance and stability improvements on all platforms. - - -2012-04-30: Version 3.10.7 - - Performance and stability improvements on all platforms. - - -2012-04-26: Version 3.10.6 - - Fixed some bugs in accessing details of the last regexp match. - - Fixed source property of empty RegExp objects. (issue 1982) - - Enabled inlining some V8 API functions. - - Performance and stability improvements on all platforms. - - -2012-04-23: Version 3.10.5 - - Put new global var semantics behind a flag until WebKit tests are - cleaned up. - - Enabled stepping into callback passed to builtins. - (Chromium issue 109564) - - Performance and stability improvements on all platforms. - - -2012-04-19: Version 3.10.4 - - Fixed issues when stressing compaction with WeakMaps. - - Fixed missing GVN flag for new-space promotion. (Chromium issue 123919) - - Simplify invocation sequence at monomorphic function invocation sites. - (issue 2079) - - Performance and stability improvements on all platforms. - - -2012-04-17: Version 3.10.3 - - Fixed several bugs in heap profiles (including issue 2078). - - Throw syntax errors on illegal escape sequences. - - Implemented rudimentary module linking (behind --harmony flag) - - Implemented ES5 erratum: Global declarations should shadow - inherited properties. - - Made handling of const more consistent when combined with 'eval' - and 'with'. - - Fixed V8 on MinGW-x64 (issue 2026). - - Performance and stability improvements on all platforms. - - -2012-04-13: Version 3.10.2 - - Fixed native ARM build (issues 1744, 539) - - Return LOOKUP variable instead of CONTEXT for non-context allocated - outer scope parameters (Chromium issue 119609). - - Fixed regular and ElementsKind transitions interfering with each other - (Chromium issue 122271). - - Improved performance of keyed loads/stores which have a HeapNumber - index (issues 1388, 1295). - - Fixed WeakMap processing for evacuation candidates (issue 2060). - - Bailout on possible direct eval calls (Chromium issue 122681). - - Do not assume that names of function expressions are context-allocated - (issue 2051). - - Performance and stability improvements on all platforms. - - -2012-04-10: Version 3.10.1 - - Fixed bug with arguments object in inlined functions (issue 2045). - - Fixed performance bug with lazy initialization (Chromium issue - 118686). - - Added suppport for Mac OS X 64bit builds with GYP. - (Patch contributed by Filipe David Manana ) - - Fixed bug with hidden properties (issue 2034). - - Fixed a performance bug when reloading pages (Chromium issue 117767, - V8 issue 1902). - - Fixed bug when optimizing throw in top-level code (issue 2054). - - Fixed two bugs with array literals (issue 2055, Chromium issue 121407). - - Fixed bug with Math.min/Math.max with NaN inputs (issue 2056). - - Fixed a bug with the new runtime profiler (Chromium issue 121147). - - Fixed compilation of V8 using uClibc. - - Optimized boot-up memory use. - - Optimized regular expressions. - - -2012-03-30: Version 3.10.0 - - Fixed store IC writability check in strict mode - (Chromium issue 120099). - - Resynchronize timers if the Windows system time was changed. - (Chromium issue 119815) - - Removed "-mfloat-abi=hard" from host compiler cflags when building for - hardfp ARM - (https://code.google.com/p/chrome-os-partner/issues/detail?id=8539) - - Fixed edge case for case independent regexp character classes - (issue 2032). - - Reset function info counters after context disposal. - (Chromium issue 117767, V8 issue 1902) - - Fixed missing write barrier in CopyObjectToObjectElements. - (Chromium issue 119926) - - Fixed missing bounds check in HasElementImpl. - (Chromium issue 119925) - - Performance and stability improvements on all platforms. - - 2012-03-23: Version 3.9.24 Activated count-based profiler for ARM. diff --git a/deps/v8/Makefile b/deps/v8/Makefile index 277c1f786d..2f86c512e4 100644 --- a/deps/v8/Makefile +++ b/deps/v8/Makefile @@ -137,6 +137,12 @@ ENVFILE = $(OUTDIR)/environment # Target definitions. "all" is the default. all: $(MODES) +# Special target for the buildbots to use. Depends on $(OUTDIR)/Makefile +# having been created before. +buildbot: + $(MAKE) -C "$(OUTDIR)" BUILDTYPE=$(BUILDTYPE) \ + builddir="$(abspath $(OUTDIR))/$(BUILDTYPE)" + # Compile targets. MODES and ARCHES are convenience targets. .SECONDEXPANSION: $(MODES): $(addsuffix .$$@,$(DEFAULT_ARCHES)) @@ -144,21 +150,21 @@ $(MODES): $(addsuffix .$$@,$(DEFAULT_ARCHES)) $(ARCHES): $(addprefix $$@.,$(MODES)) # Defines how to build a particular target (e.g. ia32.release). -$(BUILDS): $(OUTDIR)/Makefile.$$(basename $$@) - @$(MAKE) -C "$(OUTDIR)" -f Makefile.$(basename $@) \ +$(BUILDS): $(OUTDIR)/Makefile-$$(basename $$@) + @$(MAKE) -C "$(OUTDIR)" -f Makefile-$(basename $@) \ CXX="$(CXX)" LINK="$(LINK)" \ BUILDTYPE=$(shell echo $(subst .,,$(suffix $@)) | \ python -c "print raw_input().capitalize()") \ builddir="$(shell pwd)/$(OUTDIR)/$@" -native: $(OUTDIR)/Makefile.native - @$(MAKE) -C "$(OUTDIR)" -f Makefile.native \ +native: $(OUTDIR)/Makefile-native + @$(MAKE) -C "$(OUTDIR)" -f Makefile-native \ CXX="$(CXX)" LINK="$(LINK)" BUILDTYPE=Release \ builddir="$(shell pwd)/$(OUTDIR)/$@" # TODO(jkummerow): add "android.debug" when we need it. -android android.release: $(OUTDIR)/Makefile.android - @$(MAKE) -C "$(OUTDIR)" -f Makefile.android \ +android android.release: $(OUTDIR)/Makefile-android + @$(MAKE) -C "$(OUTDIR)" -f Makefile-android \ CXX="$(ANDROID_TOOL_PREFIX)-g++" \ AR="$(ANDROID_TOOL_PREFIX)-ar" \ RANLIB="$(ANDROID_TOOL_PREFIX)-ranlib" \ @@ -191,40 +197,55 @@ native.check: native --arch-and-mode=. $(TESTFLAGS) # Clean targets. You can clean each architecture individually, or everything. -$(addsuffix .clean,$(ARCHES)) android.clean: - rm -f $(OUTDIR)/Makefile.$(basename $@) +$(addsuffix .clean,$(ARCHES)): + rm -f $(OUTDIR)/Makefile-$(basename $@) rm -rf $(OUTDIR)/$(basename $@).release rm -rf $(OUTDIR)/$(basename $@).debug - find $(OUTDIR) -regex '.*\(host\|target\).$(basename $@)\.mk' -delete + find $(OUTDIR) -regex '.*\(host\|target\)-$(basename $@)\.mk' -delete native.clean: - rm -f $(OUTDIR)/Makefile.native + rm -f $(OUTDIR)/Makefile-native rm -rf $(OUTDIR)/native - find $(OUTDIR) -regex '.*\(host\|target\).native\.mk' -delete + find $(OUTDIR) -regex '.*\(host\|target\)-native\.mk' -delete + +android.clean: + rm -f $(OUTDIR)/Makefile-android + rm -rf $(OUTDIR)/android.release + find $(OUTDIR) -regex '.*\(host\|target\)-android\.mk' -delete -clean: $(addsuffix .clean,$(ARCHES)) native.clean android.clean +clean: $(addsuffix .clean,$(ARCHES)) native.clean # GYP file generation targets. -MAKEFILES = $(addprefix $(OUTDIR)/Makefile.,$(ARCHES)) -$(MAKEFILES): $(GYPFILES) $(ENVFILE) - GYP_GENERATORS=make \ +$(OUTDIR)/Makefile-ia32: $(GYPFILES) $(ENVFILE) + build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \ + -Ibuild/standalone.gypi --depth=. -Dtarget_arch=ia32 \ + -S-ia32 $(GYPFLAGS) + +$(OUTDIR)/Makefile-x64: $(GYPFILES) $(ENVFILE) + build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \ + -Ibuild/standalone.gypi --depth=. -Dtarget_arch=x64 \ + -S-x64 $(GYPFLAGS) + +$(OUTDIR)/Makefile-arm: $(GYPFILES) $(ENVFILE) build/armu.gypi + build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \ + -Ibuild/standalone.gypi --depth=. -Ibuild/armu.gypi \ + -S-arm $(GYPFLAGS) + +$(OUTDIR)/Makefile-mips: $(GYPFILES) $(ENVFILE) build/mipsu.gypi build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \ - -Ibuild/standalone.gypi --depth=. \ - -Dv8_target_arch=$(subst .,,$(suffix $@)) \ - -S.$(subst .,,$(suffix $@)) $(GYPFLAGS) + -Ibuild/standalone.gypi --depth=. -Ibuild/mipsu.gypi \ + -S-mips $(GYPFLAGS) -$(OUTDIR)/Makefile.native: $(GYPFILES) $(ENVFILE) - GYP_GENERATORS=make \ +$(OUTDIR)/Makefile-native: $(GYPFILES) $(ENVFILE) build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \ - -Ibuild/standalone.gypi --depth=. -S.native $(GYPFLAGS) + -Ibuild/standalone.gypi --depth=. -S-native $(GYPFLAGS) -$(OUTDIR)/Makefile.android: $(GYPFILES) $(ENVFILE) build/android.gypi \ +$(OUTDIR)/Makefile-android: $(GYPFILES) $(ENVFILE) build/android.gypi \ must-set-ANDROID_NDK_ROOT - GYP_GENERATORS=make \ CC="${ANDROID_TOOL_PREFIX}-gcc" \ build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \ -Ibuild/standalone.gypi --depth=. -Ibuild/android.gypi \ - -S.android $(GYPFLAGS) + -S-android $(GYPFLAGS) must-set-ANDROID_NDK_ROOT: ifndef ANDROID_NDK_ROOT @@ -240,10 +261,9 @@ $(ENVFILE): $(ENVFILE).new # Stores current GYPFLAGS in a file. $(ENVFILE).new: - @mkdir -p $(OUTDIR); echo "GYPFLAGS=$(GYPFLAGS)" > $(ENVFILE).new; \ - echo "CXX=$(CXX)" >> $(ENVFILE).new + @mkdir -p $(OUTDIR); echo "GYPFLAGS=$(GYPFLAGS)" > $(ENVFILE).new; # Dependencies. dependencies: svn checkout --force http://gyp.googlecode.com/svn/trunk build/gyp \ - --revision 1282 + --revision 1026 diff --git a/deps/v8/SConstruct b/deps/v8/SConstruct index 34d0efc5ff..b0d1344700 100644 --- a/deps/v8/SConstruct +++ b/deps/v8/SConstruct @@ -1601,4 +1601,17 @@ except: pass +def WarnAboutDeprecation(): + print """ +####################################################### +# WARNING: Building V8 with SCons is deprecated and # +# will not work much longer. Please switch to using # +# the GYP-based build now. Instructions are at # +# http://code.google.com/p/v8/wiki/BuildingWithGYP. # +####################################################### + """ + +WarnAboutDeprecation() +import atexit +atexit.register(WarnAboutDeprecation) Build() diff --git a/deps/v8/build/armu.gypi b/deps/v8/build/armu.gypi new file mode 100644 index 0000000000..d15b8ab705 --- /dev/null +++ b/deps/v8/build/armu.gypi @@ -0,0 +1,36 @@ +# Copyright 2011 the V8 project authors. All rights reserved. +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived +# from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +{ + 'variables': { + 'target_arch': 'ia32', + 'v8_target_arch': 'arm', + 'armv7': 1, + 'arm_neon': 0, + 'arm_fpu': 'vfpv3', + }, +} diff --git a/deps/v8/build/common.gypi b/deps/v8/build/common.gypi index f999437783..74a964d4db 100644 --- a/deps/v8/build/common.gypi +++ b/deps/v8/build/common.gypi @@ -142,10 +142,8 @@ 'USE_EABI_HARDFLOAT=1', 'CAN_USE_VFP_INSTRUCTIONS', ], - 'target_conditions': [ - ['_toolset=="target"', { - 'cflags': ['-mfloat-abi=hard',], - }], + 'cflags': [ + '-mfloat-abi=hard', ], }, { 'defines': [ @@ -173,11 +171,8 @@ 'defines': [ 'V8_TARGET_ARCH_MIPS', ], - 'variables': { - 'mipscompiler': '&1 | grep -q "^Target: mips-" && echo "yes" || echo "no")', - }, 'conditions': [ - ['mipscompiler=="yes"', { + [ 'target_arch=="mips"', { 'target_conditions': [ ['_toolset=="target"', { 'cflags': ['-EL'], @@ -241,19 +236,6 @@ ], }], ], - }, { # Section for OS=="mac". - 'conditions': [ - ['target_arch=="ia32"', { - 'xcode_settings': { - 'ARCHS': ['i386'], - } - }], - ['target_arch=="x64"', { - 'xcode_settings': { - 'ARCHS': ['x86_64'], - } - }], - ], }], ['v8_use_liveobjectlist=="true"', { 'defines': [ @@ -280,16 +262,19 @@ }, }, }], + ['OS=="win" and v8_target_arch=="x64"', { + 'msvs_settings': { + 'VCLinkerTool': { + 'StackReserveSize': '2097152', + }, + }, + }], ['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris" \ or OS=="netbsd"', { 'conditions': [ - [ 'v8_target_arch!="x64"', { - # Pass -m32 to the compiler iff it understands the flag. - 'variables': { - 'm32flag': ' /dev/null 2>&1) && echo "-m32" || true)', - }, - 'cflags': [ '<(m32flag)' ], - 'ldflags': [ '<(m32flag)' ], + [ 'target_arch=="ia32"', { + 'cflags': [ '-m32' ], + 'ldflags': [ '-m32' ], }], [ 'v8_no_strict_aliasing==1', { 'cflags': [ '-fno-strict-aliasing' ], @@ -322,10 +307,6 @@ }, 'VCLinkerTool': { 'LinkIncremental': '2', - # For future reference, the stack size needs to be increased - # when building for Windows 64-bit, otherwise some test cases - # can cause stack overflow. - # 'StackReserveSize': '297152', }, }, 'conditions': [ @@ -336,7 +317,7 @@ 'cflags': [ '-I/usr/pkg/include' ], }], ['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="netbsd"', { - 'cflags': [ '-Wno-unused-parameter', + 'cflags': [ '-Wall', '<(werror)', '-W', '-Wno-unused-parameter', '-Wnon-virtual-dtor', '-Woverloaded-virtual' ], }], ], @@ -407,12 +388,7 @@ 'VCLinkerTool': { 'LinkIncremental': '1', 'OptimizeReferences': '2', - 'OptimizeForWindows98': '1', 'EnableCOMDATFolding': '2', - # For future reference, the stack size needs to be - # increased when building for Windows 64-bit, otherwise - # some test cases can cause stack overflow. - # 'StackReserveSize': '297152', }, }, }], # OS=="win" diff --git a/deps/v8/build/gyp_v8 b/deps/v8/build/gyp_v8 index a926fe8ca3..6d5c126844 100755 --- a/deps/v8/build/gyp_v8 +++ b/deps/v8/build/gyp_v8 @@ -1,6 +1,6 @@ #!/usr/bin/python # -# Copyright 2012 the V8 project authors. All rights reserved. +# Copyright 2010 the V8 project authors. All rights reserved. # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: @@ -38,6 +38,11 @@ import sys script_dir = os.path.dirname(__file__) v8_root = os.path.normpath(os.path.join(script_dir, os.pardir)) +if __name__ == '__main__': + os.chdir(v8_root) + script_dir = os.path.dirname(__file__) + v8_root = '.' + sys.path.insert(0, os.path.join(v8_root, 'tools')) import utils @@ -93,7 +98,7 @@ def additional_include_files(args=[]): result.append(path) # Always include standalone.gypi - AddInclude(os.path.join(script_dir, 'standalone.gypi')) + AddInclude(os.path.join(v8_root, 'build', 'standalone.gypi')) # Optionally add supplemental .gypi files if present. supplements = glob.glob(os.path.join(v8_root, '*', 'supplement.gypi')) @@ -135,7 +140,10 @@ if __name__ == '__main__': # path separators even on Windows due to the use of shlex.split(). args.extend(shlex.split(gyp_file)) else: - args.append(os.path.join(script_dir, 'all.gyp')) + # Note that this must not start with "./" or things break. + # So we rely on having done os.chdir(v8_root) above and use the + # relative path. + args.append(os.path.join('build', 'all.gyp')) args.extend(['-I' + i for i in additional_include_files(args)]) @@ -156,28 +164,6 @@ if __name__ == '__main__': # Generate for the architectures supported on the given platform. gyp_args = list(args) - target_arch = None - for p in gyp_args: - if p.find('-Dtarget_arch=') == 0: - target_arch = p - if target_arch is None: - gyp_args.append('-Dtarget_arch=ia32') if utils.GuessOS() == 'linux': - gyp_args.append('-S.ia32') + gyp_args.append('--generator-output=out') run_gyp(gyp_args) - - if utils.GuessOS() == 'linux': - gyp_args = list(args) - gyp_args.append('-Dtarget_arch=x64') - gyp_args.append('-S.x64') - run_gyp(gyp_args) - - gyp_args = list(args) - gyp_args.append('-Dv8_target_arch=arm') - gyp_args.append('-S.arm') - run_gyp(gyp_args) - - gyp_args = list(args) - gyp_args.append('-Dv8_target_arch=mips') - gyp_args.append('-S.mips') - run_gyp(gyp_args) diff --git a/deps/v8/build/mipsu.gypi b/deps/v8/build/mipsu.gypi new file mode 100644 index 0000000000..637ff841e4 --- /dev/null +++ b/deps/v8/build/mipsu.gypi @@ -0,0 +1,33 @@ +# Copyright 2012 the V8 project authors. All rights reserved. +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived +# from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +{ + 'variables': { + 'target_arch': 'ia32', + 'v8_target_arch': 'mips', + }, +} diff --git a/deps/v8/build/standalone.gypi b/deps/v8/build/standalone.gypi index dad05ae962..b5707800f8 100644 --- a/deps/v8/build/standalone.gypi +++ b/deps/v8/build/standalone.gypi @@ -71,10 +71,6 @@ 'want_separate_host_toolset': 0, }], ], - # Default ARM variable settings. - 'armv7%': 1, - 'arm_neon%': 0, - 'arm_fpu%': 'vfpv3', }, 'target_defaults': { 'default_configuration': 'Debug', @@ -169,6 +165,9 @@ }, }], # OS=="win" ['OS=="mac"', { + 'xcode_settings': { + 'SYMROOT': '<(DEPTH)/xcodebuild', + }, 'target_defaults': { 'xcode_settings': { 'ALWAYS_SEARCH_USER_PATHS': 'NO', @@ -188,6 +187,7 @@ 'GCC_WARN_ABOUT_MISSING_NEWLINE': 'YES', # -Wnewline-eof 'MACOSX_DEPLOYMENT_TARGET': '10.4', # -mmacosx-version-min=10.4 'PREBINDING': 'NO', # No -Wl,-prebind + 'SYMROOT': '<(DEPTH)/xcodebuild', 'USE_HEADERMAP': 'NO', 'OTHER_CFLAGS': [ '-fno-strict-aliasing', diff --git a/deps/v8/include/v8-profiler.h b/deps/v8/include/v8-profiler.h index 8f380f2094..2499bbf050 100644 --- a/deps/v8/include/v8-profiler.h +++ b/deps/v8/include/v8-profiler.h @@ -64,7 +64,6 @@ */ namespace v8 { -typedef uint32_t SnapshotObjectId; /** * CpuProfileNode represents a node in a call graph. @@ -275,7 +274,7 @@ class V8EXPORT HeapGraphNode { * Returns node id. For the same heap object, the id remains the same * across all snapshots. */ - SnapshotObjectId GetId() const; + uint64_t GetId() const; /** Returns node's own size, in bytes. */ int GetSelfSize() const; @@ -339,7 +338,7 @@ class V8EXPORT HeapSnapshot { const HeapGraphNode* GetRoot() const; /** Returns a node by its id. */ - const HeapGraphNode* GetNodeById(SnapshotObjectId id) const; + const HeapGraphNode* GetNodeById(uint64_t id) const; /** Returns total nodes count in the snapshot. */ int GetNodesCount() const; @@ -347,9 +346,6 @@ class V8EXPORT HeapSnapshot { /** Returns a node by index. */ const HeapGraphNode* GetNode(int index) const; - /** Returns a max seen JS object Id. */ - SnapshotObjectId GetMaxSnapshotJSObjectId() const; - /** * Deletes the snapshot and removes it from HeapProfiler's list. * All pointers to nodes, edges and paths previously returned become @@ -368,20 +364,16 @@ class V8EXPORT HeapSnapshot { * with the following structure: * * { - * snapshot: { - * title: "...", - * uid: nnn, - * meta: { meta-info }, - * node_count: nnn, - * edge_count: nnn - * }, - * nodes: [nodes array], - * edges: [edges array], - * strings: [strings array] + * snapshot: {title: "...", uid: nnn}, + * nodes: [ + * meta-info (JSON string), + * nodes themselves + * ], + * strings: [strings] * } * - * Nodes reference strings, other nodes, and edges by their indexes - * in corresponding arrays. + * Outgoing node links are stored after each node. Nodes reference strings + * and other nodes by their indexes in corresponding arrays. */ void Serialize(OutputStream* stream, SerializationFormat format) const; }; @@ -412,19 +404,6 @@ class V8EXPORT HeapProfiler { /** Returns a profile by uid. */ static const HeapSnapshot* FindSnapshot(unsigned uid); - /** - * Returns SnapshotObjectId for a heap object referenced by |value| if - * it has been seen by the heap profiler, kUnknownObjectId otherwise. - */ - static SnapshotObjectId GetSnapshotObjectId(Handle value); - - /** - * A constant for invalid SnapshotObjectId. GetSnapshotObjectId will return - * it in case heap profiler cannot find id for the object passed as - * parameter. HeapSnapshot::GetNodeById will always return NULL for such id. - */ - static const SnapshotObjectId kUnknownObjectId = 0; - /** * Takes a heap snapshot and returns it. Title may be an empty string. * See HeapSnapshot::Type for types description. @@ -434,33 +413,6 @@ class V8EXPORT HeapProfiler { HeapSnapshot::Type type = HeapSnapshot::kFull, ActivityControl* control = NULL); - /** - * Starts tracking of heap objects population statistics. After calling - * this method, all heap objects relocations done by the garbage collector - * are being registered. - */ - static void StartHeapObjectsTracking(); - - /** - * Adds a new time interval entry to the aggregated statistics array. The - * time interval entry contains information on the current heap objects - * population size. The method also updates aggregated statistics and - * reports updates for all previous time intervals via the OutputStream - * object. Updates on each time interval are provided as a stream of the - * HeapStatsUpdate structure instances. - * - * StartHeapObjectsTracking must be called before the first call to this - * method. - */ - static void PushHeapObjectsStats(OutputStream* stream); - - /** - * Stops tracking of heap objects population statistics, cleans up all - * collected data. StartHeapObjectsTracking must be called again prior to - * calling PushHeapObjectsStats next time. - */ - static void StopHeapObjectsTracking(); - /** * Deletes all snapshots taken. All previously returned pointers to * snapshots and their contents become invalid after this call. @@ -558,19 +510,6 @@ class V8EXPORT RetainedObjectInfo { // NOLINT }; -/** - * A struct for exporting HeapStats data from V8, using "push" model. - * See HeapProfiler::PushHeapObjectsStats. - */ -struct HeapStatsUpdate { - HeapStatsUpdate(uint32_t index, uint32_t count, uint32_t size) - : index(index), count(count), size(size) { } - uint32_t index; // Index of the time interval that was changed. - uint32_t count; // New value of count field for the interval with this index. - uint32_t size; // New value of size field for the interval with this index. -}; - - } // namespace v8 diff --git a/deps/v8/include/v8.h b/deps/v8/include/v8.h index 9024531992..33179f5bf0 100644 --- a/deps/v8/include/v8.h +++ b/deps/v8/include/v8.h @@ -1,4 +1,4 @@ -// Copyright 2012 the V8 project authors. All rights reserved. +// Copyright 2011 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -107,7 +107,6 @@ class Data; class AccessorInfo; class StackTrace; class StackFrame; -class Isolate; namespace internal { @@ -863,13 +862,13 @@ class Value : public Data { * Returns true if this value is the undefined value. See ECMA-262 * 4.3.10. */ - inline bool IsUndefined() const; + V8EXPORT bool IsUndefined() const; /** * Returns true if this value is the null value. See ECMA-262 * 4.3.11. */ - inline bool IsNull() const; + V8EXPORT bool IsNull() const; /** * Returns true if this value is true. @@ -983,11 +982,7 @@ class Value : public Data { V8EXPORT bool StrictEquals(Handle that) const; private: - inline bool QuickIsUndefined() const; - inline bool QuickIsNull() const; inline bool QuickIsString() const; - V8EXPORT bool FullIsUndefined() const; - V8EXPORT bool FullIsNull() const; V8EXPORT bool FullIsString() const; }; @@ -1084,7 +1079,6 @@ class String : public Primitive { * A zero length string. */ V8EXPORT static v8::Local Empty(); - inline static v8::Local Empty(Isolate* isolate); /** * Returns true if the string is external @@ -1242,7 +1236,8 @@ class String : public Primitive { * this function should not otherwise delete or modify the resource. Neither * should the underlying buffer be deallocated or modified except through the * destructor of the external string resource. - */ V8EXPORT static Local NewExternal( + */ + V8EXPORT static Local NewExternal( ExternalAsciiStringResource* resource); /** @@ -1973,13 +1968,10 @@ class Arguments { inline Local Holder() const; inline bool IsConstructCall() const; inline Local Data() const; - inline Isolate* GetIsolate() const; - private: - static const int kIsolateIndex = 0; - static const int kDataIndex = -1; - static const int kCalleeIndex = -2; - static const int kHolderIndex = -3; + static const int kDataIndex = 0; + static const int kCalleeIndex = -1; + static const int kHolderIndex = -2; friend class ImplementationUtilities; inline Arguments(internal::Object** implicit_args, @@ -2001,11 +1993,9 @@ class V8EXPORT AccessorInfo { public: inline AccessorInfo(internal::Object** args) : args_(args) { } - inline Isolate* GetIsolate() const; inline Local Data() const; inline Local This() const; inline Local Holder() const; - private: internal::Object** args_; }; @@ -2562,11 +2552,6 @@ Handle V8EXPORT Null(); Handle V8EXPORT True(); Handle V8EXPORT False(); -inline Handle Undefined(Isolate* isolate); -inline Handle Null(Isolate* isolate); -inline Handle True(Isolate* isolate); -inline Handle False(Isolate* isolate); - /** * A set of constraints that specifies the limits of the runtime's memory use. @@ -2817,13 +2802,13 @@ class V8EXPORT Isolate { /** * Associate embedder-specific data with the isolate */ - inline void SetData(void* data); + void SetData(void* data); /** - * Retrieve embedder-specific data from the isolate. + * Retrive embedder-specific data from the isolate. * Returns NULL if SetData has never been called. */ - inline void* GetData(); + void* GetData(); private: Isolate(); @@ -3168,8 +3153,7 @@ class V8EXPORT V8 { * that is kept alive by JavaScript objects. * \returns the adjusted value. */ - static intptr_t AdjustAmountOfExternalAllocatedMemory( - intptr_t change_in_bytes); + static int AdjustAmountOfExternalAllocatedMemory(int change_in_bytes); /** * Suspends recording of tick samples in the profiler. @@ -3751,12 +3735,6 @@ class V8EXPORT Locker { }; -/** - * A struct for exporting HeapStats data from V8, using "push" model. - */ -struct HeapStatsUpdate; - - /** * An interface for exporting data from V8, using "push" model. */ @@ -3782,14 +3760,6 @@ class V8EXPORT OutputStream { // NOLINT * will not be called in case writing was aborted. */ virtual WriteResult WriteAsciiChunk(char* data, int size) = 0; - /** - * Writes the next chunk of heap stats data into the stream. Writing - * can be stopped by returning kAbort as function result. EndOfStream - * will not be called in case writing was aborted. - */ - virtual WriteResult WriteHeapStatsChunk(HeapStatsUpdate* data, int count) { - return kAbort; - }; }; @@ -3878,6 +3848,18 @@ const uintptr_t kEncodablePointerMask = PlatformSmiTagging::kEncodablePointerMask; const int kPointerToSmiShift = PlatformSmiTagging::kPointerToSmiShift; +template struct InternalConstants; + +// Internal constants for 32-bit systems. +template <> struct InternalConstants<4> { + static const int kStringResourceOffset = 3 * kApiPointerSize; +}; + +// Internal constants for 64-bit systems. +template <> struct InternalConstants<8> { + static const int kStringResourceOffset = 3 * kApiPointerSize; +}; + /** * This class exports constants and functionality from within v8 that * is necessary to implement inline functions in the v8 api. Don't @@ -3889,31 +3871,18 @@ class Internals { // the implementation of v8. static const int kHeapObjectMapOffset = 0; static const int kMapInstanceTypeOffset = 1 * kApiPointerSize + kApiIntSize; - static const int kStringResourceOffset = 3 * kApiPointerSize; + static const int kStringResourceOffset = + InternalConstants::kStringResourceOffset; - static const int kOddballKindOffset = 3 * kApiPointerSize; static const int kForeignAddressOffset = kApiPointerSize; static const int kJSObjectHeaderSize = 3 * kApiPointerSize; static const int kFullStringRepresentationMask = 0x07; static const int kExternalTwoByteRepresentationTag = 0x02; - static const int kIsolateStateOffset = 0; - static const int kIsolateEmbedderDataOffset = 1 * kApiPointerSize; - static const int kIsolateRootsOffset = 3 * kApiPointerSize; - static const int kUndefinedValueRootIndex = 5; - static const int kNullValueRootIndex = 7; - static const int kTrueValueRootIndex = 8; - static const int kFalseValueRootIndex = 9; - static const int kEmptySymbolRootIndex = 128; - static const int kJSObjectType = 0xaa; static const int kFirstNonstringType = 0x80; - static const int kOddballType = 0x82; static const int kForeignType = 0x85; - static const int kUndefinedOddballKind = 5; - static const int kNullOddballKind = 3; - static inline bool HasHeapObjectTag(internal::Object* value) { return ((reinterpret_cast(value) & kHeapObjectTagMask) == kHeapObjectTag); @@ -3933,11 +3902,6 @@ class Internals { return ReadField(map, kMapInstanceTypeOffset); } - static inline int GetOddballKind(internal::Object* obj) { - typedef internal::Object O; - return SmiValue(ReadField(obj, kOddballKindOffset)); - } - static inline void* GetExternalPointerFromSmi(internal::Object* value) { const uintptr_t address = reinterpret_cast(value); return reinterpret_cast(address >> kPointerToSmiShift); @@ -3958,28 +3922,6 @@ class Internals { return representation == kExternalTwoByteRepresentationTag; } - static inline bool IsInitialized(v8::Isolate* isolate) { - uint8_t* addr = reinterpret_cast(isolate) + kIsolateStateOffset; - return *reinterpret_cast(addr) == 1; - } - - static inline void SetEmbedderData(v8::Isolate* isolate, void* data) { - uint8_t* addr = reinterpret_cast(isolate) + - kIsolateEmbedderDataOffset; - *reinterpret_cast(addr) = data; - } - - static inline void* GetEmbedderData(v8::Isolate* isolate) { - uint8_t* addr = reinterpret_cast(isolate) + - kIsolateEmbedderDataOffset; - return *reinterpret_cast(addr); - } - - static inline internal::Object** GetRoot(v8::Isolate* isolate, int index) { - uint8_t* addr = reinterpret_cast(isolate) + kIsolateRootsOffset; - return reinterpret_cast(addr + index * kApiPointerSize); - } - template static inline T ReadField(Object* ptr, int offset) { uint8_t* addr = reinterpret_cast(ptr) + offset - kHeapObjectTag; @@ -4106,11 +4048,6 @@ Local Arguments::Data() const { } -Isolate* Arguments::GetIsolate() const { - return *reinterpret_cast(&implicit_args_[kIsolateIndex]); -} - - bool Arguments::IsConstructCall() const { return is_construct_call_; } @@ -4223,15 +4160,6 @@ String* String::Cast(v8::Value* value) { } -Local String::Empty(Isolate* isolate) { - typedef internal::Object* S; - typedef internal::Internals I; - if (!I::IsInitialized(isolate)) return Empty(); - S* slot = I::GetRoot(isolate, I::kEmptySymbolRootIndex); - return Local(reinterpret_cast(slot)); -} - - String::ExternalStringResource* String::GetExternalStringResource() const { typedef internal::Object O; typedef internal::Internals I; @@ -4250,42 +4178,6 @@ String::ExternalStringResource* String::GetExternalStringResource() const { } -bool Value::IsUndefined() const { -#ifdef V8_ENABLE_CHECKS - return FullIsUndefined(); -#else - return QuickIsUndefined(); -#endif -} - -bool Value::QuickIsUndefined() const { - typedef internal::Object O; - typedef internal::Internals I; - O* obj = *reinterpret_cast(const_cast(this)); - if (!I::HasHeapObjectTag(obj)) return false; - if (I::GetInstanceType(obj) != I::kOddballType) return false; - return (I::GetOddballKind(obj) == I::kUndefinedOddballKind); -} - - -bool Value::IsNull() const { -#ifdef V8_ENABLE_CHECKS - return FullIsNull(); -#else - return QuickIsNull(); -#endif -} - -bool Value::QuickIsNull() const { - typedef internal::Object O; - typedef internal::Internals I; - O* obj = *reinterpret_cast(const_cast(this)); - if (!I::HasHeapObjectTag(obj)) return false; - if (I::GetInstanceType(obj) != I::kOddballType) return false; - return (I::GetOddballKind(obj) == I::kNullOddballKind); -} - - bool Value::IsString() const { #ifdef V8_ENABLE_CHECKS return FullIsString(); @@ -4391,11 +4283,6 @@ External* External::Cast(v8::Value* value) { } -Isolate* AccessorInfo::GetIsolate() const { - return *reinterpret_cast(&args_[-3]); -} - - Local AccessorInfo::Data() const { return Local(reinterpret_cast(&args_[-2])); } @@ -4411,54 +4298,6 @@ Local AccessorInfo::Holder() const { } -Handle Undefined(Isolate* isolate) { - typedef internal::Object* S; - typedef internal::Internals I; - if (!I::IsInitialized(isolate)) return Undefined(); - S* slot = I::GetRoot(isolate, I::kUndefinedValueRootIndex); - return Handle(reinterpret_cast(slot)); -} - - -Handle Null(Isolate* isolate) { - typedef internal::Object* S; - typedef internal::Internals I; - if (!I::IsInitialized(isolate)) return Null(); - S* slot = I::GetRoot(isolate, I::kNullValueRootIndex); - return Handle(reinterpret_cast(slot)); -} - - -Handle True(Isolate* isolate) { - typedef internal::Object* S; - typedef internal::Internals I; - if (!I::IsInitialized(isolate)) return True(); - S* slot = I::GetRoot(isolate, I::kTrueValueRootIndex); - return Handle(reinterpret_cast(slot)); -} - - -Handle False(Isolate* isolate) { - typedef internal::Object* S; - typedef internal::Internals I; - if (!I::IsInitialized(isolate)) return False(); - S* slot = I::GetRoot(isolate, I::kFalseValueRootIndex); - return Handle(reinterpret_cast(slot)); -} - - -void Isolate::SetData(void* data) { - typedef internal::Internals I; - I::SetEmbedderData(this, data); -} - - -void* Isolate::GetData() { - typedef internal::Internals I; - return I::GetEmbedderData(this); -} - - /** * \example shell.cc * A simple shell that takes a list of expressions on the diff --git a/deps/v8/samples/lineprocessor.cc b/deps/v8/samples/lineprocessor.cc index 7a84a2a0ff..1606a8f99c 100644 --- a/deps/v8/samples/lineprocessor.cc +++ b/deps/v8/samples/lineprocessor.cc @@ -1,4 +1,4 @@ -// Copyright 2012 the V8 project authors. All rights reserved. +// Copyright 2009 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -434,9 +434,9 @@ v8::Handle ReadLine() { } if (res == NULL) { v8::Handle t = v8::Undefined(); - return v8::Handle(v8::String::Cast(*t)); + return reinterpret_cast&>(t); } - // Remove newline char + // remove newline char for (char* pos = buffer; *pos != '\0'; pos++) { if (*pos == '\n') { *pos = '\0'; diff --git a/deps/v8/samples/samples.gyp b/deps/v8/samples/samples.gyp index 3c720a748a..55b2a98acd 100644 --- a/deps/v8/samples/samples.gyp +++ b/deps/v8/samples/samples.gyp @@ -1,4 +1,4 @@ -# Copyright 2012 the V8 project authors. All rights reserved. +# Copyright 2011 the V8 project authors. All rights reserved. # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: @@ -48,12 +48,6 @@ 'sources': [ 'process.cc', ], - }, - { - 'target_name': 'lineprocessor', - 'sources': [ - 'lineprocessor.cc', - ], } ], } diff --git a/deps/v8/samples/shell.cc b/deps/v8/samples/shell.cc index db0cc1a930..b40eca2f7c 100644 --- a/deps/v8/samples/shell.cc +++ b/deps/v8/samples/shell.cc @@ -1,4 +1,4 @@ -// Copyright 2012 the V8 project authors. All rights reserved. +// Copyright 2011 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -67,20 +67,17 @@ static bool run_shell; int main(int argc, char* argv[]) { v8::V8::SetFlagsFromCommandLine(&argc, argv, true); run_shell = (argc == 1); - int result; - { - v8::HandleScope handle_scope; - v8::Persistent context = CreateShellContext(); - if (context.IsEmpty()) { - printf("Error creating context\n"); - return 1; - } - context->Enter(); - result = RunMain(argc, argv); - if (run_shell) RunShell(context); - context->Exit(); - context.Dispose(); + v8::HandleScope handle_scope; + v8::Persistent context = CreateShellContext(); + if (context.IsEmpty()) { + printf("Error creating context\n"); + return 1; } + context->Enter(); + int result = RunMain(argc, argv); + if (run_shell) RunShell(context); + context->Exit(); + context.Dispose(); v8::V8::Dispose(); return result; } diff --git a/deps/v8/src/api.cc b/deps/v8/src/api.cc index 0bc93c2ff2..4e731fbec8 100644 --- a/deps/v8/src/api.cc +++ b/deps/v8/src/api.cc @@ -512,16 +512,6 @@ void RegisteredExtension::Register(RegisteredExtension* that) { } -void RegisteredExtension::UnregisterAll() { - RegisteredExtension* re = first_extension_; - while (re != NULL) { - RegisteredExtension* next = re->next(); - delete re; - re = next; - } -} - - void RegisterExtension(Extension* that) { RegisteredExtension* extension = new RegisteredExtension(that); RegisteredExtension::Register(extension); @@ -2101,21 +2091,17 @@ bool StackFrame::IsConstructor() const { // --- D a t a --- -bool Value::FullIsUndefined() const { +bool Value::IsUndefined() const { if (IsDeadCheck(i::Isolate::Current(), "v8::Value::IsUndefined()")) { return false; } - bool result = Utils::OpenHandle(this)->IsUndefined(); - ASSERT_EQ(result, QuickIsUndefined()); - return result; + return Utils::OpenHandle(this)->IsUndefined(); } -bool Value::FullIsNull() const { +bool Value::IsNull() const { if (IsDeadCheck(i::Isolate::Current(), "v8::Value::IsNull()")) return false; - bool result = Utils::OpenHandle(this)->IsNull(); - ASSERT_EQ(result, QuickIsNull()); - return result; + return Utils::OpenHandle(this)->IsNull(); } @@ -2813,13 +2799,9 @@ bool v8::Object::ForceDelete(v8::Handle key) { i::Handle self = Utils::OpenHandle(this); i::Handle key_obj = Utils::OpenHandle(*key); - // When deleting a property on the global object using ForceDelete - // deoptimize all functions as optimized code does not check for the hole - // value with DontDelete properties. We have to deoptimize all contexts - // because of possible cross-context inlined functions. - if (self->IsJSGlobalProxy() || self->IsGlobalObject()) { - i::Deoptimizer::DeoptimizeAll(); - } + // When turning on access checks for a global object deoptimize all functions + // as optimized code does not always handle access checks. + i::Deoptimizer::DeoptimizeGlobalObject(*self); EXCEPTION_PREAMBLE(isolate); i::Handle obj = i::ForceDeleteProperty(self, key_obj); @@ -4630,9 +4612,7 @@ void* External::Value() const { Local v8::String::Empty() { i::Isolate* isolate = i::Isolate::Current(); - if (!EnsureInitializedForIsolate(isolate, "v8::String::Empty()")) { - return v8::Local(); - } + EnsureInitializedForIsolate(isolate, "v8::String::Empty()"); LOG_API(isolate, "String::Empty()"); return Utils::ToLocal(isolate->factory()->empty_symbol()); } @@ -5218,7 +5198,7 @@ void V8::AddImplicitReferences(Persistent parent, } -intptr_t V8::AdjustAmountOfExternalAllocatedMemory(intptr_t change_in_bytes) { +int V8::AdjustAmountOfExternalAllocatedMemory(int change_in_bytes) { i::Isolate* isolate = i::Isolate::Current(); if (IsDeadCheck(isolate, "v8::V8::AdjustAmountOfExternalAllocatedMemory()")) { return 0; @@ -5398,6 +5378,17 @@ void Isolate::Exit() { } +void Isolate::SetData(void* data) { + i::Isolate* isolate = reinterpret_cast(this); + isolate->SetData(data); +} + +void* Isolate::GetData() { + i::Isolate* isolate = reinterpret_cast(this); + return isolate->GetData(); +} + + String::Utf8Value::Utf8Value(v8::Handle obj) : str_(NULL), length_(0) { i::Isolate* isolate = i::Isolate::Current(); @@ -5997,7 +5988,7 @@ Handle HeapGraphEdge::GetName() const { const HeapGraphNode* HeapGraphEdge::GetFromNode() const { i::Isolate* isolate = i::Isolate::Current(); IsDeadCheck(isolate, "v8::HeapGraphEdge::GetFromNode"); - const i::HeapEntry* from = ToInternal(this)->from(); + const i::HeapEntry* from = ToInternal(this)->From(); return reinterpret_cast(from); } @@ -6031,7 +6022,7 @@ Handle HeapGraphNode::GetName() const { } -SnapshotObjectId HeapGraphNode::GetId() const { +uint64_t HeapGraphNode::GetId() const { i::Isolate* isolate = i::Isolate::Current(); IsDeadCheck(isolate, "v8::HeapGraphNode::GetId"); return ToInternal(this)->id(); @@ -6146,11 +6137,11 @@ const HeapGraphNode* HeapSnapshot::GetRoot() const { } -const HeapGraphNode* HeapSnapshot::GetNodeById(SnapshotObjectId id) const { +const HeapGraphNode* HeapSnapshot::GetNodeById(uint64_t id) const { i::Isolate* isolate = i::Isolate::Current(); IsDeadCheck(isolate, "v8::HeapSnapshot::GetNodeById"); return reinterpret_cast( - ToInternal(this)->GetEntryById(id)); + ToInternal(this)->GetEntryById(static_cast(id))); } @@ -6169,13 +6160,6 @@ const HeapGraphNode* HeapSnapshot::GetNode(int index) const { } -SnapshotObjectId HeapSnapshot::GetMaxSnapshotJSObjectId() const { - i::Isolate* isolate = i::Isolate::Current(); - IsDeadCheck(isolate, "v8::HeapSnapshot::GetMaxSnapshotJSObjectId"); - return ToInternal(this)->max_snapshot_js_object_id(); -} - - void HeapSnapshot::Serialize(OutputStream* stream, HeapSnapshot::SerializationFormat format) const { i::Isolate* isolate = i::Isolate::Current(); @@ -6217,14 +6201,6 @@ const HeapSnapshot* HeapProfiler::FindSnapshot(unsigned uid) { } -SnapshotObjectId HeapProfiler::GetSnapshotObjectId(Handle value) { - i::Isolate* isolate = i::Isolate::Current(); - IsDeadCheck(isolate, "v8::HeapProfiler::GetSnapshotObjectId"); - i::Handle obj = Utils::OpenHandle(*value); - return i::HeapProfiler::GetSnapshotObjectId(obj); -} - - const HeapSnapshot* HeapProfiler::TakeSnapshot(Handle title, HeapSnapshot::Type type, ActivityControl* control) { @@ -6244,27 +6220,6 @@ const HeapSnapshot* HeapProfiler::TakeSnapshot(Handle title, } -void HeapProfiler::StartHeapObjectsTracking() { - i::Isolate* isolate = i::Isolate::Current(); - IsDeadCheck(isolate, "v8::HeapProfiler::StartHeapObjectsTracking"); - i::HeapProfiler::StartHeapObjectsTracking(); -} - - -void HeapProfiler::StopHeapObjectsTracking() { - i::Isolate* isolate = i::Isolate::Current(); - IsDeadCheck(isolate, "v8::HeapProfiler::StopHeapObjectsTracking"); - i::HeapProfiler::StopHeapObjectsTracking(); -} - - -void HeapProfiler::PushHeapObjectsStats(OutputStream* stream) { - i::Isolate* isolate = i::Isolate::Current(); - IsDeadCheck(isolate, "v8::HeapProfiler::PushHeapObjectsStats"); - return i::HeapProfiler::PushHeapObjectsStats(stream); -} - - void HeapProfiler::DeleteAllSnapshots() { i::Isolate* isolate = i::Isolate::Current(); IsDeadCheck(isolate, "v8::HeapProfiler::DeleteAllSnapshots"); @@ -6312,11 +6267,7 @@ static void SetFlagsFromString(const char* flags) { void Testing::PrepareStressRun(int run) { static const char* kLazyOptimizations = - "--prepare-always-opt " - "--max-inlined-source-size=999999 " - "--max-inlined-nodes=999999 " - "--max-inlined-nodes-cumulative=999999 " - "--noalways-opt"; + "--prepare-always-opt --nolimit-inlining --noalways-opt"; static const char* kForcedOptimizations = "--always-opt"; // If deoptimization stressed turn on frequent deoptimization. If no value diff --git a/deps/v8/src/api.h b/deps/v8/src/api.h index 3ad57f4657..89cf0c864c 100644 --- a/deps/v8/src/api.h +++ b/deps/v8/src/api.h @@ -146,7 +146,6 @@ class RegisteredExtension { public: explicit RegisteredExtension(Extension* extension); static void Register(RegisteredExtension* that); - static void UnregisterAll(); Extension* extension() { return extension_; } RegisteredExtension* next() { return next_; } RegisteredExtension* next_auto() { return next_auto_; } diff --git a/deps/v8/src/apiutils.h b/deps/v8/src/apiutils.h index 71c0e1c2c4..68579af1b3 100644 --- a/deps/v8/src/apiutils.h +++ b/deps/v8/src/apiutils.h @@ -1,4 +1,4 @@ -// Copyright 2012 the V8 project authors. All rights reserved. +// Copyright 2009 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -40,17 +40,14 @@ class ImplementationUtilities { } // Packs additional parameters for the NewArguments function. |implicit_args| - // is a pointer to the last element of 4-elements array controlled by GC. + // is a pointer to the last element of 3-elements array controlled by GC. static void PrepareArgumentsData(internal::Object** implicit_args, - internal::Isolate* isolate, internal::Object* data, internal::JSFunction* callee, internal::Object* holder) { implicit_args[v8::Arguments::kDataIndex] = data; implicit_args[v8::Arguments::kCalleeIndex] = callee; implicit_args[v8::Arguments::kHolderIndex] = holder; - implicit_args[v8::Arguments::kIsolateIndex] = - reinterpret_cast(isolate); } static v8::Arguments NewArguments(internal::Object** implicit_args, @@ -58,8 +55,6 @@ class ImplementationUtilities { bool is_construct_call) { ASSERT(implicit_args[v8::Arguments::kCalleeIndex]->IsJSFunction()); ASSERT(implicit_args[v8::Arguments::kHolderIndex]->IsHeapObject()); - // The implicit isolate argument is not tagged and looks like a SMI. - ASSERT(implicit_args[v8::Arguments::kIsolateIndex]->IsSmi()); return v8::Arguments(implicit_args, argv, argc, is_construct_call); } diff --git a/deps/v8/src/arguments.h b/deps/v8/src/arguments.h index f8fb00c575..e9a32702cf 100644 --- a/deps/v8/src/arguments.h +++ b/deps/v8/src/arguments.h @@ -1,4 +1,4 @@ -// Copyright 2012 the V8 project authors. All rights reserved. +// Copyright 2006-2008 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -91,11 +91,9 @@ class CustomArguments : public Relocatable { Object* data, Object* self, JSObject* holder) : Relocatable(isolate) { - ASSERT(reinterpret_cast(isolate)->IsSmi()); - values_[3] = self; - values_[2] = holder; - values_[1] = data; - values_[0] = reinterpret_cast(isolate); + values_[2] = self; + values_[1] = holder; + values_[0] = data; } inline explicit CustomArguments(Isolate* isolate) : Relocatable(isolate) { @@ -108,9 +106,8 @@ class CustomArguments : public Relocatable { void IterateInstance(ObjectVisitor* v); Object** end() { return values_ + ARRAY_SIZE(values_) - 1; } - private: - Object* values_[4]; + Object* values_[3]; }; diff --git a/deps/v8/src/arm/code-stubs-arm.cc b/deps/v8/src/arm/code-stubs-arm.cc index ad2ab7e09d..f772db9be2 100644 --- a/deps/v8/src/arm/code-stubs-arm.cc +++ b/deps/v8/src/arm/code-stubs-arm.cc @@ -5169,9 +5169,9 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { __ CompareRoot(r4, Heap::kTheHoleValueRootIndex); __ b(ne, &call); // Patch the receiver on the stack with the global receiver object. - __ ldr(r3, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); - __ ldr(r3, FieldMemOperand(r3, GlobalObject::kGlobalReceiverOffset)); - __ str(r3, MemOperand(sp, argc_ * kPointerSize)); + __ ldr(r2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); + __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset)); + __ str(r2, MemOperand(sp, argc_ * kPointerSize)); __ bind(&call); } @@ -5179,13 +5179,9 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { // r1: pushed function (to be verified) __ JumpIfSmi(r1, &non_function); // Get the map of the function object. - __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE); + __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE); __ b(ne, &slow); - if (RecordCallTarget()) { - GenerateRecordCallTarget(masm); - } - // Fast-case: Invoke the function now. // r1: pushed function ParameterCount actual(argc_); @@ -5209,17 +5205,8 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { // Slow-case: Non-function called. __ bind(&slow); - if (RecordCallTarget()) { - // If there is a call target cache, mark it megamorphic in the - // non-function case. MegamorphicSentinel is an immortal immovable - // object (undefined) so no write barrier is needed. - ASSERT_EQ(*TypeFeedbackCells::MegamorphicSentinel(masm->isolate()), - masm->isolate()->heap()->undefined_value()); - __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); - __ str(ip, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset)); - } // Check for function proxy. - __ cmp(r3, Operand(JS_FUNCTION_PROXY_TYPE)); + __ cmp(r2, Operand(JS_FUNCTION_PROXY_TYPE)); __ b(ne, &non_function); __ push(r1); // put proxy as additional argument __ mov(r0, Operand(argc_ + 1, RelocInfo::NONE)); @@ -5886,12 +5873,36 @@ void SubStringStub::Generate(MacroAssembler* masm) { // r2: result string length __ ldr(r4, FieldMemOperand(r0, String::kLengthOffset)); __ cmp(r2, Operand(r4, ASR, 1)); - // Return original string. __ b(eq, &return_r0); - // Longer than original string's length or negative: unsafe arguments. - __ b(hi, &runtime); - // Shorter than original string's length: an actual substring. + Label result_longer_than_two; + // Check for special case of two character ASCII string, in which case + // we do a lookup in the symbol table first. + __ cmp(r2, Operand(2)); + __ b(gt, &result_longer_than_two); + __ b(lt, &runtime); + + __ JumpIfInstanceTypeIsNotSequentialAscii(r1, r1, &runtime); + + // Get the two characters forming the sub string. + __ add(r0, r0, Operand(r3)); + __ ldrb(r3, FieldMemOperand(r0, SeqAsciiString::kHeaderSize)); + __ ldrb(r4, FieldMemOperand(r0, SeqAsciiString::kHeaderSize + 1)); + + // Try to lookup two character string in symbol table. + Label make_two_character_string; + StringHelper::GenerateTwoCharacterSymbolTableProbe( + masm, r3, r4, r1, r5, r6, r7, r9, &make_two_character_string); + __ jmp(&return_r0); + + // r2: result string length. + // r3: two characters combined into halfword in little endian byte order. + __ bind(&make_two_character_string); + __ AllocateAsciiString(r0, r2, r4, r5, r9, &runtime); + __ strh(r3, FieldMemOperand(r0, SeqAsciiString::kHeaderSize)); + __ jmp(&return_r0); + + __ bind(&result_longer_than_two); // Deal with different string types: update the index if necessary // and put the underlying string into r5. // r0: original string diff --git a/deps/v8/src/arm/full-codegen-arm.cc b/deps/v8/src/arm/full-codegen-arm.cc index 3c8df292c4..69b12ce5ee 100644 --- a/deps/v8/src/arm/full-codegen-arm.cc +++ b/deps/v8/src/arm/full-codegen-arm.cc @@ -112,6 +112,13 @@ class JumpPatchSite BASE_EMBEDDED { }; +// TODO(jkummerow): Obsolete as soon as x64 is updated. Remove. +int FullCodeGenerator::self_optimization_header_size() { + UNREACHABLE(); + return 24; +} + + // Generate code for a JS function. On entry to the function the receiver // and arguments have been pushed on the stack left to right. The actual // argument count matches the formal parameter count expected by the @@ -268,11 +275,11 @@ void FullCodeGenerator::Generate() { // For named function expressions, declare the function name as a // constant. if (scope()->is_function_scope() && scope()->function() != NULL) { - VariableDeclaration* function = scope()->function(); - ASSERT(function->proxy()->var()->mode() == CONST || - function->proxy()->var()->mode() == CONST_HARMONY); - ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED); - VisitVariableDeclaration(function); + VariableProxy* proxy = scope()->function(); + ASSERT(proxy->var()->mode() == CONST || + proxy->var()->mode() == CONST_HARMONY); + ASSERT(proxy->var()->location() != Variable::UNALLOCATED); + EmitDeclaration(proxy, proxy->var()->mode(), NULL); } VisitDeclarations(scope()->declarations()); } @@ -782,51 +789,62 @@ void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr, } -void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { - // The variable in the declaration always resides in the current function - // context. - ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); - if (FLAG_debug_code) { - // Check that we're not inside a with or catch context. - __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset)); - __ CompareRoot(r1, Heap::kWithContextMapRootIndex); - __ Check(ne, "Declaration in with context."); - __ CompareRoot(r1, Heap::kCatchContextMapRootIndex); - __ Check(ne, "Declaration in catch context."); - } -} - - -void FullCodeGenerator::VisitVariableDeclaration( - VariableDeclaration* declaration) { +void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, + VariableMode mode, + FunctionLiteral* function) { // If it was not possible to allocate the variable at compile time, we // need to "declare" it at runtime to make sure it actually exists in the // local context. - VariableProxy* proxy = declaration->proxy(); - VariableMode mode = declaration->mode(); Variable* variable = proxy->var(); - bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET; + bool binding_needs_init = (function == NULL) && + (mode == CONST || mode == CONST_HARMONY || mode == LET); switch (variable->location()) { case Variable::UNALLOCATED: - globals_->Add(variable->name()); - globals_->Add(variable->binding_needs_init() - ? isolate()->factory()->the_hole_value() - : isolate()->factory()->undefined_value()); + ++global_count_; break; case Variable::PARAMETER: case Variable::LOCAL: - if (hole_init) { - Comment cmnt(masm_, "[ VariableDeclaration"); + if (function != NULL) { + Comment cmnt(masm_, "[ Declaration"); + VisitForAccumulatorValue(function); + __ str(result_register(), StackOperand(variable)); + } else if (binding_needs_init) { + Comment cmnt(masm_, "[ Declaration"); __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); __ str(ip, StackOperand(variable)); } break; case Variable::CONTEXT: - if (hole_init) { - Comment cmnt(masm_, "[ VariableDeclaration"); - EmitDebugCheckDeclarationContext(variable); + // The variable in the decl always resides in the current function + // context. + ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); + if (FLAG_debug_code) { + // Check that we're not inside a with or catch context. + __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset)); + __ CompareRoot(r1, Heap::kWithContextMapRootIndex); + __ Check(ne, "Declaration in with context."); + __ CompareRoot(r1, Heap::kCatchContextMapRootIndex); + __ Check(ne, "Declaration in catch context."); + } + if (function != NULL) { + Comment cmnt(masm_, "[ Declaration"); + VisitForAccumulatorValue(function); + __ str(result_register(), ContextOperand(cp, variable->index())); + int offset = Context::SlotOffset(variable->index()); + // We know that we have written a function, which is not a smi. + __ RecordWriteContextSlot(cp, + offset, + result_register(), + r2, + kLRHasBeenSaved, + kDontSaveFPRegs, + EMIT_REMEMBERED_SET, + OMIT_SMI_CHECK); + PrepareForBailoutForId(proxy->id(), NO_REGISTERS); + } else if (binding_needs_init) { + Comment cmnt(masm_, "[ Declaration"); __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); __ str(ip, ContextOperand(cp, variable->index())); // No write barrier since the_hole_value is in old space. @@ -835,11 +853,13 @@ void FullCodeGenerator::VisitVariableDeclaration( break; case Variable::LOOKUP: { - Comment cmnt(masm_, "[ VariableDeclaration"); + Comment cmnt(masm_, "[ Declaration"); __ mov(r2, Operand(variable->name())); // Declaration nodes are always introduced in one of four modes. - ASSERT(mode == VAR || mode == LET || - mode == CONST || mode == CONST_HARMONY); + ASSERT(mode == VAR || + mode == CONST || + mode == CONST_HARMONY || + mode == LET); PropertyAttributes attr = (mode == CONST || mode == CONST_HARMONY) ? READ_ONLY : NONE; __ mov(r1, Operand(Smi::FromInt(attr))); @@ -847,7 +867,11 @@ void FullCodeGenerator::VisitVariableDeclaration( // Note: For variables we must not push an initial value (such as // 'undefined') because we may have a (legal) redeclaration and we // must not destroy the current value. - if (hole_init) { + if (function != NULL) { + __ Push(cp, r2, r1); + // Push initial value for function declaration. + VisitForStackValue(function); + } else if (binding_needs_init) { __ LoadRoot(r0, Heap::kTheHoleValueRootIndex); __ Push(cp, r2, r1, r0); } else { @@ -861,122 +885,6 @@ void FullCodeGenerator::VisitVariableDeclaration( } -void FullCodeGenerator::VisitFunctionDeclaration( - FunctionDeclaration* declaration) { - VariableProxy* proxy = declaration->proxy(); - Variable* variable = proxy->var(); - switch (variable->location()) { - case Variable::UNALLOCATED: { - globals_->Add(variable->name()); - Handle function = - Compiler::BuildFunctionInfo(declaration->fun(), script()); - // Check for stack-overflow exception. - if (function.is_null()) return SetStackOverflow(); - globals_->Add(function); - break; - } - - case Variable::PARAMETER: - case Variable::LOCAL: { - Comment cmnt(masm_, "[ FunctionDeclaration"); - VisitForAccumulatorValue(declaration->fun()); - __ str(result_register(), StackOperand(variable)); - break; - } - - case Variable::CONTEXT: { - Comment cmnt(masm_, "[ FunctionDeclaration"); - EmitDebugCheckDeclarationContext(variable); - VisitForAccumulatorValue(declaration->fun()); - __ str(result_register(), ContextOperand(cp, variable->index())); - int offset = Context::SlotOffset(variable->index()); - // We know that we have written a function, which is not a smi. - __ RecordWriteContextSlot(cp, - offset, - result_register(), - r2, - kLRHasBeenSaved, - kDontSaveFPRegs, - EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); - PrepareForBailoutForId(proxy->id(), NO_REGISTERS); - break; - } - - case Variable::LOOKUP: { - Comment cmnt(masm_, "[ FunctionDeclaration"); - __ mov(r2, Operand(variable->name())); - __ mov(r1, Operand(Smi::FromInt(NONE))); - __ Push(cp, r2, r1); - // Push initial value for function declaration. - VisitForStackValue(declaration->fun()); - __ CallRuntime(Runtime::kDeclareContextSlot, 4); - break; - } - } -} - - -void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) { - VariableProxy* proxy = declaration->proxy(); - Variable* variable = proxy->var(); - Handle instance = declaration->module()->interface()->Instance(); - ASSERT(!instance.is_null()); - - switch (variable->location()) { - case Variable::UNALLOCATED: { - Comment cmnt(masm_, "[ ModuleDeclaration"); - globals_->Add(variable->name()); - globals_->Add(instance); - Visit(declaration->module()); - break; - } - - case Variable::CONTEXT: { - Comment cmnt(masm_, "[ ModuleDeclaration"); - EmitDebugCheckDeclarationContext(variable); - __ mov(r1, Operand(instance)); - __ str(r1, ContextOperand(cp, variable->index())); - Visit(declaration->module()); - break; - } - - case Variable::PARAMETER: - case Variable::LOCAL: - case Variable::LOOKUP: - UNREACHABLE(); - } -} - - -void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) { - VariableProxy* proxy = declaration->proxy(); - Variable* variable = proxy->var(); - switch (variable->location()) { - case Variable::UNALLOCATED: - // TODO(rossberg) - break; - - case Variable::CONTEXT: { - Comment cmnt(masm_, "[ ImportDeclaration"); - EmitDebugCheckDeclarationContext(variable); - // TODO(rossberg) - break; - } - - case Variable::PARAMETER: - case Variable::LOCAL: - case Variable::LOOKUP: - UNREACHABLE(); - } -} - - -void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) { - // TODO(rossberg) -} - - void FullCodeGenerator::DeclareGlobals(Handle pairs) { // Call the runtime to declare the globals. // The context is the first argument. @@ -2363,18 +2271,6 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) { } // Record source position for debugger. SetSourcePosition(expr->position()); - - // Record call targets in unoptimized code, but not in the snapshot. - if (!Serializer::enabled()) { - flags = static_cast(flags | RECORD_CALL_TARGET); - Handle uninitialized = - TypeFeedbackCells::UninitializedSentinel(isolate()); - Handle cell = - isolate()->factory()->NewJSGlobalPropertyCell(uninitialized); - RecordTypeFeedbackCell(expr->id(), cell); - __ mov(r2, Operand(cell)); - } - CallFunctionStub stub(arg_count, flags); __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); __ CallStub(&stub); @@ -3668,7 +3564,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) { __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout); __ ldr(scratch1, FieldMemOperand(string, SeqAsciiString::kLengthOffset)); - __ add(string_length, string_length, Operand(scratch1), SetCC); + __ add(string_length, string_length, Operand(scratch1)); __ b(vs, &bailout); __ cmp(element, elements_end); __ b(lt, &loop); @@ -3705,7 +3601,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) { __ b(ne, &bailout); __ tst(scratch2, Operand(0x80000000)); __ b(ne, &bailout); - __ add(string_length, string_length, Operand(scratch2), SetCC); + __ add(string_length, string_length, Operand(scratch2)); __ b(vs, &bailout); __ SmiUntag(string_length); @@ -4461,8 +4357,7 @@ void FullCodeGenerator::LoadContextField(Register dst, int context_index) { void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { Scope* declaration_scope = scope()->DeclarationScope(); - if (declaration_scope->is_global_scope() || - declaration_scope->is_module_scope()) { + if (declaration_scope->is_global_scope()) { // Contexts nested in the global context have a canonical empty function // as their closure, not the anonymous closure containing the global // code. Pass a smi sentinel and let the runtime look up the empty diff --git a/deps/v8/src/arm/lithium-arm.cc b/deps/v8/src/arm/lithium-arm.cc index 5c60f5321c..c3dd1cbaa2 100644 --- a/deps/v8/src/arm/lithium-arm.cc +++ b/deps/v8/src/arm/lithium-arm.cc @@ -108,17 +108,22 @@ void LInstruction::PrintTo(StringStream* stream) { } -void LInstruction::PrintDataTo(StringStream* stream) { +template +void LTemplateInstruction::PrintDataTo(StringStream* stream) { stream->Add("= "); - for (int i = 0; i < InputCount(); i++) { + for (int i = 0; i < inputs_.length(); i++) { if (i > 0) stream->Add(" "); - InputAt(i)->PrintTo(stream); + inputs_[i]->PrintTo(stream); } } -void LInstruction::PrintOutputOperandTo(StringStream* stream) { - if (HasResult()) result()->PrintTo(stream); +template +void LTemplateInstruction::PrintOutputOperandTo(StringStream* stream) { + for (int i = 0; i < results_.length(); i++) { + if (i > 0) stream->Add(" "); + results_[i]->PrintTo(stream); + } } @@ -727,6 +732,22 @@ LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) { } +LInstruction* LChunkBuilder::SetInstructionPendingDeoptimizationEnvironment( + LInstruction* instr, int ast_id) { + ASSERT(instruction_pending_deoptimization_environment_ == NULL); + ASSERT(pending_deoptimization_ast_id_ == AstNode::kNoNumber); + instruction_pending_deoptimization_environment_ = instr; + pending_deoptimization_ast_id_ = ast_id; + return instr; +} + + +void LChunkBuilder::ClearInstructionPendingDeoptimizationEnvironment() { + instruction_pending_deoptimization_environment_ = NULL; + pending_deoptimization_ast_id_ = AstNode::kNoNumber; +} + + LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr, HInstruction* hinstr, CanDeoptimize can_deoptimize) { @@ -739,10 +760,8 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr, if (hinstr->HasObservableSideEffects()) { ASSERT(hinstr->next()->IsSimulate()); HSimulate* sim = HSimulate::cast(hinstr->next()); - ASSERT(instruction_pending_deoptimization_environment_ == NULL); - ASSERT(pending_deoptimization_ast_id_ == AstNode::kNoNumber); - instruction_pending_deoptimization_environment_ = instr; - pending_deoptimization_ast_id_ = sim->ast_id(); + instr = SetInstructionPendingDeoptimizationEnvironment( + instr, sim->ast_id()); } // If instruction does not have side-effects lazy deoptimization @@ -760,6 +779,12 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr, } +LInstruction* LChunkBuilder::MarkAsSaveDoubles(LInstruction* instr) { + instr->MarkAsSaveDoubles(); + return instr; +} + + LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) { ASSERT(!instr->HasPointerMap()); instr->set_pointer_map(new(zone()) LPointerMap(position_)); @@ -1270,7 +1295,6 @@ LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) { LInstruction* LChunkBuilder::DoBitNot(HBitNot* instr) { ASSERT(instr->value()->representation().IsInteger32()); ASSERT(instr->representation().IsInteger32()); - if (instr->HasNoUses()) return NULL; LOperand* value = UseRegisterAtStart(instr->value()); return DefineAsRegister(new(zone()) LBitNotI(value)); } @@ -1295,75 +1319,6 @@ LInstruction* LChunkBuilder::DoDiv(HDiv* instr) { } -bool LChunkBuilder::HasMagicNumberForDivisor(int32_t divisor) { - uint32_t divisor_abs = abs(divisor); - // Dividing by 0, 1, and powers of 2 is easy. - // Note that IsPowerOf2(0) returns true; - ASSERT(IsPowerOf2(0) == true); - if (IsPowerOf2(divisor_abs)) return true; - - // We have magic numbers for a few specific divisors. - // Details and proofs can be found in: - // - Hacker's Delight, Henry S. Warren, Jr. - // - The PowerPC Compiler Writer’s Guide - // and probably many others. - // - // We handle - // * - // but not - // * - int32_t power_of_2_factor = - CompilerIntrinsics::CountTrailingZeros(divisor_abs); - DivMagicNumbers magic_numbers = - DivMagicNumberFor(divisor_abs >> power_of_2_factor); - if (magic_numbers.M != InvalidDivMagicNumber.M) return true; - - return false; -} - - -HValue* LChunkBuilder::SimplifiedDividendForMathFloorOfDiv(HValue* dividend) { - // A value with an integer representation does not need to be transformed. - if (dividend->representation().IsInteger32()) { - return dividend; - // A change from an integer32 can be replaced by the integer32 value. - } else if (dividend->IsChange() && - HChange::cast(dividend)->from().IsInteger32()) { - return HChange::cast(dividend)->value(); - } - return NULL; -} - - -HValue* LChunkBuilder::SimplifiedDivisorForMathFloorOfDiv(HValue* divisor) { - // Only optimize when we have magic numbers for the divisor. - // The standard integer division routine is usually slower than transitionning - // to VFP. - if (divisor->IsConstant() && - HConstant::cast(divisor)->HasInteger32Value()) { - HConstant* constant_val = HConstant::cast(divisor); - int32_t int32_val = constant_val->Integer32Value(); - if (LChunkBuilder::HasMagicNumberForDivisor(int32_val)) { - return constant_val->CopyToRepresentation(Representation::Integer32()); - } - } - return NULL; -} - - -LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) { - HValue* right = instr->right(); - LOperand* dividend = UseRegister(instr->left()); - LOperand* divisor = UseRegisterOrConstant(right); - LOperand* remainder = TempRegister(); - ASSERT(right->IsConstant() && - HConstant::cast(right)->HasInteger32Value() && - HasMagicNumberForDivisor(HConstant::cast(right)->Integer32Value())); - return AssignEnvironment(DefineAsRegister( - new LMathFloorOfDiv(dividend, divisor, remainder))); -} - - LInstruction* LChunkBuilder::DoMod(HMod* instr) { if (instr->representation().IsInteger32()) { ASSERT(instr->left()->representation().IsInteger32()); @@ -1798,9 +1753,9 @@ LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) { } -LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) { +LInstruction* LChunkBuilder::DoCheckMap(HCheckMap* instr) { LOperand* value = UseRegisterAtStart(instr->value()); - LInstruction* result = new(zone()) LCheckMaps(value); + LInstruction* result = new(zone()) LCheckMap(value); return AssignEnvironment(result); } @@ -2287,12 +2242,9 @@ LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) { if (pending_deoptimization_ast_id_ == instr->ast_id()) { LInstruction* result = new(zone()) LLazyBailout; result = AssignEnvironment(result); - // Store the lazy deopt environment with the instruction if needed. Right - // now it is only used for LInstanceOfKnownGlobal. instruction_pending_deoptimization_environment_-> - SetDeferredLazyDeoptimizationEnvironment(result->environment()); - instruction_pending_deoptimization_environment_ = NULL; - pending_deoptimization_ast_id_ = AstNode::kNoNumber; + set_deoptimization_environment(result->environment()); + ClearInstructionPendingDeoptimizationEnvironment(); return result; } @@ -2319,8 +2271,8 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) { undefined, instr->call_kind(), instr->is_construct()); - if (instr->arguments_var() != NULL) { - inner->Bind(instr->arguments_var(), graph()->GetArgumentsObject()); + if (instr->arguments() != NULL) { + inner->Bind(instr->arguments(), graph()->GetArgumentsObject()); } current_block_->UpdateEnvironment(inner); chunk_->AddInlinedClosure(instr->closure()); @@ -2329,21 +2281,10 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) { LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) { - LInstruction* pop = NULL; - - HEnvironment* env = current_block_->last_environment(); - - if (instr->arguments_pushed()) { - int argument_count = env->arguments_environment()->parameter_count(); - pop = new(zone()) LDrop(argument_count); - argument_count_ -= argument_count; - } - HEnvironment* outer = current_block_->last_environment()-> DiscardInlined(false); current_block_->UpdateEnvironment(outer); - - return pop; + return NULL; } diff --git a/deps/v8/src/arm/lithium-arm.h b/deps/v8/src/arm/lithium-arm.h index ec8aac8036..62cde6e249 100644 --- a/deps/v8/src/arm/lithium-arm.h +++ b/deps/v8/src/arm/lithium-arm.h @@ -72,7 +72,7 @@ class LCodeGen; V(CheckFunction) \ V(CheckInstanceType) \ V(CheckNonSmi) \ - V(CheckMaps) \ + V(CheckMap) \ V(CheckPrototypeMaps) \ V(CheckSmi) \ V(ClampDToUint8) \ @@ -132,7 +132,6 @@ class LCodeGen; V(LoadNamedField) \ V(LoadNamedFieldPolymorphic) \ V(LoadNamedGeneric) \ - V(MathFloorOfDiv) \ V(ModI) \ V(MulI) \ V(NumberTagD) \ @@ -180,8 +179,7 @@ class LCodeGen; V(CheckMapValue) \ V(LoadFieldByIndex) \ V(DateField) \ - V(WrapReceiver) \ - V(Drop) + V(WrapReceiver) #define DECLARE_CONCRETE_INSTRUCTION(type, mnemonic) \ @@ -205,14 +203,15 @@ class LInstruction: public ZoneObject { LInstruction() : environment_(NULL), hydrogen_value_(NULL), - is_call_(false) { } + is_call_(false), + is_save_doubles_(false) { } virtual ~LInstruction() { } virtual void CompileToNative(LCodeGen* generator) = 0; virtual const char* Mnemonic() const = 0; virtual void PrintTo(StringStream* stream); - virtual void PrintDataTo(StringStream* stream); - virtual void PrintOutputOperandTo(StringStream* stream); + virtual void PrintDataTo(StringStream* stream) = 0; + virtual void PrintOutputOperandTo(StringStream* stream) = 0; enum Opcode { // Declare a unique enum value for each instruction. @@ -247,12 +246,22 @@ class LInstruction: public ZoneObject { void set_hydrogen_value(HValue* value) { hydrogen_value_ = value; } HValue* hydrogen_value() const { return hydrogen_value_; } - virtual void SetDeferredLazyDeoptimizationEnvironment(LEnvironment* env) { } + void set_deoptimization_environment(LEnvironment* env) { + deoptimization_environment_.set(env); + } + LEnvironment* deoptimization_environment() const { + return deoptimization_environment_.get(); + } + bool HasDeoptimizationEnvironment() const { + return deoptimization_environment_.is_set(); + } void MarkAsCall() { is_call_ = true; } + void MarkAsSaveDoubles() { is_save_doubles_ = true; } // Interface to the register allocator and iterators. bool IsMarkedAsCall() const { return is_call_; } + bool IsMarkedAsSaveDoubles() const { return is_save_doubles_; } virtual bool HasResult() const = 0; virtual LOperand* result() = 0; @@ -273,7 +282,9 @@ class LInstruction: public ZoneObject { LEnvironment* environment_; SetOncePointer pointer_map_; HValue* hydrogen_value_; + SetOncePointer deoptimization_environment_; bool is_call_; + bool is_save_doubles_; }; @@ -295,6 +306,9 @@ class LTemplateInstruction: public LInstruction { int TempCount() { return T; } LOperand* TempAt(int i) { return temps_[i]; } + virtual void PrintDataTo(StringStream* stream); + virtual void PrintOutputOperandTo(StringStream* stream); + protected: EmbeddedContainer results_; EmbeddedContainer inputs_; @@ -520,8 +534,9 @@ class LArgumentsLength: public LTemplateInstruction<1, 1, 0> { class LArgumentsElements: public LTemplateInstruction<1, 0, 0> { public: + LArgumentsElements() { } + DECLARE_CONCRETE_INSTRUCTION(ArgumentsElements, "arguments-elements") - DECLARE_HYDROGEN_ACCESSOR(ArgumentsElements) }; @@ -567,21 +582,6 @@ class LDivI: public LTemplateInstruction<1, 2, 0> { }; -class LMathFloorOfDiv: public LTemplateInstruction<1, 2, 1> { - public: - LMathFloorOfDiv(LOperand* left, - LOperand* right, - LOperand* temp = NULL) { - inputs_[0] = left; - inputs_[1] = right; - temps_[0] = temp; - } - - DECLARE_CONCRETE_INSTRUCTION(MathFloorOfDiv, "math-floor-of-div") - DECLARE_HYDROGEN_ACCESSOR(MathFloorOfDiv) -}; - - class LMulI: public LTemplateInstruction<1, 2, 1> { public: LMulI(LOperand* left, LOperand* right, LOperand* temp) { @@ -834,15 +834,6 @@ class LInstanceOfKnownGlobal: public LTemplateInstruction<1, 1, 1> { DECLARE_HYDROGEN_ACCESSOR(InstanceOfKnownGlobal) Handle function() const { return hydrogen()->function(); } - LEnvironment* GetDeferredLazyDeoptimizationEnvironment() { - return lazy_deopt_env_; - } - virtual void SetDeferredLazyDeoptimizationEnvironment(LEnvironment* env) { - lazy_deopt_env_ = env; - } - - private: - LEnvironment* lazy_deopt_env_; }; @@ -1387,19 +1378,6 @@ class LPushArgument: public LTemplateInstruction<0, 1, 0> { }; -class LDrop: public LTemplateInstruction<0, 0, 0> { - public: - explicit LDrop(int count) : count_(count) { } - - int count() const { return count_; } - - DECLARE_CONCRETE_INSTRUCTION(Drop, "drop") - - private: - int count_; -}; - - class LThisFunction: public LTemplateInstruction<1, 0, 0> { public: DECLARE_CONCRETE_INSTRUCTION(ThisFunction, "this-function") @@ -1482,7 +1460,6 @@ class LInvokeFunction: public LTemplateInstruction<1, 1, 0> { virtual void PrintDataTo(StringStream* stream); int arity() const { return hydrogen()->argument_count() - 1; } - Handle known_function() { return hydrogen()->known_function(); } }; @@ -1762,8 +1739,6 @@ class LStoreKeyedFastDoubleElement: public LTemplateInstruction<0, 3, 0> { LOperand* elements() { return inputs_[0]; } LOperand* key() { return inputs_[1]; } LOperand* value() { return inputs_[2]; } - - bool NeedsCanonicalization() { return hydrogen()->NeedsCanonicalization(); } }; @@ -1914,14 +1889,14 @@ class LCheckInstanceType: public LTemplateInstruction<0, 1, 0> { }; -class LCheckMaps: public LTemplateInstruction<0, 1, 0> { +class LCheckMap: public LTemplateInstruction<0, 1, 0> { public: - explicit LCheckMaps(LOperand* value) { + explicit LCheckMap(LOperand* value) { inputs_[0] = value; } - DECLARE_CONCRETE_INSTRUCTION(CheckMaps, "check-maps") - DECLARE_HYDROGEN_ACCESSOR(CheckMaps) + DECLARE_CONCRETE_INSTRUCTION(CheckMap, "check-map") + DECLARE_HYDROGEN_ACCESSOR(CheckMap) }; @@ -2299,10 +2274,6 @@ class LChunkBuilder BASE_EMBEDDED { HYDROGEN_CONCRETE_INSTRUCTION_LIST(DECLARE_DO) #undef DECLARE_DO - static bool HasMagicNumberForDivisor(int32_t divisor); - static HValue* SimplifiedDividendForMathFloorOfDiv(HValue* val); - static HValue* SimplifiedDivisorForMathFloorOfDiv(HValue* val); - private: enum Status { UNUSED, @@ -2398,6 +2369,11 @@ class LChunkBuilder BASE_EMBEDDED { LInstruction* instr, HInstruction* hinstr, CanDeoptimize can_deoptimize = CANNOT_DEOPTIMIZE_EAGERLY); + LInstruction* MarkAsSaveDoubles(LInstruction* instr); + + LInstruction* SetInstructionPendingDeoptimizationEnvironment( + LInstruction* instr, int ast_id); + void ClearInstructionPendingDeoptimizationEnvironment(); LEnvironment* CreateEnvironment(HEnvironment* hydrogen_env, int* argument_index_accumulator); diff --git a/deps/v8/src/arm/lithium-codegen-arm.cc b/deps/v8/src/arm/lithium-codegen-arm.cc index 79b56fc077..82b80a2b80 100644 --- a/deps/v8/src/arm/lithium-codegen-arm.cc +++ b/deps/v8/src/arm/lithium-codegen-arm.cc @@ -1034,100 +1034,6 @@ void LCodeGen::DoModI(LModI* instr) { } -void LCodeGen::EmitSignedIntegerDivisionByConstant( - Register result, - Register dividend, - int32_t divisor, - Register remainder, - Register scratch, - LEnvironment* environment) { - ASSERT(!AreAliased(dividend, scratch, ip)); - ASSERT(LChunkBuilder::HasMagicNumberForDivisor(divisor)); - - uint32_t divisor_abs = abs(divisor); - - int32_t power_of_2_factor = - CompilerIntrinsics::CountTrailingZeros(divisor_abs); - - switch (divisor_abs) { - case 0: - DeoptimizeIf(al, environment); - return; - - case 1: - if (divisor > 0) { - __ Move(result, dividend); - } else { - __ rsb(result, dividend, Operand(0), SetCC); - DeoptimizeIf(vs, environment); - } - // Compute the remainder. - __ mov(remainder, Operand(0)); - return; - - default: - if (IsPowerOf2(divisor_abs)) { - // Branch and condition free code for integer division by a power - // of two. - int32_t power = WhichPowerOf2(divisor_abs); - if (power > 1) { - __ mov(scratch, Operand(dividend, ASR, power - 1)); - } - __ add(scratch, dividend, Operand(scratch, LSR, 32 - power)); - __ mov(result, Operand(scratch, ASR, power)); - // Negate if necessary. - // We don't need to check for overflow because the case '-1' is - // handled separately. - if (divisor < 0) { - ASSERT(divisor != -1); - __ rsb(result, result, Operand(0)); - } - // Compute the remainder. - if (divisor > 0) { - __ sub(remainder, dividend, Operand(result, LSL, power)); - } else { - __ add(remainder, dividend, Operand(result, LSL, power)); - } - return; - } else { - // Use magic numbers for a few specific divisors. - // Details and proofs can be found in: - // - Hacker's Delight, Henry S. Warren, Jr. - // - The PowerPC Compiler Writer’s Guide - // and probably many others. - // - // We handle - // * - // but not - // * - DivMagicNumbers magic_numbers = - DivMagicNumberFor(divisor_abs >> power_of_2_factor); - // Branch and condition free code for integer division by a power - // of two. - const int32_t M = magic_numbers.M; - const int32_t s = magic_numbers.s + power_of_2_factor; - - __ mov(ip, Operand(M)); - __ smull(ip, scratch, dividend, ip); - if (M < 0) { - __ add(scratch, scratch, Operand(dividend)); - } - if (s > 0) { - __ mov(scratch, Operand(scratch, ASR, s)); - } - __ add(result, scratch, Operand(dividend, LSR, 31)); - if (divisor < 0) __ rsb(result, result, Operand(0)); - // Compute the remainder. - __ mov(ip, Operand(divisor)); - // This sequence could be replaced with 'mls' when - // it gets implemented. - __ mul(scratch, result, ip); - __ sub(remainder, dividend, scratch); - } - } -} - - void LCodeGen::DoDivI(LDivI* instr) { class DeferredDivI: public LDeferredCode { public: @@ -1209,34 +1115,6 @@ void LCodeGen::DoDivI(LDivI* instr) { } -void LCodeGen::DoMathFloorOfDiv(LMathFloorOfDiv* instr) { - const Register result = ToRegister(instr->result()); - const Register left = ToRegister(instr->InputAt(0)); - const Register remainder = ToRegister(instr->TempAt(0)); - const Register scratch = scratch0(); - - // We only optimize this for division by constants, because the standard - // integer division routine is usually slower than transitionning to VFP. - // This could be optimized on processors with SDIV available. - ASSERT(instr->InputAt(1)->IsConstantOperand()); - int32_t divisor = ToInteger32(LConstantOperand::cast(instr->InputAt(1))); - if (divisor < 0) { - __ cmp(left, Operand(0)); - DeoptimizeIf(eq, instr->environment()); - } - EmitSignedIntegerDivisionByConstant(result, - left, - divisor, - remainder, - scratch, - instr->environment()); - // We operated a truncating division. Correct the result if necessary. - __ cmp(remainder, Operand(0)); - __ teq(remainder, Operand(divisor), ne); - __ sub(result, result, Operand(1), LeaveCC, mi); -} - - template void LCodeGen::DoDeferredBinaryOpStub(LTemplateInstruction<1, 2, T>* instr, Token::Value op) { @@ -2389,7 +2267,8 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, RelocInfo::CODE_TARGET, instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); - LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment(); + ASSERT(instr->HasDeoptimizationEnvironment()); + LEnvironment* env = instr->deoptimization_environment(); safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); // Put the result value into the result register slot and // restore all registers. @@ -2885,20 +2764,16 @@ void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { Register scratch = scratch0(); Register result = ToRegister(instr->result()); - if (instr->hydrogen()->from_inlined()) { - __ sub(result, sp, Operand(2 * kPointerSize)); - } else { - // Check if the calling frame is an arguments adaptor frame. - Label done, adapted; - __ ldr(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); - __ ldr(result, MemOperand(scratch, StandardFrameConstants::kContextOffset)); - __ cmp(result, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); + // Check if the calling frame is an arguments adaptor frame. + Label done, adapted; + __ ldr(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); + __ ldr(result, MemOperand(scratch, StandardFrameConstants::kContextOffset)); + __ cmp(result, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); - // Result is the frame pointer for the frame if not adapted and for the real - // frame below the adaptor frame if adapted. - __ mov(result, fp, LeaveCC, ne); - __ mov(result, scratch, LeaveCC, eq); - } + // Result is the frame pointer for the frame if not adapted and for the real + // frame below the adaptor frame if adapted. + __ mov(result, fp, LeaveCC, ne); + __ mov(result, scratch, LeaveCC, eq); } @@ -3007,7 +2882,7 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) { __ b(ne, &loop); __ bind(&invoke); - ASSERT(instr->HasPointerMap()); + ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); LPointerMap* pointers = instr->pointer_map(); RecordPosition(pointers->position()); SafepointGenerator safepoint_generator( @@ -3032,11 +2907,6 @@ void LCodeGen::DoPushArgument(LPushArgument* instr) { } -void LCodeGen::DoDrop(LDrop* instr) { - __ Drop(instr->count()); -} - - void LCodeGen::DoThisFunction(LThisFunction* instr) { Register result = ToRegister(instr->result()); __ LoadHeapObject(result, instr->hydrogen()->closure()); @@ -3083,8 +2953,7 @@ void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) { void LCodeGen::CallKnownFunction(Handle function, int arity, LInstruction* instr, - CallKind call_kind, - R1State r1_state) { + CallKind call_kind) { bool can_invoke_directly = !function->NeedsArgumentsAdaption() || function->shared()->formal_parameter_count() == arity; @@ -3092,10 +2961,7 @@ void LCodeGen::CallKnownFunction(Handle function, RecordPosition(pointers->position()); if (can_invoke_directly) { - if (r1_state == R1_UNINITIALIZED) { - __ LoadHeapObject(r1, function); - } - + __ LoadHeapObject(r1, function); // Change context if needed. bool change_context = (info()->closure()->context() != function->context()) || @@ -3134,8 +3000,7 @@ void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { CallKnownFunction(instr->function(), instr->arity(), instr, - CALL_AS_METHOD, - R1_UNINITIALIZED); + CALL_AS_METHOD); } @@ -3559,21 +3424,13 @@ void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) { void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { ASSERT(ToRegister(instr->function()).is(r1)); ASSERT(instr->HasPointerMap()); - - if (instr->known_function().is_null()) { - LPointerMap* pointers = instr->pointer_map(); - RecordPosition(pointers->position()); - SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); - ParameterCount count(instr->arity()); - __ InvokeFunction(r1, count, CALL_FUNCTION, generator, CALL_AS_METHOD); - __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); - } else { - CallKnownFunction(instr->known_function(), - instr->arity(), - instr, - CALL_AS_METHOD, - R1_CONTAINS_TARGET); - } + ASSERT(instr->HasDeoptimizationEnvironment()); + LPointerMap* pointers = instr->pointer_map(); + RecordPosition(pointers->position()); + SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); + ParameterCount count(instr->arity()); + __ InvokeFunction(r1, count, CALL_FUNCTION, generator, CALL_AS_METHOD); + __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); } @@ -3628,11 +3485,7 @@ void LCodeGen::DoCallGlobal(LCallGlobal* instr) { void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { ASSERT(ToRegister(instr->result()).is(r0)); - CallKnownFunction(instr->target(), - instr->arity(), - instr, - CALL_AS_FUNCTION, - R1_UNINITIALIZED); + CallKnownFunction(instr->target(), instr->arity(), instr, CALL_AS_FUNCTION); } @@ -3762,6 +3615,7 @@ void LCodeGen::DoStoreKeyedFastDoubleElement( Register scratch = scratch0(); bool key_is_constant = instr->key()->IsConstantOperand(); int constant_key = 0; + Label not_nan; // Calculate the effective address of the slot in the array to store the // double value. @@ -3784,15 +3638,13 @@ void LCodeGen::DoStoreKeyedFastDoubleElement( Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag)); } - if (instr->NeedsCanonicalization()) { - // Check for NaN. All NaNs must be canonicalized. - __ VFPCompareAndSetFlags(value, value); - // Only load canonical NaN if the comparison above set the overflow. - __ Vmov(value, - FixedDoubleArray::canonical_not_the_hole_nan_as_double(), - vs); - } + // Check for NaN. All NaNs must be canonicalized. + __ VFPCompareAndSetFlags(value, value); + + // Only load canonical NaN if the comparison above set the overflow. + __ Vmov(value, FixedDoubleArray::canonical_not_the_hole_nan_as_double(), vs); + __ bind(¬_nan); __ vstr(value, scratch, 0); } @@ -4486,22 +4338,14 @@ void LCodeGen::DoCheckMapCommon(Register reg, } -void LCodeGen::DoCheckMaps(LCheckMaps* instr) { +void LCodeGen::DoCheckMap(LCheckMap* instr) { Register scratch = scratch0(); LOperand* input = instr->InputAt(0); ASSERT(input->IsRegister()); Register reg = ToRegister(input); - - Label success; - SmallMapList* map_set = instr->hydrogen()->map_set(); - for (int i = 0; i < map_set->length() - 1; i++) { - Handle map = map_set->at(i); - __ CompareMap(reg, scratch, map, &success, REQUIRE_EXACT_MAP); - __ b(eq, &success); - } - Handle map = map_set->last(); - DoCheckMapCommon(reg, scratch, map, REQUIRE_EXACT_MAP, instr->environment()); - __ bind(&success); + Handle map = instr->hydrogen()->map(); + DoCheckMapCommon(reg, scratch, map, instr->hydrogen()->mode(), + instr->environment()); } @@ -4620,14 +4464,6 @@ void LCodeGen::DoAllocateObject(LAllocateObject* instr) { deferred->entry(), TAG_OBJECT); - __ bind(deferred->exit()); - if (FLAG_debug_code) { - Label is_in_new_space; - __ JumpIfInNewSpace(result, scratch, &is_in_new_space); - __ Abort("Allocated object is not in new-space"); - __ bind(&is_in_new_space); - } - // Load the initial map. Register map = scratch; __ LoadHeapObject(map, constructor); @@ -4646,14 +4482,14 @@ void LCodeGen::DoAllocateObject(LAllocateObject* instr) { __ str(scratch, FieldMemOperand(result, property_offset)); } } + + __ bind(deferred->exit()); } void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) { Register result = ToRegister(instr->result()); Handle constructor = instr->hydrogen()->constructor(); - Handle initial_map(constructor->initial_map()); - int instance_size = initial_map->instance_size(); // TODO(3095996): Get rid of this. For now, we need to make the // result register contain a valid pointer because it is already @@ -4661,9 +4497,9 @@ void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) { __ mov(result, Operand(0)); PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); - __ mov(r0, Operand(Smi::FromInt(instance_size))); + __ LoadHeapObject(r0, constructor); __ push(r0); - CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr); + CallRuntimeFromDeferred(Runtime::kNewObject, 1, instr); __ StoreToSafepointRegisterSlot(r0, result); } @@ -4797,10 +4633,9 @@ void LCodeGen::EmitDeepCopy(Handle object, __ str(r2, FieldMemOperand(result, total_offset + 4)); } } else if (elements->IsFixedArray()) { - Handle fast_elements = Handle::cast(elements); for (int i = 0; i < elements_length; i++) { int total_offset = elements_offset + FixedArray::OffsetOfElementAt(i); - Handle value(fast_elements->get(i)); + Handle value = JSObject::GetElement(object, i); if (value->IsJSObject()) { Handle value_object = Handle::cast(value); __ add(r2, result, Operand(*offset)); @@ -4824,23 +4659,6 @@ void LCodeGen::EmitDeepCopy(Handle object, void LCodeGen::DoFastLiteral(LFastLiteral* instr) { int size = instr->hydrogen()->total_size(); - ElementsKind boilerplate_elements_kind = - instr->hydrogen()->boilerplate()->GetElementsKind(); - - // Deopt if the literal boilerplate ElementsKind is of a type different than - // the expected one. The check isn't necessary if the boilerplate has already - // been converted to FAST_ELEMENTS. - if (boilerplate_elements_kind != FAST_ELEMENTS) { - __ LoadHeapObject(r1, instr->hydrogen()->boilerplate()); - // Load map into r2. - __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); - // Load the map's "bit field 2". - __ ldrb(r2, FieldMemOperand(r2, Map::kBitField2Offset)); - // Retrieve elements_kind from bit field 2. - __ ubfx(r2, r2, Map::kElementsKindShift, Map::kElementsKindBitCount); - __ cmp(r2, Operand(boilerplate_elements_kind)); - DeoptimizeIf(ne, instr->environment()); - } // Allocate all objects that are part of the literal in one big // allocation. This avoids multiple limit checks. @@ -5136,7 +4954,7 @@ void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) { Register strict = scratch0(); __ mov(strict, Operand(Smi::FromInt(strict_mode_flag()))); __ Push(object, key, strict); - ASSERT(instr->HasPointerMap()); + ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); LPointerMap* pointers = instr->pointer_map(); RecordPosition(pointers->position()); SafepointGenerator safepoint_generator( @@ -5149,7 +4967,7 @@ void LCodeGen::DoIn(LIn* instr) { Register obj = ToRegister(instr->object()); Register key = ToRegister(instr->key()); __ Push(key, obj); - ASSERT(instr->HasPointerMap()); + ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); LPointerMap* pointers = instr->pointer_map(); RecordPosition(pointers->position()); SafepointGenerator safepoint_generator(this, pointers, Safepoint::kLazyDeopt); diff --git a/deps/v8/src/arm/lithium-codegen-arm.h b/deps/v8/src/arm/lithium-codegen-arm.h index c6a3af7e02..adb6e1bb73 100644 --- a/deps/v8/src/arm/lithium-codegen-arm.h +++ b/deps/v8/src/arm/lithium-codegen-arm.h @@ -215,18 +215,12 @@ class LCodeGen BASE_EMBEDDED { int argc, LInstruction* instr); - enum R1State { - R1_UNINITIALIZED, - R1_CONTAINS_TARGET - }; - // Generate a direct call to a known function. Expects the function // to be in r1. void CallKnownFunction(Handle function, int arity, LInstruction* instr, - CallKind call_kind, - R1State r1_state); + CallKind call_kind); void LoadHeapObject(Register result, Handle object); @@ -323,17 +317,6 @@ class LCodeGen BASE_EMBEDDED { Register source, int* offset); - // Emit optimized code for integer division. - // Inputs are signed. - // All registers are clobbered. - // If 'remainder' is no_reg, it is not computed. - void EmitSignedIntegerDivisionByConstant(Register result, - Register dividend, - int32_t divisor, - Register remainder, - Register scratch, - LEnvironment* environment); - struct JumpTableEntry { explicit inline JumpTableEntry(Address entry) : label(), diff --git a/deps/v8/src/arm/macro-assembler-arm.cc b/deps/v8/src/arm/macro-assembler-arm.cc index 42c9961b3e..857c2bf770 100644 --- a/deps/v8/src/arm/macro-assembler-arm.cc +++ b/deps/v8/src/arm/macro-assembler-arm.cc @@ -3710,28 +3710,15 @@ void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) { } -#ifdef DEBUG -bool AreAliased(Register reg1, - Register reg2, - Register reg3, - Register reg4, - Register reg5, - Register reg6) { - int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() + - reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid(); - - RegList regs = 0; - if (reg1.is_valid()) regs |= reg1.bit(); - if (reg2.is_valid()) regs |= reg2.bit(); - if (reg3.is_valid()) regs |= reg3.bit(); - if (reg4.is_valid()) regs |= reg4.bit(); - if (reg5.is_valid()) regs |= reg5.bit(); - if (reg6.is_valid()) regs |= reg6.bit(); - int n_of_non_aliasing_regs = NumRegs(regs); - - return n_of_valid_regs != n_of_non_aliasing_regs; +bool AreAliased(Register r1, Register r2, Register r3, Register r4) { + if (r1.is(r2)) return true; + if (r1.is(r3)) return true; + if (r1.is(r4)) return true; + if (r2.is(r3)) return true; + if (r2.is(r4)) return true; + if (r3.is(r4)) return true; + return false; } -#endif CodePatcher::CodePatcher(byte* address, int instructions) diff --git a/deps/v8/src/arm/macro-assembler-arm.h b/deps/v8/src/arm/macro-assembler-arm.h index 360f4c128c..47afa93a6e 100644 --- a/deps/v8/src/arm/macro-assembler-arm.h +++ b/deps/v8/src/arm/macro-assembler-arm.h @@ -85,14 +85,7 @@ enum SmiCheck { INLINE_SMI_CHECK, OMIT_SMI_CHECK }; enum LinkRegisterStatus { kLRHasNotBeenSaved, kLRHasBeenSaved }; -#ifdef DEBUG -bool AreAliased(Register reg1, - Register reg2, - Register reg3 = no_reg, - Register reg4 = no_reg, - Register reg5 = no_reg, - Register reg6 = no_reg); -#endif +bool AreAliased(Register r1, Register r2, Register r3, Register r4); // MacroAssembler implements a collection of frequently used macros. @@ -1328,6 +1321,7 @@ class MacroAssembler: public Assembler { }; +#ifdef ENABLE_DEBUGGER_SUPPORT // The code patcher is used to patch (typically) small parts of code e.g. for // debugging and other types of instrumentation. When using the code patcher // the exact number of bytes specified must be emitted. It is not legal to emit @@ -1357,6 +1351,7 @@ class CodePatcher { int size_; // Number of bytes of the expected patch size. MacroAssembler masm_; // Macro assembler used to generate the code. }; +#endif // ENABLE_DEBUGGER_SUPPORT // ----------------------------------------------------------------------------- diff --git a/deps/v8/src/arm/regexp-macro-assembler-arm.cc b/deps/v8/src/arm/regexp-macro-assembler-arm.cc index a833624ceb..10ff2dd96c 100644 --- a/deps/v8/src/arm/regexp-macro-assembler-arm.cc +++ b/deps/v8/src/arm/regexp-macro-assembler-arm.cc @@ -452,12 +452,8 @@ void RegExpMacroAssemblerARM::CheckNotCharacter(unsigned c, void RegExpMacroAssemblerARM::CheckCharacterAfterAnd(uint32_t c, uint32_t mask, Label* on_equal) { - if (c == 0) { - __ tst(current_character(), Operand(mask)); - } else { - __ and_(r0, current_character(), Operand(mask)); - __ cmp(r0, Operand(c)); - } + __ and_(r0, current_character(), Operand(mask)); + __ cmp(r0, Operand(c)); BranchOrBacktrack(eq, on_equal); } @@ -465,12 +461,8 @@ void RegExpMacroAssemblerARM::CheckCharacterAfterAnd(uint32_t c, void RegExpMacroAssemblerARM::CheckNotCharacterAfterAnd(unsigned c, unsigned mask, Label* on_not_equal) { - if (c == 0) { - __ tst(current_character(), Operand(mask)); - } else { - __ and_(r0, current_character(), Operand(mask)); - __ cmp(r0, Operand(c)); - } + __ and_(r0, current_character(), Operand(mask)); + __ cmp(r0, Operand(c)); BranchOrBacktrack(ne, on_not_equal); } @@ -488,44 +480,6 @@ void RegExpMacroAssemblerARM::CheckNotCharacterAfterMinusAnd( } -void RegExpMacroAssemblerARM::CheckCharacterInRange( - uc16 from, - uc16 to, - Label* on_in_range) { - __ sub(r0, current_character(), Operand(from)); - __ cmp(r0, Operand(to - from)); - BranchOrBacktrack(ls, on_in_range); // Unsigned lower-or-same condition. -} - - -void RegExpMacroAssemblerARM::CheckCharacterNotInRange( - uc16 from, - uc16 to, - Label* on_not_in_range) { - __ sub(r0, current_character(), Operand(from)); - __ cmp(r0, Operand(to - from)); - BranchOrBacktrack(hi, on_not_in_range); // Unsigned higher condition. -} - - -void RegExpMacroAssemblerARM::CheckBitInTable( - Handle table, - Label* on_bit_set) { - __ mov(r0, Operand(table)); - if (mode_ != ASCII || kTableMask != String::kMaxAsciiCharCode) { - __ and_(r1, current_character(), Operand(kTableSize - 1)); - __ add(r1, r1, Operand(ByteArray::kHeaderSize - kHeapObjectTag)); - } else { - __ add(r1, - current_character(), - Operand(ByteArray::kHeaderSize - kHeapObjectTag)); - } - __ ldrb(r0, MemOperand(r0, r1)); - __ cmp(r0, Operand(0)); - BranchOrBacktrack(ne, on_bit_set); -} - - bool RegExpMacroAssemblerARM::CheckSpecialCharacterClass(uc16 type, Label* on_no_match) { // Range checks (c in min..max) are generally implemented by an unsigned diff --git a/deps/v8/src/arm/regexp-macro-assembler-arm.h b/deps/v8/src/arm/regexp-macro-assembler-arm.h index 14f984f567..5c8ed0693f 100644 --- a/deps/v8/src/arm/regexp-macro-assembler-arm.h +++ b/deps/v8/src/arm/regexp-macro-assembler-arm.h @@ -79,14 +79,6 @@ class RegExpMacroAssemblerARM: public NativeRegExpMacroAssembler { uc16 minus, uc16 mask, Label* on_not_equal); - virtual void CheckCharacterInRange(uc16 from, - uc16 to, - Label* on_in_range); - virtual void CheckCharacterNotInRange(uc16 from, - uc16 to, - Label* on_not_in_range); - virtual void CheckBitInTable(Handle table, Label* on_bit_set); - // Checks whether the given offset from the current position is before // the end of the string. virtual void CheckPosition(int cp_offset, Label* on_outside_input); diff --git a/deps/v8/src/arm/stub-cache-arm.cc b/deps/v8/src/arm/stub-cache-arm.cc index 49c0982301..d514b607ae 100644 --- a/deps/v8/src/arm/stub-cache-arm.cc +++ b/deps/v8/src/arm/stub-cache-arm.cc @@ -582,8 +582,6 @@ static void PushInterceptorArguments(MacroAssembler* masm, __ push(holder); __ ldr(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset)); __ push(scratch); - __ mov(scratch, Operand(ExternalReference::isolate_address())); - __ push(scratch); } @@ -598,7 +596,7 @@ static void CompileCallLoadPropertyWithInterceptor( ExternalReference ref = ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly), masm->isolate()); - __ mov(r0, Operand(6)); + __ mov(r0, Operand(5)); __ mov(r1, Operand(ref)); CEntryStub stub(1); @@ -606,9 +604,9 @@ static void CompileCallLoadPropertyWithInterceptor( } -static const int kFastApiCallArguments = 4; +static const int kFastApiCallArguments = 3; -// Reserves space for the extra arguments to API function in the +// Reserves space for the extra arguments to FastHandleApiCall in the // caller's frame. // // These arguments are set by CheckPrototypes and GenerateFastApiDirectCall. @@ -634,8 +632,7 @@ static void GenerateFastApiDirectCall(MacroAssembler* masm, // -- sp[0] : holder (set by CheckPrototypes) // -- sp[4] : callee JS function // -- sp[8] : call data - // -- sp[12] : isolate - // -- sp[16] : last JS argument + // -- sp[12] : last JS argument // -- ... // -- sp[(argc + 3) * 4] : first JS argument // -- sp[(argc + 4) * 4] : receiver @@ -645,7 +642,7 @@ static void GenerateFastApiDirectCall(MacroAssembler* masm, __ LoadHeapObject(r5, function); __ ldr(cp, FieldMemOperand(r5, JSFunction::kContextOffset)); - // Pass the additional arguments. + // Pass the additional arguments FastHandleApiCall expects. Handle api_call_info = optimization.api_call_info(); Handle call_data(api_call_info->data()); if (masm->isolate()->heap()->InNewSpace(*call_data)) { @@ -654,15 +651,13 @@ static void GenerateFastApiDirectCall(MacroAssembler* masm, } else { __ Move(r6, call_data); } - __ mov(r7, Operand(ExternalReference::isolate_address())); - // Store JS function, call data and isolate. - __ stm(ib, sp, r5.bit() | r6.bit() | r7.bit()); + // Store JS function and call data. + __ stm(ib, sp, r5.bit() | r6.bit()); - // Prepare arguments. - __ add(r2, sp, Operand(3 * kPointerSize)); + // r2 points to call data as expected by Arguments + // (refer to layout above). + __ add(r2, sp, Operand(2 * kPointerSize)); - // Allocate the v8::Arguments structure in the arguments' space since - // it's not controlled by GC. const int kApiStackSpace = 4; FrameScope frame_scope(masm, StackFrame::MANUAL); @@ -671,9 +666,9 @@ static void GenerateFastApiDirectCall(MacroAssembler* masm, // r0 = v8::Arguments& // Arguments is after the return address. __ add(r0, sp, Operand(1 * kPointerSize)); - // v8::Arguments::implicit_args_ + // v8::Arguments::implicit_args = data __ str(r2, MemOperand(r0, 0 * kPointerSize)); - // v8::Arguments::values_ + // v8::Arguments::values = last argument __ add(ip, r2, Operand(argc * kPointerSize)); __ str(ip, MemOperand(r0, 1 * kPointerSize)); // v8::Arguments::length_ = argc @@ -850,7 +845,7 @@ class CallInterceptorCompiler BASE_EMBEDDED { __ CallExternalReference( ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall), masm->isolate()), - 6); + 5); // Restore the name_ register. __ pop(name_); // Leave the internal frame. @@ -1209,9 +1204,7 @@ void StubCompiler::GenerateLoadCallback(Handle object, } else { __ Move(scratch3, Handle(callback->data())); } - __ Push(reg, scratch3); - __ mov(scratch3, Operand(ExternalReference::isolate_address())); - __ Push(scratch3, name_reg); + __ Push(reg, scratch3, name_reg); __ mov(r0, sp); // r0 = Handle const int kApiStackSpace = 1; @@ -1223,7 +1216,7 @@ void StubCompiler::GenerateLoadCallback(Handle object, __ str(scratch2, MemOperand(sp, 1 * kPointerSize)); __ add(r1, sp, Operand(1 * kPointerSize)); // r1 = AccessorInfo& - const int kStackUnwindSpace = 5; + const int kStackUnwindSpace = 4; Address getter_address = v8::ToCData
(callback->getter()); ApiFunction fun(getter_address); ExternalReference ref = @@ -1351,19 +1344,20 @@ void StubCompiler::GenerateLoadInterceptor(Handle object, if (!receiver.is(holder_reg)) { ASSERT(scratch1.is(holder_reg)); __ Push(receiver, holder_reg); + __ ldr(scratch3, + FieldMemOperand(scratch2, AccessorInfo::kDataOffset)); + __ Push(scratch3, scratch2, name_reg); } else { __ push(receiver); - __ push(holder_reg); + __ ldr(scratch3, + FieldMemOperand(scratch2, AccessorInfo::kDataOffset)); + __ Push(holder_reg, scratch3, scratch2, name_reg); } - __ ldr(scratch3, - FieldMemOperand(scratch2, AccessorInfo::kDataOffset)); - __ mov(scratch1, Operand(ExternalReference::isolate_address())); - __ Push(scratch3, scratch1, scratch2, name_reg); ExternalReference ref = ExternalReference(IC_Utility(IC::kLoadCallbackProperty), masm()->isolate()); - __ TailCallExternalReference(ref, 6, 1); + __ TailCallExternalReference(ref, 5, 1); } } else { // !compile_followup_inline // Call the runtime system to load the interceptor. @@ -1377,7 +1371,7 @@ void StubCompiler::GenerateLoadInterceptor(Handle object, ExternalReference ref = ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), masm()->isolate()); - __ TailCallExternalReference(ref, 6, 1); + __ TailCallExternalReference(ref, 5, 1); } } @@ -1745,7 +1739,7 @@ Handle CallStubCompiler::CompileArrayPopCall( // We can't address the last element in one operation. Compute the more // expensive shift first, and use an offset later on. __ add(elements, elements, Operand(r4, LSL, kPointerSizeLog2 - kSmiTagSize)); - __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize)); + __ ldr(r0, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag)); __ cmp(r0, r6); __ b(eq, &call_builtin); @@ -1753,7 +1747,7 @@ Handle CallStubCompiler::CompileArrayPopCall( __ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset)); // Fill with the hole. - __ str(r6, FieldMemOperand(elements, FixedArray::kHeaderSize)); + __ str(r6, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag)); __ Drop(argc + 1); __ Ret(); @@ -3383,44 +3377,6 @@ static bool IsElementTypeSigned(ElementsKind elements_kind) { } -static void GenerateSmiKeyCheck(MacroAssembler* masm, - Register key, - Register scratch0, - Register scratch1, - DwVfpRegister double_scratch0, - Label* fail) { - if (CpuFeatures::IsSupported(VFP3)) { - CpuFeatures::Scope scope(VFP3); - Label key_ok; - // Check for smi or a smi inside a heap number. We convert the heap - // number and check if the conversion is exact and fits into the smi - // range. - __ JumpIfSmi(key, &key_ok); - __ CheckMap(key, - scratch0, - Heap::kHeapNumberMapRootIndex, - fail, - DONT_DO_SMI_CHECK); - __ sub(ip, key, Operand(kHeapObjectTag)); - __ vldr(double_scratch0, ip, HeapNumber::kValueOffset); - __ EmitVFPTruncate(kRoundToZero, - double_scratch0.low(), - double_scratch0, - scratch0, - scratch1, - kCheckForInexactConversion); - __ b(ne, fail); - __ vmov(scratch0, double_scratch0.low()); - __ TrySmiTag(scratch0, fail, scratch1); - __ mov(key, scratch0); - __ bind(&key_ok); - } else { - // Check that the key is a smi. - __ JumpIfNotSmi(key, fail); - } -} - - void KeyedLoadStubCompiler::GenerateLoadExternalArray( MacroAssembler* masm, ElementsKind elements_kind) { @@ -3437,8 +3393,8 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray( // This stub is meant to be tail-jumped to, the receiver must already // have been verified by the caller to not be a smi. - // Check that the key is a smi or a heap number convertible to a smi. - GenerateSmiKeyCheck(masm, key, r4, r5, d1, &miss_force_generic); + // Check that the key is a smi. + __ JumpIfNotSmi(key, &miss_force_generic); __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset)); // r3: elements array @@ -3768,8 +3724,8 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray( // This stub is meant to be tail-jumped to, the receiver must already // have been verified by the caller to not be a smi. - // Check that the key is a smi or a heap number convertible to a smi. - GenerateSmiKeyCheck(masm, key, r4, r5, d1, &miss_force_generic); + // Check that the key is a smi. + __ JumpIfNotSmi(key, &miss_force_generic); __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset)); @@ -4094,8 +4050,8 @@ void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) { // This stub is meant to be tail-jumped to, the receiver must already // have been verified by the caller to not be a smi. - // Check that the key is a smi or a heap number convertible to a smi. - GenerateSmiKeyCheck(masm, r0, r4, r5, d1, &miss_force_generic); + // Check that the key is a smi. + __ JumpIfNotSmi(r0, &miss_force_generic); // Get the elements array. __ ldr(r2, FieldMemOperand(r1, JSObject::kElementsOffset)); @@ -4146,8 +4102,8 @@ void KeyedLoadStubCompiler::GenerateLoadFastDoubleElement( // This stub is meant to be tail-jumped to, the receiver must already // have been verified by the caller to not be a smi. - // Check that the key is a smi or a heap number convertible to a smi. - GenerateSmiKeyCheck(masm, key_reg, r4, r5, d1, &miss_force_generic); + // Check that the key is a smi. + __ JumpIfNotSmi(key_reg, &miss_force_generic); // Get the elements array. __ ldr(elements_reg, @@ -4222,8 +4178,8 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement( // This stub is meant to be tail-jumped to, the receiver must already // have been verified by the caller to not be a smi. - // Check that the key is a smi or a heap number convertible to a smi. - GenerateSmiKeyCheck(masm, key_reg, r4, r5, d1, &miss_force_generic); + // Check that the key is a smi. + __ JumpIfNotSmi(key_reg, &miss_force_generic); if (elements_kind == FAST_SMI_ONLY_ELEMENTS) { __ JumpIfNotSmi(value_reg, &transition_elements_kind); @@ -4389,9 +4345,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement( // This stub is meant to be tail-jumped to, the receiver must already // have been verified by the caller to not be a smi. - - // Check that the key is a smi or a heap number convertible to a smi. - GenerateSmiKeyCheck(masm, key_reg, r4, r5, d1, &miss_force_generic); + __ JumpIfNotSmi(key_reg, &miss_force_generic); __ ldr(elements_reg, FieldMemOperand(receiver_reg, JSObject::kElementsOffset)); diff --git a/deps/v8/src/array.js b/deps/v8/src/array.js index a1cc5b6a7d..00a4fee5cd 100644 --- a/deps/v8/src/array.js +++ b/deps/v8/src/array.js @@ -1,4 +1,4 @@ -// Copyright 2012 the V8 project authors. All rights reserved. +// Copyright 2010 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -465,19 +465,15 @@ function ArrayPush() { } -// Returns an array containing the array elements of the object followed -// by the array elements of each argument in order. See ECMA-262, -// section 15.4.4.7. function ArrayConcat(arg1) { // length == 1 if (IS_NULL_OR_UNDEFINED(this) && !IS_UNDETECTABLE(this)) { throw MakeTypeError("called_on_null_or_undefined", ["Array.prototype.concat"]); } - var array = ToObject(this); var arg_count = %_ArgumentsLength(); var arrays = new InternalArray(1 + arg_count); - arrays[0] = array; + arrays[0] = this; for (var i = 0; i < arg_count; i++) { arrays[i + 1] = %_Arguments(i); } @@ -1031,28 +1027,13 @@ function ArrayFilter(f, receiver) { var result = new $Array(); var accumulator = new InternalArray(); var accumulator_length = 0; - if (%DebugCallbackSupportsStepping(f)) { - for (var i = 0; i < length; i++) { - if (i in array) { - var element = array[i]; - // Prepare break slots for debugger step in. - %DebugPrepareStepInIfStepping(f); - if (%_CallFunction(receiver, element, i, array, f)) { - accumulator[accumulator_length++] = element; - } - } - } - } else { - // This is a duplicate of the previous loop sans debug stepping. - for (var i = 0; i < length; i++) { - if (i in array) { - var element = array[i]; - if (%_CallFunction(receiver, element, i, array, f)) { - accumulator[accumulator_length++] = element; - } + for (var i = 0; i < length; i++) { + if (i in array) { + var element = array[i]; + if (%_CallFunction(receiver, element, i, array, f)) { + accumulator[accumulator_length++] = element; } } - // End of duplicate. } %MoveArrayContents(accumulator, result); return result; @@ -1078,24 +1059,12 @@ function ArrayForEach(f, receiver) { } else if (!IS_SPEC_OBJECT(receiver)) { receiver = ToObject(receiver); } - if (%DebugCallbackSupportsStepping(f)) { - for (var i = 0; i < length; i++) { - if (i in array) { - var element = array[i]; - // Prepare break slots for debugger step in. - %DebugPrepareStepInIfStepping(f); - %_CallFunction(receiver, element, i, array, f); - } - } - } else { - // This is a duplicate of the previous loop sans debug stepping. - for (var i = 0; i < length; i++) { - if (i in array) { - var element = array[i]; - %_CallFunction(receiver, element, i, array, f); - } + + for (var i = 0; i < length; i++) { + if (i in array) { + var element = array[i]; + %_CallFunction(receiver, element, i, array, f); } - // End of duplicate. } } @@ -1122,24 +1091,11 @@ function ArraySome(f, receiver) { receiver = ToObject(receiver); } - if (%DebugCallbackSupportsStepping(f)) { - for (var i = 0; i < length; i++) { - if (i in array) { - var element = array[i]; - // Prepare break slots for debugger step in. - %DebugPrepareStepInIfStepping(f); - if (%_CallFunction(receiver, element, i, array, f)) return true; - } - } - } else { - // This is a duplicate of the previous loop sans debug stepping. - for (var i = 0; i < length; i++) { - if (i in array) { - var element = array[i]; - if (%_CallFunction(receiver, element, i, array, f)) return true; - } + for (var i = 0; i < length; i++) { + if (i in array) { + var element = array[i]; + if (%_CallFunction(receiver, element, i, array, f)) return true; } - // End of duplicate. } return false; } @@ -1165,24 +1121,11 @@ function ArrayEvery(f, receiver) { receiver = ToObject(receiver); } - if (%DebugCallbackSupportsStepping(f)) { - for (var i = 0; i < length; i++) { - if (i in array) { - var element = array[i]; - // Prepare break slots for debugger step in. - %DebugPrepareStepInIfStepping(f); - if (!%_CallFunction(receiver, element, i, array, f)) return false; - } - } - } else { - // This is a duplicate of the previous loop sans debug stepping. - for (var i = 0; i < length; i++) { - if (i in array) { - var element = array[i]; - if (!%_CallFunction(receiver, element, i, array, f)) return false; - } + for (var i = 0; i < length; i++) { + if (i in array) { + var element = array[i]; + if (!%_CallFunction(receiver, element, i, array, f)) return false; } - // End of duplicate. } return true; } @@ -1209,24 +1152,11 @@ function ArrayMap(f, receiver) { var result = new $Array(); var accumulator = new InternalArray(length); - if (%DebugCallbackSupportsStepping(f)) { - for (var i = 0; i < length; i++) { - if (i in array) { - var element = array[i]; - // Prepare break slots for debugger step in. - %DebugPrepareStepInIfStepping(f); - accumulator[i] = %_CallFunction(receiver, element, i, array, f); - } + for (var i = 0; i < length; i++) { + if (i in array) { + var element = array[i]; + accumulator[i] = %_CallFunction(receiver, element, i, array, f); } - } else { - // This is a duplicate of the previous loop sans debug stepping. - for (var i = 0; i < length; i++) { - if (i in array) { - var element = array[i]; - accumulator[i] = %_CallFunction(receiver, element, i, array, f); - } - } - // End of duplicate. } %MoveArrayContents(accumulator, result); return result; @@ -1381,27 +1311,11 @@ function ArrayReduce(callback, current) { } var receiver = %GetDefaultReceiver(callback); - - if (%DebugCallbackSupportsStepping(callback)) { - for (; i < length; i++) { - if (i in array) { - var element = array[i]; - // Prepare break slots for debugger step in. - %DebugPrepareStepInIfStepping(callback); - current = - %_CallFunction(receiver, current, element, i, array, callback); - } - } - } else { - // This is a duplicate of the previous loop sans debug stepping. - for (; i < length; i++) { - if (i in array) { - var element = array[i]; - current = - %_CallFunction(receiver, current, element, i, array, callback); - } + for (; i < length; i++) { + if (i in array) { + var element = array[i]; + current = %_CallFunction(receiver, current, element, i, array, callback); } - // End of duplicate. } return current; } @@ -1434,27 +1348,11 @@ function ArrayReduceRight(callback, current) { } var receiver = %GetDefaultReceiver(callback); - - if (%DebugCallbackSupportsStepping(callback)) { - for (; i >= 0; i--) { - if (i in array) { - var element = array[i]; - // Prepare break slots for debugger step in. - %DebugPrepareStepInIfStepping(callback); - current = - %_CallFunction(receiver, current, element, i, array, callback); - } - } - } else { - // This is a duplicate of the previous loop sans debug stepping. - for (; i >= 0; i--) { - if (i in array) { - var element = array[i]; - current = - %_CallFunction(receiver, current, element, i, array, callback); - } + for (; i >= 0; i--) { + if (i in array) { + var element = array[i]; + current = %_CallFunction(receiver, current, element, i, array, callback); } - // End of duplicate. } return current; } diff --git a/deps/v8/src/assembler.cc b/deps/v8/src/assembler.cc index be2564960d..4944202f07 100644 --- a/deps/v8/src/assembler.cc +++ b/deps/v8/src/assembler.cc @@ -99,7 +99,21 @@ struct DoubleConstant BASE_EMBEDDED { double the_hole_nan; }; -static DoubleConstant double_constants; +struct InitializeDoubleConstants { + static void Construct(DoubleConstant* double_constants) { + double_constants->min_int = kMinInt; + double_constants->one_half = 0.5; + double_constants->minus_zero = -0.0; + double_constants->uint8_max_value = 255; + double_constants->zero = 0.0; + double_constants->canonical_non_hole_nan = OS::nan_value(); + double_constants->the_hole_nan = BitCast(kHoleNanInt64); + double_constants->negative_infinity = -V8_INFINITY; + } +}; + +static LazyInstance::type + double_constants = LAZY_INSTANCE_INITIALIZER; const char* const RelocInfo::kFillerCommentString = "DEOPTIMIZATION PADDING"; @@ -712,18 +726,6 @@ void RelocInfo::Verify() { // ----------------------------------------------------------------------------- // Implementation of ExternalReference -void ExternalReference::SetUp() { - double_constants.min_int = kMinInt; - double_constants.one_half = 0.5; - double_constants.minus_zero = -0.0; - double_constants.uint8_max_value = 255; - double_constants.zero = 0.0; - double_constants.canonical_non_hole_nan = OS::nan_value(); - double_constants.the_hole_nan = BitCast(kHoleNanInt64); - double_constants.negative_infinity = -V8_INFINITY; -} - - ExternalReference::ExternalReference(Builtins::CFunctionId id, Isolate* isolate) : address_(Redirect(isolate, Builtins::c_function_address(id))) {} @@ -956,47 +958,50 @@ ExternalReference ExternalReference::scheduled_exception_address( ExternalReference ExternalReference::address_of_min_int() { - return ExternalReference(reinterpret_cast(&double_constants.min_int)); + return ExternalReference(reinterpret_cast( + &double_constants.Pointer()->min_int)); } ExternalReference ExternalReference::address_of_one_half() { - return ExternalReference(reinterpret_cast(&double_constants.one_half)); + return ExternalReference(reinterpret_cast( + &double_constants.Pointer()->one_half)); } ExternalReference ExternalReference::address_of_minus_zero() { - return ExternalReference( - reinterpret_cast(&double_constants.minus_zero)); + return ExternalReference(reinterpret_cast( + &double_constants.Pointer()->minus_zero)); } ExternalReference ExternalReference::address_of_zero() { - return ExternalReference(reinterpret_cast(&double_constants.zero)); + return ExternalReference(reinterpret_cast( + &double_constants.Pointer()->zero)); } ExternalReference ExternalReference::address_of_uint8_max_value() { - return ExternalReference( - reinterpret_cast(&double_constants.uint8_max_value)); + return ExternalReference(reinterpret_cast( + &double_constants.Pointer()->uint8_max_value)); } ExternalReference ExternalReference::address_of_negative_infinity() { - return ExternalReference( - reinterpret_cast(&double_constants.negative_infinity)); + return ExternalReference(reinterpret_cast( + &double_constants.Pointer()->negative_infinity)); } ExternalReference ExternalReference::address_of_canonical_non_hole_nan() { - return ExternalReference( - reinterpret_cast(&double_constants.canonical_non_hole_nan)); + return ExternalReference(reinterpret_cast( + &double_constants.Pointer()->canonical_non_hole_nan)); } ExternalReference ExternalReference::address_of_the_hole_nan() { - return ExternalReference( - reinterpret_cast(&double_constants.the_hole_nan)); + return ExternalReference(reinterpret_cast( + &double_constants.Pointer()->the_hole_nan)); } @@ -1153,20 +1158,6 @@ double power_double_int(double x, int y) { double power_double_double(double x, double y) { -#ifdef __MINGW64_VERSION_MAJOR - // MinGW64 has a custom implementation for pow. This handles certain - // special cases that are different. - if ((x == 0.0 || isinf(x)) && isfinite(y)) { - double f; - if (modf(y, &f) != 0.0) return ((x == 0.0) ^ (y > 0)) ? V8_INFINITY : 0; - } - - if (x == 2.0) { - int y_int = static_cast(y); - if (y == y_int) return ldexp(1.0, y_int); - } -#endif - // The checks for special cases can be dropped in ia32 because it has already // been done in generated code before bailing out here. if (isnan(y) || ((x == 1 || x == -1) && isinf(y))) return OS::nan_value(); diff --git a/deps/v8/src/assembler.h b/deps/v8/src/assembler.h index 05fe320ad0..f960b58691 100644 --- a/deps/v8/src/assembler.h +++ b/deps/v8/src/assembler.h @@ -539,8 +539,6 @@ class ExternalReference BASE_EMBEDDED { DIRECT_GETTER_CALL }; - static void SetUp(); - typedef void* ExternalReferenceRedirector(void* original, Type type); ExternalReference(Builtins::CFunctionId id, Isolate* isolate); diff --git a/deps/v8/src/ast.cc b/deps/v8/src/ast.cc index 6f9fd7afb2..4b6ae680a4 100644 --- a/deps/v8/src/ast.cc +++ b/deps/v8/src/ast.cc @@ -962,14 +962,6 @@ RegExpDisjunction::RegExpDisjunction(ZoneList* alternatives) } -static int IncreaseBy(int previous, int increase) { - if (RegExpTree::kInfinity - previous < increase) { - return RegExpTree::kInfinity; - } else { - return previous + increase; - } -} - RegExpAlternative::RegExpAlternative(ZoneList* nodes) : nodes_(nodes) { ASSERT(nodes->length() > 1); @@ -977,10 +969,13 @@ RegExpAlternative::RegExpAlternative(ZoneList* nodes) max_match_ = 0; for (int i = 0; i < nodes->length(); i++) { RegExpTree* node = nodes->at(i); - int node_min_match = node->min_match(); - min_match_ = IncreaseBy(min_match_, node_min_match); + min_match_ += node->min_match(); int node_max_match = node->max_match(); - max_match_ = IncreaseBy(max_match_, node_max_match); + if (kInfinity - max_match_ < node_max_match) { + max_match_ = kInfinity; + } else { + max_match_ += node->max_match(); + } } } @@ -998,78 +993,138 @@ CaseClause::CaseClause(Isolate* isolate, } -#define REGULAR_NODE(NodeType) \ - void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \ - increase_node_count(); \ - } -#define DONT_OPTIMIZE_NODE(NodeType) \ +#define INCREASE_NODE_COUNT(NodeType) \ void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \ increase_node_count(); \ - add_flag(kDontOptimize); \ - add_flag(kDontInline); \ - add_flag(kDontSelfOptimize); \ - } -#define DONT_INLINE_NODE(NodeType) \ - void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \ - increase_node_count(); \ - add_flag(kDontInline); \ - } -#define DONT_SELFOPTIMIZE_NODE(NodeType) \ - void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \ - increase_node_count(); \ - add_flag(kDontSelfOptimize); \ } -REGULAR_NODE(VariableDeclaration) -REGULAR_NODE(FunctionDeclaration) -REGULAR_NODE(Block) -REGULAR_NODE(ExpressionStatement) -REGULAR_NODE(EmptyStatement) -REGULAR_NODE(IfStatement) -REGULAR_NODE(ContinueStatement) -REGULAR_NODE(BreakStatement) -REGULAR_NODE(ReturnStatement) -REGULAR_NODE(SwitchStatement) -REGULAR_NODE(Conditional) -REGULAR_NODE(Literal) -REGULAR_NODE(ObjectLiteral) -REGULAR_NODE(Assignment) -REGULAR_NODE(Throw) -REGULAR_NODE(Property) -REGULAR_NODE(UnaryOperation) -REGULAR_NODE(CountOperation) -REGULAR_NODE(BinaryOperation) -REGULAR_NODE(CompareOperation) -REGULAR_NODE(ThisFunction) -REGULAR_NODE(Call) -REGULAR_NODE(CallNew) -// In theory, for VariableProxy we'd have to add: -// if (node->var()->IsLookupSlot()) add_flag(kDontInline); -// But node->var() is usually not bound yet at VariableProxy creation time, and -// LOOKUP variables only result from constructs that cannot be inlined anyway. -REGULAR_NODE(VariableProxy) - -DONT_OPTIMIZE_NODE(ModuleDeclaration) -DONT_OPTIMIZE_NODE(ImportDeclaration) -DONT_OPTIMIZE_NODE(ExportDeclaration) -DONT_OPTIMIZE_NODE(ModuleLiteral) -DONT_OPTIMIZE_NODE(ModuleVariable) -DONT_OPTIMIZE_NODE(ModulePath) -DONT_OPTIMIZE_NODE(ModuleUrl) -DONT_OPTIMIZE_NODE(WithStatement) -DONT_OPTIMIZE_NODE(TryCatchStatement) -DONT_OPTIMIZE_NODE(TryFinallyStatement) -DONT_OPTIMIZE_NODE(DebuggerStatement) -DONT_OPTIMIZE_NODE(SharedFunctionInfoLiteral) - -DONT_INLINE_NODE(FunctionLiteral) -DONT_INLINE_NODE(RegExpLiteral) // TODO(1322): Allow materialized literals. -DONT_INLINE_NODE(ArrayLiteral) // TODO(1322): Allow materialized literals. - -DONT_SELFOPTIMIZE_NODE(DoWhileStatement) -DONT_SELFOPTIMIZE_NODE(WhileStatement) -DONT_SELFOPTIMIZE_NODE(ForStatement) -DONT_SELFOPTIMIZE_NODE(ForInStatement) +INCREASE_NODE_COUNT(VariableDeclaration) +INCREASE_NODE_COUNT(FunctionDeclaration) +INCREASE_NODE_COUNT(ModuleDeclaration) +INCREASE_NODE_COUNT(ImportDeclaration) +INCREASE_NODE_COUNT(ExportDeclaration) +INCREASE_NODE_COUNT(ModuleLiteral) +INCREASE_NODE_COUNT(ModuleVariable) +INCREASE_NODE_COUNT(ModulePath) +INCREASE_NODE_COUNT(ModuleUrl) +INCREASE_NODE_COUNT(Block) +INCREASE_NODE_COUNT(ExpressionStatement) +INCREASE_NODE_COUNT(EmptyStatement) +INCREASE_NODE_COUNT(IfStatement) +INCREASE_NODE_COUNT(ContinueStatement) +INCREASE_NODE_COUNT(BreakStatement) +INCREASE_NODE_COUNT(ReturnStatement) +INCREASE_NODE_COUNT(Conditional) +INCREASE_NODE_COUNT(Literal) +INCREASE_NODE_COUNT(ObjectLiteral) +INCREASE_NODE_COUNT(Assignment) +INCREASE_NODE_COUNT(Throw) +INCREASE_NODE_COUNT(Property) +INCREASE_NODE_COUNT(UnaryOperation) +INCREASE_NODE_COUNT(CountOperation) +INCREASE_NODE_COUNT(BinaryOperation) +INCREASE_NODE_COUNT(CompareOperation) +INCREASE_NODE_COUNT(ThisFunction) +INCREASE_NODE_COUNT(Call) +INCREASE_NODE_COUNT(CallNew) + +#undef INCREASE_NODE_COUNT + + +void AstConstructionVisitor::VisitWithStatement(WithStatement* node) { + increase_node_count(); + add_flag(kDontOptimize); + add_flag(kDontInline); +} + + +void AstConstructionVisitor::VisitSwitchStatement(SwitchStatement* node) { + increase_node_count(); + add_flag(kDontInline); +} + + +void AstConstructionVisitor::VisitDoWhileStatement(DoWhileStatement* node) { + increase_node_count(); + add_flag(kDontSelfOptimize); +} + + +void AstConstructionVisitor::VisitWhileStatement(WhileStatement* node) { + increase_node_count(); + add_flag(kDontSelfOptimize); +} + + +void AstConstructionVisitor::VisitForStatement(ForStatement* node) { + increase_node_count(); + add_flag(kDontSelfOptimize); +} + + +void AstConstructionVisitor::VisitForInStatement(ForInStatement* node) { + increase_node_count(); + add_flag(kDontSelfOptimize); +} + + +void AstConstructionVisitor::VisitTryCatchStatement(TryCatchStatement* node) { + increase_node_count(); + add_flag(kDontOptimize); + add_flag(kDontInline); +} + + +void AstConstructionVisitor::VisitTryFinallyStatement( + TryFinallyStatement* node) { + increase_node_count(); + add_flag(kDontOptimize); + add_flag(kDontInline); +} + + +void AstConstructionVisitor::VisitDebuggerStatement(DebuggerStatement* node) { + increase_node_count(); + add_flag(kDontOptimize); + add_flag(kDontInline); +} + + +void AstConstructionVisitor::VisitFunctionLiteral(FunctionLiteral* node) { + increase_node_count(); + add_flag(kDontInline); +} + + +void AstConstructionVisitor::VisitSharedFunctionInfoLiteral( + SharedFunctionInfoLiteral* node) { + increase_node_count(); + add_flag(kDontOptimize); + add_flag(kDontInline); +} + + +void AstConstructionVisitor::VisitVariableProxy(VariableProxy* node) { + increase_node_count(); + // In theory, we'd have to add: + // if(node->var()->IsLookupSlot()) { add_flag(kDontInline); } + // However, node->var() is usually not bound yet at VariableProxy creation + // time, and LOOKUP variables only result from constructs that cannot + // be inlined anyway. +} + + +void AstConstructionVisitor::VisitRegExpLiteral(RegExpLiteral* node) { + increase_node_count(); + add_flag(kDontInline); // TODO(1322): Allow materialized literals. +} + + +void AstConstructionVisitor::VisitArrayLiteral(ArrayLiteral* node) { + increase_node_count(); + add_flag(kDontInline); // TODO(1322): Allow materialized literals. +} + void AstConstructionVisitor::VisitCallRuntime(CallRuntime* node) { increase_node_count(); @@ -1087,11 +1142,6 @@ void AstConstructionVisitor::VisitCallRuntime(CallRuntime* node) { } } -#undef REGULAR_NODE -#undef DONT_OPTIMIZE_NODE -#undef DONT_INLINE_NODE -#undef DONT_SELFOPTIMIZE_NODE - Handle Literal::ToString() { if (handle_->IsString()) return Handle::cast(handle_); diff --git a/deps/v8/src/ast.h b/deps/v8/src/ast.h index dad80576bd..b827302ebd 100644 --- a/deps/v8/src/ast.h +++ b/deps/v8/src/ast.h @@ -270,7 +270,6 @@ class SmallMapList { void Reserve(int capacity) { list_.Reserve(capacity); } void Clear() { list_.Clear(); } - void Sort() { list_.Sort(); } bool is_empty() const { return list_.is_empty(); } int length() const { return list_.length(); } @@ -421,8 +420,8 @@ class Block: public BreakableStatement { ZoneList* statements() { return &statements_; } bool is_initializer_block() const { return is_initializer_block_; } - Scope* scope() const { return scope_; } - void set_scope(Scope* scope) { scope_ = scope; } + Scope* block_scope() const { return block_scope_; } + void set_block_scope(Scope* block_scope) { block_scope_ = block_scope; } protected: template friend class AstNodeFactory; @@ -434,13 +433,13 @@ class Block: public BreakableStatement { : BreakableStatement(isolate, labels, TARGET_FOR_NAMED_ONLY), statements_(capacity), is_initializer_block_(is_initializer_block), - scope_(NULL) { + block_scope_(NULL) { } private: ZoneList statements_; bool is_initializer_block_; - Scope* scope_; + Scope* block_scope_; }; @@ -608,7 +607,6 @@ class ModuleLiteral: public Module { DECLARE_NODE_TYPE(ModuleLiteral) Block* body() const { return body_; } - Handle context() const { return context_; } protected: template friend class AstNodeFactory; @@ -620,7 +618,6 @@ class ModuleLiteral: public Module { private: Block* body_; - Handle context_; }; diff --git a/deps/v8/src/bootstrapper.cc b/deps/v8/src/bootstrapper.cc index c65c68c2d7..0e95b4b839 100644 --- a/deps/v8/src/bootstrapper.cc +++ b/deps/v8/src/bootstrapper.cc @@ -1011,7 +1011,7 @@ bool Genesis::InitializeGlobal(Handle inner_global, proto_map->set_prototype(global_context()->initial_object_prototype()); Handle proto = factory->NewJSObjectFromMap(proto_map); proto->InObjectPropertyAtPut(JSRegExp::kSourceFieldIndex, - heap->query_colon_symbol()); + heap->empty_string()); proto->InObjectPropertyAtPut(JSRegExp::kGlobalFieldIndex, heap->false_value()); proto->InObjectPropertyAtPut(JSRegExp::kIgnoreCaseFieldIndex, @@ -2159,7 +2159,7 @@ void Genesis::TransferNamedProperties(Handle from, Handle descs = Handle(from->map()->instance_descriptors()); for (int i = 0; i < descs->number_of_descriptors(); i++) { - PropertyDetails details = descs->GetDetails(i); + PropertyDetails details = PropertyDetails(descs->GetDetails(i)); switch (details.type()) { case FIELD: { HandleScope inner; diff --git a/deps/v8/src/builtins.cc b/deps/v8/src/builtins.cc index 84a0c3d19c..01e88f5593 100644 --- a/deps/v8/src/builtins.cc +++ b/deps/v8/src/builtins.cc @@ -1103,7 +1103,7 @@ MUST_USE_RESULT static MaybeObject* HandleApiCallHelper( CustomArguments custom(isolate); v8::ImplementationUtilities::PrepareArgumentsData(custom.end(), - isolate, data_obj, *function, raw_holder); + data_obj, *function, raw_holder); v8::Arguments new_args = v8::ImplementationUtilities::NewArguments( custom.end(), @@ -1143,6 +1143,68 @@ BUILTIN(HandleApiCallConstruct) { } +#ifdef DEBUG + +static void VerifyTypeCheck(Handle object, + Handle function) { + ASSERT(function->shared()->IsApiFunction()); + FunctionTemplateInfo* info = function->shared()->get_api_func_data(); + if (info->signature()->IsUndefined()) return; + SignatureInfo* signature = SignatureInfo::cast(info->signature()); + Object* receiver_type = signature->receiver(); + if (receiver_type->IsUndefined()) return; + FunctionTemplateInfo* type = FunctionTemplateInfo::cast(receiver_type); + ASSERT(object->IsInstanceOf(type)); +} + +#endif + + +BUILTIN(FastHandleApiCall) { + ASSERT(!CalledAsConstructor(isolate)); + Heap* heap = isolate->heap(); + const bool is_construct = false; + + // We expect four more arguments: callback, function, call data, and holder. + const int args_length = args.length() - 4; + ASSERT(args_length >= 0); + + Object* callback_obj = args[args_length]; + + v8::Arguments new_args = v8::ImplementationUtilities::NewArguments( + &args[args_length + 1], + &args[0] - 1, + args_length - 1, + is_construct); + +#ifdef DEBUG + VerifyTypeCheck(Utils::OpenHandle(*new_args.Holder()), + Utils::OpenHandle(*new_args.Callee())); +#endif + HandleScope scope(isolate); + Object* result; + v8::Handle value; + { + // Leaving JavaScript. + VMState state(isolate, EXTERNAL); + ExternalCallbackScope call_scope(isolate, + v8::ToCData
(callback_obj)); + v8::InvocationCallback callback = + v8::ToCData(callback_obj); + + value = callback(new_args); + } + if (value.IsEmpty()) { + result = heap->undefined_value(); + } else { + result = *reinterpret_cast(*value); + } + + RETURN_IF_SCHEDULED_EXCEPTION(isolate); + return result; +} + + // Helper function to handle calls to non-function objects created through the // API. The object can be called as either a constructor (using new) or just as // a function (without new). @@ -1181,7 +1243,7 @@ MUST_USE_RESULT static MaybeObject* HandleApiCallAsFunctionOrConstructor( CustomArguments custom(isolate); v8::ImplementationUtilities::PrepareArgumentsData(custom.end(), - isolate, call_data->data(), constructor, obj); + call_data->data(), constructor, obj); v8::Arguments new_args = v8::ImplementationUtilities::NewArguments( custom.end(), &args[0] - 1, diff --git a/deps/v8/src/builtins.h b/deps/v8/src/builtins.h index 3ea33938eb..f079139d45 100644 --- a/deps/v8/src/builtins.h +++ b/deps/v8/src/builtins.h @@ -56,6 +56,7 @@ enum BuiltinExtraArguments { V(ArrayConcat, NO_EXTRA_ARGUMENTS) \ \ V(HandleApiCall, NEEDS_CALLED_FUNCTION) \ + V(FastHandleApiCall, NO_EXTRA_ARGUMENTS) \ V(HandleApiCallConstruct, NEEDS_CALLED_FUNCTION) \ V(HandleApiCallAsFunction, NO_EXTRA_ARGUMENTS) \ V(HandleApiCallAsConstructor, NO_EXTRA_ARGUMENTS) \ diff --git a/deps/v8/src/bytecodes-irregexp.h b/deps/v8/src/bytecodes-irregexp.h index c7cc66e527..b13efb36f8 100644 --- a/deps/v8/src/bytecodes-irregexp.h +++ b/deps/v8/src/bytecodes-irregexp.h @@ -72,23 +72,24 @@ V(AND_CHECK_4_CHARS, 27, 16) /* bc8 pad24 uint32 uint32 addr32 */ \ V(AND_CHECK_CHAR, 28, 12) /* bc8 pad8 uint16 uint32 addr32 */ \ V(AND_CHECK_NOT_4_CHARS, 29, 16) /* bc8 pad24 uint32 uint32 addr32 */ \ V(AND_CHECK_NOT_CHAR, 30, 12) /* bc8 pad8 uint16 uint32 addr32 */ \ -V(MINUS_AND_CHECK_NOT_CHAR, 31, 12) /* bc8 pad8 uc16 uc16 uc16 addr32 */ \ -V(CHECK_CHAR_IN_RANGE, 32, 12) /* bc8 pad24 uc16 uc16 addr32 */ \ -V(CHECK_CHAR_NOT_IN_RANGE, 33, 12) /* bc8 pad24 uc16 uc16 addr32 */ \ -V(CHECK_BIT_IN_TABLE, 34, 24) /* bc8 pad24 addr32 bits128 */ \ -V(CHECK_LT, 35, 8) /* bc8 pad8 uc16 addr32 */ \ -V(CHECK_GT, 36, 8) /* bc8 pad8 uc16 addr32 */ \ -V(CHECK_NOT_BACK_REF, 37, 8) /* bc8 reg_idx24 addr32 */ \ -V(CHECK_NOT_BACK_REF_NO_CASE, 38, 8) /* bc8 reg_idx24 addr32 */ \ -V(CHECK_NOT_REGS_EQUAL, 39, 12) /* bc8 regidx24 reg_idx32 addr32 */ \ -V(CHECK_REGISTER_LT, 40, 12) /* bc8 reg_idx24 value32 addr32 */ \ -V(CHECK_REGISTER_GE, 41, 12) /* bc8 reg_idx24 value32 addr32 */ \ -V(CHECK_REGISTER_EQ_POS, 42, 8) /* bc8 reg_idx24 addr32 */ \ -V(CHECK_AT_START, 43, 8) /* bc8 pad24 addr32 */ \ -V(CHECK_NOT_AT_START, 44, 8) /* bc8 pad24 addr32 */ \ -V(CHECK_GREEDY, 45, 8) /* bc8 pad24 addr32 */ \ -V(ADVANCE_CP_AND_GOTO, 46, 8) /* bc8 offset24 addr32 */ \ -V(SET_CURRENT_POSITION_FROM_END, 47, 4) /* bc8 idx24 */ +V(MINUS_AND_CHECK_NOT_CHAR, 31, 12) /* bc8 pad8 uc16 uc16 addr32 */ \ +V(CHECK_LT, 32, 8) /* bc8 pad8 uc16 addr32 */ \ +V(CHECK_GT, 33, 8) /* bc8 pad8 uc16 addr32 */ \ +V(CHECK_NOT_BACK_REF, 34, 8) /* bc8 reg_idx24 addr32 */ \ +V(CHECK_NOT_BACK_REF_NO_CASE, 35, 8) /* bc8 reg_idx24 addr32 */ \ +V(CHECK_NOT_REGS_EQUAL, 36, 12) /* bc8 regidx24 reg_idx32 addr32 */ \ +V(LOOKUP_MAP1, 37, 12) /* bc8 pad8 start16 bit_map_addr32 addr32 */ \ +V(LOOKUP_MAP2, 38, 96) /* bc8 pad8 start16 half_nibble_map_addr32* */ \ +V(LOOKUP_MAP8, 39, 96) /* bc8 pad8 start16 byte_map addr32* */ \ +V(LOOKUP_HI_MAP8, 40, 96) /* bc8 start24 byte_map_addr32 addr32* */ \ +V(CHECK_REGISTER_LT, 41, 12) /* bc8 reg_idx24 value32 addr32 */ \ +V(CHECK_REGISTER_GE, 42, 12) /* bc8 reg_idx24 value32 addr32 */ \ +V(CHECK_REGISTER_EQ_POS, 43, 8) /* bc8 reg_idx24 addr32 */ \ +V(CHECK_AT_START, 44, 8) /* bc8 pad24 addr32 */ \ +V(CHECK_NOT_AT_START, 45, 8) /* bc8 pad24 addr32 */ \ +V(CHECK_GREEDY, 46, 8) /* bc8 pad24 addr32 */ \ +V(ADVANCE_CP_AND_GOTO, 47, 8) /* bc8 offset24 addr32 */ \ +V(SET_CURRENT_POSITION_FROM_END, 48, 4) /* bc8 idx24 */ #define DECLARE_BYTECODES(name, code, length) \ static const int BC_##name = code; diff --git a/deps/v8/src/code-stubs.cc b/deps/v8/src/code-stubs.cc index 814e358721..11016c8238 100644 --- a/deps/v8/src/code-stubs.cc +++ b/deps/v8/src/code-stubs.cc @@ -73,12 +73,21 @@ SmartArrayPointer CodeStub::GetName() { void CodeStub::RecordCodeGeneration(Code* code, MacroAssembler* masm) { + code->set_major_key(MajorKey()); + Isolate* isolate = masm->isolate(); SmartArrayPointer name = GetName(); PROFILE(isolate, CodeCreateEvent(Logger::STUB_TAG, code, *name)); GDBJIT(AddCode(GDBJITInterface::STUB, *name, code)); Counters* counters = isolate->counters(); counters->total_stubs_code_size()->Increment(code->instruction_size()); + +#ifdef ENABLE_DISASSEMBLER + if (FLAG_print_code_stubs) { + code->Disassemble(*name); + PrintF("\n"); + } +#endif } @@ -116,16 +125,8 @@ Handle CodeStub::GetCode() { GetICState()); Handle new_object = factory->NewCode( desc, flags, masm.CodeObject(), NeedsImmovableCode()); - new_object->set_major_key(MajorKey()); - FinishCode(new_object); RecordCodeGeneration(*new_object, &masm); - -#ifdef ENABLE_DISASSEMBLER - if (FLAG_print_code_stubs) { - new_object->Disassemble(*GetName()); - PrintF("\n"); - } -#endif + FinishCode(new_object); if (UseSpecialCache()) { AddToSpecialCache(new_object); diff --git a/deps/v8/src/compiler-intrinsics.h b/deps/v8/src/compiler-intrinsics.h index b73e8ac750..3b9c59ea53 100644 --- a/deps/v8/src/compiler-intrinsics.h +++ b/deps/v8/src/compiler-intrinsics.h @@ -40,9 +40,6 @@ class CompilerIntrinsics { // Returns number of zero bits following most significant 1 bit. // Undefined for zero value. INLINE(static int CountLeadingZeros(uint32_t value)); - - // Returns the number of bits set. - INLINE(static int CountSetBits(uint32_t value)); }; #ifdef __GNUC__ @@ -54,10 +51,6 @@ int CompilerIntrinsics::CountLeadingZeros(uint32_t value) { return __builtin_clz(value); } -int CompilerIntrinsics::CountSetBits(uint32_t value) { - return __builtin_popcount(value); -} - #elif defined(_MSC_VER) #pragma intrinsic(_BitScanForward) @@ -75,16 +68,6 @@ int CompilerIntrinsics::CountLeadingZeros(uint32_t value) { return 31 - static_cast(result); } -int CompilerIntrinsics::CountSetBits(uint32_t value) { - // Manually count set bits. - value = ((value >> 1) & 0x55555555) + (value & 0x55555555); - value = ((value >> 2) & 0x33333333) + (value & 0x33333333); - value = ((value >> 4) & 0x0f0f0f0f) + (value & 0x0f0f0f0f); - value = ((value >> 8) & 0x00ff00ff) + (value & 0x00ff00ff); - value = ((value >> 16) & 0x0000ffff) + (value & 0x0000ffff); - return value; -} - #else #error Unsupported compiler #endif diff --git a/deps/v8/src/contexts.h b/deps/v8/src/contexts.h index 647c15c153..af5cb036c6 100644 --- a/deps/v8/src/contexts.h +++ b/deps/v8/src/contexts.h @@ -397,7 +397,7 @@ class Context: public FixedArray { GLOBAL_CONTEXT_FIELDS(GLOBAL_CONTEXT_FIELD_ACCESSORS) #undef GLOBAL_CONTEXT_FIELD_ACCESSORS - // Lookup the slot called name, starting with the current context. + // Lookup the the slot called name, starting with the current context. // There are three possibilities: // // 1) result->IsContext(): diff --git a/deps/v8/src/conversions-inl.h b/deps/v8/src/conversions-inl.h index 77b260f036..b098a1c29c 100644 --- a/deps/v8/src/conversions-inl.h +++ b/deps/v8/src/conversions-inl.h @@ -228,7 +228,9 @@ double InternalStringToIntDouble(UnicodeCache* unicode_cache, } ASSERT(number != 0); - return ldexp(static_cast(negative ? -number : number), exponent); + // The double could be constructed faster from number (mantissa), exponent + // and sign. Assuming it's a rare case more simple code is used. + return static_cast(negative ? -number : number) * pow(2.0, exponent); } diff --git a/deps/v8/src/d8.cc b/deps/v8/src/d8.cc index 26d0bc10e1..45781cf0d4 100644 --- a/deps/v8/src/d8.cc +++ b/deps/v8/src/d8.cc @@ -318,7 +318,6 @@ static size_t convertToUint(Local value_in, TryCatch* try_catch) { const char kArrayBufferReferencePropName[] = "_is_array_buffer_"; const char kArrayBufferMarkerPropName[] = "_array_buffer_ref_"; -static const int kExternalArrayAllocationHeaderSize = 2; Handle Shell::CreateExternalArray(const Arguments& args, ExternalArrayType type, @@ -427,26 +426,14 @@ Handle Shell::CreateExternalArray(const Arguments& args, } Persistent persistent_array = Persistent::New(array); + persistent_array.MakeWeak(data, ExternalArrayWeakCallback); + persistent_array.MarkIndependent(); if (data == NULL && length != 0) { - // Make sure the total size fits into a (signed) int. - static const int kMaxSize = 0x7fffffff; - if (length > (kMaxSize - sizeof(size_t)) / element_size) { - return ThrowException(String::New("Array exceeds maximum size (2G)")); - } - // Prepend the size of the allocated chunk to the data itself. - int total_size = length * element_size + - kExternalArrayAllocationHeaderSize * sizeof(size_t); - data = malloc(total_size); + data = calloc(length, element_size); if (data == NULL) { return ThrowException(String::New("Memory allocation failed.")); } - *reinterpret_cast(data) = total_size; - data = reinterpret_cast(data) + kExternalArrayAllocationHeaderSize; - memset(data, 0, length * element_size); - V8::AdjustAmountOfExternalAllocatedMemory(total_size); } - persistent_array.MakeWeak(data, ExternalArrayWeakCallback); - persistent_array.MarkIndependent(); array->SetIndexedPropertiesToExternalArrayData( reinterpret_cast(data) + offset, type, @@ -465,9 +452,6 @@ void Shell::ExternalArrayWeakCallback(Persistent object, void* data) { Handle converted_object = object->ToObject(); Local prop_value = converted_object->Get(prop_name); if (data != NULL && !prop_value->IsObject()) { - data = reinterpret_cast(data) - kExternalArrayAllocationHeaderSize; - V8::AdjustAmountOfExternalAllocatedMemory( - -static_cast(*reinterpret_cast(data))); free(data); } object.Dispose(); @@ -993,8 +977,8 @@ void Shell::OnExit() { printf("+--------------------------------------------+-------------+\n"); delete [] counters; } - delete counters_file_; - delete counter_map_; + if (counters_file_ != NULL) + delete counters_file_; } #endif // V8_SHARED diff --git a/deps/v8/src/d8.js b/deps/v8/src/d8.js index 819135add4..bf269230b8 100644 --- a/deps/v8/src/d8.js +++ b/deps/v8/src/d8.js @@ -2174,7 +2174,7 @@ function DebugResponseDetails(response) { } var current_line = from_line + num; - var spacer = maxdigits - (1 + Math.floor(log10(current_line))); + spacer = maxdigits - (1 + Math.floor(log10(current_line))); if (current_line == Debug.State.currentSourceLine + 1) { for (var i = 0; i < maxdigits; i++) { result += '>'; diff --git a/deps/v8/src/debug-agent.cc b/deps/v8/src/debug-agent.cc index bdc7a578ac..511663d8ee 100644 --- a/deps/v8/src/debug-agent.cc +++ b/deps/v8/src/debug-agent.cc @@ -323,41 +323,41 @@ bool DebuggerAgentUtil::SendConnectMessage(const Socket* conn, const char* embedding_host) { static const int kBufferSize = 80; char buffer[kBufferSize]; // Sending buffer. + bool ok; int len; - int r; // Send the header. len = OS::SNPrintF(Vector(buffer, kBufferSize), "Type: connect\r\n"); - r = conn->Send(buffer, len); - if (r != len) return false; + ok = conn->Send(buffer, len); + if (!ok) return false; len = OS::SNPrintF(Vector(buffer, kBufferSize), "V8-Version: %s\r\n", v8::V8::GetVersion()); - r = conn->Send(buffer, len); - if (r != len) return false; + ok = conn->Send(buffer, len); + if (!ok) return false; len = OS::SNPrintF(Vector(buffer, kBufferSize), "Protocol-Version: 1\r\n"); - r = conn->Send(buffer, len); - if (r != len) return false; + ok = conn->Send(buffer, len); + if (!ok) return false; if (embedding_host != NULL) { len = OS::SNPrintF(Vector(buffer, kBufferSize), "Embedding-Host: %s\r\n", embedding_host); - r = conn->Send(buffer, len); - if (r != len) return false; + ok = conn->Send(buffer, len); + if (!ok) return false; } len = OS::SNPrintF(Vector(buffer, kBufferSize), "%s: 0\r\n", kContentLength); - r = conn->Send(buffer, len); - if (r != len) return false; + ok = conn->Send(buffer, len); + if (!ok) return false; // Terminate header with empty line. len = OS::SNPrintF(Vector(buffer, kBufferSize), "\r\n"); - r = conn->Send(buffer, len); - if (r != len) return false; + ok = conn->Send(buffer, len); + if (!ok) return false; // No body for connect message. diff --git a/deps/v8/src/debug-debugger.js b/deps/v8/src/debug-debugger.js index 91838e8ad0..802f6224c4 100644 --- a/deps/v8/src/debug-debugger.js +++ b/deps/v8/src/debug-debugger.js @@ -1957,7 +1957,7 @@ DebugCommandProcessor.prototype.frameForScopeRequest_ = function(request) { if (request.arguments && !IS_UNDEFINED(request.arguments.frameNumber)) { frame_index = request.arguments.frameNumber; if (frame_index < 0 || this.exec_state_.frameCount() <= frame_index) { - throw new Error('Invalid frame number'); + return response.failed('Invalid frame number'); } return this.exec_state_.frame(frame_index); } else { @@ -1966,44 +1966,20 @@ DebugCommandProcessor.prototype.frameForScopeRequest_ = function(request) { }; -// Gets scope host object from request. It is either a function -// ('functionHandle' argument must be specified) or a stack frame -// ('frameNumber' may be specified and the current frame is taken by default). -DebugCommandProcessor.prototype.scopeHolderForScopeRequest_ = - function(request) { - if (request.arguments && "functionHandle" in request.arguments) { - if (!IS_NUMBER(request.arguments.functionHandle)) { - throw new Error('Function handle must be a number'); - } - var function_mirror = LookupMirror(request.arguments.functionHandle); - if (!function_mirror) { - throw new Error('Failed to find function object by handle'); - } - if (!function_mirror.isFunction()) { - throw new Error('Value of non-function type is found by handle'); - } - return function_mirror; - } else { - // No frames no scopes. - if (this.exec_state_.frameCount() == 0) { - throw new Error('No scopes'); - } - - // Get the frame for which the scopes are requested. - var frame = this.frameForScopeRequest_(request); - return frame; +DebugCommandProcessor.prototype.scopesRequest_ = function(request, response) { + // No frames no scopes. + if (this.exec_state_.frameCount() == 0) { + return response.failed('No scopes'); } -} + // Get the frame for which the scopes are requested. + var frame = this.frameForScopeRequest_(request); -DebugCommandProcessor.prototype.scopesRequest_ = function(request, response) { - var scope_holder = this.scopeHolderForScopeRequest_(request); - - // Fill all scopes for this frame or function. - var total_scopes = scope_holder.scopeCount(); + // Fill all scopes for this frame. + var total_scopes = frame.scopeCount(); var scopes = []; for (var i = 0; i < total_scopes; i++) { - scopes.push(scope_holder.scope(i)); + scopes.push(frame.scope(i)); } response.body = { fromScope: 0, @@ -2015,19 +1991,24 @@ DebugCommandProcessor.prototype.scopesRequest_ = function(request, response) { DebugCommandProcessor.prototype.scopeRequest_ = function(request, response) { - // Get the frame or function for which the scope is requested. - var scope_holder = this.scopeHolderForScopeRequest_(request); + // No frames no scopes. + if (this.exec_state_.frameCount() == 0) { + return response.failed('No scopes'); + } + + // Get the frame for which the scope is requested. + var frame = this.frameForScopeRequest_(request); // With no scope argument just return top scope. var scope_index = 0; if (request.arguments && !IS_UNDEFINED(request.arguments.number)) { scope_index = %ToNumber(request.arguments.number); - if (scope_index < 0 || scope_holder.scopeCount() <= scope_index) { + if (scope_index < 0 || frame.scopeCount() <= scope_index) { return response.failed('Invalid scope number'); } } - response.body = scope_holder.scope(scope_index); + response.body = frame.scope(scope_index); }; diff --git a/deps/v8/src/debug.cc b/deps/v8/src/debug.cc index 99256ba21a..f8a1ecf4f9 100644 --- a/deps/v8/src/debug.cc +++ b/deps/v8/src/debug.cc @@ -1857,6 +1857,13 @@ static void RedirectActivationsToRecompiledCodeOnThread( // break slots. debug_break_slot_count++; } + if (frame_code->has_self_optimization_header() && + !new_code->has_self_optimization_header()) { + delta -= FullCodeGenerator::self_optimization_header_size(); + } else { + ASSERT(frame_code->has_self_optimization_header() == + new_code->has_self_optimization_header()); + } int debug_break_slot_bytes = debug_break_slot_count * Assembler::kDebugBreakSlotLength; if (FLAG_trace_deopt) { diff --git a/deps/v8/src/debug.h b/deps/v8/src/debug.h index 7ec78015c6..474b90bd21 100644 --- a/deps/v8/src/debug.h +++ b/deps/v8/src/debug.h @@ -1,4 +1,4 @@ -// Copyright 2012 the V8 project authors. All rights reserved. +// Copyright 2011 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -245,8 +245,6 @@ class Debug { bool IsBreakOnException(ExceptionBreakType type); void PrepareStep(StepAction step_action, int step_count); void ClearStepping(); - void ClearStepOut(); - bool IsStepping() { return thread_local_.step_count_ > 0; } bool StepNextContinue(BreakLocationIterator* break_location_iterator, JavaScriptFrame* frame); static Handle GetDebugInfo(Handle shared); @@ -466,6 +464,7 @@ class Debug { void ActivateStepIn(StackFrame* frame); void ClearStepIn(); void ActivateStepOut(StackFrame* frame); + void ClearStepOut(); void ClearStepNext(); // Returns whether the compile succeeded. void RemoveDebugInfo(Handle debug_info); diff --git a/deps/v8/src/double.h b/deps/v8/src/double.h index fcf6906af7..16a3245e9a 100644 --- a/deps/v8/src/double.h +++ b/deps/v8/src/double.h @@ -130,6 +130,12 @@ class Double { return (d64 & kExponentMask) == kExponentMask; } + bool IsNan() const { + uint64_t d64 = AsUint64(); + return ((d64 & kExponentMask) == kExponentMask) && + ((d64 & kSignificandMask) != 0); + } + bool IsInfinite() const { uint64_t d64 = AsUint64(); return ((d64 & kExponentMask) == kExponentMask) && diff --git a/deps/v8/src/elements.cc b/deps/v8/src/elements.cc index 26d3dc135c..aa51ea9b78 100644 --- a/deps/v8/src/elements.cc +++ b/deps/v8/src/elements.cc @@ -1332,8 +1332,18 @@ ElementsAccessor* ElementsAccessor::ForArray(FixedArrayBase* array) { void ElementsAccessor::InitializeOncePerProcess() { + static struct ConcreteElementsAccessors { +#define ACCESSOR_STRUCT(Class, Kind, Store) Class* Kind##_handler; + ELEMENTS_LIST(ACCESSOR_STRUCT) +#undef ACCESSOR_STRUCT + } element_accessors = { +#define ACCESSOR_INIT(Class, Kind, Store) new Class(#Kind), + ELEMENTS_LIST(ACCESSOR_INIT) +#undef ACCESSOR_INIT + }; + static ElementsAccessor* accessor_array[] = { -#define ACCESSOR_ARRAY(Class, Kind, Store) new Class(#Kind), +#define ACCESSOR_ARRAY(Class, Kind, Store) element_accessors.Kind##_handler, ELEMENTS_LIST(ACCESSOR_ARRAY) #undef ACCESSOR_ARRAY }; @@ -1345,14 +1355,6 @@ void ElementsAccessor::InitializeOncePerProcess() { } -void ElementsAccessor::TearDown() { -#define ACCESSOR_DELETE(Class, Kind, Store) delete elements_accessors_[Kind]; - ELEMENTS_LIST(ACCESSOR_DELETE) -#undef ACCESSOR_DELETE - elements_accessors_ = NULL; -} - - template MaybeObject* ElementsAccessorBase:: diff --git a/deps/v8/src/elements.h b/deps/v8/src/elements.h index 51d402d341..ff97c08324 100644 --- a/deps/v8/src/elements.h +++ b/deps/v8/src/elements.h @@ -131,7 +131,6 @@ class ElementsAccessor { static ElementsAccessor* ForArray(FixedArrayBase* array); static void InitializeOncePerProcess(); - static void TearDown(); protected: friend class NonStrictArgumentsElementsAccessor; diff --git a/deps/v8/src/extensions/externalize-string-extension.cc b/deps/v8/src/extensions/externalize-string-extension.cc index 50d876136f..9fbf329818 100644 --- a/deps/v8/src/extensions/externalize-string-extension.cc +++ b/deps/v8/src/extensions/externalize-string-extension.cc @@ -133,8 +133,11 @@ v8::Handle ExternalizeStringExtension::IsAscii( void ExternalizeStringExtension::Register() { - static ExternalizeStringExtension externalize_extension; - static v8::DeclareExtension declaration(&externalize_extension); + static ExternalizeStringExtension* externalize_extension = NULL; + if (externalize_extension == NULL) + externalize_extension = new ExternalizeStringExtension; + static v8::DeclareExtension externalize_extension_declaration( + externalize_extension); } } } // namespace v8::internal diff --git a/deps/v8/src/extensions/gc-extension.cc b/deps/v8/src/extensions/gc-extension.cc index f921552aaa..573797e174 100644 --- a/deps/v8/src/extensions/gc-extension.cc +++ b/deps/v8/src/extensions/gc-extension.cc @@ -46,8 +46,9 @@ v8::Handle GCExtension::GC(const v8::Arguments& args) { void GCExtension::Register() { - static GCExtension gc_extension; - static v8::DeclareExtension declaration(&gc_extension); + static GCExtension* gc_extension = NULL; + if (gc_extension == NULL) gc_extension = new GCExtension(); + static v8::DeclareExtension gc_extension_declaration(gc_extension); } } } // namespace v8::internal diff --git a/deps/v8/src/factory.cc b/deps/v8/src/factory.cc index 6bb7893746..e8a9f26a5c 100644 --- a/deps/v8/src/factory.cc +++ b/deps/v8/src/factory.cc @@ -291,15 +291,6 @@ Handle Factory::NewGlobalContext() { } -Handle Factory::NewModuleContext(Handle previous, - Handle scope_info) { - CALL_HEAP_FUNCTION( - isolate(), - isolate()->heap()->AllocateModuleContext(*previous, *scope_info), - Context); -} - - Handle Factory::NewFunctionContext(int length, Handle function) { CALL_HEAP_FUNCTION( @@ -333,9 +324,10 @@ Handle Factory::NewWithContext(Handle function, } -Handle Factory::NewBlockContext(Handle function, - Handle previous, - Handle scope_info) { +Handle Factory::NewBlockContext( + Handle function, + Handle previous, + Handle scope_info) { CALL_HEAP_FUNCTION( isolate(), isolate()->heap()->AllocateBlockContext(*function, @@ -936,13 +928,6 @@ Handle Factory::NewJSObject(Handle constructor, } -Handle Factory::NewJSModule() { - CALL_HEAP_FUNCTION( - isolate(), - isolate()->heap()->AllocateJSModule(), JSModule); -} - - Handle Factory::NewGlobalObject( Handle constructor) { CALL_HEAP_FUNCTION(isolate(), diff --git a/deps/v8/src/factory.h b/deps/v8/src/factory.h index 06aad1bef6..786d4a983a 100644 --- a/deps/v8/src/factory.h +++ b/deps/v8/src/factory.h @@ -162,12 +162,9 @@ class Factory { // Create a global (but otherwise uninitialized) context. Handle NewGlobalContext(); - // Create a module context. - Handle NewModuleContext(Handle previous, - Handle scope_info); - // Create a function context. - Handle NewFunctionContext(int length, Handle function); + Handle NewFunctionContext(int length, + Handle function); // Create a catch context. Handle NewCatchContext(Handle function, @@ -180,7 +177,7 @@ class Factory { Handle previous, Handle extension); - // Create a block context. + // Create a 'block' context. Handle NewBlockContext(Handle function, Handle previous, Handle scope_info); @@ -265,9 +262,6 @@ class Factory { // runtime. Handle NewJSObjectFromMap(Handle map); - // JS modules are pretenured. - Handle NewJSModule(); - // JS arrays are pretenured when allocated by the parser. Handle NewJSArray(int capacity, ElementsKind elements_kind = FAST_ELEMENTS, diff --git a/deps/v8/src/flag-definitions.h b/deps/v8/src/flag-definitions.h index 62a9782859..75697a8906 100644 --- a/deps/v8/src/flag-definitions.h +++ b/deps/v8/src/flag-definitions.h @@ -132,8 +132,6 @@ public: // Flags for language modes and experimental language features. DEFINE_bool(use_strict, false, "enforce strict mode") -DEFINE_bool(es52_globals, false, - "activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false, "enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false, "enable harmony block scoping") @@ -167,12 +165,7 @@ DEFINE_bool(eliminate_dead_phis, true, "eliminate dead phis") DEFINE_bool(use_gvn, true, "use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true, "use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true, "use function inlining") -DEFINE_int(max_inlined_source_size, 600, - "maximum source size in bytes considered for a single inlining") -DEFINE_int(max_inlined_nodes, 196, - "maximum number of AST nodes considered for a single inlining") -DEFINE_int(max_inlined_nodes_cumulative, 196, - "maximum cumulative number of AST nodes considered for inlining") +DEFINE_bool(limit_inlining, true, "limit code size growth from inlining") DEFINE_bool(loop_invariant_code_motion, true, "loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true, @@ -195,8 +188,6 @@ DEFINE_bool(trap_on_deopt, false, "put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true, "deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true, "polymorphic inlining") DEFINE_bool(use_osr, true, "use on-stack replacement") -DEFINE_bool(array_bounds_checks_elimination, true, - "perform array bounds checks elimination") DEFINE_bool(trace_osr, false, "trace on-stack replacement") DEFINE_int(stress_runs, 0, "number of stress runs") diff --git a/deps/v8/src/frames.cc b/deps/v8/src/frames.cc index e265341b1a..0571a813f5 100644 --- a/deps/v8/src/frames.cc +++ b/deps/v8/src/frames.cc @@ -1359,28 +1359,34 @@ InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* // ------------------------------------------------------------------------- int NumRegs(RegList reglist) { - return CompilerIntrinsics::CountSetBits(reglist); + int n = 0; + while (reglist != 0) { + n++; + reglist &= reglist - 1; // clear one bit + } + return n; } struct JSCallerSavedCodeData { + JSCallerSavedCodeData() { + int i = 0; + for (int r = 0; r < kNumRegs; r++) + if ((kJSCallerSaved & (1 << r)) != 0) + reg_code[i++] = r; + + ASSERT(i == kNumJSCallerSaved); + } int reg_code[kNumJSCallerSaved]; }; -JSCallerSavedCodeData caller_saved_code_data; -void SetUpJSCallerSavedCodeData() { - int i = 0; - for (int r = 0; r < kNumRegs; r++) - if ((kJSCallerSaved & (1 << r)) != 0) - caller_saved_code_data.reg_code[i++] = r; - - ASSERT(i == kNumJSCallerSaved); -} +static LazyInstance::type caller_saved_code_data = + LAZY_INSTANCE_INITIALIZER; int JSCallerSavedCode(int n) { ASSERT(0 <= n && n < kNumJSCallerSaved); - return caller_saved_code_data.reg_code[n]; + return caller_saved_code_data.Get().reg_code[n]; } diff --git a/deps/v8/src/frames.h b/deps/v8/src/frames.h index 7178bd413a..9071555197 100644 --- a/deps/v8/src/frames.h +++ b/deps/v8/src/frames.h @@ -40,8 +40,6 @@ typedef uint32_t RegList; // Get the number of registers in a given register list. int NumRegs(RegList list); -void SetUpJSCallerSavedCodeData(); - // Return the code of the n-th saved register available to JavaScript. int JSCallerSavedCode(int n); diff --git a/deps/v8/src/full-codegen.cc b/deps/v8/src/full-codegen.cc index 9b1df4ee73..44fe011a4e 100644 --- a/deps/v8/src/full-codegen.cc +++ b/deps/v8/src/full-codegen.cc @@ -316,6 +316,7 @@ bool FullCodeGenerator::MakeCode(CompilationInfo* info) { code->set_optimizable(info->IsOptimizable() && !info->function()->flags()->Contains(kDontOptimize) && info->function()->scope()->AllowsLazyRecompilation()); + code->set_self_optimization_header(cgen.has_self_optimization_header_); cgen.PopulateDeoptimizationData(code); cgen.PopulateTypeFeedbackInfo(code); cgen.PopulateTypeFeedbackCells(code); @@ -331,6 +332,9 @@ bool FullCodeGenerator::MakeCode(CompilationInfo* info) { code->set_stack_check_table_offset(table_offset); CodeGenerator::PrintCode(code, info); info->SetCode(code); // May be an empty handle. + if (!code.is_null()) { + isolate->runtime_profiler()->NotifyCodeGenerated(code->instruction_size()); + } #ifdef ENABLE_GDB_JIT_INTERFACE if (FLAG_gdbjit && !code.is_null()) { GDBJITLineInfo* lineinfo = @@ -569,91 +573,88 @@ void FullCodeGenerator::DoTest(const TestContext* context) { void FullCodeGenerator::VisitDeclarations( ZoneList* declarations) { - ZoneList >* saved_globals = globals_; - ZoneList > inner_globals(10); - globals_ = &inner_globals; + int save_global_count = global_count_; + global_count_ = 0; AstVisitor::VisitDeclarations(declarations); - if (!globals_->is_empty()) { + + // Batch declare global functions and variables. + if (global_count_ > 0) { + Handle array = + isolate()->factory()->NewFixedArray(2 * global_count_, TENURED); + int length = declarations->length(); + for (int j = 0, i = 0; i < length; i++) { + Declaration* decl = declarations->at(i); + Variable* var = decl->proxy()->var(); + + if (var->IsUnallocated()) { + array->set(j++, *(var->name())); + FunctionDeclaration* fun_decl = decl->AsFunctionDeclaration(); + if (fun_decl == NULL) { + if (var->binding_needs_init()) { + // In case this binding needs initialization use the hole. + array->set_the_hole(j++); + } else { + array->set_undefined(j++); + } + } else { + Handle function = + Compiler::BuildFunctionInfo(fun_decl->fun(), script()); + // Check for stack-overflow exception. + if (function.is_null()) { + SetStackOverflow(); + return; + } + array->set(j++, *function); + } + } + } // Invoke the platform-dependent code generator to do the actual // declaration the global functions and variables. - Handle array = - isolate()->factory()->NewFixedArray(globals_->length(), TENURED); - for (int i = 0; i < globals_->length(); ++i) - array->set(i, *globals_->at(i)); DeclareGlobals(array); } - globals_ = saved_globals; + global_count_ = save_global_count; } -void FullCodeGenerator::VisitModuleLiteral(ModuleLiteral* module) { - Handle instance = module->interface()->Instance(); - ASSERT(!instance.is_null()); +void FullCodeGenerator::VisitVariableDeclaration(VariableDeclaration* decl) { + EmitDeclaration(decl->proxy(), decl->mode(), NULL); +} - // Allocate a module context statically. - Block* block = module->body(); - Scope* saved_scope = scope(); - scope_ = block->scope(); - Handle scope_info = scope_->GetScopeInfo(); - - // Generate code for module creation and linking. - Comment cmnt(masm_, "[ ModuleLiteral"); - SetStatementPosition(block); - - if (scope_info->HasContext()) { - // Set up module context. - __ Push(scope_info); - __ Push(instance); - __ CallRuntime(Runtime::kPushModuleContext, 2); - StoreToFrameField( - StandardFrameConstants::kContextOffset, context_register()); - } - { - Comment cmnt(masm_, "[ Declarations"); - VisitDeclarations(scope_->declarations()); - } +void FullCodeGenerator::VisitFunctionDeclaration(FunctionDeclaration* decl) { + EmitDeclaration(decl->proxy(), decl->mode(), decl->fun()); +} - scope_ = saved_scope; - if (scope_info->HasContext()) { - // Pop module context. - LoadContextField(context_register(), Context::PREVIOUS_INDEX); - // Update local stack frame context field. - StoreToFrameField( - StandardFrameConstants::kContextOffset, context_register()); - } - // Populate module instance object. - const PropertyAttributes attr = - static_cast(READ_ONLY | DONT_DELETE | DONT_ENUM); - for (Interface::Iterator it = module->interface()->iterator(); - !it.done(); it.Advance()) { - if (it.interface()->IsModule()) { - Handle value = it.interface()->Instance(); - ASSERT(!value.is_null()); - JSReceiver::SetProperty(instance, it.name(), value, attr, kStrictMode); - } else { - // TODO(rossberg): set proper getters instead of undefined... - // instance->DefineAccessor(*it.name(), ACCESSOR_GETTER, *getter, attr); - Handle value(isolate()->heap()->undefined_value()); - JSReceiver::SetProperty(instance, it.name(), value, attr, kStrictMode); - } - } - USE(instance->PreventExtensions()); +void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* decl) { + EmitDeclaration(decl->proxy(), decl->mode(), NULL); +} + + +void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* decl) { + EmitDeclaration(decl->proxy(), decl->mode(), NULL); +} + + +void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* decl) { + // TODO(rossberg) +} + + +void FullCodeGenerator::VisitModuleLiteral(ModuleLiteral* module) { + // TODO(rossberg) } void FullCodeGenerator::VisitModuleVariable(ModuleVariable* module) { - // Noting to do. - // The instance object is resolved statically through the module's interface. + // TODO(rossberg) } void FullCodeGenerator::VisitModulePath(ModulePath* module) { - // Noting to do. - // The instance object is resolved statically through the module's interface. + // TODO(rossberg) } @@ -915,9 +916,9 @@ void FullCodeGenerator::VisitBlock(Block* stmt) { Scope* saved_scope = scope(); // Push a block context when entering a block with block scoped variables. - if (stmt->scope() != NULL) { + if (stmt->block_scope() != NULL) { { Comment cmnt(masm_, "[ Extend block context"); - scope_ = stmt->scope(); + scope_ = stmt->block_scope(); Handle scope_info = scope_->GetScopeInfo(); int heap_slots = scope_info->ContextLength() - Context::MIN_CONTEXT_SLOTS; __ Push(scope_info); @@ -944,7 +945,7 @@ void FullCodeGenerator::VisitBlock(Block* stmt) { PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); // Pop block context if necessary. - if (stmt->scope() != NULL) { + if (stmt->block_scope() != NULL) { LoadContextField(context_register(), Context::PREVIOUS_INDEX); // Update local stack frame context field. StoreToFrameField(StandardFrameConstants::kContextOffset, diff --git a/deps/v8/src/full-codegen.h b/deps/v8/src/full-codegen.h index 0e0ffe924b..58d59862a5 100644 --- a/deps/v8/src/full-codegen.h +++ b/deps/v8/src/full-codegen.h @@ -83,17 +83,22 @@ class FullCodeGenerator: public AstVisitor { scope_(info->scope()), nesting_stack_(NULL), loop_depth_(0), - globals_(NULL), + global_count_(0), context_(NULL), bailout_entries_(info->HasDeoptimizationSupport() ? info->function()->ast_node_count() : 0), stack_checks_(2), // There's always at least one. type_feedback_cells_(info->HasDeoptimizationSupport() ? info->function()->ast_node_count() : 0), - ic_total_count_(0) { } + ic_total_count_(0), + has_self_optimization_header_(false) { } static bool MakeCode(CompilationInfo* info); + // Returns the platform-specific size in bytes of the self-optimization + // header. + static int self_optimization_header_size(); + // Encode state and pc-offset as a BitField. // Only use 30 bits because we encode the result as a smi. class StateField : public BitField { }; @@ -202,7 +207,7 @@ class FullCodeGenerator: public AstVisitor { virtual ~NestedBlock() {} virtual NestedStatement* Exit(int* stack_depth, int* context_length) { - if (statement()->AsBlock()->scope() != NULL) { + if (statement()->AsBlock()->block_scope() != NULL) { ++(*context_length); } return previous_; @@ -413,9 +418,12 @@ class FullCodeGenerator: public AstVisitor { Label* if_true, Label* if_false); - // If enabled, emit debug code for checking that the current context is - // neither a with nor a catch context. - void EmitDebugCheckDeclarationContext(Variable* variable); + // Platform-specific code for a variable, constant, or function + // declaration. Functions have an initial value. + // Increments global_count_ for unallocated variables. + void EmitDeclaration(VariableProxy* proxy, + VariableMode mode, + FunctionLiteral* function); // Platform-specific code for checking the stack limit at the back edge of // a loop. @@ -545,8 +553,12 @@ class FullCodeGenerator: public AstVisitor { Handle