From 91757fa8400654b69de18e3840c402e369521b68 Mon Sep 17 00:00:00 2001 From: Ryan Dahl Date: Tue, 17 Aug 2010 08:37:25 -0700 Subject: [PATCH] Upgrade V8 to 2.3.8 --- deps/v8/AUTHORS | 1 + deps/v8/ChangeLog | 42 +- deps/v8/SConstruct | 3 +- deps/v8/include/v8-profiler.h | 30 +- deps/v8/include/v8.h | 10 +- deps/v8/src/SConscript | 8 +- deps/v8/src/SConscript.orig | 324 +++++ deps/v8/src/accessors.cc | 2 +- deps/v8/src/api.cc | 121 +- deps/v8/src/arm/assembler-arm-inl.h | 23 + deps/v8/src/arm/assembler-arm.cc | 15 + deps/v8/src/arm/assembler-arm.h | 4 + deps/v8/src/arm/builtins-arm.cc | 25 +- deps/v8/src/arm/codegen-arm.cc | 666 ++++++--- deps/v8/src/arm/codegen-arm.h | 23 +- deps/v8/src/arm/debug-arm.cc | 8 +- deps/v8/src/arm/disasm-arm.cc | 8 +- deps/v8/src/arm/fast-codegen-arm.cc | 241 ---- deps/v8/src/arm/full-codegen-arm.cc | 184 +-- deps/v8/src/arm/macro-assembler-arm.cc | 12 +- deps/v8/src/arm/macro-assembler-arm.h | 3 + deps/v8/src/arm/simulator-arm.cc | 10 +- deps/v8/src/arm/stub-cache-arm.cc | 32 - deps/v8/src/assembler.h | 1 + deps/v8/src/bootstrapper.cc | 18 +- deps/v8/src/builtins.h | 2 + deps/v8/src/checks.h | 10 +- deps/v8/src/codegen.cc | 15 +- deps/v8/src/codegen.h | 24 +- deps/v8/src/compiler.cc | 28 +- deps/v8/src/compiler.h | 53 - deps/v8/src/contexts.h | 2 + deps/v8/src/cpu-profiler.cc | 11 +- deps/v8/src/cpu-profiler.h | 2 +- deps/v8/src/debug.cc | 33 +- deps/v8/src/debug.h | 10 +- deps/v8/src/factory.cc | 14 +- deps/v8/src/factory.h | 4 +- deps/v8/src/fast-codegen.cc | 746 ---------- deps/v8/src/fast-codegen.h | 161 --- deps/v8/src/flag-definitions.h | 3 - deps/v8/src/full-codegen.cc | 5 +- deps/v8/src/full-codegen.h | 9 +- deps/v8/src/globals.h | 25 + deps/v8/src/handles-inl.h | 2 +- deps/v8/src/handles.cc | 35 +- deps/v8/src/handles.h | 3 - deps/v8/src/heap-profiler.cc | 6 +- deps/v8/src/heap.cc | 522 ++++--- deps/v8/src/heap.h | 6 +- deps/v8/src/ia32/assembler-ia32-inl.h | 24 + deps/v8/src/ia32/assembler-ia32.cc | 15 + deps/v8/src/ia32/assembler-ia32.h | 1 + deps/v8/src/ia32/builtins-ia32.cc | 22 +- deps/v8/src/ia32/codegen-ia32.cc | 722 +++++++--- deps/v8/src/ia32/codegen-ia32.h | 33 +- deps/v8/src/ia32/debug-ia32.cc | 44 +- deps/v8/src/ia32/disasm-ia32.cc | 1 + deps/v8/src/ia32/fast-codegen-ia32.cc | 954 ------------- deps/v8/src/ia32/fast-codegen-ia32.h | 155 --- deps/v8/src/ia32/full-codegen-ia32.cc | 175 +-- deps/v8/src/ia32/macro-assembler-ia32.cc | 66 +- deps/v8/src/ia32/macro-assembler-ia32.h | 33 + deps/v8/src/ia32/stub-cache-ia32.cc | 32 +- deps/v8/src/ia32/virtual-frame-ia32.h | 16 + deps/v8/src/list-inl.h | 7 + deps/v8/src/list.h | 2 + deps/v8/src/liveedit-debugger.js | 7 +- deps/v8/src/liveedit.cc | 42 +- deps/v8/src/mark-compact.cc | 256 +++- deps/v8/src/mark-compact.h | 10 +- deps/v8/src/messages.js | 12 +- deps/v8/src/mips/debug-mips.cc | 9 +- deps/v8/src/mips/simulator-mips.cc | 7 +- deps/v8/src/objects-inl.h | 112 +- deps/v8/src/objects-visiting.cc | 139 ++ deps/v8/src/objects-visiting.h | 382 +++++ deps/v8/src/objects.cc | 178 +-- deps/v8/src/objects.h | 140 +- deps/v8/src/parser.cc | 9 +- deps/v8/src/platform-linux.cc | 5 +- deps/v8/src/platform-nullos.cc | 6 + deps/v8/src/platform-posix.cc | 7 +- deps/v8/src/platform-win32.cc | 5 + deps/v8/src/platform.h | 3 + deps/v8/src/platform.h.orig | 580 ++++++++ deps/v8/src/profile-generator-inl.h | 18 - deps/v8/src/profile-generator.cc | 1234 +++++++++++------ deps/v8/src/profile-generator.h | 491 ++++--- deps/v8/src/property.h | 6 +- deps/v8/src/runtime.cc | 16 +- deps/v8/src/runtime.js | 8 +- deps/v8/src/serialize.cc | 8 - deps/v8/src/serialize.h | 2 +- deps/v8/src/stub-cache.cc | 17 - deps/v8/src/stub-cache.h | 3 - deps/v8/src/third_party/dtoa/dtoa.c | 129 +- deps/v8/src/top.cc | 12 +- deps/v8/src/type-info.h | 16 +- deps/v8/src/utils.h | 6 +- deps/v8/src/version.cc | 4 +- deps/v8/src/x64/assembler-x64-inl.h | 23 + deps/v8/src/x64/assembler-x64.cc | 2 +- deps/v8/src/x64/builtins-x64.cc | 22 +- deps/v8/src/x64/codegen-x64.cc | 312 +++-- deps/v8/src/x64/codegen-x64.h | 22 +- deps/v8/src/x64/debug-x64.cc | 32 +- deps/v8/src/x64/fast-codegen-x64.cc | 250 ---- deps/v8/src/x64/full-codegen-x64.cc | 181 +-- deps/v8/src/x64/macro-assembler-x64.cc | 18 +- deps/v8/src/x64/macro-assembler-x64.h | 3 + deps/v8/src/x64/stub-cache-x64.cc | 24 - deps/v8/test/cctest/test-api.cc | 101 +- deps/v8/test/cctest/test-cpu-profiler.cc | 15 + deps/v8/test/cctest/test-disasm-ia32.cc | 2 + deps/v8/test/cctest/test-heap-profiler.cc | 224 ++- .../api-call-after-bypassed-exception.js | 52 +- deps/v8/test/mjsunit/bitops-info.js | 77 + .../mjsunit/debug-clearbreakpointgroup.js | 234 ++-- .../debug-evaluate-bool-constructor.js | 160 +-- deps/v8/test/mjsunit/debug-references.js | 236 ++-- deps/v8/test/mjsunit/debug-stepin-accessor.js | 496 +++---- deps/v8/test/mjsunit/debug-stepin-builtin.js | 156 +-- .../debug-stepin-call-function-stub.js | 230 +-- .../mjsunit/debug-stepin-function-call.js | 296 ++-- .../test/mjsunit/debug-stepnext-do-while.js | 158 +-- .../debug-stepout-recursive-function.js | 212 +-- .../test/mjsunit/debug-stepout-to-builtin.js | 168 +-- deps/v8/test/mjsunit/for-in-delete.js | 50 + deps/v8/test/mjsunit/fuzz-natives.js | 3 + .../mjsunit/global-deleted-property-keyed.js | 76 +- deps/v8/test/mjsunit/mjsunit.status | 4 - deps/v8/test/mjsunit/object-literal.js | 32 +- deps/v8/test/mjsunit/regexp-capture.js | 114 +- .../mjsunit/regress/bitops-register-alias.js | 31 + deps/v8/test/mjsunit/regress/regress-246.js | 60 +- deps/v8/test/mjsunit/regress/regress-760-1.js | 49 + deps/v8/test/mjsunit/regress/regress-760-2.js | 49 + deps/v8/test/mjsunit/regress/regress-798.js | 109 ++ deps/v8/test/mjsunit/regress/regress-815.js | 49 + deps/v8/test/sputnik/sputnik.status | 4 +- deps/v8/tools/gc-nvp-trace-processor.py | 69 +- deps/v8/tools/gyp/v8.gyp | 11 +- deps/v8/tools/oom_dump/README | 30 + deps/v8/tools/oom_dump/SConstruct | 42 + deps/v8/tools/oom_dump/oom_dump.cc | 285 ++++ deps/v8/tools/v8.xcodeproj/project.pbxproj | 24 +- deps/v8/tools/visual_studio/v8_base.vcproj | 21 +- .../v8/tools/visual_studio/v8_base_arm.vcproj | 19 +- .../v8/tools/visual_studio/v8_base_x64.vcproj | 19 +- 150 files changed, 8072 insertions(+), 6510 deletions(-) create mode 100755 deps/v8/src/SConscript.orig delete mode 100644 deps/v8/src/arm/fast-codegen-arm.cc delete mode 100644 deps/v8/src/fast-codegen.cc delete mode 100644 deps/v8/src/fast-codegen.h delete mode 100644 deps/v8/src/ia32/fast-codegen-ia32.cc delete mode 100644 deps/v8/src/ia32/fast-codegen-ia32.h create mode 100644 deps/v8/src/objects-visiting.cc create mode 100644 deps/v8/src/objects-visiting.h create mode 100644 deps/v8/src/platform.h.orig delete mode 100644 deps/v8/src/x64/fast-codegen-x64.cc create mode 100644 deps/v8/test/mjsunit/bitops-info.js create mode 100644 deps/v8/test/mjsunit/for-in-delete.js create mode 100644 deps/v8/test/mjsunit/regress/bitops-register-alias.js create mode 100644 deps/v8/test/mjsunit/regress/regress-760-1.js create mode 100644 deps/v8/test/mjsunit/regress/regress-760-2.js create mode 100644 deps/v8/test/mjsunit/regress/regress-798.js create mode 100644 deps/v8/test/mjsunit/regress/regress-815.js create mode 100644 deps/v8/tools/oom_dump/README create mode 100644 deps/v8/tools/oom_dump/SConstruct create mode 100644 deps/v8/tools/oom_dump/oom_dump.cc diff --git a/deps/v8/AUTHORS b/deps/v8/AUTHORS index 4d87db3be0..65b8965f1a 100644 --- a/deps/v8/AUTHORS +++ b/deps/v8/AUTHORS @@ -29,4 +29,5 @@ Rodolph Perfetta Ryan Dahl Subrato K De Burcu Dogan +Vlad Burlik diff --git a/deps/v8/ChangeLog b/deps/v8/ChangeLog index b5663744d3..4c96de014b 100644 --- a/deps/v8/ChangeLog +++ b/deps/v8/ChangeLog @@ -1,11 +1,45 @@ +2010-08-16: Version 2.3.8 + + Fixed build with strict aliasing on GCC 4.4 (issue 463). + + Fixed issue with incorrect handling of custom valueOf methods on + string wrappers (issue 760). + + Fixed compilation for ARMv4 (issue 590). + + Improved performance. + + +2010-08-11: Version 2.3.7 + + Reduced size of heap snapshots produced by heap profiler (issue 783). + + Introduced v8::Value::IsRegExp method. + + Fixed CPU profiler crash in start / stop sequence when non-existent + name is passed (issue http://crbug.com/51594). + + Introduced new indexed property query callbacks API (issue 816). This + API is guarded by USE_NEW_QUERY_CALLBACK define and is disabled + by default. + + Removed support for object literal get/set with number/string + property name. + + Fixed handling of JSObject::elements in CalculateNetworkSize + (issue 822). + + Allow compiling with strict aliasing enabled on GCC 4.4 (issue 463). + + 2010-08-09: Version 2.3.6 - RegExp literals create a new object every time they are evaluated - (issue 704). + RegExp literals create a new object every time they are evaluated + (issue 704). - Object.seal and Object.freeze return the modified object (issue 809). + Object.seal and Object.freeze return the modified object (issue 809). - Fix building using GCC 4.4.4. + Fix building using GCC 4.4.4. 2010-08-04: Version 2.3.5 diff --git a/deps/v8/SConstruct b/deps/v8/SConstruct index 00b8fb7219..8fc192637c 100644 --- a/deps/v8/SConstruct +++ b/deps/v8/SConstruct @@ -58,7 +58,7 @@ else: # on linux we need these compiler flags to avoid crashes in the v8 test suite # and avoid dtoa.c strict aliasing issues if os.environ.get('GCC_VERSION') == '44': - GCC_EXTRA_CCFLAGS = ['-fno-tree-vrp', '-fno-strict-aliasing'] + GCC_EXTRA_CCFLAGS = ['-fno-tree-vrp'] GCC_DTOA_EXTRA_CCFLAGS = [] else: GCC_EXTRA_CCFLAGS = [] @@ -80,7 +80,6 @@ ANDROID_FLAGS = ['-march=armv7-a', '-frerun-cse-after-loop', '-frename-registers', '-fomit-frame-pointer', - '-fno-strict-aliasing', '-finline-limit=64', '-DCAN_USE_VFP_INSTRUCTIONS=1', '-DCAN_USE_ARMV7_INSTRUCTIONS=1', diff --git a/deps/v8/include/v8-profiler.h b/deps/v8/include/v8-profiler.h index c99eb0d9f8..9e3cb873c6 100644 --- a/deps/v8/include/v8-profiler.h +++ b/deps/v8/include/v8-profiler.h @@ -194,10 +194,10 @@ class HeapGraphNode; class V8EXPORT HeapGraphEdge { public: enum Type { - CONTEXT_VARIABLE = 0, // A variable from a function context. - ELEMENT = 1, // An element of an array. - PROPERTY = 2, // A named object property. - INTERNAL = 3 // A link that can't be accessed from JS, + kContextVariable = 0, // A variable from a function context. + kElement = 1, // An element of an array. + kProperty = 2, // A named object property. + kInternal = 3 // A link that can't be accessed from JS, // thus, its name isn't a real property name. }; @@ -240,12 +240,12 @@ class V8EXPORT HeapGraphPath { class V8EXPORT HeapGraphNode { public: enum Type { - INTERNAL = 0, // Internal node, a virtual one, for housekeeping. - ARRAY = 1, // An array of elements. - STRING = 2, // A string. - OBJECT = 3, // A JS object (except for arrays and strings). - CODE = 4, // Compiled code. - CLOSURE = 5 // Function closure. + kInternal = 0, // Internal node, a virtual one, for housekeeping. + kArray = 1, // An array of elements. + kString = 2, // A string. + kObject = 3, // A JS object (except for arrays and strings). + kCode = 4, // Compiled code. + kClosure = 5 // Function closure. }; /** Returns node type (see HeapGraphNode::Type). */ @@ -268,13 +268,15 @@ class V8EXPORT HeapGraphNode { int GetSelfSize() const; /** Returns node's network (self + reachable nodes) size, in bytes. */ - int GetTotalSize() const; + int GetReachableSize() const; /** - * Returns node's private size, in bytes. That is, the size of memory - * that will be reclaimed having this node collected. + * Returns node's retained size, in bytes. That is, self + sizes of + * the objects that are reachable only from this object. In other + * words, the size of memory that will be reclaimed having this node + * collected. */ - int GetPrivateSize() const; + int GetRetainedSize() const; /** Returns child nodes count of the node. */ int GetChildrenCount() const; diff --git a/deps/v8/include/v8.h b/deps/v8/include/v8.h index 3ac10ab917..ff73226925 100644 --- a/deps/v8/include/v8.h +++ b/deps/v8/include/v8.h @@ -919,6 +919,11 @@ class Value : public Data { */ V8EXPORT bool IsDate() const; + /** + * Returns true if this value is a RegExp. + */ + V8EXPORT bool IsRegExp() const; + V8EXPORT Local ToBoolean() const; V8EXPORT Local ToNumber() const; V8EXPORT Local ToString() const; @@ -1819,9 +1824,9 @@ typedef Handle (*IndexedPropertySetter)(uint32_t index, /** * Returns a non-empty handle if the interceptor intercepts the request. - * The result is true if the property exists and false otherwise. + * The result is an integer encoding property attributes. */ -typedef Handle (*IndexedPropertyQuery)(uint32_t index, +typedef Handle (*IndexedPropertyQuery)(uint32_t index, const AccessorInfo& info); /** @@ -2140,6 +2145,7 @@ class V8EXPORT ObjectTemplate : public Template { IndexedPropertyDeleter deleter = 0, IndexedPropertyEnumerator enumerator = 0, Handle data = Handle()); + /** * Sets the callback to be used when calling instances created from * this template as a function. If no callback is set, instances diff --git a/deps/v8/src/SConscript b/deps/v8/src/SConscript index 9ff3414c16..29b8e1f377 100755 --- a/deps/v8/src/SConscript +++ b/deps/v8/src/SConscript @@ -84,6 +84,7 @@ SOURCES = { mark-compact.cc messages.cc objects.cc + objects-visiting.cc oprofile-agent.cc parser.cc profile-generator.cc @@ -117,7 +118,6 @@ SOURCES = { zone.cc """), 'arch:arm': Split(""" - fast-codegen.cc jump-target-light.cc virtual-frame-light.cc arm/builtins-arm.cc @@ -126,7 +126,6 @@ SOURCES = { arm/cpu-arm.cc arm/debug-arm.cc arm/disasm-arm.cc - arm/fast-codegen-arm.cc arm/frames-arm.cc arm/full-codegen-arm.cc arm/ic-arm.cc @@ -139,7 +138,6 @@ SOURCES = { arm/assembler-arm.cc """), 'arch:mips': Split(""" - fast-codegen.cc mips/assembler-mips.cc mips/builtins-mips.cc mips/codegen-mips.cc @@ -147,7 +145,6 @@ SOURCES = { mips/cpu-mips.cc mips/debug-mips.cc mips/disasm-mips.cc - mips/fast-codegen-mips.cc mips/full-codegen-mips.cc mips/frames-mips.cc mips/ic-mips.cc @@ -166,7 +163,6 @@ SOURCES = { ia32/cpu-ia32.cc ia32/debug-ia32.cc ia32/disasm-ia32.cc - ia32/fast-codegen-ia32.cc ia32/frames-ia32.cc ia32/full-codegen-ia32.cc ia32/ic-ia32.cc @@ -178,7 +174,6 @@ SOURCES = { ia32/virtual-frame-ia32.cc """), 'arch:x64': Split(""" - fast-codegen.cc jump-target-heavy.cc virtual-frame-heavy.cc x64/assembler-x64.cc @@ -187,7 +182,6 @@ SOURCES = { x64/cpu-x64.cc x64/debug-x64.cc x64/disasm-x64.cc - x64/fast-codegen-x64.cc x64/frames-x64.cc x64/full-codegen-x64.cc x64/ic-x64.cc diff --git a/deps/v8/src/SConscript.orig b/deps/v8/src/SConscript.orig new file mode 100755 index 0000000000..e6b4e3820c --- /dev/null +++ b/deps/v8/src/SConscript.orig @@ -0,0 +1,324 @@ +# Copyright 2008 the V8 project authors. All rights reserved. +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived +# from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import sys +from os.path import join, dirname, abspath +root_dir = dirname(File('SConstruct').rfile().abspath) +sys.path.append(join(root_dir, 'tools')) +import js2c +Import('context') + + +SOURCES = { + 'all': Split(""" + accessors.cc + allocation.cc + api.cc + assembler.cc + ast.cc + bootstrapper.cc + builtins.cc + checks.cc + circular-queue.cc + code-stubs.cc + codegen.cc + compilation-cache.cc + compiler.cc + contexts.cc + conversions.cc + counters.cc + cpu-profiler.cc + data-flow.cc + dateparser.cc + debug-agent.cc + debug.cc + disassembler.cc + diy-fp.cc + dtoa.cc + execution.cc + factory.cc + flags.cc + flow-graph.cc + frame-element.cc + frames.cc + full-codegen.cc + func-name-inferrer.cc + global-handles.cc + fast-dtoa.cc + fixed-dtoa.cc + handles.cc + hashmap.cc + heap-profiler.cc + heap.cc + ic.cc + interpreter-irregexp.cc + jsregexp.cc + jump-target.cc + liveedit.cc + log-utils.cc + log.cc + mark-compact.cc + messages.cc + objects.cc + objects-visiting.cc + oprofile-agent.cc + parser.cc + profile-generator.cc + property.cc + regexp-macro-assembler-irregexp.cc + regexp-macro-assembler.cc + regexp-stack.cc + register-allocator.cc + rewriter.cc + runtime.cc + scanner.cc + scopeinfo.cc + scopes.cc + serialize.cc + snapshot-common.cc + spaces.cc + string-stream.cc + stub-cache.cc + token.cc + top.cc + type-info.cc + unicode.cc + utils.cc + v8-counters.cc + v8.cc + v8threads.cc + variables.cc + version.cc + virtual-frame.cc + vm-state.cc + zone.cc + """), + 'arch:arm': Split(""" + jump-target-light.cc + virtual-frame-light.cc + arm/builtins-arm.cc + arm/codegen-arm.cc + arm/constants-arm.cc + arm/cpu-arm.cc + arm/debug-arm.cc + arm/disasm-arm.cc + arm/frames-arm.cc + arm/full-codegen-arm.cc + arm/ic-arm.cc + arm/jump-target-arm.cc + arm/macro-assembler-arm.cc + arm/regexp-macro-assembler-arm.cc + arm/register-allocator-arm.cc + arm/stub-cache-arm.cc + arm/virtual-frame-arm.cc + arm/assembler-arm.cc + """), + 'arch:mips': Split(""" + mips/assembler-mips.cc + mips/builtins-mips.cc + mips/codegen-mips.cc + mips/constants-mips.cc + mips/cpu-mips.cc + mips/debug-mips.cc + mips/disasm-mips.cc + mips/full-codegen-mips.cc + mips/frames-mips.cc + mips/ic-mips.cc + mips/jump-target-mips.cc + mips/macro-assembler-mips.cc + mips/register-allocator-mips.cc + mips/stub-cache-mips.cc + mips/virtual-frame-mips.cc + """), + 'arch:ia32': Split(""" + jump-target-heavy.cc + virtual-frame-heavy.cc + ia32/assembler-ia32.cc + ia32/builtins-ia32.cc + ia32/codegen-ia32.cc + ia32/cpu-ia32.cc + ia32/debug-ia32.cc + ia32/disasm-ia32.cc + ia32/frames-ia32.cc + ia32/full-codegen-ia32.cc + ia32/ic-ia32.cc + ia32/jump-target-ia32.cc + ia32/macro-assembler-ia32.cc + ia32/regexp-macro-assembler-ia32.cc + ia32/register-allocator-ia32.cc + ia32/stub-cache-ia32.cc + ia32/virtual-frame-ia32.cc + """), + 'arch:x64': Split(""" + jump-target-heavy.cc + virtual-frame-heavy.cc + x64/assembler-x64.cc + x64/builtins-x64.cc + x64/codegen-x64.cc + x64/cpu-x64.cc + x64/debug-x64.cc + x64/disasm-x64.cc + x64/frames-x64.cc + x64/full-codegen-x64.cc + x64/ic-x64.cc + x64/jump-target-x64.cc + x64/macro-assembler-x64.cc + x64/regexp-macro-assembler-x64.cc + x64/register-allocator-x64.cc + x64/stub-cache-x64.cc + x64/virtual-frame-x64.cc + """), + 'simulator:arm': ['arm/simulator-arm.cc'], + 'simulator:mips': ['mips/simulator-mips.cc'], + 'os:freebsd': ['platform-freebsd.cc', 'platform-posix.cc'], + 'os:openbsd': ['platform-openbsd.cc', 'platform-posix.cc'], + 'os:linux': ['platform-linux.cc', 'platform-posix.cc'], + 'os:android': ['platform-linux.cc', 'platform-posix.cc'], + 'os:macos': ['platform-macos.cc', 'platform-posix.cc'], + 'os:solaris': ['platform-solaris.cc', 'platform-posix.cc'], + 'os:nullos': ['platform-nullos.cc'], + 'os:win32': ['platform-win32.cc'], + 'mode:release': [], + 'mode:debug': [ + 'objects-debug.cc', 'prettyprinter.cc', 'regexp-macro-assembler-tracer.cc' + ] +} + + +D8_FILES = { + 'all': [ + 'd8.cc', 'd8-debug.cc' + ], + 'os:linux': [ + 'd8-posix.cc' + ], + 'os:macos': [ + 'd8-posix.cc' + ], + 'os:android': [ + 'd8-posix.cc' + ], + 'os:freebsd': [ + 'd8-posix.cc' + ], + 'os:openbsd': [ + 'd8-posix.cc' + ], + 'os:solaris': [ + 'd8-posix.cc' + ], + 'os:win32': [ + 'd8-windows.cc' + ], + 'os:nullos': [ + 'd8-windows.cc' # Empty implementation at the moment. + ], + 'console:readline': [ + 'd8-readline.cc' + ] +} + + +LIBRARY_FILES = ''' +runtime.js +v8natives.js +array.js +string.js +uri.js +math.js +messages.js +apinatives.js +date.js +regexp.js +json.js +liveedit-debugger.js +mirror-debugger.js +debug-debugger.js +'''.split() + + +def Abort(message): + print message + sys.exit(1) + + +def ConfigureObjectFiles(): + env = Environment() + env.Replace(**context.flags['v8']) + context.ApplyEnvOverrides(env) + env['BUILDERS']['JS2C'] = Builder(action=js2c.JS2C) + env['BUILDERS']['Snapshot'] = Builder(action='$SOURCE $TARGET --logfile "$LOGFILE" --log-snapshot-positions') + + # Build the standard platform-independent source files. + source_files = context.GetRelevantSources(SOURCES) + + d8_files = context.GetRelevantSources(D8_FILES) + d8_js = env.JS2C('d8-js.cc', 'd8.js', TYPE='D8') + d8_js_obj = context.ConfigureObject(env, d8_js, CPPPATH=['.']) + d8_objs = [context.ConfigureObject(env, [d8_files]), d8_js_obj] + + # Combine the JavaScript library files into a single C++ file and + # compile it. + library_files = [s for s in LIBRARY_FILES] + library_files.append('macros.py') + libraries_src, libraries_empty_src = env.JS2C(['libraries.cc', 'libraries-empty.cc'], library_files, TYPE='CORE') + libraries_obj = context.ConfigureObject(env, libraries_src, CPPPATH=['.']) + + # Build dtoa. + dtoa_env = env.Copy() + dtoa_env.Replace(**context.flags['dtoa']) + dtoa_files = ['dtoa-config.c'] + dtoa_obj = context.ConfigureObject(dtoa_env, dtoa_files) + + source_objs = context.ConfigureObject(env, source_files) + non_snapshot_files = [dtoa_obj, source_objs] + + # Create snapshot if necessary. For cross compilation you should either + # do without snapshots and take the performance hit or you should build a + # host VM with the simulator=arm and snapshot=on options and then take the + # resulting snapshot.cc file from obj/release and put it in the src + # directory. Then rebuild the VM with the cross compiler and specify + # snapshot=nobuild on the scons command line. + empty_snapshot_obj = context.ConfigureObject(env, 'snapshot-empty.cc') + mksnapshot_env = env.Copy() + mksnapshot_env.Replace(**context.flags['mksnapshot']) + mksnapshot_src = 'mksnapshot.cc' + mksnapshot = mksnapshot_env.Program('mksnapshot', [mksnapshot_src, libraries_obj, non_snapshot_files, empty_snapshot_obj], PDB='mksnapshot.exe.pdb') + if context.use_snapshot: + if context.build_snapshot: + snapshot_cc = env.Snapshot('snapshot.cc', mksnapshot, LOGFILE=File('snapshot.log').abspath) + else: + snapshot_cc = 'snapshot.cc' + snapshot_obj = context.ConfigureObject(env, snapshot_cc, CPPPATH=['.']) + else: + snapshot_obj = empty_snapshot_obj + library_objs = [non_snapshot_files, libraries_obj, snapshot_obj] + return (library_objs, d8_objs, [mksnapshot]) + + +(library_objs, d8_objs, mksnapshot) = ConfigureObjectFiles() +Return('library_objs d8_objs mksnapshot') diff --git a/deps/v8/src/accessors.cc b/deps/v8/src/accessors.cc index ed0bbd7a1c..3c49846601 100644 --- a/deps/v8/src/accessors.cc +++ b/deps/v8/src/accessors.cc @@ -488,7 +488,7 @@ Object* Accessors::FunctionGetLength(Object* object, void*) { JSFunction* function = FindInPrototypeChain(object, &found_it); if (!found_it) return Smi::FromInt(0); // Check if already compiled. - if (!function->is_compiled()) { + if (!function->shared()->is_compiled()) { // If the function isn't compiled yet, the length is not computed // correctly yet. Compile it now and return the right length. HandleScope scope; diff --git a/deps/v8/src/api.cc b/deps/v8/src/api.cc index 4fdc95f5ee..7a967dbffd 100644 --- a/deps/v8/src/api.cc +++ b/deps/v8/src/api.cc @@ -174,6 +174,8 @@ void i::V8::FatalProcessOutOfMemory(const char* location, bool take_snapshot) { heap_stats.objects_per_type = objects_per_type; int size_per_type[LAST_TYPE + 1] = {0}; heap_stats.size_per_type = size_per_type; + int os_error; + heap_stats.os_error = &os_error; int end_marker; heap_stats.end_marker = &end_marker; i::Heap::RecordStats(&heap_stats, take_snapshot); @@ -1792,6 +1794,13 @@ bool Value::IsDate() const { } +bool Value::IsRegExp() const { + if (IsDeadCheck("v8::Value::IsRegExp()")) return false; + i::Handle obj = Utils::OpenHandle(this); + return obj->IsJSRegExp(); +} + + Local Value::ToString() const { if (IsDeadCheck("v8::Value::ToString()")) return Local(); LOG_API("ToString"); @@ -4491,24 +4500,27 @@ const CpuProfile* CpuProfiler::StopProfiling(Handle title, } +static i::HeapGraphEdge* ToInternal(const HeapGraphEdge* edge) { + return const_cast( + reinterpret_cast(edge)); +} + HeapGraphEdge::Type HeapGraphEdge::GetType() const { IsDeadCheck("v8::HeapGraphEdge::GetType"); - return static_cast( - reinterpret_cast(this)->type()); + return static_cast(ToInternal(this)->type()); } Handle HeapGraphEdge::GetName() const { IsDeadCheck("v8::HeapGraphEdge::GetName"); - const i::HeapGraphEdge* edge = - reinterpret_cast(this); + i::HeapGraphEdge* edge = ToInternal(this); switch (edge->type()) { - case i::HeapGraphEdge::CONTEXT_VARIABLE: - case i::HeapGraphEdge::INTERNAL: - case i::HeapGraphEdge::PROPERTY: + case i::HeapGraphEdge::kContextVariable: + case i::HeapGraphEdge::kInternal: + case i::HeapGraphEdge::kProperty: return Handle(ToApi(i::Factory::LookupAsciiSymbol( edge->name()))); - case i::HeapGraphEdge::ELEMENT: + case i::HeapGraphEdge::kElement: return Handle(ToApi(i::Factory::NewNumberFromInt( edge->index()))); default: UNREACHABLE(); @@ -4519,28 +4531,32 @@ Handle HeapGraphEdge::GetName() const { const HeapGraphNode* HeapGraphEdge::GetFromNode() const { IsDeadCheck("v8::HeapGraphEdge::GetFromNode"); - const i::HeapEntry* from = - reinterpret_cast(this)->from(); + const i::HeapEntry* from = ToInternal(this)->From(); return reinterpret_cast(from); } const HeapGraphNode* HeapGraphEdge::GetToNode() const { IsDeadCheck("v8::HeapGraphEdge::GetToNode"); - const i::HeapEntry* to = - reinterpret_cast(this)->to(); + const i::HeapEntry* to = ToInternal(this)->to(); return reinterpret_cast(to); } +static i::HeapGraphPath* ToInternal(const HeapGraphPath* path) { + return const_cast( + reinterpret_cast(path)); +} + + int HeapGraphPath::GetEdgesCount() const { - return reinterpret_cast(this)->path()->length(); + return ToInternal(this)->path()->length(); } const HeapGraphEdge* HeapGraphPath::GetEdge(int index) const { return reinterpret_cast( - reinterpret_cast(this)->path()->at(index)); + ToInternal(this)->path()->at(index)); } @@ -4555,137 +4571,136 @@ const HeapGraphNode* HeapGraphPath::GetToNode() const { } +static i::HeapEntry* ToInternal(const HeapGraphNode* entry) { + return const_cast( + reinterpret_cast(entry)); +} + + HeapGraphNode::Type HeapGraphNode::GetType() const { IsDeadCheck("v8::HeapGraphNode::GetType"); - return static_cast( - reinterpret_cast(this)->type()); + return static_cast(ToInternal(this)->type()); } Handle HeapGraphNode::GetName() const { IsDeadCheck("v8::HeapGraphNode::GetName"); return Handle(ToApi(i::Factory::LookupAsciiSymbol( - reinterpret_cast(this)->name()))); + ToInternal(this)->name()))); } uint64_t HeapGraphNode::GetId() const { IsDeadCheck("v8::HeapGraphNode::GetId"); - return reinterpret_cast(this)->id(); + return ToInternal(this)->id(); } int HeapGraphNode::GetSelfSize() const { IsDeadCheck("v8::HeapGraphNode::GetSelfSize"); - return reinterpret_cast(this)->self_size(); + return ToInternal(this)->self_size(); } -int HeapGraphNode::GetTotalSize() const { - IsDeadCheck("v8::HeapSnapshot::GetHead"); - return const_cast( - reinterpret_cast(this))->TotalSize(); +int HeapGraphNode::GetReachableSize() const { + IsDeadCheck("v8::HeapSnapshot::GetReachableSize"); + return ToInternal(this)->ReachableSize(); } -int HeapGraphNode::GetPrivateSize() const { - IsDeadCheck("v8::HeapSnapshot::GetPrivateSize"); - return const_cast( - reinterpret_cast(this))->NonSharedTotalSize(); +int HeapGraphNode::GetRetainedSize() const { + IsDeadCheck("v8::HeapSnapshot::GetRetainedSize"); + return ToInternal(this)->RetainedSize(); } int HeapGraphNode::GetChildrenCount() const { IsDeadCheck("v8::HeapSnapshot::GetChildrenCount"); - return reinterpret_cast(this)->children()->length(); + return ToInternal(this)->children().length(); } const HeapGraphEdge* HeapGraphNode::GetChild(int index) const { IsDeadCheck("v8::HeapSnapshot::GetChild"); return reinterpret_cast( - reinterpret_cast(this)->children()->at(index)); + &ToInternal(this)->children()[index]); } int HeapGraphNode::GetRetainersCount() const { IsDeadCheck("v8::HeapSnapshot::GetRetainersCount"); - return reinterpret_cast(this)->retainers()->length(); + return ToInternal(this)->retainers().length(); } const HeapGraphEdge* HeapGraphNode::GetRetainer(int index) const { IsDeadCheck("v8::HeapSnapshot::GetRetainer"); return reinterpret_cast( - reinterpret_cast(this)->retainers()->at(index)); + ToInternal(this)->retainers()[index]); } int HeapGraphNode::GetRetainingPathsCount() const { IsDeadCheck("v8::HeapSnapshot::GetRetainingPathsCount"); - return const_cast( - reinterpret_cast( - this))->GetRetainingPaths()->length(); + return ToInternal(this)->GetRetainingPaths()->length(); } const HeapGraphPath* HeapGraphNode::GetRetainingPath(int index) const { IsDeadCheck("v8::HeapSnapshot::GetRetainingPath"); return reinterpret_cast( - const_cast( - reinterpret_cast( - this))->GetRetainingPaths()->at(index)); + ToInternal(this)->GetRetainingPaths()->at(index)); } const HeapGraphNode* HeapSnapshotsDiff::GetAdditionsRoot() const { IsDeadCheck("v8::HeapSnapshotsDiff::GetAdditionsRoot"); - const i::HeapSnapshotsDiff* diff = - reinterpret_cast(this); + i::HeapSnapshotsDiff* diff = + const_cast( + reinterpret_cast(this)); return reinterpret_cast(diff->additions_root()); } const HeapGraphNode* HeapSnapshotsDiff::GetDeletionsRoot() const { IsDeadCheck("v8::HeapSnapshotsDiff::GetDeletionsRoot"); - const i::HeapSnapshotsDiff* diff = - reinterpret_cast(this); + i::HeapSnapshotsDiff* diff = + const_cast( + reinterpret_cast(this)); return reinterpret_cast(diff->deletions_root()); } +static i::HeapSnapshot* ToInternal(const HeapSnapshot* snapshot) { + return const_cast( + reinterpret_cast(snapshot)); +} + + unsigned HeapSnapshot::GetUid() const { IsDeadCheck("v8::HeapSnapshot::GetUid"); - return reinterpret_cast(this)->uid(); + return ToInternal(this)->uid(); } Handle HeapSnapshot::GetTitle() const { IsDeadCheck("v8::HeapSnapshot::GetTitle"); - const i::HeapSnapshot* snapshot = - reinterpret_cast(this); return Handle(ToApi(i::Factory::LookupAsciiSymbol( - snapshot->title()))); + ToInternal(this)->title()))); } const HeapGraphNode* HeapSnapshot::GetRoot() const { IsDeadCheck("v8::HeapSnapshot::GetHead"); - const i::HeapSnapshot* snapshot = - reinterpret_cast(this); - return reinterpret_cast(snapshot->const_root()); + return reinterpret_cast(ToInternal(this)->root()); } const HeapSnapshotsDiff* HeapSnapshot::CompareWith( const HeapSnapshot* snapshot) const { IsDeadCheck("v8::HeapSnapshot::CompareWith"); - i::HeapSnapshot* snapshot1 = const_cast( - reinterpret_cast(this)); - i::HeapSnapshot* snapshot2 = const_cast( - reinterpret_cast(snapshot)); return reinterpret_cast( - snapshot1->CompareWith(snapshot2)); + ToInternal(this)->CompareWith(ToInternal(snapshot))); } diff --git a/deps/v8/src/arm/assembler-arm-inl.h b/deps/v8/src/arm/assembler-arm-inl.h index 5be57709e3..f72ad76abe 100644 --- a/deps/v8/src/arm/assembler-arm-inl.h +++ b/deps/v8/src/arm/assembler-arm-inl.h @@ -190,6 +190,29 @@ void RelocInfo::Visit(ObjectVisitor* visitor) { } +template +void RelocInfo::Visit() { + RelocInfo::Mode mode = rmode(); + if (mode == RelocInfo::EMBEDDED_OBJECT) { + StaticVisitor::VisitPointer(target_object_address()); + } else if (RelocInfo::IsCodeTarget(mode)) { + StaticVisitor::VisitCodeTarget(this); + } else if (mode == RelocInfo::EXTERNAL_REFERENCE) { + StaticVisitor::VisitExternalReference(target_reference_address()); +#ifdef ENABLE_DEBUGGER_SUPPORT + } else if (Debug::has_break_points() && + ((RelocInfo::IsJSReturn(mode) && + IsPatchedReturnSequence()) || + (RelocInfo::IsDebugBreakSlot(mode) && + IsPatchedDebugBreakSlotSequence()))) { + StaticVisitor::VisitDebugTarget(this); +#endif + } else if (mode == RelocInfo::RUNTIME_ENTRY) { + StaticVisitor::VisitRuntimeEntry(this); + } +} + + Operand::Operand(int32_t immediate, RelocInfo::Mode rmode) { rm_ = no_reg; imm32_ = immediate; diff --git a/deps/v8/src/arm/assembler-arm.cc b/deps/v8/src/arm/assembler-arm.cc index b1705df9be..136c82e7ef 100644 --- a/deps/v8/src/arm/assembler-arm.cc +++ b/deps/v8/src/arm/assembler-arm.cc @@ -2276,6 +2276,21 @@ void Assembler::vcmp(const DwVfpRegister src1, } +void Assembler::vcmp(const DwVfpRegister src1, + const double src2, + const SBit s, + const Condition cond) { + // vcmp(Dd, Dm) double precision floating point comparison. + // Instruction details available in ARM DDI 0406A, A8-570. + // cond(31-28) | 11101 (27-23)| D=?(22) | 11 (21-20) | 0101 (19-16) | + // Vd(15-12) | 101(11-9) | sz(8)=1 | E(7)=? | 1(6) | M(5)=? | 0(4) | 0000(3-0) + ASSERT(CpuFeatures::IsEnabled(VFP3)); + ASSERT(src2 == 0.0); + emit(cond | 0xE*B24 |B23 | 0x3*B20 | B18 | B16 | + src1.code()*B12 | 0x5*B9 | B8 | B6); +} + + void Assembler::vmrs(Register dst, Condition cond) { // Instruction details available in ARM DDI 0406A, A8-652. // cond(31-28) | 1110 (27-24) | 1111(23-20)| 0001 (19-16) | diff --git a/deps/v8/src/arm/assembler-arm.h b/deps/v8/src/arm/assembler-arm.h index 16e69e2968..218eb97f3c 100644 --- a/deps/v8/src/arm/assembler-arm.h +++ b/deps/v8/src/arm/assembler-arm.h @@ -1031,6 +1031,10 @@ class Assembler : public Malloced { const DwVfpRegister src2, const SBit s = LeaveCC, const Condition cond = al); + void vcmp(const DwVfpRegister src1, + const double src2, + const SBit s = LeaveCC, + const Condition cond = al); void vmrs(const Register dst, const Condition cond = al); void vsqrt(const DwVfpRegister dst, diff --git a/deps/v8/src/arm/builtins-arm.cc b/deps/v8/src/arm/builtins-arm.cc index b1f29ba381..7e7e358c10 100644 --- a/deps/v8/src/arm/builtins-arm.cc +++ b/deps/v8/src/arm/builtins-arm.cc @@ -911,6 +911,29 @@ void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { } +void Builtins::Generate_LazyCompile(MacroAssembler* masm) { + // Enter an internal frame. + __ EnterInternalFrame(); + + // Preserve the function. + __ push(r1); + + // Push the function on the stack as the argument to the runtime function. + __ push(r1); + __ CallRuntime(Runtime::kLazyCompile, 1); + // Calculate the entry point. + __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); + // Restore saved function. + __ pop(r1); + + // Tear down temporary frame. + __ LeaveInternalFrame(); + + // Do a tail-call of the compiled function. + __ Jump(r2); +} + + void Builtins::Generate_FunctionCall(MacroAssembler* masm) { // 1. Make sure we have at least one argument. // r0: actual number of arguments @@ -1050,7 +1073,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { __ ldr(r2, FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset)); __ mov(r2, Operand(r2, ASR, kSmiTagSize)); - __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kCodeOffset)); + __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeOffset)); __ add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); __ cmp(r2, r0); // Check formal and actual parameter counts. __ Jump(Handle(builtin(ArgumentsAdaptorTrampoline)), diff --git a/deps/v8/src/arm/codegen-arm.cc b/deps/v8/src/arm/codegen-arm.cc index 4bcf1a07df..df17b6f864 100644 --- a/deps/v8/src/arm/codegen-arm.cc +++ b/deps/v8/src/arm/codegen-arm.cc @@ -217,93 +217,80 @@ void CodeGenerator::Generate(CompilationInfo* info) { } #endif - if (info->mode() == CompilationInfo::PRIMARY) { - frame_->Enter(); - // tos: code slot - - // Allocate space for locals and initialize them. This also checks - // for stack overflow. - frame_->AllocateStackSlots(); - - frame_->AssertIsSpilled(); - int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; - if (heap_slots > 0) { - // Allocate local context. - // Get outer context and create a new context based on it. - __ ldr(r0, frame_->Function()); - frame_->EmitPush(r0); - if (heap_slots <= FastNewContextStub::kMaximumSlots) { - FastNewContextStub stub(heap_slots); - frame_->CallStub(&stub, 1); - } else { - frame_->CallRuntime(Runtime::kNewContext, 1); - } + frame_->Enter(); + // tos: code slot + + // Allocate space for locals and initialize them. This also checks + // for stack overflow. + frame_->AllocateStackSlots(); + + frame_->AssertIsSpilled(); + int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; + if (heap_slots > 0) { + // Allocate local context. + // Get outer context and create a new context based on it. + __ ldr(r0, frame_->Function()); + frame_->EmitPush(r0); + if (heap_slots <= FastNewContextStub::kMaximumSlots) { + FastNewContextStub stub(heap_slots); + frame_->CallStub(&stub, 1); + } else { + frame_->CallRuntime(Runtime::kNewContext, 1); + } #ifdef DEBUG - JumpTarget verified_true; - __ cmp(r0, cp); - verified_true.Branch(eq); - __ stop("NewContext: r0 is expected to be the same as cp"); - verified_true.Bind(); + JumpTarget verified_true; + __ cmp(r0, cp); + verified_true.Branch(eq); + __ stop("NewContext: r0 is expected to be the same as cp"); + verified_true.Bind(); #endif - // Update context local. - __ str(cp, frame_->Context()); - } + // Update context local. + __ str(cp, frame_->Context()); + } - // TODO(1241774): Improve this code: - // 1) only needed if we have a context - // 2) no need to recompute context ptr every single time - // 3) don't copy parameter operand code from SlotOperand! - { - Comment cmnt2(masm_, "[ copy context parameters into .context"); - // Note that iteration order is relevant here! If we have the same - // parameter twice (e.g., function (x, y, x)), and that parameter - // needs to be copied into the context, it must be the last argument - // passed to the parameter that needs to be copied. This is a rare - // case so we don't check for it, instead we rely on the copying - // order: such a parameter is copied repeatedly into the same - // context location and thus the last value is what is seen inside - // the function. - frame_->AssertIsSpilled(); - for (int i = 0; i < scope()->num_parameters(); i++) { - Variable* par = scope()->parameter(i); - Slot* slot = par->slot(); - if (slot != NULL && slot->type() == Slot::CONTEXT) { - ASSERT(!scope()->is_global_scope()); // No params in global scope. - __ ldr(r1, frame_->ParameterAt(i)); - // Loads r2 with context; used below in RecordWrite. - __ str(r1, SlotOperand(slot, r2)); - // Load the offset into r3. - int slot_offset = - FixedArray::kHeaderSize + slot->index() * kPointerSize; - __ RecordWrite(r2, Operand(slot_offset), r3, r1); - } + // TODO(1241774): Improve this code: + // 1) only needed if we have a context + // 2) no need to recompute context ptr every single time + // 3) don't copy parameter operand code from SlotOperand! + { + Comment cmnt2(masm_, "[ copy context parameters into .context"); + // Note that iteration order is relevant here! If we have the same + // parameter twice (e.g., function (x, y, x)), and that parameter + // needs to be copied into the context, it must be the last argument + // passed to the parameter that needs to be copied. This is a rare + // case so we don't check for it, instead we rely on the copying + // order: such a parameter is copied repeatedly into the same + // context location and thus the last value is what is seen inside + // the function. + frame_->AssertIsSpilled(); + for (int i = 0; i < scope()->num_parameters(); i++) { + Variable* par = scope()->parameter(i); + Slot* slot = par->slot(); + if (slot != NULL && slot->type() == Slot::CONTEXT) { + ASSERT(!scope()->is_global_scope()); // No params in global scope. + __ ldr(r1, frame_->ParameterAt(i)); + // Loads r2 with context; used below in RecordWrite. + __ str(r1, SlotOperand(slot, r2)); + // Load the offset into r3. + int slot_offset = + FixedArray::kHeaderSize + slot->index() * kPointerSize; + __ RecordWrite(r2, Operand(slot_offset), r3, r1); } } + } - // Store the arguments object. This must happen after context - // initialization because the arguments object may be stored in - // the context. - if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) { - StoreArgumentsObject(true); - } + // Store the arguments object. This must happen after context + // initialization because the arguments object may be stored in + // the context. + if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) { + StoreArgumentsObject(true); + } - // Initialize ThisFunction reference if present. - if (scope()->is_function_scope() && scope()->function() != NULL) { - frame_->EmitPushRoot(Heap::kTheHoleValueRootIndex); - StoreToSlot(scope()->function()->slot(), NOT_CONST_INIT); - } - } else { - // When used as the secondary compiler for splitting, r1, cp, - // fp, and lr have been pushed on the stack. Adjust the virtual - // frame to match this state. - frame_->Adjust(4); - - // Bind all the bailout labels to the beginning of the function. - List* bailouts = info->bailouts(); - for (int i = 0; i < bailouts->length(); i++) { - __ bind(bailouts->at(i)->label()); - } + // Initialize ThisFunction reference if present. + if (scope()->is_function_scope() && scope()->function() != NULL) { + frame_->EmitPushRoot(Heap::kTheHoleValueRootIndex); + StoreToSlot(scope()->function()->slot(), NOT_CONST_INIT); } // Initialize the function return target after the locals are set @@ -532,6 +519,10 @@ void CodeGenerator::LoadCondition(Expression* x, void CodeGenerator::Load(Expression* expr) { + // We generally assume that we are not in a spilled scope for most + // of the code generator. A failure to ensure this caused issue 815 + // and this assert is designed to catch similar issues. + frame_->AssertIsNotSpilled(); #ifdef DEBUG int original_height = frame_->height(); #endif @@ -688,6 +679,10 @@ Reference::Reference(CodeGenerator* cgen, expression_(expression), type_(ILLEGAL), persist_after_get_(persist_after_get) { + // We generally assume that we are not in a spilled scope for most + // of the code generator. A failure to ensure this caused issue 815 + // and this assert is designed to catch similar issues. + cgen->frame()->AssertIsNotSpilled(); cgen->LoadReference(this); } @@ -784,12 +779,26 @@ void CodeGenerator::ToBoolean(JumpTarget* true_target, __ tst(tos, Operand(kSmiTagMask)); true_target->Branch(eq); - // Slow case: call the runtime. - frame_->EmitPush(tos); - frame_->CallRuntime(Runtime::kToBool, 1); - // Convert the result (r0) to a condition code. - __ LoadRoot(ip, Heap::kFalseValueRootIndex); - __ cmp(r0, ip); + // Slow case. + if (CpuFeatures::IsSupported(VFP3)) { + CpuFeatures::Scope scope(VFP3); + // Implements the slow case by using ToBooleanStub. + // The ToBooleanStub takes a single argument, and + // returns a non-zero value for true, or zero for false. + // Both the argument value and the return value use the + // register assigned to tos_ + ToBooleanStub stub(tos); + frame_->CallStub(&stub, 0); + // Convert the result in "tos" to a condition code. + __ cmp(tos, Operand(0)); + } else { + // Implements slow case by calling the runtime. + frame_->EmitPush(tos); + frame_->CallRuntime(Runtime::kToBool, 1); + // Convert the result (r0) to a condition code. + __ LoadRoot(ip, Heap::kFalseValueRootIndex); + __ cmp(r0, ip); + } } cc_reg_ = ne; @@ -1213,7 +1222,21 @@ void CodeGenerator::SmiOperation(Token::Value op, case Token::SHR: case Token::SAR: { ASSERT(!reversed); - TypeInfo result = TypeInfo::Integer32(); + TypeInfo result = + (op == Token::SAR) ? TypeInfo::Integer32() : TypeInfo::Number(); + if (!reversed) { + if (op == Token::SHR) { + if (int_value >= 2) { + result = TypeInfo::Smi(); + } else if (int_value >= 1) { + result = TypeInfo::Integer32(); + } + } else { + if (int_value >= 1) { + result = TypeInfo::Smi(); + } + } + } Register scratch = VirtualFrame::scratch0(); Register scratch2 = VirtualFrame::scratch1(); int shift_value = int_value & 0x1f; // least significant 5 bits @@ -1532,9 +1555,8 @@ void CodeGenerator::CallApplyLazy(Expression* applicand, __ BranchOnSmi(r0, &build_args); __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE); __ b(ne, &build_args); - __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset)); Handle apply_code(Builtins::builtin(Builtins::FunctionApply)); - __ ldr(r1, FieldMemOperand(r0, SharedFunctionInfo::kCodeOffset)); + __ ldr(r1, FieldMemOperand(r0, JSFunction::kCodeOffset)); __ cmp(r1, Operand(apply_code)); __ b(ne, &build_args); @@ -1899,19 +1921,17 @@ void CodeGenerator::VisitBreakStatement(BreakStatement* node) { void CodeGenerator::VisitReturnStatement(ReturnStatement* node) { - frame_->SpillAll(); Comment cmnt(masm_, "[ ReturnStatement"); CodeForStatementPosition(node); Load(node->expression()); + frame_->PopToR0(); + frame_->PrepareForReturn(); if (function_return_is_shadowed_) { - frame_->EmitPop(r0); function_return_.Jump(); } else { // Pop the result from the frame and prepare the frame for // returning thus making it easier to merge. - frame_->PopToR0(); - frame_->PrepareForReturn(); if (function_return_.is_bound()) { // If the function return label is already bound we reuse the // code by jumping to the return site. @@ -2307,7 +2327,6 @@ void CodeGenerator::VisitForInStatement(ForInStatement* node) { #ifdef DEBUG int original_height = frame_->height(); #endif - VirtualFrame::SpilledScope spilled_scope(frame_); Comment cmnt(masm_, "[ ForInStatement"); CodeForStatementPosition(node); @@ -2321,6 +2340,7 @@ void CodeGenerator::VisitForInStatement(ForInStatement* node) { // Get the object to enumerate over (converted to JSObject). Load(node->enumerable()); + VirtualFrame::SpilledScope spilled_scope(frame_); // Both SpiderMonkey and kjs ignore null and undefined in contrast // to the specification. 12.6.4 mandates a call to ToObject. frame_->EmitPop(r0); @@ -2482,36 +2502,39 @@ void CodeGenerator::VisitForInStatement(ForInStatement* node) { frame_->EmitPush(r0); frame_->EmitPush(r3); // push entry frame_->InvokeBuiltin(Builtins::FILTER_KEY, CALL_JS, 2); - __ mov(r3, Operand(r0)); - + __ mov(r3, Operand(r0), SetCC); // If the property has been removed while iterating, we just skip it. - __ LoadRoot(ip, Heap::kNullValueRootIndex); - __ cmp(r3, ip); node->continue_target()->Branch(eq); end_del_check.Bind(); // Store the entry in the 'each' expression and take another spin in the // loop. r3: i'th entry of the enum cache (or string there of) frame_->EmitPush(r3); // push entry - { Reference each(this, node->each()); + { VirtualFrame::RegisterAllocationScope scope(this); + Reference each(this, node->each()); if (!each.is_illegal()) { if (each.size() > 0) { + // Loading a reference may leave the frame in an unspilled state. + frame_->SpillAll(); // Sync stack to memory. + // Get the value (under the reference on the stack) from memory. __ ldr(r0, frame_->ElementAt(each.size())); frame_->EmitPush(r0); each.SetValue(NOT_CONST_INIT, UNLIKELY_SMI); - frame_->Drop(2); + frame_->Drop(2); // The result of the set and the extra pushed value. } else { // If the reference was to a slot we rely on the convenient property - // that it doesn't matter whether a value (eg, r3 pushed above) is + // that it doesn't matter whether a value (eg, ebx pushed above) is // right on top of or right underneath a zero-sized reference. each.SetValue(NOT_CONST_INIT, UNLIKELY_SMI); - frame_->Drop(); + frame_->Drop(1); // Drop the result of the set operation. } } } // Body. CheckStack(); // TODO(1222600): ignore if body contains calls. - Visit(node->body()); + { VirtualFrame::RegisterAllocationScope scope(this); + Visit(node->body()); + } // Next. Reestablish a spilled frame in case we are coming here via // a continue in the body. @@ -2558,7 +2581,9 @@ void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) { // Remove the exception from the stack. frame_->Drop(); - VisitStatements(node->catch_block()->statements()); + { VirtualFrame::RegisterAllocationScope scope(this); + VisitStatements(node->catch_block()->statements()); + } if (frame_ != NULL) { exit.Jump(); } @@ -2593,7 +2618,9 @@ void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) { } // Generate code for the statements in the try block. - VisitStatements(node->try_block()->statements()); + { VirtualFrame::RegisterAllocationScope scope(this); + VisitStatements(node->try_block()->statements()); + } // Stop the introduced shadowing and count the number of required unlinks. // After shadowing stops, the original labels are unshadowed and the @@ -2614,7 +2641,7 @@ void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) { // the handler list and drop the rest of this handler from the // frame. STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); - frame_->EmitPop(r1); + frame_->EmitPop(r1); // r0 can contain the return value. __ mov(r3, Operand(handler_address)); __ str(r1, MemOperand(r3)); frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); @@ -2640,7 +2667,7 @@ void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) { frame_->Forget(frame_->height() - handler_height); STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); - frame_->EmitPop(r1); + frame_->EmitPop(r1); // r0 can contain the return value. __ str(r1, MemOperand(r3)); frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); @@ -2707,7 +2734,9 @@ void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) { } // Generate code for the statements in the try block. - VisitStatements(node->try_block()->statements()); + { VirtualFrame::RegisterAllocationScope scope(this); + VisitStatements(node->try_block()->statements()); + } // Stop the introduced shadowing and count the number of required unlinks. // After shadowing stops, the original labels are unshadowed and the @@ -2797,7 +2826,9 @@ void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) { // and the state - while evaluating the finally block. // // Generate code for the statements in the finally block. - VisitStatements(node->finally_block()->statements()); + { VirtualFrame::RegisterAllocationScope scope(this); + VisitStatements(node->finally_block()->statements()); + } if (has_valid_frame()) { // Restore state and return value or faked TOS. @@ -3977,7 +4008,6 @@ void CodeGenerator::VisitCall(Call* node) { } else if (var != NULL && var->slot() != NULL && var->slot()->type() == Slot::LOOKUP) { - VirtualFrame::SpilledScope spilled_scope(frame_); // ---------------------------------- // JavaScript examples: // @@ -3990,8 +4020,6 @@ void CodeGenerator::VisitCall(Call* node) { // } // ---------------------------------- - // JumpTargets do not yet support merging frames so the frame must be - // spilled when jumping to these targets. JumpTarget slow, done; // Generate fast case for loading functions from slots that @@ -4005,8 +4033,7 @@ void CodeGenerator::VisitCall(Call* node) { slow.Bind(); // Load the function frame_->EmitPush(cp); - __ mov(r0, Operand(var->name())); - frame_->EmitPush(r0); + frame_->EmitPush(Operand(var->name())); frame_->CallRuntime(Runtime::kLoadContextSlot, 2); // r0: slot value; r1: receiver @@ -4022,7 +4049,7 @@ void CodeGenerator::VisitCall(Call* node) { call.Jump(); done.Bind(); frame_->EmitPush(r0); // function - LoadGlobalReceiver(r1); // receiver + LoadGlobalReceiver(VirtualFrame::scratch0()); // receiver call.Bind(); } @@ -4077,8 +4104,6 @@ void CodeGenerator::VisitCall(Call* node) { // ------------------------------------------- // JavaScript example: 'array[index](1, 2, 3)' // ------------------------------------------- - VirtualFrame::SpilledScope spilled_scope(frame_); - Load(property->obj()); if (property->is_synthetic()) { Load(property->key()); @@ -4086,7 +4111,7 @@ void CodeGenerator::VisitCall(Call* node) { // Put the function below the receiver. // Use the global receiver. frame_->EmitPush(r0); // Function. - LoadGlobalReceiver(r0); + LoadGlobalReceiver(VirtualFrame::scratch0()); // Call the function. CallWithArguments(args, RECEIVER_MIGHT_BE_VALUE, node->position()); frame_->EmitPush(r0); @@ -4099,6 +4124,7 @@ void CodeGenerator::VisitCall(Call* node) { // Set the name register and call the IC initialization code. Load(property->key()); + frame_->SpillAll(); frame_->EmitPop(r2); // Function name. InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP; @@ -4118,10 +4144,8 @@ void CodeGenerator::VisitCall(Call* node) { // Load the function. Load(function); - VirtualFrame::SpilledScope spilled_scope(frame_); - // Pass the global proxy as the receiver. - LoadGlobalReceiver(r0); + LoadGlobalReceiver(VirtualFrame::scratch0()); // Call the function. CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position()); @@ -4176,21 +4200,21 @@ void CodeGenerator::VisitCallNew(CallNew* node) { void CodeGenerator::GenerateClassOf(ZoneList* args) { - VirtualFrame::SpilledScope spilled_scope(frame_); - ASSERT(args->length() == 1); - JumpTarget leave, null, function, non_function_constructor; + Register scratch = VirtualFrame::scratch0(); + JumpTarget null, function, leave, non_function_constructor; - // Load the object into r0. + // Load the object into register. + ASSERT(args->length() == 1); Load(args->at(0)); - frame_->EmitPop(r0); + Register tos = frame_->PopToRegister(); // If the object is a smi, we return null. - __ tst(r0, Operand(kSmiTagMask)); + __ tst(tos, Operand(kSmiTagMask)); null.Branch(eq); // Check that the object is a JS object but take special care of JS // functions to make sure they have 'Function' as their class. - __ CompareObjectType(r0, r0, r1, FIRST_JS_OBJECT_TYPE); + __ CompareObjectType(tos, tos, scratch, FIRST_JS_OBJECT_TYPE); null.Branch(lt); // As long as JS_FUNCTION_TYPE is the last instance type and it is @@ -4198,37 +4222,38 @@ void CodeGenerator::GenerateClassOf(ZoneList* args) { // LAST_JS_OBJECT_TYPE. STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1); - __ cmp(r1, Operand(JS_FUNCTION_TYPE)); + __ cmp(scratch, Operand(JS_FUNCTION_TYPE)); function.Branch(eq); // Check if the constructor in the map is a function. - __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset)); - __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE); + __ ldr(tos, FieldMemOperand(tos, Map::kConstructorOffset)); + __ CompareObjectType(tos, scratch, scratch, JS_FUNCTION_TYPE); non_function_constructor.Branch(ne); - // The r0 register now contains the constructor function. Grab the + // The tos register now contains the constructor function. Grab the // instance class name from there. - __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset)); - __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset)); - frame_->EmitPush(r0); + __ ldr(tos, FieldMemOperand(tos, JSFunction::kSharedFunctionInfoOffset)); + __ ldr(tos, + FieldMemOperand(tos, SharedFunctionInfo::kInstanceClassNameOffset)); + frame_->EmitPush(tos); leave.Jump(); // Functions have class 'Function'. function.Bind(); - __ mov(r0, Operand(Factory::function_class_symbol())); - frame_->EmitPush(r0); + __ mov(tos, Operand(Factory::function_class_symbol())); + frame_->EmitPush(tos); leave.Jump(); // Objects with a non-function constructor have class 'Object'. non_function_constructor.Bind(); - __ mov(r0, Operand(Factory::Object_symbol())); - frame_->EmitPush(r0); + __ mov(tos, Operand(Factory::Object_symbol())); + frame_->EmitPush(tos); leave.Jump(); // Non-JS objects have class null. null.Bind(); - __ LoadRoot(r0, Heap::kNullValueRootIndex); - frame_->EmitPush(r0); + __ LoadRoot(tos, Heap::kNullValueRootIndex); + frame_->EmitPush(tos); // All done. leave.Bind(); @@ -4236,45 +4261,51 @@ void CodeGenerator::GenerateClassOf(ZoneList* args) { void CodeGenerator::GenerateValueOf(ZoneList* args) { - VirtualFrame::SpilledScope spilled_scope(frame_); - ASSERT(args->length() == 1); + Register scratch = VirtualFrame::scratch0(); JumpTarget leave; + + ASSERT(args->length() == 1); Load(args->at(0)); - frame_->EmitPop(r0); // r0 contains object. + Register tos = frame_->PopToRegister(); // tos contains object. // if (object->IsSmi()) return the object. - __ tst(r0, Operand(kSmiTagMask)); + __ tst(tos, Operand(kSmiTagMask)); leave.Branch(eq); // It is a heap object - get map. If (!object->IsJSValue()) return the object. - __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE); + __ CompareObjectType(tos, scratch, scratch, JS_VALUE_TYPE); leave.Branch(ne); // Load the value. - __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset)); + __ ldr(tos, FieldMemOperand(tos, JSValue::kValueOffset)); leave.Bind(); - frame_->EmitPush(r0); + frame_->EmitPush(tos); } void CodeGenerator::GenerateSetValueOf(ZoneList* args) { - VirtualFrame::SpilledScope spilled_scope(frame_); - ASSERT(args->length() == 2); + Register scratch1 = VirtualFrame::scratch0(); + Register scratch2 = VirtualFrame::scratch1(); JumpTarget leave; + + ASSERT(args->length() == 2); Load(args->at(0)); // Load the object. Load(args->at(1)); // Load the value. - frame_->EmitPop(r0); // r0 contains value - frame_->EmitPop(r1); // r1 contains object + Register value = frame_->PopToRegister(); + Register object = frame_->PopToRegister(value); // if (object->IsSmi()) return object. - __ tst(r1, Operand(kSmiTagMask)); + __ tst(object, Operand(kSmiTagMask)); leave.Branch(eq); // It is a heap object - get map. If (!object->IsJSValue()) return the object. - __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE); + __ CompareObjectType(object, scratch1, scratch1, JS_VALUE_TYPE); leave.Branch(ne); // Store the value. - __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset)); + __ str(value, FieldMemOperand(object, JSValue::kValueOffset)); // Update the write barrier. - __ RecordWrite(r1, Operand(JSValue::kValueOffset - kHeapObjectTag), r2, r3); + __ RecordWrite(object, + Operand(JSValue::kValueOffset - kHeapObjectTag), + scratch1, + scratch2); // Leave. leave.Bind(); - frame_->EmitPush(r0); + frame_->EmitPush(value); } @@ -4558,22 +4589,18 @@ class DeferredStringCharCodeAt : public DeferredCode { // This generates code that performs a String.prototype.charCodeAt() call // or returns a smi in order to trigger conversion. void CodeGenerator::GenerateStringCharCodeAt(ZoneList* args) { - VirtualFrame::SpilledScope spilled_scope(frame_); Comment(masm_, "[ GenerateStringCharCodeAt"); ASSERT(args->length() == 2); Load(args->at(0)); Load(args->at(1)); - Register index = r1; - Register object = r2; - - frame_->EmitPop(r1); - frame_->EmitPop(r2); + Register index = frame_->PopToRegister(); + Register object = frame_->PopToRegister(index); // We need two extra registers. - Register scratch = r3; - Register result = r0; + Register scratch = VirtualFrame::scratch0(); + Register result = VirtualFrame::scratch1(); DeferredStringCharCodeAt* deferred = new DeferredStringCharCodeAt(object, @@ -4608,16 +4635,13 @@ class DeferredStringCharFromCode : public DeferredCode { // Generates code for creating a one-char string from a char code. void CodeGenerator::GenerateStringCharFromCode(ZoneList* args) { - VirtualFrame::SpilledScope spilled_scope(frame_); Comment(masm_, "[ GenerateStringCharFromCode"); ASSERT(args->length() == 1); Load(args->at(0)); - Register code = r1; - Register result = r0; - - frame_->EmitPop(code); + Register result = frame_->GetTOSRegister(); + Register code = frame_->PopToRegister(result); DeferredStringCharFromCode* deferred = new DeferredStringCharFromCode( code, result); @@ -4679,23 +4703,20 @@ class DeferredStringCharAt : public DeferredCode { // This generates code that performs a String.prototype.charAt() call // or returns a smi in order to trigger conversion. void CodeGenerator::GenerateStringCharAt(ZoneList* args) { - VirtualFrame::SpilledScope spilled_scope(frame_); Comment(masm_, "[ GenerateStringCharAt"); ASSERT(args->length() == 2); Load(args->at(0)); Load(args->at(1)); - Register index = r1; - Register object = r2; - - frame_->EmitPop(r1); - frame_->EmitPop(r2); + Register index = frame_->PopToRegister(); + Register object = frame_->PopToRegister(index); // We need three extra registers. - Register scratch1 = r3; - Register scratch2 = r4; - Register result = r0; + Register scratch1 = VirtualFrame::scratch0(); + Register scratch2 = VirtualFrame::scratch1(); + // Use r6 without notifying the virtual frame. + Register result = r6; DeferredStringCharAt* deferred = new DeferredStringCharAt(object, @@ -4793,6 +4814,152 @@ void CodeGenerator::GenerateIsSpecObject(ZoneList* args) { } +// Deferred code to check whether the String JavaScript object is safe for using +// default value of. This code is called after the bit caching this information +// in the map has been checked with the map for the object in the map_result_ +// register. On return the register map_result_ contains 1 for true and 0 for +// false. +class DeferredIsStringWrapperSafeForDefaultValueOf : public DeferredCode { + public: + DeferredIsStringWrapperSafeForDefaultValueOf(Register object, + Register map_result, + Register scratch1, + Register scratch2) + : object_(object), + map_result_(map_result), + scratch1_(scratch1), + scratch2_(scratch2) { } + + virtual void Generate() { + Label false_result; + + // Check that map is loaded as expected. + if (FLAG_debug_code) { + __ ldr(ip, FieldMemOperand(object_, HeapObject::kMapOffset)); + __ cmp(map_result_, ip); + __ Assert(eq, "Map not in expected register"); + } + + // Check for fast case object. Generate false result for slow case object. + __ ldr(scratch1_, FieldMemOperand(object_, JSObject::kPropertiesOffset)); + __ ldr(scratch1_, FieldMemOperand(scratch1_, HeapObject::kMapOffset)); + __ LoadRoot(ip, Heap::kHashTableMapRootIndex); + __ cmp(scratch1_, ip); + __ b(eq, &false_result); + + // Look for valueOf symbol in the descriptor array, and indicate false if + // found. The type is not checked, so if it is a transition it is a false + // negative. + __ ldr(map_result_, + FieldMemOperand(map_result_, Map::kInstanceDescriptorsOffset)); + __ ldr(scratch2_, FieldMemOperand(map_result_, FixedArray::kLengthOffset)); + // map_result_: descriptor array + // scratch2_: length of descriptor array + // Calculate the end of the descriptor array. + STATIC_ASSERT(kSmiTag == 0); + STATIC_ASSERT(kSmiTagSize == 1); + STATIC_ASSERT(kPointerSize == 4); + __ add(scratch1_, + map_result_, + Operand(FixedArray::kHeaderSize - kHeapObjectTag)); + __ add(scratch1_, + scratch1_, + Operand(scratch2_, LSL, kPointerSizeLog2 - kSmiTagSize)); + + // Calculate location of the first key name. + __ add(map_result_, + map_result_, + Operand(FixedArray::kHeaderSize - kHeapObjectTag + + DescriptorArray::kFirstIndex * kPointerSize)); + // Loop through all the keys in the descriptor array. If one of these is the + // symbol valueOf the result is false. + Label entry, loop; + // The use of ip to store the valueOf symbol asumes that it is not otherwise + // used in the loop below. + __ mov(ip, Operand(Factory::value_of_symbol())); + __ jmp(&entry); + __ bind(&loop); + __ ldr(scratch2_, MemOperand(map_result_, 0)); + __ cmp(scratch2_, ip); + __ b(eq, &false_result); + __ add(map_result_, map_result_, Operand(kPointerSize)); + __ bind(&entry); + __ cmp(map_result_, Operand(scratch1_)); + __ b(ne, &loop); + + // Reload map as register map_result_ was used as temporary above. + __ ldr(map_result_, FieldMemOperand(object_, HeapObject::kMapOffset)); + + // If a valueOf property is not found on the object check that it's + // prototype is the un-modified String prototype. If not result is false. + __ ldr(scratch1_, FieldMemOperand(map_result_, Map::kPrototypeOffset)); + __ tst(scratch1_, Operand(kSmiTagMask)); + __ b(eq, &false_result); + __ ldr(scratch1_, FieldMemOperand(scratch1_, HeapObject::kMapOffset)); + __ ldr(scratch2_, + CodeGenerator::ContextOperand(cp, Context::GLOBAL_INDEX)); + __ ldr(scratch2_, + FieldMemOperand(scratch2_, GlobalObject::kGlobalContextOffset)); + __ ldr(scratch2_, + CodeGenerator::ContextOperand( + scratch2_, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); + __ cmp(scratch1_, scratch2_); + __ b(ne, &false_result); + + // Set the bit in the map to indicate that it has been checked safe for + // default valueOf and set true result. + __ ldr(scratch1_, FieldMemOperand(map_result_, Map::kBitField2Offset)); + __ orr(scratch1_, + scratch1_, + Operand(1 << Map::kStringWrapperSafeForDefaultValueOf)); + __ str(scratch1_, FieldMemOperand(map_result_, Map::kBitField2Offset)); + __ mov(map_result_, Operand(1)); + __ jmp(exit_label()); + __ bind(&false_result); + // Set false result. + __ mov(map_result_, Operand(0)); + } + + private: + Register object_; + Register map_result_; + Register scratch1_; + Register scratch2_; +}; + + +void CodeGenerator::GenerateIsStringWrapperSafeForDefaultValueOf( + ZoneList* args) { + ASSERT(args->length() == 1); + Load(args->at(0)); + Register obj = frame_->PopToRegister(); // Pop the string wrapper. + if (FLAG_debug_code) { + __ AbortIfSmi(obj); + } + + // Check whether this map has already been checked to be safe for default + // valueOf. + Register map_result = VirtualFrame::scratch0(); + __ ldr(map_result, FieldMemOperand(obj, HeapObject::kMapOffset)); + __ ldrb(ip, FieldMemOperand(map_result, Map::kBitField2Offset)); + __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf)); + true_target()->Branch(ne); + + // We need an additional two scratch registers for the deferred code. + Register scratch1 = VirtualFrame::scratch1(); + // Use r6 without notifying the virtual frame. + Register scratch2 = r6; + + DeferredIsStringWrapperSafeForDefaultValueOf* deferred = + new DeferredIsStringWrapperSafeForDefaultValueOf( + obj, map_result, scratch1, scratch2); + deferred->Branch(eq); + deferred->BindExit(); + __ tst(map_result, Operand(map_result)); + cc_reg_ = ne; +} + + void CodeGenerator::GenerateIsFunction(ZoneList* args) { // This generates a fast version of: // (%_ClassOf(arg) === 'Function') @@ -4874,13 +5041,13 @@ void CodeGenerator::GenerateArgumentsLength(ZoneList* args) { void CodeGenerator::GenerateArguments(ZoneList* args) { - VirtualFrame::SpilledScope spilled_scope(frame_); ASSERT(args->length() == 1); // Satisfy contract with ArgumentsAccessStub: // Load the key into r1 and the formal parameters count into r0. Load(args->at(0)); - frame_->EmitPop(r1); + frame_->PopToR1(); + frame_->SpillAll(); __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters()))); // Call the shared stub to get to arguments[key]. @@ -5108,9 +5275,7 @@ class DeferredSearchCache: public DeferredCode { void DeferredSearchCache::Generate() { __ Push(cache_, key_); __ CallRuntime(Runtime::kGetFromCache, 2); - if (!dst_.is(r0)) { - __ mov(dst_, r0); - } + __ Move(dst_, r0); } @@ -5130,33 +5295,42 @@ void CodeGenerator::GenerateGetFromCache(ZoneList* args) { Load(args->at(1)); - VirtualFrame::SpilledScope spilled_scope(frame_); - - frame_->EmitPop(r2); + frame_->PopToR1(); + frame_->SpillAll(); + Register key = r1; // Just poped to r1 + Register result = r0; // Free, as frame has just been spilled. + Register scratch1 = VirtualFrame::scratch0(); + Register scratch2 = VirtualFrame::scratch1(); - __ ldr(r1, ContextOperand(cp, Context::GLOBAL_INDEX)); - __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalContextOffset)); - __ ldr(r1, ContextOperand(r1, Context::JSFUNCTION_RESULT_CACHES_INDEX)); - __ ldr(r1, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(cache_id))); + __ ldr(scratch1, ContextOperand(cp, Context::GLOBAL_INDEX)); + __ ldr(scratch1, + FieldMemOperand(scratch1, GlobalObject::kGlobalContextOffset)); + __ ldr(scratch1, + ContextOperand(scratch1, Context::JSFUNCTION_RESULT_CACHES_INDEX)); + __ ldr(scratch1, + FieldMemOperand(scratch1, FixedArray::OffsetOfElementAt(cache_id))); - DeferredSearchCache* deferred = new DeferredSearchCache(r0, r1, r2); + DeferredSearchCache* deferred = + new DeferredSearchCache(result, scratch1, key); const int kFingerOffset = FixedArray::OffsetOfElementAt(JSFunctionResultCache::kFingerIndex); STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1); - __ ldr(r0, FieldMemOperand(r1, kFingerOffset)); - // r0 now holds finger offset as a smi. - __ add(r3, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); - // r3 now points to the start of fixed array elements. - __ ldr(r0, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize, PreIndex)); - // Note side effect of PreIndex: r3 now points to the key of the pair. - __ cmp(r2, r0); + __ ldr(result, FieldMemOperand(scratch1, kFingerOffset)); + // result now holds finger offset as a smi. + __ add(scratch2, scratch1, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); + // scratch2 now points to the start of fixed array elements. + __ ldr(result, + MemOperand( + scratch2, result, LSL, kPointerSizeLog2 - kSmiTagSize, PreIndex)); + // Note side effect of PreIndex: scratch2 now points to the key of the pair. + __ cmp(key, result); deferred->Branch(ne); - __ ldr(r0, MemOperand(r3, kPointerSize)); + __ ldr(result, MemOperand(scratch2, kPointerSize)); deferred->BindExit(); - frame_->EmitPush(r0); + frame_->EmitPush(result); } @@ -6851,6 +7025,11 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) { __ str(cp, FieldMemOperand(r0, JSFunction::kContextOffset)); __ str(r1, FieldMemOperand(r0, JSFunction::kLiteralsOffset)); + // Initialize the code pointer in the function to be the one + // found in the shared function info object. + __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kCodeOffset)); + __ str(r3, FieldMemOperand(r0, JSFunction::kCodeOffset)); + // Return result. The argument function info has been popped already. __ Ret(); @@ -7801,6 +7980,77 @@ void CompareStub::Generate(MacroAssembler* masm) { } +// This stub does not handle the inlined cases (Smis, Booleans, undefined). +// The stub returns zero for false, and a non-zero value for true. +void ToBooleanStub::Generate(MacroAssembler* masm) { + Label false_result; + Label not_heap_number; + Register scratch0 = VirtualFrame::scratch0(); + + // HeapNumber => false iff +0, -0, or NaN. + __ ldr(scratch0, FieldMemOperand(tos_, HeapObject::kMapOffset)); + __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); + __ cmp(scratch0, ip); + __ b(¬_heap_number, ne); + + __ sub(ip, tos_, Operand(kHeapObjectTag)); + __ vldr(d1, ip, HeapNumber::kValueOffset); + __ vcmp(d1, 0.0); + __ vmrs(pc); + // "tos_" is a register, and contains a non zero value by default. + // Hence we only need to overwrite "tos_" with zero to return false for + // FP_ZERO or FP_NAN cases. Otherwise, by default it returns true. + __ mov(tos_, Operand(0), LeaveCC, eq); // for FP_ZERO + __ mov(tos_, Operand(0), LeaveCC, vs); // for FP_NAN + __ Ret(); + + __ bind(¬_heap_number); + + // Check if the value is 'null'. + // 'null' => false. + __ LoadRoot(ip, Heap::kNullValueRootIndex); + __ cmp(tos_, ip); + __ b(&false_result, eq); + + // It can be an undetectable object. + // Undetectable => false. + __ ldr(ip, FieldMemOperand(tos_, HeapObject::kMapOffset)); + __ ldrb(scratch0, FieldMemOperand(ip, Map::kBitFieldOffset)); + __ and_(scratch0, scratch0, Operand(1 << Map::kIsUndetectable)); + __ cmp(scratch0, Operand(1 << Map::kIsUndetectable)); + __ b(&false_result, eq); + + // JavaScript object => true. + __ ldr(scratch0, FieldMemOperand(tos_, HeapObject::kMapOffset)); + __ ldrb(scratch0, FieldMemOperand(scratch0, Map::kInstanceTypeOffset)); + __ cmp(scratch0, Operand(FIRST_JS_OBJECT_TYPE)); + // "tos_" is a register and contains a non-zero value. + // Hence we implicitly return true if the greater than + // condition is satisfied. + __ Ret(gt); + + // Check for string + __ ldr(scratch0, FieldMemOperand(tos_, HeapObject::kMapOffset)); + __ ldrb(scratch0, FieldMemOperand(scratch0, Map::kInstanceTypeOffset)); + __ cmp(scratch0, Operand(FIRST_NONSTRING_TYPE)); + // "tos_" is a register and contains a non-zero value. + // Hence we implicitly return true if the greater than + // condition is satisfied. + __ Ret(gt); + + // String value => false iff empty, i.e., length is zero + __ ldr(tos_, FieldMemOperand(tos_, String::kLengthOffset)); + // If length is zero, "tos_" contains zero ==> false. + // If length is not zero, "tos_" contains a non-zero value ==> true. + __ Ret(); + + // Return 0 in "tos_" for false . + __ bind(&false_result); + __ mov(tos_, Operand(0)); + __ Ret(); +} + + // We fall into this code if the operands were Smis, but the result was // not (eg. overflow). We branch into this code (to the not_smi label) if // the operands were not both Smi. The operands are in r0 and r1. In order @@ -10444,11 +10694,9 @@ void StringCharCodeAtGenerator::GenerateSlow( // NumberToSmi discards numbers that are not exact integers. __ CallRuntime(Runtime::kNumberToSmi, 1); } - if (!scratch_.is(r0)) { - // Save the conversion result before the pop instructions below - // have a chance to overwrite it. - __ mov(scratch_, r0); - } + // Save the conversion result before the pop instructions below + // have a chance to overwrite it. + __ Move(scratch_, r0); __ pop(index_); __ pop(object_); // Reload the instance type. @@ -10467,9 +10715,7 @@ void StringCharCodeAtGenerator::GenerateSlow( call_helper.BeforeCall(masm); __ Push(object_, index_); __ CallRuntime(Runtime::kStringCharCodeAt, 2); - if (!result_.is(r0)) { - __ mov(result_, r0); - } + __ Move(result_, r0); call_helper.AfterCall(masm); __ jmp(&exit_); @@ -10510,9 +10756,7 @@ void StringCharFromCodeGenerator::GenerateSlow( call_helper.BeforeCall(masm); __ push(code_); __ CallRuntime(Runtime::kCharFromCode, 1); - if (!result_.is(r0)) { - __ mov(result_, r0); - } + __ Move(result_, r0); call_helper.AfterCall(masm); __ jmp(&exit_); diff --git a/deps/v8/src/arm/codegen-arm.h b/deps/v8/src/arm/codegen-arm.h index bfe20809b1..029d59900d 100644 --- a/deps/v8/src/arm/codegen-arm.h +++ b/deps/v8/src/arm/codegen-arm.h @@ -286,6 +286,10 @@ class CodeGenerator: public AstVisitor { return inlined_write_barrier_size_ + 4; } + static MemOperand ContextOperand(Register context, int index) { + return MemOperand(context, Context::SlotOffset(index)); + } + private: // Construction/Destruction explicit CodeGenerator(MacroAssembler* masm); @@ -338,10 +342,6 @@ class CodeGenerator: public AstVisitor { void LoadReference(Reference* ref); void UnloadReference(Reference* ref); - static MemOperand ContextOperand(Register context, int index) { - return MemOperand(context, Context::SlotOffset(index)); - } - MemOperand SlotOperand(Slot* slot, Register tmp); MemOperand ContextSlotOperandCheckExtensions(Slot* slot, @@ -482,6 +482,8 @@ class CodeGenerator: public AstVisitor { void GenerateIsSpecObject(ZoneList* args); void GenerateIsFunction(ZoneList* args); void GenerateIsUndetectableObject(ZoneList* args); + void GenerateIsStringWrapperSafeForDefaultValueOf( + ZoneList* args); // Support for construct call checks. void GenerateIsConstructCall(ZoneList* args); @@ -623,6 +625,19 @@ class TranscendentalCacheStub: public CodeStub { }; +class ToBooleanStub: public CodeStub { + public: + explicit ToBooleanStub(Register tos) : tos_(tos) { } + + void Generate(MacroAssembler* masm); + + private: + Register tos_; + Major MajorKey() { return ToBoolean; } + int MinorKey() { return tos_.code(); } +}; + + class GenericBinaryOpStub : public CodeStub { public: GenericBinaryOpStub(Token::Value op, diff --git a/deps/v8/src/arm/debug-arm.cc b/deps/v8/src/arm/debug-arm.cc index e87d265e89..3a948451b4 100644 --- a/deps/v8/src/arm/debug-arm.cc +++ b/deps/v8/src/arm/debug-arm.cc @@ -293,15 +293,11 @@ void Debug::GenerateFrameDropperLiveEdit(MacroAssembler* masm) { masm->Abort("LiveEdit frame dropping is not supported on arm"); } +const bool Debug::kFrameDropperSupported = false; + #undef __ -Object** Debug::SetUpFrameDropperFrame(StackFrame* bottom_js_frame, - Handle code) { - UNREACHABLE(); - return NULL; -} -const int Debug::kFrameDropperFrameSize = -1; #endif // ENABLE_DEBUGGER_SUPPORT diff --git a/deps/v8/src/arm/disasm-arm.cc b/deps/v8/src/arm/disasm-arm.cc index fd142bd961..0029ed168b 100644 --- a/deps/v8/src/arm/disasm-arm.cc +++ b/deps/v8/src/arm/disasm-arm.cc @@ -1188,7 +1188,13 @@ void Decoder::DecodeVCMP(Instr* instr) { bool raise_exception_for_qnan = (instr->Bit(7) == 0x1); if (dp_operation && !raise_exception_for_qnan) { - Format(instr, "vcmp.f64'cond 'Dd, 'Dm"); + if (instr->Opc2Field() == 0x4) { + Format(instr, "vcmp.f64'cond 'Dd, 'Dm"); + } else if (instr->Opc2Field() == 0x5) { + Format(instr, "vcmp.f64'cond 'Dd, #0.0"); + } else { + Unknown(instr); // invalid + } } else { Unknown(instr); // Not used by V8. } diff --git a/deps/v8/src/arm/fast-codegen-arm.cc b/deps/v8/src/arm/fast-codegen-arm.cc deleted file mode 100644 index 36ac2aa3d3..0000000000 --- a/deps/v8/src/arm/fast-codegen-arm.cc +++ /dev/null @@ -1,241 +0,0 @@ -// Copyright 2010 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "v8.h" - -#if defined(V8_TARGET_ARCH_ARM) - -#include "codegen-inl.h" -#include "fast-codegen.h" -#include "scopes.h" - -namespace v8 { -namespace internal { - -#define __ ACCESS_MASM(masm()) - -Register FastCodeGenerator::accumulator0() { return r0; } -Register FastCodeGenerator::accumulator1() { return r1; } -Register FastCodeGenerator::scratch0() { return r3; } -Register FastCodeGenerator::scratch1() { return r4; } -Register FastCodeGenerator::scratch2() { return r5; } -Register FastCodeGenerator::receiver_reg() { return r2; } -Register FastCodeGenerator::context_reg() { return cp; } - - -void FastCodeGenerator::EmitLoadReceiver() { - // Offset 2 is due to return address and saved frame pointer. - int index = 2 + scope()->num_parameters(); - __ ldr(receiver_reg(), MemOperand(sp, index * kPointerSize)); -} - - -void FastCodeGenerator::EmitGlobalVariableLoad(Handle cell) { - ASSERT(!destination().is(no_reg)); - ASSERT(cell->IsJSGlobalPropertyCell()); - - __ mov(destination(), Operand(cell)); - __ ldr(destination(), - FieldMemOperand(destination(), JSGlobalPropertyCell::kValueOffset)); - if (FLAG_debug_code) { - __ mov(ip, Operand(Factory::the_hole_value())); - __ cmp(destination(), ip); - __ Check(ne, "DontDelete cells can't contain the hole"); - } - - // The loaded value is not known to be a smi. - clear_as_smi(destination()); -} - - -void FastCodeGenerator::EmitThisPropertyStore(Handle name) { - LookupResult lookup; - info()->receiver()->Lookup(*name, &lookup); - - ASSERT(lookup.holder() == *info()->receiver()); - ASSERT(lookup.type() == FIELD); - Handle map(Handle::cast(info()->receiver())->map()); - int index = lookup.GetFieldIndex() - map->inobject_properties(); - int offset = index * kPointerSize; - - // We will emit the write barrier unless the stored value is statically - // known to be a smi. - bool needs_write_barrier = !is_smi(accumulator0()); - - // Negative offsets are inobject properties. - if (offset < 0) { - offset += map->instance_size(); - __ str(accumulator0(), FieldMemOperand(receiver_reg(), offset)); - if (needs_write_barrier) { - // Preserve receiver from write barrier. - __ mov(scratch0(), receiver_reg()); - } - } else { - offset += FixedArray::kHeaderSize; - __ ldr(scratch0(), - FieldMemOperand(receiver_reg(), JSObject::kPropertiesOffset)); - __ str(accumulator0(), FieldMemOperand(scratch0(), offset)); - } - - if (needs_write_barrier) { - __ RecordWrite(scratch0(), Operand(offset), scratch1(), scratch2()); - } - - if (destination().is(accumulator1())) { - __ mov(accumulator1(), accumulator0()); - if (is_smi(accumulator0())) { - set_as_smi(accumulator1()); - } else { - clear_as_smi(accumulator1()); - } - } -} - - -void FastCodeGenerator::EmitThisPropertyLoad(Handle name) { - ASSERT(!destination().is(no_reg)); - LookupResult lookup; - info()->receiver()->Lookup(*name, &lookup); - - ASSERT(lookup.holder() == *info()->receiver()); - ASSERT(lookup.type() == FIELD); - Handle map(Handle::cast(info()->receiver())->map()); - int index = lookup.GetFieldIndex() - map->inobject_properties(); - int offset = index * kPointerSize; - - // Perform the load. Negative offsets are inobject properties. - if (offset < 0) { - offset += map->instance_size(); - __ ldr(destination(), FieldMemOperand(receiver_reg(), offset)); - } else { - offset += FixedArray::kHeaderSize; - __ ldr(scratch0(), - FieldMemOperand(receiver_reg(), JSObject::kPropertiesOffset)); - __ ldr(destination(), FieldMemOperand(scratch0(), offset)); - } - - // The loaded value is not known to be a smi. - clear_as_smi(destination()); -} - - -void FastCodeGenerator::EmitBitOr() { - if (is_smi(accumulator0()) && is_smi(accumulator1())) { - // If both operands are known to be a smi then there is no need to check - // the operands or result. There is no need to perform the operation in - // an effect context. - if (!destination().is(no_reg)) { - __ orr(destination(), accumulator1(), Operand(accumulator0())); - } - } else { - // Left is in accumulator1, right in accumulator0. - if (destination().is(accumulator0())) { - __ mov(scratch0(), accumulator0()); - __ orr(destination(), accumulator1(), Operand(accumulator1())); - Label* bailout = - info()->AddBailout(accumulator1(), scratch0()); // Left, right. - __ BranchOnNotSmi(destination(), bailout); - } else if (destination().is(accumulator1())) { - __ mov(scratch0(), accumulator1()); - __ orr(destination(), accumulator1(), Operand(accumulator0())); - Label* bailout = info()->AddBailout(scratch0(), accumulator0()); - __ BranchOnNotSmi(destination(), bailout); - } else { - ASSERT(destination().is(no_reg)); - __ orr(scratch0(), accumulator1(), Operand(accumulator0())); - Label* bailout = info()->AddBailout(accumulator1(), accumulator0()); - __ BranchOnNotSmi(scratch0(), bailout); - } - } - - // If we didn't bailout, the result (in fact, both inputs too) is known to - // be a smi. - set_as_smi(accumulator0()); - set_as_smi(accumulator1()); -} - - -void FastCodeGenerator::Generate(CompilationInfo* compilation_info) { - ASSERT(info_ == NULL); - info_ = compilation_info; - Comment cmnt(masm_, "[ function compiled by fast code generator"); - - // Save the caller's frame pointer and set up our own. - Comment prologue_cmnt(masm(), ";; Prologue"); - __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit()); - __ add(fp, sp, Operand(2 * kPointerSize)); - // Note that we keep a live register reference to cp (context) at - // this point. - - Label* bailout_to_beginning = info()->AddBailout(); - // Receiver (this) is allocated to a fixed register. - if (info()->has_this_properties()) { - Comment cmnt(masm(), ";; MapCheck(this)"); - if (FLAG_print_ir) { - PrintF("MapCheck(this)\n"); - } - ASSERT(info()->has_receiver() && info()->receiver()->IsHeapObject()); - Handle object = Handle::cast(info()->receiver()); - Handle map(object->map()); - EmitLoadReceiver(); - __ CheckMap(receiver_reg(), scratch0(), map, bailout_to_beginning, false); - } - - // If there is a global variable access check if the global object is the - // same as at lazy-compilation time. - if (info()->has_globals()) { - Comment cmnt(masm(), ";; MapCheck(GLOBAL)"); - if (FLAG_print_ir) { - PrintF("MapCheck(GLOBAL)\n"); - } - ASSERT(info()->has_global_object()); - Handle map(info()->global_object()->map()); - __ ldr(scratch0(), CodeGenerator::GlobalObject()); - __ CheckMap(scratch0(), scratch1(), map, bailout_to_beginning, true); - } - - VisitStatements(function()->body()); - - Comment return_cmnt(masm(), ";; Return()"); - if (FLAG_print_ir) { - PrintF("Return()\n"); - } - __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); - __ mov(sp, fp); - __ ldm(ia_w, sp, fp.bit() | lr.bit()); - int32_t sp_delta = (scope()->num_parameters() + 1) * kPointerSize; - __ add(sp, sp, Operand(sp_delta)); - __ Jump(lr); -} - - -#undef __ - - -} } // namespace v8::internal - -#endif // V8_TARGET_ARCH_ARM diff --git a/deps/v8/src/arm/full-codegen-arm.cc b/deps/v8/src/arm/full-codegen-arm.cc index ea5a8f2a83..b58a4a5854 100644 --- a/deps/v8/src/arm/full-codegen-arm.cc +++ b/deps/v8/src/arm/full-codegen-arm.cc @@ -55,99 +55,97 @@ namespace internal { // // The function builds a JS frame. Please see JavaScriptFrameConstants in // frames-arm.h for its layout. -void FullCodeGenerator::Generate(CompilationInfo* info, Mode mode) { +void FullCodeGenerator::Generate(CompilationInfo* info) { ASSERT(info_ == NULL); info_ = info; SetFunctionPosition(function()); Comment cmnt(masm_, "[ function compiled by full code generator"); - if (mode == PRIMARY) { - int locals_count = scope()->num_stack_slots(); + int locals_count = scope()->num_stack_slots(); - __ Push(lr, fp, cp, r1); - if (locals_count > 0) { - // Load undefined value here, so the value is ready for the loop - // below. - __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); - } - // Adjust fp to point to caller's fp. - __ add(fp, sp, Operand(2 * kPointerSize)); + __ Push(lr, fp, cp, r1); + if (locals_count > 0) { + // Load undefined value here, so the value is ready for the loop + // below. + __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); + } + // Adjust fp to point to caller's fp. + __ add(fp, sp, Operand(2 * kPointerSize)); - { Comment cmnt(masm_, "[ Allocate locals"); - for (int i = 0; i < locals_count; i++) { - __ push(ip); - } + { Comment cmnt(masm_, "[ Allocate locals"); + for (int i = 0; i < locals_count; i++) { + __ push(ip); } + } - bool function_in_register = true; + bool function_in_register = true; - // Possibly allocate a local context. - int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; - if (heap_slots > 0) { - Comment cmnt(masm_, "[ Allocate local context"); - // Argument to NewContext is the function, which is in r1. - __ push(r1); - if (heap_slots <= FastNewContextStub::kMaximumSlots) { - FastNewContextStub stub(heap_slots); - __ CallStub(&stub); - } else { - __ CallRuntime(Runtime::kNewContext, 1); - } - function_in_register = false; - // Context is returned in both r0 and cp. It replaces the context - // passed to us. It's saved in the stack and kept live in cp. - __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); - // Copy any necessary parameters into the context. - int num_parameters = scope()->num_parameters(); - for (int i = 0; i < num_parameters; i++) { - Slot* slot = scope()->parameter(i)->slot(); - if (slot != NULL && slot->type() == Slot::CONTEXT) { - int parameter_offset = StandardFrameConstants::kCallerSPOffset + - (num_parameters - 1 - i) * kPointerSize; - // Load parameter from stack. - __ ldr(r0, MemOperand(fp, parameter_offset)); - // Store it in the context. - __ mov(r1, Operand(Context::SlotOffset(slot->index()))); - __ str(r0, MemOperand(cp, r1)); - // Update the write barrier. This clobbers all involved - // registers, so we have to use two more registers to avoid - // clobbering cp. - __ mov(r2, Operand(cp)); - __ RecordWrite(r2, Operand(r1), r3, r0); - } + // Possibly allocate a local context. + int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; + if (heap_slots > 0) { + Comment cmnt(masm_, "[ Allocate local context"); + // Argument to NewContext is the function, which is in r1. + __ push(r1); + if (heap_slots <= FastNewContextStub::kMaximumSlots) { + FastNewContextStub stub(heap_slots); + __ CallStub(&stub); + } else { + __ CallRuntime(Runtime::kNewContext, 1); + } + function_in_register = false; + // Context is returned in both r0 and cp. It replaces the context + // passed to us. It's saved in the stack and kept live in cp. + __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); + // Copy any necessary parameters into the context. + int num_parameters = scope()->num_parameters(); + for (int i = 0; i < num_parameters; i++) { + Slot* slot = scope()->parameter(i)->slot(); + if (slot != NULL && slot->type() == Slot::CONTEXT) { + int parameter_offset = StandardFrameConstants::kCallerSPOffset + + (num_parameters - 1 - i) * kPointerSize; + // Load parameter from stack. + __ ldr(r0, MemOperand(fp, parameter_offset)); + // Store it in the context. + __ mov(r1, Operand(Context::SlotOffset(slot->index()))); + __ str(r0, MemOperand(cp, r1)); + // Update the write barrier. This clobbers all involved + // registers, so we have to use two more registers to avoid + // clobbering cp. + __ mov(r2, Operand(cp)); + __ RecordWrite(r2, Operand(r1), r3, r0); } } + } - Variable* arguments = scope()->arguments()->AsVariable(); - if (arguments != NULL) { - // Function uses arguments object. - Comment cmnt(masm_, "[ Allocate arguments object"); - if (!function_in_register) { - // Load this again, if it's used by the local context below. - __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); - } else { - __ mov(r3, r1); - } - // Receiver is just before the parameters on the caller's stack. - int offset = scope()->num_parameters() * kPointerSize; - __ add(r2, fp, - Operand(StandardFrameConstants::kCallerSPOffset + offset)); - __ mov(r1, Operand(Smi::FromInt(scope()->num_parameters()))); - __ Push(r3, r2, r1); - - // Arguments to ArgumentsAccessStub: - // function, receiver address, parameter count. - // The stub will rewrite receiever and parameter count if the previous - // stack frame was an arguments adapter frame. - ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT); - __ CallStub(&stub); - // Duplicate the value; move-to-slot operation might clobber registers. - __ mov(r3, r0); - Move(arguments->slot(), r0, r1, r2); - Slot* dot_arguments_slot = - scope()->arguments_shadow()->AsVariable()->slot(); - Move(dot_arguments_slot, r3, r1, r2); + Variable* arguments = scope()->arguments()->AsVariable(); + if (arguments != NULL) { + // Function uses arguments object. + Comment cmnt(masm_, "[ Allocate arguments object"); + if (!function_in_register) { + // Load this again, if it's used by the local context below. + __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); + } else { + __ mov(r3, r1); } + // Receiver is just before the parameters on the caller's stack. + int offset = scope()->num_parameters() * kPointerSize; + __ add(r2, fp, + Operand(StandardFrameConstants::kCallerSPOffset + offset)); + __ mov(r1, Operand(Smi::FromInt(scope()->num_parameters()))); + __ Push(r3, r2, r1); + + // Arguments to ArgumentsAccessStub: + // function, receiver address, parameter count. + // The stub will rewrite receiever and parameter count if the previous + // stack frame was an arguments adapter frame. + ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT); + __ CallStub(&stub); + // Duplicate the value; move-to-slot operation might clobber registers. + __ mov(r3, r0); + Move(arguments->slot(), r0, r1, r2); + Slot* dot_arguments_slot = + scope()->arguments_shadow()->AsVariable()->slot(); + Move(dot_arguments_slot, r3, r1, r2); } { Comment cmnt(masm_, "[ Declarations"); @@ -956,15 +954,13 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { __ cmp(r4, Operand(r2)); __ b(eq, &update_each); - // Convert the entry to a string or null if it isn't a property - // anymore. If the property has been removed while iterating, we + // Convert the entry to a string or (smi) 0 if it isn't a property + // any more. If the property has been removed while iterating, we // just skip it. __ push(r1); // Enumerable. __ push(r3); // Current entry. __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_JS); - __ mov(r3, Operand(r0)); - __ LoadRoot(ip, Heap::kNullValueRootIndex); - __ cmp(r3, ip); + __ mov(r3, Operand(r0), SetCC); __ b(eq, loop_statement.continue_target()); // Update the 'each' property or variable from the possibly filtered @@ -1959,6 +1955,26 @@ void FullCodeGenerator::EmitIsUndetectableObject(ZoneList* args) { } +void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( + ZoneList* args) { + + ASSERT(args->length() == 1); + + VisitForValue(args->at(0), kAccumulator); + + Label materialize_true, materialize_false; + Label* if_true = NULL; + Label* if_false = NULL; + PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false); + + // Just indicate false, as %_IsStringWrapperSafeForDefaultValueOf() is only + // used in a few functions in runtime.js which should not normally be hit by + // this compiler. + __ jmp(if_false); + Apply(context_, if_true, if_false); +} + + void FullCodeGenerator::EmitIsFunction(ZoneList* args) { ASSERT(args->length() == 1); diff --git a/deps/v8/src/arm/macro-assembler-arm.cc b/deps/v8/src/arm/macro-assembler-arm.cc index 9c25ccde2f..38c7c28c9d 100644 --- a/deps/v8/src/arm/macro-assembler-arm.cc +++ b/deps/v8/src/arm/macro-assembler-arm.cc @@ -757,7 +757,7 @@ void MacroAssembler::InvokeFunction(Register fun, SharedFunctionInfo::kFormalParameterCountOffset)); mov(expected_reg, Operand(expected_reg, ASR, kSmiTagSize)); ldr(code_reg, - MemOperand(code_reg, SharedFunctionInfo::kCodeOffset - kHeapObjectTag)); + MemOperand(r1, JSFunction::kCodeOffset - kHeapObjectTag)); add(code_reg, code_reg, Operand(Code::kHeaderSize - kHeapObjectTag)); ParameterCount expected(expected_reg); @@ -1508,8 +1508,7 @@ void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) { // Make sure the code objects in the builtins object and in the // builtin function are the same. push(r1); - ldr(r1, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); - ldr(r1, FieldMemOperand(r1, SharedFunctionInfo::kCodeOffset)); + ldr(r1, FieldMemOperand(r1, JSFunction::kCodeOffset)); cmp(r1, target); Assert(eq, "Builtin code object changed"); pop(r1); @@ -1656,6 +1655,13 @@ void MacroAssembler::JumpIfEitherSmi(Register reg1, } +void MacroAssembler::AbortIfSmi(Register object) { + ASSERT_EQ(0, kSmiTag); + tst(object, Operand(kSmiTagMask)); + Assert(ne, "Operand is a smi"); +} + + void MacroAssembler::JumpIfNonSmisNotBothSequentialAsciiStrings( Register first, Register second, diff --git a/deps/v8/src/arm/macro-assembler-arm.h b/deps/v8/src/arm/macro-assembler-arm.h index 37a1b1cb13..836ed74994 100644 --- a/deps/v8/src/arm/macro-assembler-arm.h +++ b/deps/v8/src/arm/macro-assembler-arm.h @@ -618,6 +618,9 @@ class MacroAssembler: public Assembler { // Jump if either of the registers contain a smi. void JumpIfEitherSmi(Register reg1, Register reg2, Label* on_either_smi); + // Abort execution if argument is a smi. Used in debug code. + void AbortIfSmi(Register object); + // --------------------------------------------------------------------------- // String utilities diff --git a/deps/v8/src/arm/simulator-arm.cc b/deps/v8/src/arm/simulator-arm.cc index 04635e3f91..c4cc8d46cb 100644 --- a/deps/v8/src/arm/simulator-arm.cc +++ b/deps/v8/src/arm/simulator-arm.cc @@ -2431,11 +2431,17 @@ void Simulator::DecodeVCMP(Instr* instr) { } int d = GlueRegCode(!dp_operation, instr->VdField(), instr->DField()); - int m = GlueRegCode(!dp_operation, instr->VmField(), instr->MField()); + int m = 0; + if (instr->Opc2Field() == 0x4) { + m = GlueRegCode(!dp_operation, instr->VmField(), instr->MField()); + } if (dp_operation) { double dd_value = get_double_from_d_register(d); - double dm_value = get_double_from_d_register(m); + double dm_value = 0.0; + if (instr->Opc2Field() == 0x4) { + dm_value = get_double_from_d_register(m); + } Compute_FPSCR_Flags(dd_value, dm_value); } else { diff --git a/deps/v8/src/arm/stub-cache-arm.cc b/deps/v8/src/arm/stub-cache-arm.cc index 8c8e702d6c..fa90ca7d11 100644 --- a/deps/v8/src/arm/stub-cache-arm.cc +++ b/deps/v8/src/arm/stub-cache-arm.cc @@ -1212,38 +1212,6 @@ void StubCompiler::GenerateLoadInterceptor(JSObject* object, } -Object* StubCompiler::CompileLazyCompile(Code::Flags flags) { - // ----------- S t a t e ------------- - // -- r1: function - // -- lr: return address - // ----------------------------------- - - // Enter an internal frame. - __ EnterInternalFrame(); - - // Preserve the function. - __ push(r1); - - // Push the function on the stack as the argument to the runtime function. - __ push(r1); - __ CallRuntime(Runtime::kLazyCompile, 1); - - // Calculate the entry point. - __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); - - // Restore saved function. - __ pop(r1); - - // Tear down temporary frame. - __ LeaveInternalFrame(); - - // Do a tail-call of the compiled function. - __ Jump(r2); - - return GetCodeWithFlags(flags, "LazyCompileStub"); -} - - void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) { if (kind_ == Code::KEYED_CALL_IC) { __ cmp(r2, Operand(Handle(name))); diff --git a/deps/v8/src/assembler.h b/deps/v8/src/assembler.h index cf7020ecea..1577433591 100644 --- a/deps/v8/src/assembler.h +++ b/deps/v8/src/assembler.h @@ -235,6 +235,7 @@ class RelocInfo BASE_EMBEDDED { INLINE(void set_call_object(Object* target)); INLINE(Object** call_object_address()); + template inline void Visit(); inline void Visit(ObjectVisitor* v); // Patch the code with some other code. diff --git a/deps/v8/src/bootstrapper.cc b/deps/v8/src/bootstrapper.cc index e1d4489d44..ce8e98d6a5 100644 --- a/deps/v8/src/bootstrapper.cc +++ b/deps/v8/src/bootstrapper.cc @@ -36,6 +36,7 @@ #include "global-handles.h" #include "macro-assembler.h" #include "natives.h" +#include "objects-visiting.h" #include "snapshot.h" #include "stub-cache.h" @@ -56,7 +57,7 @@ class SourceCodeCache BASE_EMBEDDED { } void Iterate(ObjectVisitor* v) { - v->VisitPointer(BitCast(&cache_)); + v->VisitPointer(BitCast(&cache_)); } @@ -470,6 +471,7 @@ Handle Genesis::CreateEmptyFunction() { Handle code = Handle(Builtins::builtin(Builtins::EmptyFunction)); empty_function->set_code(*code); + empty_function->shared()->set_code(*code); Handle source = Factory::NewStringFromAscii(CStrVector("() {}")); Handle