Browse Source

deps: upgrade V8 to 5.0.71.32

* Pick up the branch head for V8 5.0 stable [1]
* Edit v8 gitignore to allow trace_event copy
* Update V8 DEP trace_event as per deps/v8/DEPS [2]

[1] https://chromium.googlesource.com/v8/v8.git/+/3c67831
[2] 4b09207e44

Ref: https://github.com/nodejs/node/pull/5945
PR-URL: https://github.com/nodejs/node/pull/6111
Reviewed-By: targos - Michaël Zasso <mic.besace@gmail.com>
Reviewed-By: bnoordhuis - Ben Noordhuis <info@bnoordhuis.nl>
Reviewed-By: indutny - Fedor Indutny <fedor.indutny@gmail.com>
process-exit-stdio-flushing
Ali Ijaz Sheikh 9 years ago
parent
commit
52af5c4eeb
  1. 2
      deps/v8/.clang-format
  2. 9
      deps/v8/.gitignore
  3. 3
      deps/v8/AUTHORS
  4. 129
      deps/v8/BUILD.gn
  5. 1775
      deps/v8/ChangeLog
  6. 64
      deps/v8/DEPS
  7. 2
      deps/v8/Makefile
  8. 24
      deps/v8/WATCHLISTS
  9. 63
      deps/v8/base/trace_event/common/trace_event_common.h
  10. 6
      deps/v8/build/all.gyp
  11. 21
      deps/v8/build/has_valgrind.py
  12. 3
      deps/v8/build/isolate.gypi
  13. 73
      deps/v8/build/standalone.gypi
  14. 3
      deps/v8/build/toolchain.gypi
  15. 4
      deps/v8/build/vs_toolchain.py
  16. 1
      deps/v8/include/v8-experimental.h
  17. 10
      deps/v8/include/v8-platform.h
  18. 134
      deps/v8/include/v8-profiler.h
  19. 8
      deps/v8/include/v8-version.h
  20. 104
      deps/v8/include/v8.h
  21. 5
      deps/v8/infra/config/cq.cfg
  22. 2
      deps/v8/snapshot_toolchain.gni
  23. 8
      deps/v8/src/DEPS
  24. 514
      deps/v8/src/accessors.cc
  25. 24
      deps/v8/src/accessors.h
  26. 9
      deps/v8/src/address-map.cc
  27. 5
      deps/v8/src/api-experimental.cc
  28. 270
      deps/v8/src/api-natives.cc
  29. 4
      deps/v8/src/api-natives.h
  30. 222
      deps/v8/src/api.cc
  31. 6
      deps/v8/src/arguments.cc
  32. 41
      deps/v8/src/arguments.h
  33. 27
      deps/v8/src/arm/assembler-arm-inl.h
  34. 47
      deps/v8/src/arm/assembler-arm.cc
  35. 23
      deps/v8/src/arm/assembler-arm.h
  36. 639
      deps/v8/src/arm/builtins-arm.cc
  37. 2460
      deps/v8/src/arm/code-stubs-arm.cc
  38. 10
      deps/v8/src/arm/codegen-arm.cc
  39. 16
      deps/v8/src/arm/constants-arm.h
  40. 24
      deps/v8/src/arm/deoptimizer-arm.cc
  41. 30
      deps/v8/src/arm/disasm-arm.cc
  42. 61
      deps/v8/src/arm/interface-descriptors-arm.cc
  43. 109
      deps/v8/src/arm/macro-assembler-arm.cc
  44. 24
      deps/v8/src/arm/macro-assembler-arm.h
  45. 48
      deps/v8/src/arm/simulator-arm.cc
  46. 22
      deps/v8/src/arm64/assembler-arm64-inl.h
  47. 4
      deps/v8/src/arm64/assembler-arm64.h
  48. 624
      deps/v8/src/arm64/builtins-arm64.cc
  49. 5046
      deps/v8/src/arm64/code-stubs-arm64.cc
  50. 5
      deps/v8/src/arm64/cpu-arm64.cc
  51. 25
      deps/v8/src/arm64/deoptimizer-arm64.cc
  52. 64
      deps/v8/src/arm64/interface-descriptors-arm64.cc
  53. 133
      deps/v8/src/arm64/macro-assembler-arm64.cc
  54. 28
      deps/v8/src/arm64/macro-assembler-arm64.h
  55. 61
      deps/v8/src/arm64/simulator-arm64.cc
  56. 13
      deps/v8/src/arm64/utils-arm64.h
  57. 132
      deps/v8/src/assembler.cc
  58. 46
      deps/v8/src/assembler.h
  59. 1
      deps/v8/src/ast/OWNERS
  60. 6
      deps/v8/src/ast/ast-expression-rewriter.cc
  61. 2
      deps/v8/src/ast/ast-expression-rewriter.h
  62. 5
      deps/v8/src/ast/ast-expression-visitor.cc
  63. 2
      deps/v8/src/ast/ast-expression-visitor.h
  64. 9
      deps/v8/src/ast/ast-literal-reindexer.cc
  65. 17
      deps/v8/src/ast/ast-numbering.cc
  66. 2
      deps/v8/src/ast/ast-value-factory.cc
  67. 3
      deps/v8/src/ast/ast-value-factory.h
  68. 43
      deps/v8/src/ast/ast.cc
  69. 358
      deps/v8/src/ast/ast.h
  70. 1
      deps/v8/src/ast/modules.cc
  71. 19
      deps/v8/src/ast/modules.h
  72. 42
      deps/v8/src/ast/prettyprinter.cc
  73. 3
      deps/v8/src/ast/prettyprinter.h
  74. 76
      deps/v8/src/ast/scopeinfo.cc
  75. 170
      deps/v8/src/ast/scopes.cc
  76. 57
      deps/v8/src/ast/scopes.h
  77. 3
      deps/v8/src/ast/variables.cc
  78. 56
      deps/v8/src/ast/variables.h
  79. 14
      deps/v8/src/bailout-reason.h
  80. 7
      deps/v8/src/base.isolate
  81. 2
      deps/v8/src/base/atomicops.h
  82. 152
      deps/v8/src/base/atomicops_internals_s390_gcc.h
  83. 14
      deps/v8/src/base/bits.h
  84. 11
      deps/v8/src/base/cpu.cc
  85. 5
      deps/v8/src/base/cpu.h
  86. 347
      deps/v8/src/bootstrapper.cc
  87. 6
      deps/v8/src/bootstrapper.h
  88. 956
      deps/v8/src/builtins.cc
  89. 133
      deps/v8/src/builtins.h
  90. 87
      deps/v8/src/code-factory.cc
  91. 33
      deps/v8/src/code-factory.h
  92. 84
      deps/v8/src/code-stubs-hydrogen.cc
  93. 104
      deps/v8/src/code-stubs.cc
  94. 184
      deps/v8/src/code-stubs.h
  95. 11
      deps/v8/src/codegen.cc
  96. 175
      deps/v8/src/compiler.cc
  97. 115
      deps/v8/src/compiler.h
  98. 14
      deps/v8/src/compiler/access-builder.cc
  99. 15
      deps/v8/src/compiler/access-info.cc
  100. 120
      deps/v8/src/compiler/arm/code-generator-arm.cc

2
deps/v8/.clang-format

@ -1,4 +1,4 @@
# Defines the Google C++ style for automatic reformatting.
# http://clang.llvm.org/docs/ClangFormatStyleOptions.html
BasedOnStyle: Google
MaxEmptyLinesToKeep: 2
MaxEmptyLinesToKeep: 1

9
deps/v8/.gitignore

@ -42,7 +42,7 @@ shell_g
/_*
/build/Debug
/build/gyp
/build/ipch/
/build/ipch
/build/Release
/build/win_toolchain.json
/buildtools
@ -58,6 +58,7 @@ shell_g
/test/promises-aplus/sinon
/test/simdjs/data
/test/test262/data
/test/test262/data.tar
/testing/gmock
/testing/gtest
/third_party
@ -65,8 +66,10 @@ shell_g
/third_party/llvm
/third_party/llvm-build
/tools/clang
/tools/jsfunfuzz
/tools/jsfunfuzz.zip
/tools/gcmole/gcmole-tools
/tools/gcmole/gcmole-tools.tar.gz
/tools/jsfunfuzz/jsfunfuzz
/tools/jsfunfuzz/jsfunfuzz.tar.gz
/tools/luci-go/linux64/isolate
/tools/luci-go/mac64/isolate
/tools/luci-go/win64/isolate.exe

3
deps/v8/AUTHORS

@ -40,7 +40,9 @@ Alexis Campailla <alexis@janeasystems.com>
Andreas Anyuru <andreas.anyuru@gmail.com>
Andrew Paprocki <andrew@ishiboo.com>
Andrei Kashcha <anvaka@gmail.com>
Bangfu Tao <bangfu.tao@samsung.com>
Ben Noordhuis <info@bnoordhuis.nl>
Benjamin Tan <demoneaux@gmail.com>
Bert Belder <bertbelder@gmail.com>
Burcu Dogan <burcujdogan@gmail.com>
Caitlin Potter <caitpotter88@gmail.com>
@ -107,5 +109,6 @@ Vlad Burlik <vladbph@gmail.com>
Vladimir Krivosheev <develar@gmail.com>
Vladimir Shutoff <vovan@shutoff.ru>
Yu Yin <xwafish@gmail.com>
Zac Hansen <xaxxon@gmail.com>
Zhongping Wang <kewpie.w.zp@gmail.com>
柳荣一 <admin@web-tinker.com>

129
deps/v8/BUILD.gn

@ -28,7 +28,7 @@ declare_args() {
v8_deprecation_warnings = false
v8_enable_disassembler = false
v8_enable_gdbjit = false
v8_enable_handle_zapping = true
v8_enable_handle_zapping = is_debug
v8_enable_i18n_support = true
v8_enable_verify_heap = false
v8_interpreted_regexp = false
@ -46,6 +46,15 @@ if (is_msan) {
v8_target_arch = target_cpu
}
if (v8_use_snapshot && v8_use_external_startup_data) {
snapshot_target = ":v8_external_snapshot"
} else if (v8_use_snapshot) {
snapshot_target = ":v8_snapshot"
} else {
assert(!v8_use_external_startup_data)
snapshot_target = ":v8_nosnapshot"
}
###############################################################################
# Configurations
#
@ -291,7 +300,6 @@ action("js2c_experimental") {
"src/js/generator.js",
"src/js/harmony-atomics.js",
"src/js/harmony-regexp.js",
"src/js/harmony-reflect.js",
"src/js/harmony-object-observe.js",
"src/js/harmony-sharedarraybuffer.js",
"src/js/harmony-simd.js",
@ -774,16 +782,16 @@ source_set("v8_base") {
"src/compiler/instruction-selector.h",
"src/compiler/instruction.cc",
"src/compiler/instruction.h",
"src/compiler/interpreter-assembler.cc",
"src/compiler/interpreter-assembler.h",
"src/compiler/int64-lowering.cc",
"src/compiler/int64-lowering.h",
"src/compiler/js-builtin-reducer.cc",
"src/compiler/js-builtin-reducer.h",
"src/compiler/js-call-reducer.cc",
"src/compiler/js-call-reducer.h",
"src/compiler/js-context-relaxation.cc",
"src/compiler/js-context-relaxation.h",
"src/compiler/js-context-specialization.cc",
"src/compiler/js-context-specialization.h",
"src/compiler/js-create-lowering.cc",
"src/compiler/js-create-lowering.h",
"src/compiler/js-frame-specialization.cc",
"src/compiler/js-frame-specialization.h",
"src/compiler/js-generic-lowering.cc",
@ -1009,6 +1017,8 @@ source_set("v8_base") {
"src/fast-dtoa.h",
"src/field-index.h",
"src/field-index-inl.h",
"src/field-type.cc",
"src/field-type.h",
"src/fixed-dtoa.cc",
"src/fixed-dtoa.h",
"src/flag-definitions.h",
@ -1030,6 +1040,7 @@ source_set("v8_base") {
"src/handles.cc",
"src/handles.h",
"src/hashmap.h",
"src/heap-symbols.h",
"src/heap/array-buffer-tracker.cc",
"src/heap/array-buffer-tracker.h",
"src/heap/gc-idle-time-handler.cc",
@ -1053,11 +1064,14 @@ source_set("v8_base") {
"src/heap/objects-visiting-inl.h",
"src/heap/objects-visiting.cc",
"src/heap/objects-visiting.h",
"src/heap/remembered-set.cc",
"src/heap/remembered-set.h",
"src/heap/scavenge-job.h",
"src/heap/scavenge-job.cc",
"src/heap/scavenger-inl.h",
"src/heap/scavenger.cc",
"src/heap/scavenger.h",
"src/heap/slot-set.h",
"src/heap/slots-buffer.cc",
"src/heap/slots-buffer.h",
"src/heap/spaces-inl.h",
@ -1104,11 +1118,20 @@ source_set("v8_base") {
"src/interpreter/constant-array-builder.h",
"src/interpreter/control-flow-builders.cc",
"src/interpreter/control-flow-builders.h",
"src/interpreter/handler-table-builder.cc",
"src/interpreter/handler-table-builder.h",
"src/interpreter/interpreter.cc",
"src/interpreter/interpreter.h",
"src/interpreter/interpreter-assembler.cc",
"src/interpreter/interpreter-assembler.h",
"src/interpreter/register-translator.cc",
"src/interpreter/register-translator.h",
"src/interpreter/source-position-table.cc",
"src/interpreter/source-position-table.h",
"src/isolate-inl.h",
"src/isolate.cc",
"src/isolate.h",
"src/json-parser.h",
"src/json-stringifier.h",
"src/key-accumulator.h",
"src/key-accumulator.cc",
@ -1144,7 +1167,6 @@ source_set("v8_base") {
"src/parsing/expression-classifier.h",
"src/parsing/func-name-inferrer.cc",
"src/parsing/func-name-inferrer.h",
"src/parsing/json-parser.h",
"src/parsing/parameter-initializer-rewriter.cc",
"src/parsing/parameter-initializer-rewriter.h",
"src/parsing/parser-base.h",
@ -1183,6 +1205,8 @@ source_set("v8_base") {
"src/profiler/profile-generator.h",
"src/profiler/sampler.cc",
"src/profiler/sampler.h",
"src/profiler/sampling-heap-profiler.cc",
"src/profiler/sampling-heap-profiler.h",
"src/profiler/strings-storage.cc",
"src/profiler/strings-storage.h",
"src/profiler/unbound-queue-inl.h",
@ -1262,6 +1286,7 @@ source_set("v8_base") {
"src/snapshot/snapshot-common.cc",
"src/snapshot/snapshot-source-sink.cc",
"src/snapshot/snapshot-source-sink.h",
"src/source-position.h",
"src/splay-tree.h",
"src/splay-tree-inl.h",
"src/snapshot/snapshot.h",
@ -1286,7 +1311,6 @@ source_set("v8_base") {
"src/type-feedback-vector.h",
"src/type-info.cc",
"src/type-info.h",
"src/types-inl.h",
"src/types.cc",
"src/types.h",
"src/typing-asm.cc",
@ -1300,6 +1324,7 @@ source_set("v8_base") {
"src/unicode-cache.h",
"src/unicode-decoder.cc",
"src/unicode-decoder.h",
"src/utils-inl.h",
"src/utils.cc",
"src/utils.h",
"src/v8.cc",
@ -1655,6 +1680,7 @@ source_set("v8_libbase") {
"src/base/atomicops_internals_mips_gcc.h",
"src/base/atomicops_internals_mips64_gcc.h",
"src/base/atomicops_internals_portable.h",
"src/base/atomicops_internals_s390_gcc.h",
"src/base/atomicops_internals_tsan.h",
"src/base/atomicops_internals_x86_gcc.cc",
"src/base/atomicops_internals_x86_gcc.h",
@ -1721,8 +1747,6 @@ source_set("v8_libbase") {
libs = [ "dl", "rt" ]
} else if (is_android) {
defines += [ "CAN_USE_VFP_INSTRUCTIONS" ]
if (current_toolchain == host_toolchain) {
libs = [ "dl", "rt" ]
if (host_os == "mac") {
@ -1782,6 +1806,28 @@ source_set("v8_libplatform") {
]
}
source_set("fuzzer_support") {
visibility = [ ":*" ] # Only targets in this file can depend on this.
sources = [
"test/fuzzer/fuzzer-support.cc",
"test/fuzzer/fuzzer-support.h",
]
configs -= [ "//build/config/compiler:chromium_code" ]
configs += [ "//build/config/compiler:no_chromium_code" ]
configs += [
":internal_config_base",
":features",
":toolchain",
]
deps = [
":v8_libplatform",
snapshot_target,
]
}
###############################################################################
# Executables
#
@ -1815,15 +1861,6 @@ if (current_toolchain == snapshot_toolchain) {
# Public targets
#
if (v8_use_snapshot && v8_use_external_startup_data) {
snapshot_target = ":v8_external_snapshot"
} else if (v8_use_snapshot) {
snapshot_target = ":v8_snapshot"
} else {
assert(!v8_use_external_startup_data)
snapshot_target = ":v8_nosnapshot"
}
if (is_component_build) {
component("v8") {
sources = [
@ -1905,3 +1942,57 @@ if ((current_toolchain == host_toolchain && v8_toolset_for_d8 == "host") ||
}
}
}
source_set("json_fuzzer") {
sources = [
"test/fuzzer/json.cc",
]
deps = [
":fuzzer_support",
]
configs -= [ "//build/config/compiler:chromium_code" ]
configs += [ "//build/config/compiler:no_chromium_code" ]
configs += [
":internal_config",
":features",
":toolchain",
]
}
source_set("parser_fuzzer") {
sources = [
"test/fuzzer/parser.cc",
]
deps = [
":fuzzer_support",
]
configs -= [ "//build/config/compiler:chromium_code" ]
configs += [ "//build/config/compiler:no_chromium_code" ]
configs += [
":internal_config",
":features",
":toolchain",
]
}
source_set("regexp_fuzzer") {
sources = [
"test/fuzzer/regexp.cc",
]
deps = [
":fuzzer_support",
]
configs -= [ "//build/config/compiler:chromium_code" ]
configs += [ "//build/config/compiler:no_chromium_code" ]
configs += [
":internal_config",
":features",
":toolchain",
]
}

1775
deps/v8/ChangeLog

File diff suppressed because it is too large

64
deps/v8/DEPS

@ -8,15 +8,15 @@ vars = {
deps = {
"v8/build/gyp":
Var("git_url") + "/external/gyp.git" + "@" + "b85ad3e578da830377dbc1843aa4fbc5af17a192",
Var("git_url") + "/external/gyp.git" + "@" + "ed163ce233f76a950dce1751ac851dbe4b1c00cc",
"v8/third_party/icu":
Var("git_url") + "/chromium/deps/icu.git" + "@" + "8d342a405be5ae8aacb1e16f0bc31c3a4fbf26a2",
Var("git_url") + "/chromium/deps/icu.git" + "@" + "e466f6ac8f60bb9697af4a91c6911c6fc4aec95f",
"v8/buildtools":
Var("git_url") + "/chromium/buildtools.git" + "@" + "0f8e6e4b126ee88137930a0ae4776c4741808740",
Var("git_url") + "/chromium/buildtools.git" + "@" + "97b5c485707335dd2952c05bf11412ada3f4fb6f",
"v8/base/trace_event/common":
Var("git_url") + "/chromium/src/base/trace_event/common.git" + "@" + "d83d44b13d07c2fd0a40101a7deef9b93b841732",
Var("git_url") + "/chromium/src/base/trace_event/common.git" + "@" + "4b09207e447ae5bd34643b4c6321bee7b76d35f9",
"v8/tools/swarming_client":
Var('git_url') + '/external/swarming.client.git' + '@' + "9cdd76171e517a430a72dcd7d66ade67e109aa00",
Var('git_url') + '/external/swarming.client.git' + '@' + "0b908f18767c8304dc089454bc1c91755d21f1f5",
"v8/testing/gtest":
Var("git_url") + "/external/github.com/google/googletest.git" + "@" + "6f8a66431cb592dad629028a50b3dd418a408c87",
"v8/testing/gmock":
@ -27,9 +27,9 @@ deps = {
Var("git_url") + "/v8/deps/third_party/mozilla-tests.git" + "@" + "f6c578a10ea707b1a8ab0b88943fe5115ce2b9be",
"v8/test/simdjs/data": Var("git_url") + "/external/github.com/tc39/ecmascript_simd.git" + "@" + "c8ef63c728283debc25891123eb00482fee4b8cd",
"v8/test/test262/data":
Var("git_url") + "/external/github.com/tc39/test262.git" + "@" + "67ba34b03a46bac4254223ae25f42c7b959540f0",
Var("git_url") + "/external/github.com/tc39/test262.git" + "@" + "738a24b109f3fa71be44d5c3701d73141d494510",
"v8/tools/clang":
Var("git_url") + "/chromium/src/tools/clang.git" + "@" + "24e8c1c92fe54ef8ed7651b5850c056983354a4a",
Var("git_url") + "/chromium/src/tools/clang.git" + "@" + "a8adb78c8eda9bddb2aa9c51f3fee60296de1ad4",
}
deps_os = {
@ -102,6 +102,22 @@ hooks = [
"-s", "v8/buildtools/linux64/clang-format.sha1",
],
},
{
'name': 'gcmole',
'pattern': '.',
'action': [
'python',
'v8/tools/gcmole/download_gcmole_tools.py',
],
},
{
'name': 'jsfunfuzz',
'pattern': '.',
'action': [
'python',
'v8/tools/jsfunfuzz/download_jsfunfuzz.py',
],
},
# Pull luci-go binaries (isolate, swarming) using checked-in hashes.
{
'name': 'luci-go_win',
@ -136,6 +152,40 @@ hooks = [
'-d', 'v8/tools/luci-go/linux64',
],
},
# Pull GN using checked-in hashes.
{
"name": "gn_win",
"pattern": ".",
"action": [ "download_from_google_storage",
"--no_resume",
"--platform=win32",
"--no_auth",
"--bucket", "chromium-gn",
"-s", "v8/buildtools/win/gn.exe.sha1",
],
},
{
"name": "gn_mac",
"pattern": ".",
"action": [ "download_from_google_storage",
"--no_resume",
"--platform=darwin",
"--no_auth",
"--bucket", "chromium-gn",
"-s", "v8/buildtools/mac/gn.sha1",
],
},
{
"name": "gn_linux",
"pattern": ".",
"action": [ "download_from_google_storage",
"--no_resume",
"--platform=linux*",
"--no_auth",
"--bucket", "chromium-gn",
"-s", "v8/buildtools/linux64/gn.sha1",
],
},
{
# Update the Windows toolchain if necessary.
'name': 'win_toolchain',

2
deps/v8/Makefile

@ -251,7 +251,7 @@ NACL_ARCHES = nacl_ia32 nacl_x64
GYPFILES = third_party/icu/icu.gypi third_party/icu/icu.gyp \
build/shim_headers.gypi build/features.gypi build/standalone.gypi \
build/toolchain.gypi build/all.gyp build/mac/asan.gyp \
test/cctest/cctest.gyp \
test/cctest/cctest.gyp test/fuzzer/fuzzer.gyp \
test/unittests/unittests.gyp tools/gyp/v8.gyp \
tools/parser-shell.gyp testing/gmock.gyp testing/gtest.gyp \
buildtools/third_party/libc++abi/libc++abi.gyp \

24
deps/v8/WATCHLISTS

@ -43,9 +43,11 @@
'filepath': 'src/debug/',
},
'interpreter': {
'filepath': 'src/interpreter/',
'filepath': 'test/cctest/interpreter/',
'filepath': 'test/unittests/interpreter/',
'filepath': 'src/interpreter/' \
'|src/compiler/interpreter' \
'|src/compiler/bytecode' \
'|test/cctest/interpreter/' \
'|test/unittests/interpreter/',
},
'feature_shipping_status': {
'filepath': 'src/flag-definitions.h',
@ -53,9 +55,12 @@
'gc_changes': {
'filepath': 'src/heap/',
},
'merges': {
'filepath': '.',
'arm': {
'filepath': '/arm/',
},
'ia32': {
'filepath': '/ia32/',
}
},
'WATCHLISTS': {
@ -79,9 +84,12 @@
'hpayer@chromium.org',
'ulan@chromium.org',
],
'merges': [
# Only enabled on branches created with tools/release/create_release.py
'v8-merges@googlegroups.com',
'arm': [
'v8-mips-ports@googlegroups.com',
'v8-ppc-ports@googlegroups.com',
],
'ia32': [
'v8-x87-ports@googlegroups.com',
],
},
}

63
deps/v8/base/trace_event/common/trace_event_common.h

@ -203,40 +203,26 @@
// - category and name strings must have application lifetime (statics or
// literals). They may not include " chars.
#define TRACE_EVENT0(category_group, name) \
INTERNAL_TRACE_MEMORY(category_group, name) \
INTERNAL_TRACE_EVENT_ADD_SCOPED(category_group, name)
#define TRACE_EVENT_WITH_FLOW0(category_group, name, bind_id, flow_flags) \
INTERNAL_TRACE_MEMORY(category_group, name) \
INTERNAL_TRACE_EVENT_ADD_SCOPED_WITH_FLOW(category_group, name, bind_id, \
flow_flags)
#define TRACE_EVENT1(category_group, name, arg1_name, arg1_val) \
INTERNAL_TRACE_MEMORY(category_group, name) \
INTERNAL_TRACE_EVENT_ADD_SCOPED(category_group, name, arg1_name, arg1_val)
#define TRACE_EVENT_WITH_FLOW1(category_group, name, bind_id, flow_flags, \
arg1_name, arg1_val) \
INTERNAL_TRACE_MEMORY(category_group, name) \
INTERNAL_TRACE_EVENT_ADD_SCOPED_WITH_FLOW(category_group, name, bind_id, \
flow_flags, arg1_name, arg1_val)
#define TRACE_EVENT2(category_group, name, arg1_name, arg1_val, arg2_name, \
arg2_val) \
INTERNAL_TRACE_MEMORY(category_group, name) \
INTERNAL_TRACE_EVENT_ADD_SCOPED(category_group, name, arg1_name, arg1_val, \
arg2_name, arg2_val)
#define TRACE_EVENT_WITH_FLOW2(category_group, name, bind_id, flow_flags, \
arg1_name, arg1_val, arg2_name, arg2_val) \
INTERNAL_TRACE_MEMORY(category_group, name) \
INTERNAL_TRACE_EVENT_ADD_SCOPED_WITH_FLOW(category_group, name, bind_id, \
flow_flags, arg1_name, arg1_val, \
arg2_name, arg2_val)
// Records events like TRACE_EVENT2 but uses |memory_tag| for memory tracing.
// Use this where |name| is too generic to accurately aggregate allocations.
#define TRACE_EVENT_WITH_MEMORY_TAG2(category, name, memory_tag, arg1_name, \
arg1_val, arg2_name, arg2_val) \
INTERNAL_TRACE_MEMORY(category, memory_tag) \
INTERNAL_TRACE_EVENT_ADD_SCOPED(category, name, arg1_name, arg1_val, \
arg2_name, arg2_val)
// UNSHIPPED_TRACE_EVENT* are like TRACE_EVENT* except that they are not
// included in official builds.
@ -309,6 +295,12 @@
TRACE_EVENT_FLAG_COPY | scope, arg1_name, arg1_val, \
arg2_name, arg2_val)
#define TRACE_EVENT_INSTANT_WITH_TIMESTAMP0(category_group, name, scope, \
timestamp) \
INTERNAL_TRACE_EVENT_ADD_WITH_ID_TID_AND_TIMESTAMP( \
TRACE_EVENT_PHASE_INSTANT, category_group, name, 0, 0, timestamp, \
TRACE_EVENT_FLAG_NONE | scope)
// Syntactic sugars for the sampling tracing in the main thread.
#define TRACE_EVENT_SCOPED_SAMPLING_STATE(category, name) \
TRACE_EVENT_SCOPED_SAMPLING_STATE_FOR_BUCKET(0, category, name)
@ -478,6 +470,20 @@
static_cast<int>(value1_val), value2_name, \
static_cast<int>(value2_val))
// Similar to TRACE_COUNTERx, but with a custom |timestamp| provided.
#define TRACE_COUNTER_WITH_TIMESTAMP1(category_group, name, timestamp, value) \
INTERNAL_TRACE_EVENT_ADD_WITH_TIMESTAMP( \
TRACE_EVENT_PHASE_COUNTER, category_group, name, timestamp, \
TRACE_EVENT_FLAG_NONE, "value", static_cast<int>(value))
#define TRACE_COUNTER_WITH_TIMESTAMP2(category_group, name, timestamp, \
value1_name, value1_val, value2_name, \
value2_val) \
INTERNAL_TRACE_EVENT_ADD_WITH_TIMESTAMP( \
TRACE_EVENT_PHASE_COUNTER, category_group, name, timestamp, \
TRACE_EVENT_FLAG_NONE, value1_name, static_cast<int>(value1_val), \
value2_name, static_cast<int>(value2_val))
// Records the value of a counter called "name" immediately. Value
// must be representable as a 32 bit integer.
// - category and name strings must have application lifetime (statics or
@ -920,6 +926,17 @@
name, id, TRACE_EVENT_FLAG_COPY, arg1_name, \
arg1_val, arg2_name, arg2_val)
// Records a clock sync event.
#define TRACE_EVENT_CLOCK_SYNC_RECEIVER(sync_id) \
INTERNAL_TRACE_EVENT_ADD( \
TRACE_EVENT_PHASE_CLOCK_SYNC, "__metadata", "clock_sync", \
TRACE_EVENT_FLAG_NONE, "sync_id", sync_id)
#define TRACE_EVENT_CLOCK_SYNC_ISSUER(sync_id, issue_ts, issue_end_ts) \
INTERNAL_TRACE_EVENT_ADD_WITH_TIMESTAMP( \
TRACE_EVENT_PHASE_CLOCK_SYNC, "__metadata", "clock_sync", \
issue_end_ts.ToInternalValue(), TRACE_EVENT_FLAG_NONE, \
"sync_id", sync_id, "issue_ts", issue_ts.ToInternalValue())
// Macros to track the life time and value of arbitrary client objects.
// See also TraceTrackableObject.
#define TRACE_EVENT_OBJECT_CREATED_WITH_ID(category_group, name, id) \
@ -945,6 +962,21 @@
TRACE_EVENT_PHASE_DELETE_OBJECT, category_group, name, \
TRACE_ID_DONT_MANGLE(id), TRACE_EVENT_FLAG_NONE)
// Records entering and leaving trace event contexts. |category_group| and
// |name| specify the context category and type. |context| is a
// snapshotted context object id.
#define TRACE_EVENT_ENTER_CONTEXT(category_group, name, context) \
INTERNAL_TRACE_EVENT_ADD_WITH_ID( \
TRACE_EVENT_PHASE_ENTER_CONTEXT, category_group, name, \
TRACE_ID_DONT_MANGLE(context), TRACE_EVENT_FLAG_NONE)
#define TRACE_EVENT_LEAVE_CONTEXT(category_group, name, context) \
INTERNAL_TRACE_EVENT_ADD_WITH_ID( \
TRACE_EVENT_PHASE_LEAVE_CONTEXT, category_group, name, \
TRACE_ID_DONT_MANGLE(context), TRACE_EVENT_FLAG_NONE)
#define TRACE_EVENT_SCOPED_CONTEXT(category_group, name, context) \
INTERNAL_TRACE_EVENT_SCOPED_CONTEXT(category_group, name, \
TRACE_ID_DONT_MANGLE(context))
// Macro to efficiently determine if a given category group is enabled.
#define TRACE_EVENT_CATEGORY_GROUP_ENABLED(category_group, ret) \
do { \
@ -1007,6 +1039,9 @@
#define TRACE_EVENT_PHASE_DELETE_OBJECT ('D')
#define TRACE_EVENT_PHASE_MEMORY_DUMP ('v')
#define TRACE_EVENT_PHASE_MARK ('R')
#define TRACE_EVENT_PHASE_CLOCK_SYNC ('c')
#define TRACE_EVENT_PHASE_ENTER_CONTEXT ('(')
#define TRACE_EVENT_PHASE_LEAVE_CONTEXT (')')
// Flags for changing the behavior of TRACE_EVENT_API_ADD_TRACE_EVENT.
#define TRACE_EVENT_FLAG_NONE (static_cast<unsigned int>(0))

6
deps/v8/build/all.gyp

@ -11,6 +11,7 @@
'../samples/samples.gyp:*',
'../src/d8.gyp:d8',
'../test/cctest/cctest.gyp:*',
'../test/fuzzer/fuzzer.gyp:*',
'../test/unittests/unittests.gyp:*',
],
'conditions': [
@ -30,11 +31,16 @@
'../test/mjsunit/mjsunit.gyp:*',
'../test/mozilla/mozilla.gyp:*',
'../test/optimize_for_size.gyp:*',
'../test/perf.gyp:*',
'../test/preparser/preparser.gyp:*',
'../test/simdjs/simdjs.gyp:*',
'../test/test262/test262.gyp:*',
'../test/webkit/webkit.gyp:*',
'../tools/check-static-initializers.gyp:*',
'../tools/gcmole/run_gcmole.gyp:*',
'../tools/jsfunfuzz/jsfunfuzz.gyp:*',
'../tools/run-deopt-fuzzer.gyp:*',
'../tools/run-valgrind.gyp:*',
],
}],
]

21
deps/v8/build/has_valgrind.py

@ -0,0 +1,21 @@
#!/usr/bin/env python
# Copyright 2016 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
VALGRIND_DIR = os.path.join(BASE_DIR, 'third_party', 'valgrind')
LINUX32_DIR = os.path.join(VALGRIND_DIR, 'linux_x86')
LINUX64_DIR = os.path.join(VALGRIND_DIR, 'linux_x64')
def DoMain(_):
"""Hook to be called from gyp without starting a separate python
interpreter."""
return int(os.path.exists(LINUX32_DIR) and os.path.exists(LINUX64_DIR))
if __name__ == '__main__':
print DoMain([])

3
deps/v8/build/isolate.gypi

@ -71,12 +71,15 @@
'--config-variable', 'OS=<(OS)',
'--config-variable', 'asan=<(asan)',
'--config-variable', 'cfi_vptr=<(cfi_vptr)',
'--config-variable', 'gcmole=<(gcmole)',
'--config-variable', 'has_valgrind=<(has_valgrind)',
'--config-variable', 'icu_use_data_file_flag=0',
'--config-variable', 'msan=<(msan)',
'--config-variable', 'tsan=<(tsan)',
'--config-variable', 'component=<(component)',
'--config-variable', 'target_arch=<(target_arch)',
'--config-variable', 'use_custom_libcxx=<(use_custom_libcxx)',
'--config-variable', 'v8_separate_ignition_snapshot=<(v8_separate_ignition_snapshot)',
'--config-variable', 'v8_use_external_startup_data=<(v8_use_external_startup_data)',
'--config-variable', 'v8_use_snapshot=<(v8_use_snapshot)',
],

73
deps/v8/build/standalone.gypi

@ -110,6 +110,9 @@
'use_goma%': 0,
'gomadir%': '',
# Check if valgrind directories are present.
'has_valgrind%': '<!pymod_do_main(has_valgrind)',
'conditions': [
# Set default gomadir.
['OS=="win"', {
@ -166,6 +169,7 @@
'test_isolation_mode%': '<(test_isolation_mode)',
'fastbuild%': '<(fastbuild)',
'coverage%': '<(coverage)',
'has_valgrind%': '<(has_valgrind)',
# Add a simple extras solely for the purpose of the cctests
'v8_extra_library_files': ['../test/cctest/test-extra.js'],
@ -195,6 +199,9 @@
# their own default value.
'v8_use_external_startup_data%': 1,
# Use a separate ignition snapshot file in standalone builds.
'v8_separate_ignition_snapshot': 1,
# Relative path to icu.gyp from this file.
'icu_gyp_path': '../third_party/icu/icu.gyp',
@ -313,9 +320,8 @@
['android_ndk_root==""', {
'variables': {
'android_sysroot': '<(android_toolchain)/sysroot/',
'android_stlport': '<(android_toolchain)/sources/cxx-stl/stlport/',
'android_stl': '<(android_toolchain)/sources/cxx-stl/',
},
'android_include': '<(android_sysroot)/usr/include',
'conditions': [
['target_arch=="x64"', {
'android_lib': '<(android_sysroot)/usr/lib64',
@ -323,14 +329,16 @@
'android_lib': '<(android_sysroot)/usr/lib',
}],
],
'android_stlport_include': '<(android_stlport)/stlport',
'android_stlport_libs': '<(android_stlport)/libs',
'android_libcpp_include': '<(android_stl)/llvm-libc++/libcxx/include',
'android_libcpp_abi_include': '<(android_stl)/llvm-libc++abi/libcxxabi/include',
'android_libcpp_libs': '<(android_stl)/llvm-libc++/libs',
'android_support_include': '<(android_toolchain)/sources/android/support/include',
'android_sysroot': '<(android_sysroot)',
}, {
'variables': {
'android_sysroot': '<(android_ndk_root)/platforms/android-<(android_target_platform)/arch-<(android_target_arch)',
'android_stlport': '<(android_ndk_root)/sources/cxx-stl/stlport/',
'android_stl': '<(android_ndk_root)/sources/cxx-stl/',
},
'android_include': '<(android_sysroot)/usr/include',
'conditions': [
['target_arch=="x64"', {
'android_lib': '<(android_sysroot)/usr/lib64',
@ -338,11 +346,14 @@
'android_lib': '<(android_sysroot)/usr/lib',
}],
],
'android_stlport_include': '<(android_stlport)/stlport',
'android_stlport_libs': '<(android_stlport)/libs',
'android_libcpp_include': '<(android_stl)/llvm-libc++/libcxx/include',
'android_libcpp_abi_include': '<(android_stl)/llvm-libc++abi/libcxxabi/include',
'android_libcpp_libs': '<(android_stl)/llvm-libc++/libs',
'android_support_include': '<(android_ndk_root)/sources/android/support/include',
'android_sysroot': '<(android_sysroot)',
}],
],
'android_stlport_library': 'stlport_static',
'android_libcpp_library': 'c++_static',
}], # OS=="android"
['host_clang==1', {
'host_cc': '<(clang_dir)/bin/clang',
@ -367,6 +378,9 @@
# fpxx - compatibility mode, it chooses fp32 or fp64 depending on runtime
# detection
'mips_fpu_mode%': 'fp32',
# Indicates if gcmole tools are downloaded by a hook.
'gcmole%': 0,
},
'target_defaults': {
'variables': {
@ -720,8 +734,7 @@
'cflags': [ '-fPIC', ],
}],
[ 'coverage==1', {
'cflags!': [ '-O3', '-O2', '-O1', ],
'cflags': [ '-fprofile-arcs', '-ftest-coverage', '-O0'],
'cflags': [ '-fprofile-arcs', '-ftest-coverage'],
'ldflags': [ '-fprofile-arcs'],
}],
],
@ -1005,11 +1018,7 @@
}, # configurations
'cflags': [ '-Wno-abi', '-Wall', '-W', '-Wno-unused-parameter'],
'cflags_cc': [ '-Wnon-virtual-dtor', '-fno-rtti', '-fno-exceptions',
# Note: Using -std=c++0x will define __STRICT_ANSI__, which
# in turn will leave out some template stuff for 'long
# long'. What we want is -std=c++11, but this is not
# supported by GCC 4.6 or Xcode 4.2
'-std=gnu++0x' ],
'-std=gnu++11' ],
'target_conditions': [
['_toolset=="target"', {
'cflags!': [
@ -1022,19 +1031,16 @@
'-fno-short-enums',
'-finline-limit=64',
'-Wa,--noexecstack',
# Note: This include is in cflags to ensure that it comes after
# all of the includes.
'-I<(android_include)',
'-I<(android_stlport_include)',
'--sysroot=<(android_sysroot)',
],
'cflags_cc': [
'-Wno-error=non-virtual-dtor', # TODO(michaelbai): Fix warnings.
'-isystem<(android_libcpp_include)',
'-isystem<(android_libcpp_abi_include)',
'-isystem<(android_support_include)',
],
'defines': [
'ANDROID',
#'__GNU_SOURCE=1', # Necessary for clone()
'USE_STLPORT=1',
'_STLP_USE_PTR_SPECIALIZATIONS=1',
'HAVE_OFF64_T',
'HAVE_SYS_UIO_H',
'ANDROID_BINSIZE_HACK', # Enable temporary hacks to reduce binsize.
@ -1043,10 +1049,9 @@
'-pthread', # Not supported by Android toolchain.
],
'ldflags': [
'-nostdlib',
'-Wl,--no-undefined',
'-Wl,-rpath-link=<(android_lib)',
'-L<(android_lib)',
'--sysroot=<(android_sysroot)',
'-nostdlib',
],
'libraries!': [
'-lrt', # librt is built into Bionic.
@ -1057,12 +1062,12 @@
'-lpthread', '-lnss3', '-lnssutil3', '-lsmime3', '-lplds4', '-lplc4', '-lnspr4',
],
'libraries': [
'-l<(android_stlport_library)',
'-l<(android_libcpp_library)',
'-latomic',
# Manually link the libgcc.a that the cross compiler uses.
'<!(<(android_toolchain)/*-gcc -print-libgcc-file-name)',
'-lc',
'-ldl',
'-lstdc++',
'-lm',
],
'conditions': [
@ -1079,22 +1084,22 @@
'-mfpu=vfp3',
],
'ldflags': [
'-L<(android_stlport_libs)/armeabi-v7a',
'-L<(android_libcpp_libs)/armeabi-v7a',
],
}],
['target_arch=="arm" and arm_version < 7', {
'ldflags': [
'-L<(android_stlport_libs)/armeabi',
'-L<(android_libcpp_libs)/armeabi',
],
}],
['target_arch=="x64"', {
'ldflags': [
'-L<(android_stlport_libs)/x86_64',
'-L<(android_libcpp_libs)/x86_64',
],
}],
['target_arch=="arm64"', {
'ldflags': [
'-L<(android_stlport_libs)/arm64-v8a',
'-L<(android_libcpp_libs)/arm64-v8a',
],
}],
['target_arch=="ia32" or target_arch=="x87"', {
@ -1106,7 +1111,7 @@
'-fno-stack-protector',
],
'ldflags': [
'-L<(android_stlport_libs)/x86',
'-L<(android_libcpp_libs)/x86',
],
}],
['target_arch=="mipsel"', {
@ -1119,7 +1124,7 @@
'-fno-stack-protector',
],
'ldflags': [
'-L<(android_stlport_libs)/mips',
'-L<(android_libcpp_libs)/mips',
],
}],
['(target_arch=="arm" or target_arch=="arm64" or target_arch=="x64" or target_arch=="ia32") and component!="shared_library"', {

3
deps/v8/build/toolchain.gypi

@ -81,6 +81,9 @@
# The setting is ignored if want_separate_host_toolset is 0.
'v8_toolset_for_d8%': 'target',
# Control usage of a separate ignition snapshot file.
'v8_separate_ignition_snapshot%': 0,
'host_os%': '<(OS)',
'werror%': '-Werror',
# For a shared library build, results in "libv8-<(soname_version).so".

4
deps/v8/build/vs_toolchain.py

@ -191,10 +191,10 @@ def _GetDesiredVsToolchainHashes():
"""Load a list of SHA1s corresponding to the toolchains that we want installed
to build with."""
if os.environ.get('GYP_MSVS_VERSION') == '2015':
return ['49ae4b60d898182fc3f521c2fcda82c453915011']
return ['5a85cf1ce842f7cc96b9d17039a445a9dc9cf0dd']
else:
# Default to VS2013.
return ['ee7d718ec60c2dc5d255bbe325909c2021a7efef']
return ['9ff97c632ae1fee0c98bcd53e71770eb3a0d8deb']
def Update(force=False):

1
deps/v8/include/v8-experimental.h

@ -39,6 +39,7 @@ class V8_EXPORT FastAccessorBuilder {
LabelId MakeLabel();
void SetLabel(LabelId label_id);
void CheckNotZeroOrJump(ValueId value_id, LabelId label_id);
ValueId Call(v8::FunctionCallback callback, ValueId value_id);
private:
FastAccessorBuilder() = delete;

10
deps/v8/include/v8-platform.h

@ -5,6 +5,7 @@
#ifndef V8_V8_PLATFORM_H_
#define V8_V8_PLATFORM_H_
#include <stddef.h>
#include <stdint.h>
namespace v8 {
@ -55,6 +56,15 @@ class Platform {
virtual ~Platform() {}
/**
* Gets the number of threads that are used to execute background tasks. Is
* used to estimate the number of tasks a work package should be split into.
* A return value of 0 means that there are no background threads available.
* Note that a value of 0 won't prohibit V8 from posting tasks using
* |CallOnBackgroundThread|.
*/
virtual size_t NumberOfAvailableBackgroundThreads() { return 0; }
/**
* Schedules a task to be invoked on a background thread. |expected_runtime|
* indicates that the task will run a long time. The Platform implementation

134
deps/v8/include/v8-profiler.h

@ -206,6 +206,13 @@ class V8_EXPORT CpuProfiler {
*/
CpuProfile* StopProfiling(Local<String> title);
/**
* Force collection of a sample. Must be called on the VM thread.
* Recording the forced sample does not contribute to the aggregated
* profile statistics.
*/
void CollectSample();
/**
* Tells the profiler whether the embedder is idle.
*/
@ -418,6 +425,90 @@ class V8_EXPORT ActivityControl { // NOLINT
};
/**
* AllocationProfile is a sampled profile of allocations done by the program.
* This is structured as a call-graph.
*/
class V8_EXPORT AllocationProfile {
public:
struct Allocation {
/**
* Size of the sampled allocation object.
*/
size_t size;
/**
* The number of objects of such size that were sampled.
*/
unsigned int count;
};
/**
* Represents a node in the call-graph.
*/
struct Node {
/**
* Name of the function. May be empty for anonymous functions or if the
* script corresponding to this function has been unloaded.
*/
Local<String> name;
/**
* Name of the script containing the function. May be empty if the script
* name is not available, or if the script has been unloaded.
*/
Local<String> script_name;
/**
* id of the script where the function is located. May be equal to
* v8::UnboundScript::kNoScriptId in cases where the script doesn't exist.
*/
int script_id;
/**
* Start position of the function in the script.
*/
int start_position;
/**
* 1-indexed line number where the function starts. May be
* kNoLineNumberInfo if no line number information is available.
*/
int line_number;
/**
* 1-indexed column number where the function starts. May be
* kNoColumnNumberInfo if no line number information is available.
*/
int column_number;
/**
* List of callees called from this node for which we have sampled
* allocations. The lifetime of the children is scoped to the containing
* AllocationProfile.
*/
std::vector<Node*> children;
/**
* List of self allocations done by this node in the call-graph.
*/
std::vector<Allocation> allocations;
};
/**
* Returns the root node of the call-graph. The root node corresponds to an
* empty JS call-stack. The lifetime of the returned Node* is scoped to the
* containing AllocationProfile.
*/
virtual Node* GetRootNode() = 0;
virtual ~AllocationProfile() {}
static const int kNoLineNumberInfo = Message::kNoLineNumberInfo;
static const int kNoColumnNumberInfo = Message::kNoColumnInfo;
};
/**
* Interface for controlling heap profiling. Instance of the
* profiler can be retrieved using v8::Isolate::GetHeapProfiler.
@ -521,6 +612,49 @@ class V8_EXPORT HeapProfiler {
*/
void StopTrackingHeapObjects();
/**
* Starts gathering a sampling heap profile. A sampling heap profile is
* similar to tcmalloc's heap profiler and Go's mprof. It samples object
* allocations and builds an online 'sampling' heap profile. At any point in
* time, this profile is expected to be a representative sample of objects
* currently live in the system. Each sampled allocation includes the stack
* trace at the time of allocation, which makes this really useful for memory
* leak detection.
*
* This mechanism is intended to be cheap enough that it can be used in
* production with minimal performance overhead.
*
* Allocations are sampled using a randomized Poisson process. On average, one
* allocation will be sampled every |sample_interval| bytes allocated. The
* |stack_depth| parameter controls the maximum number of stack frames to be
* captured on each allocation.
*
* NOTE: This is a proof-of-concept at this point. Right now we only sample
* newspace allocations. Support for paged space allocation (e.g. pre-tenured
* objects, large objects, code objects, etc.) and native allocations
* doesn't exist yet, but is anticipated in the future.
*
* Objects allocated before the sampling is started will not be included in
* the profile.
*
* Returns false if a sampling heap profiler is already running.
*/
bool StartSamplingHeapProfiler(uint64_t sample_interval = 512 * 1024,
int stack_depth = 16);
/**
* Stops the sampling heap profile and discards the current profile.
*/
void StopSamplingHeapProfiler();
/**
* Returns the sampled profile of allocations allocated (and still live) since
* StartSamplingHeapProfiler was called. The ownership of the pointer is
* transfered to the caller. Returns nullptr if sampling heap profiler is not
* active.
*/
AllocationProfile* GetAllocationProfile();
/**
* Deletes all snapshots taken. All previously returned pointers to
* snapshots and their contents become invalid after this call.

8
deps/v8/include/v8-version.h

@ -8,10 +8,10 @@
// These macros define the version number for the current version.
// NOTE these macros are used by some of the tool scripts and the build
// system so their names cannot be changed without changing the scripts.
#define V8_MAJOR_VERSION 4
#define V8_MINOR_VERSION 9
#define V8_BUILD_NUMBER 385
#define V8_PATCH_LEVEL 35
#define V8_MAJOR_VERSION 5
#define V8_MINOR_VERSION 0
#define V8_BUILD_NUMBER 71
#define V8_PATCH_LEVEL 32
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)

104
deps/v8/include/v8.h

@ -329,9 +329,7 @@ class Local {
friend class PersistentValueMapBase;
template<class F1, class F2> friend class PersistentValueVector;
template <class S>
V8_INLINE Local(S* that)
: val_(that) {}
explicit V8_INLINE Local(T* that) : val_(that) {}
V8_INLINE static Local<T> New(Isolate* isolate, T* that);
T* val_;
};
@ -434,7 +432,10 @@ class WeakCallbackInfo {
return internal_fields_[1];
}
bool IsFirstPass() const { return callback_ != nullptr; }
V8_DEPRECATED("Not realiable once SetSecondPassCallback() was used.",
bool IsFirstPass() const) {
return callback_ != nullptr;
}
// When first called, the embedder MUST Reset() the Global which triggered the
// callback. The Global itself is unusable for anything else. No v8 other api
@ -787,7 +788,7 @@ template <class T, class M> class Persistent : public PersistentBase<T> {
template<class F1, class F2> friend class Persistent;
template<class F> friend class ReturnValue;
template <class S> V8_INLINE Persistent(S* that) : PersistentBase<T>(that) { }
explicit V8_INLINE Persistent(T* that) : PersistentBase<T>(that) {}
V8_INLINE T* operator*() const { return this->val_; }
template<class S, class M2>
V8_INLINE void Copy(const Persistent<S, M2>& that);
@ -886,7 +887,7 @@ using UniquePersistent = Global<T>;
*/
class V8_EXPORT HandleScope {
public:
HandleScope(Isolate* isolate);
explicit HandleScope(Isolate* isolate);
~HandleScope();
@ -939,7 +940,7 @@ class V8_EXPORT HandleScope {
*/
class V8_EXPORT EscapableHandleScope : public HandleScope {
public:
EscapableHandleScope(Isolate* isolate);
explicit EscapableHandleScope(Isolate* isolate);
V8_INLINE ~EscapableHandleScope() {}
/**
@ -3147,7 +3148,8 @@ class FunctionCallbackInfo {
public:
V8_INLINE int Length() const;
V8_INLINE Local<Value> operator[](int i) const;
V8_INLINE Local<Function> Callee() const;
V8_INLINE V8_DEPRECATED("Use Data() to explicitly pass Callee instead",
Local<Function> Callee() const);
V8_INLINE Local<Object> This() const;
V8_INLINE Local<Object> Holder() const;
V8_INLINE bool IsConstructCall() const;
@ -3191,19 +3193,21 @@ class PropertyCallbackInfo {
V8_INLINE Local<Object> This() const;
V8_INLINE Local<Object> Holder() const;
V8_INLINE ReturnValue<T> GetReturnValue() const;
V8_INLINE bool ShouldThrowOnError() const;
// This shouldn't be public, but the arm compiler needs it.
static const int kArgsLength = 6;
static const int kArgsLength = 7;
protected:
friend class MacroAssembler;
friend class internal::PropertyCallbackArguments;
friend class internal::CustomArguments<PropertyCallbackInfo>;
static const int kHolderIndex = 0;
static const int kIsolateIndex = 1;
static const int kReturnValueDefaultValueIndex = 2;
static const int kReturnValueIndex = 3;
static const int kDataIndex = 4;
static const int kThisIndex = 5;
static const int kShouldThrowOnErrorIndex = 0;
static const int kHolderIndex = 1;
static const int kIsolateIndex = 2;
static const int kReturnValueDefaultValueIndex = 3;
static const int kReturnValueIndex = 4;
static const int kDataIndex = 5;
static const int kThisIndex = 6;
V8_INLINE PropertyCallbackInfo(internal::Object** args) : args_(args) {}
internal::Object** args_;
@ -4322,9 +4326,11 @@ enum AccessType {
* object.
*/
typedef bool (*AccessCheckCallback)(Local<Context> accessing_context,
Local<Object> accessed_object,
Local<Value> data);
typedef bool (*DeprecatedAccessCheckCallback)(Local<Context> accessing_context,
Local<Object> accessed_object);
/**
* Returns true if cross-context access should be allowed to the named
* property with the given key on the host object.
@ -4753,6 +4759,10 @@ class V8_EXPORT ObjectTemplate : public Template {
*/
void SetAccessCheckCallback(AccessCheckCallback callback,
Local<Value> data = Local<Value>());
V8_DEPRECATED(
"Use SetAccessCheckCallback with new AccessCheckCallback signature.",
void SetAccessCheckCallback(DeprecatedAccessCheckCallback callback,
Local<Value> data = Local<Value>()));
V8_DEPRECATED(
"Use SetAccessCheckCallback instead",
@ -4999,8 +5009,10 @@ typedef void (*MemoryAllocationCallback)(ObjectSpace space,
AllocationAction action,
int size);
// --- Leave Script Callback ---
typedef void (*CallCompletedCallback)();
// --- Enter/Leave Script Callback ---
typedef void (*BeforeCallEnteredCallback)(Isolate*);
typedef void (*CallCompletedCallback)(Isolate*);
typedef void (*DeprecatedCallCompletedCallback)();
// --- Promise Reject Callback ---
enum PromiseRejectEvent {
@ -5069,11 +5081,24 @@ enum GCType {
kGCTypeIncrementalMarking | kGCTypeProcessWeakCallbacks
};
/**
* GCCallbackFlags is used to notify additional information about the GC
* callback.
* - kGCCallbackFlagConstructRetainedObjectInfos: The GC callback is for
* constructing retained object infos.
* - kGCCallbackFlagForced: The GC callback is for a forced GC for testing.
* - kGCCallbackFlagSynchronousPhantomCallbackProcessing: The GC callback
* is called synchronously without getting posted to an idle task.
* - kGCCallbackFlagCollectAllAvailableGarbage: The GC callback is called
* in a phase where V8 is trying to collect all available garbage
* (e.g., handling a low memory notification).
*/
enum GCCallbackFlags {
kNoGCCallbackFlags = 0,
kGCCallbackFlagConstructRetainedObjectInfos = 1 << 1,
kGCCallbackFlagForced = 1 << 2,
kGCCallbackFlagSynchronousPhantomCallbackProcessing = 1 << 3
kGCCallbackFlagSynchronousPhantomCallbackProcessing = 1 << 3,
kGCCallbackFlagCollectAllAvailableGarbage = 1 << 4,
};
typedef void (*GCCallback)(GCType type, GCCallbackFlags flags);
@ -5455,6 +5480,18 @@ class V8_EXPORT Isolate {
kPromiseChain = 17,
kPromiseAccept = 18,
kPromiseDefer = 19,
kHtmlCommentInExternalScript = 20,
kHtmlComment = 21,
kSloppyModeBlockScopedFunctionRedefinition = 22,
kForInInitializer = 23,
kArrayProtectorDirtied = 24,
kArraySpeciesModified = 25,
kArrayPrototypeConstructorModified = 26,
kArrayInstanceProtoModified = 27,
kArrayInstanceConstructorModified = 28,
// If you add new values here, you'll also need to update V8Initializer.cpp
// in Chromium.
kUseCounterFeatureCount // This enum value must be last.
};
@ -5795,6 +5832,19 @@ class V8_EXPORT Isolate {
*/
void SetEventLogger(LogEventCallback that);
/**
* Adds a callback to notify the host application right before a script
* is about to run. If a script re-enters the runtime during executing, the
* BeforeCallEnteredCallback is invoked for each re-entrance.
* Executing scripts inside the callback will re-trigger the callback.
*/
void AddBeforeCallEnteredCallback(BeforeCallEnteredCallback callback);
/**
* Removes callback that was installed by AddBeforeCallEnteredCallback.
*/
void RemoveBeforeCallEnteredCallback(BeforeCallEnteredCallback callback);
/**
* Adds a callback to notify the host application when a script finished
* running. If a script re-enters the runtime during executing, the
@ -5803,12 +5853,18 @@ class V8_EXPORT Isolate {
* further callbacks.
*/
void AddCallCompletedCallback(CallCompletedCallback callback);
V8_DEPRECATE_SOON(
"Use callback with parameter",
void AddCallCompletedCallback(DeprecatedCallCompletedCallback callback));
/**
* Removes callback that was installed by AddCallCompletedCallback.
*/
void RemoveCallCompletedCallback(CallCompletedCallback callback);
V8_DEPRECATE_SOON(
"Use callback with parameter",
void RemoveCallCompletedCallback(
DeprecatedCallCompletedCallback callback));
/**
* Set callback to notify about promise reject with no handler, or
@ -7132,7 +7188,7 @@ class Internals {
static const int kNodeIsPartiallyDependentShift = 4;
static const int kNodeIsActiveShift = 4;
static const int kJSObjectType = 0xb7;
static const int kJSObjectType = 0xb5;
static const int kFirstNonstringType = 0x80;
static const int kOddballType = 0x83;
static const int kForeignType = 0x87;
@ -8262,6 +8318,12 @@ ReturnValue<T> PropertyCallbackInfo<T>::GetReturnValue() const {
return ReturnValue<T>(&args_[kReturnValueIndex]);
}
template <typename T>
bool PropertyCallbackInfo<T>::ShouldThrowOnError() const {
typedef internal::Internals I;
return args_[kShouldThrowOnErrorIndex] != I::IntToSmi(0);
}
Local<Primitive> Undefined(Isolate* isolate) {
typedef internal::Object* S;

5
deps/v8/infra/config/cq.cfg

@ -16,6 +16,7 @@ rietveld {
verifiers {
reviewer_lgtm {
committer_list: "project-v8-committers"
dry_run_access_list: "project-v8-tryjob-access"
}
tree_status {
@ -66,9 +67,9 @@ verifiers {
}
}
buckets {
name: "tryserver.blink"
name: "tryserver.v8"
builders {
name: "linux_blink_rel"
name: "v8_linux_blink_rel"
experiment_percentage: 20
}
}

2
deps/v8/snapshot_toolchain.gni

@ -42,3 +42,5 @@ if (host_cpu == "x64" && host_os == "linux") {
} else {
snapshot_toolchain = default_toolchain
}

8
deps/v8/src/DEPS

@ -12,18 +12,12 @@ include_rules = [
"+src/interpreter/bytecode-array-iterator.h",
"+src/interpreter/bytecodes.h",
"+src/interpreter/interpreter.h",
"+src/interpreter/source-position-table.h",
"-src/libplatform",
"-include/libplatform"
]
specific_include_rules = {
".*\.h": [
# Note that src/v8.h by now is a regular header file, it doesn't provide
# any special declarations besides the V8 class. There should be no need
# for including it in any .h files though. This rule is just a reminder,
# and can be removed once the dust has settled.
"-src/v8.h",
],
"d8\.cc": [
"+include/libplatform/libplatform.h",
],

514
deps/v8/src/accessors.cc

@ -27,13 +27,15 @@ Handle<AccessorInfo> Accessors::MakeAccessor(
AccessorNameSetterCallback setter,
PropertyAttributes attributes) {
Factory* factory = isolate->factory();
Handle<ExecutableAccessorInfo> info = factory->NewExecutableAccessorInfo();
Handle<AccessorInfo> info = factory->NewAccessorInfo();
info->set_property_attributes(attributes);
info->set_all_can_read(false);
info->set_all_can_write(false);
info->set_is_special_data_property(true);
name = factory->InternalizeName(name);
info->set_name(*name);
Handle<Object> get = v8::FromCData(isolate, getter);
if (setter == nullptr) setter = &ReconfigureToDataProperty;
Handle<Object> set = v8::FromCData(isolate, setter);
info->set_getter(*get);
info->set_setter(*set);
@ -41,21 +43,6 @@ Handle<AccessorInfo> Accessors::MakeAccessor(
}
Handle<ExecutableAccessorInfo> Accessors::CloneAccessor(
Isolate* isolate,
Handle<ExecutableAccessorInfo> accessor) {
Factory* factory = isolate->factory();
Handle<ExecutableAccessorInfo> info = factory->NewExecutableAccessorInfo();
info->set_name(accessor->name());
info->set_flag(accessor->flag());
info->set_expected_receiver_type(accessor->expected_receiver_type());
info->set_getter(accessor->getter());
info->set_setter(accessor->setter());
info->set_data(accessor->data());
return info;
}
static V8_INLINE bool CheckForName(Handle<Name> name,
Handle<String> property_name,
int offset,
@ -96,6 +83,7 @@ bool Accessors::IsJSObjectFieldAccessor(Handle<Map> map, Handle<Name> name,
bool Accessors::IsJSArrayBufferViewFieldAccessor(Handle<Map> map,
Handle<Name> name,
int* object_offset) {
DCHECK(name->IsUniqueName());
Isolate* isolate = name->GetIsolate();
switch (map->instance_type()) {
@ -113,7 +101,7 @@ bool Accessors::IsJSArrayBufferViewFieldAccessor(Handle<Map> map,
// Check if the property is overridden on the instance.
DescriptorArray* descriptors = map->instance_descriptors();
int descriptor = descriptors->SearchWithCache(*name, *map);
int descriptor = descriptors->SearchWithCache(isolate, *name, *map);
if (descriptor != DescriptorArray::kNotFound) return false;
Handle<Object> proto = Handle<Object>(map->prototype(), isolate);
@ -140,6 +128,50 @@ bool Accessors::IsJSArrayBufferViewFieldAccessor(Handle<Map> map,
}
}
MUST_USE_RESULT static MaybeHandle<Object> ReplaceAccessorWithDataProperty(
Isolate* isolate, Handle<JSObject> receiver, Handle<JSObject> holder,
Handle<Name> name, Handle<Object> value, bool observe) {
LookupIterator it(receiver, name, holder,
LookupIterator::OWN_SKIP_INTERCEPTOR);
// Skip any access checks we might hit. This accessor should never hit in a
// situation where the caller does not have access.
if (it.state() == LookupIterator::ACCESS_CHECK) {
CHECK(it.HasAccess());
it.Next();
}
CHECK_EQ(LookupIterator::ACCESSOR, it.state());
Handle<Object> old_value;
bool is_observed = observe && receiver->map()->is_observed();
if (is_observed) {
MaybeHandle<Object> maybe_old = Object::GetPropertyWithAccessor(&it);
if (!maybe_old.ToHandle(&old_value)) return maybe_old;
}
it.ReconfigureDataProperty(value, it.property_attributes());
if (is_observed && !old_value->SameValue(*value)) {
return JSObject::EnqueueChangeRecord(receiver, "update", name, old_value);
}
return value;
}
void Accessors::ReconfigureToDataProperty(
v8::Local<v8::Name> key, v8::Local<v8::Value> val,
const v8::PropertyCallbackInfo<void>& info) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate());
HandleScope scope(isolate);
Handle<JSObject> receiver =
Handle<JSObject>::cast(Utils::OpenHandle(*info.This()));
Handle<JSObject> holder =
Handle<JSObject>::cast(Utils::OpenHandle(*info.Holder()));
Handle<Name> name = Utils::OpenHandle(*key);
Handle<Object> value = Utils::OpenHandle(*val);
MaybeHandle<Object> result = ReplaceAccessorWithDataProperty(
isolate, receiver, holder, name, value, false);
if (result.is_null()) isolate->OptionalRescheduleException(false);
}
//
// Accessors::ArgumentsIterator
@ -156,29 +188,11 @@ void Accessors::ArgumentsIteratorGetter(
}
void Accessors::ArgumentsIteratorSetter(
v8::Local<v8::Name> name, v8::Local<v8::Value> val,
const v8::PropertyCallbackInfo<void>& info) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate());
HandleScope scope(isolate);
Handle<JSObject> object_handle =
Handle<JSObject>::cast(Utils::OpenHandle(*info.This()));
Handle<Object> value_handle = Utils::OpenHandle(*val);
Handle<Name> name_handle = Utils::OpenHandle(*name);
if (JSObject::DefinePropertyOrElementIgnoreAttributes(
object_handle, name_handle, value_handle, NONE)
.is_null()) {
isolate->OptionalRescheduleException(false);
}
}
Handle<AccessorInfo> Accessors::ArgumentsIteratorInfo(
Isolate* isolate, PropertyAttributes attributes) {
Handle<Name> name = isolate->factory()->iterator_symbol();
return MakeAccessor(isolate, name, &ArgumentsIteratorGetter,
&ArgumentsIteratorSetter, attributes);
return MakeAccessor(isolate, name, &ArgumentsIteratorGetter, nullptr,
attributes);
}
@ -219,6 +233,19 @@ void Accessors::ArrayLengthSetter(
if (JSArray::ObservableSetLength(array, length).is_null()) {
isolate->OptionalRescheduleException(false);
}
if (info.ShouldThrowOnError()) {
uint32_t actual_new_len = 0;
CHECK(array->length()->ToArrayLength(&actual_new_len));
// Throw TypeError if there were non-deletable elements.
if (actual_new_len != length) {
Factory* factory = isolate->factory();
isolate->Throw(*factory->NewTypeError(
MessageTemplate::kStrictDeleteProperty,
factory->NewNumberFromUint(actual_new_len - 1), array));
isolate->OptionalRescheduleException(false);
}
}
}
@ -259,21 +286,10 @@ void Accessors::StringLengthGetter(
}
void Accessors::StringLengthSetter(
v8::Local<v8::Name> name,
v8::Local<v8::Value> value,
const v8::PropertyCallbackInfo<void>& info) {
UNREACHABLE();
}
Handle<AccessorInfo> Accessors::StringLengthInfo(
Isolate* isolate, PropertyAttributes attributes) {
return MakeAccessor(isolate,
isolate->factory()->length_string(),
&StringLengthGetter,
&StringLengthSetter,
attributes);
return MakeAccessor(isolate, isolate->factory()->length_string(),
&StringLengthGetter, nullptr, attributes);
}
@ -295,22 +311,11 @@ void Accessors::ScriptColumnOffsetGetter(
}
void Accessors::ScriptColumnOffsetSetter(
v8::Local<v8::Name> name,
v8::Local<v8::Value> value,
const v8::PropertyCallbackInfo<void>& info) {
UNREACHABLE();
}
Handle<AccessorInfo> Accessors::ScriptColumnOffsetInfo(
Isolate* isolate, PropertyAttributes attributes) {
Handle<String> name(isolate->factory()->InternalizeOneByteString(
STATIC_CHAR_VECTOR("column_offset")));
return MakeAccessor(isolate,
name,
&ScriptColumnOffsetGetter,
&ScriptColumnOffsetSetter,
return MakeAccessor(isolate, name, &ScriptColumnOffsetGetter, nullptr,
attributes);
}
@ -332,23 +337,11 @@ void Accessors::ScriptIdGetter(
}
void Accessors::ScriptIdSetter(
v8::Local<v8::Name> name,
v8::Local<v8::Value> value,
const v8::PropertyCallbackInfo<void>& info) {
UNREACHABLE();
}
Handle<AccessorInfo> Accessors::ScriptIdInfo(
Isolate* isolate, PropertyAttributes attributes) {
Handle<String> name(
isolate->factory()->InternalizeOneByteString(STATIC_CHAR_VECTOR("id")));
return MakeAccessor(isolate,
name,
&ScriptIdGetter,
&ScriptIdSetter,
attributes);
return MakeAccessor(isolate, name, &ScriptIdGetter, nullptr, attributes);
}
@ -369,21 +362,10 @@ void Accessors::ScriptNameGetter(
}
void Accessors::ScriptNameSetter(
v8::Local<v8::Name> name,
v8::Local<v8::Value> value,
const v8::PropertyCallbackInfo<void>& info) {
UNREACHABLE();
}
Handle<AccessorInfo> Accessors::ScriptNameInfo(
Isolate* isolate, PropertyAttributes attributes) {
return MakeAccessor(isolate,
isolate->factory()->name_string(),
&ScriptNameGetter,
&ScriptNameSetter,
attributes);
return MakeAccessor(isolate, isolate->factory()->name_string(),
&ScriptNameGetter, nullptr, attributes);
}
@ -404,21 +386,10 @@ void Accessors::ScriptSourceGetter(
}
void Accessors::ScriptSourceSetter(
v8::Local<v8::Name> name,
v8::Local<v8::Value> value,
const v8::PropertyCallbackInfo<void>& info) {
UNREACHABLE();
}
Handle<AccessorInfo> Accessors::ScriptSourceInfo(
Isolate* isolate, PropertyAttributes attributes) {
return MakeAccessor(isolate,
isolate->factory()->source_string(),
&ScriptSourceGetter,
&ScriptSourceSetter,
attributes);
return MakeAccessor(isolate, isolate->factory()->source_string(),
&ScriptSourceGetter, nullptr, attributes);
}
@ -440,22 +411,11 @@ void Accessors::ScriptLineOffsetGetter(
}
void Accessors::ScriptLineOffsetSetter(
v8::Local<v8::Name> name,
v8::Local<v8::Value> value,
const v8::PropertyCallbackInfo<void>& info) {
UNREACHABLE();
}
Handle<AccessorInfo> Accessors::ScriptLineOffsetInfo(
Isolate* isolate, PropertyAttributes attributes) {
Handle<String> name(isolate->factory()->InternalizeOneByteString(
STATIC_CHAR_VECTOR("line_offset")));
return MakeAccessor(isolate,
name,
&ScriptLineOffsetGetter,
&ScriptLineOffsetSetter,
return MakeAccessor(isolate, name, &ScriptLineOffsetGetter, nullptr,
attributes);
}
@ -478,23 +438,11 @@ void Accessors::ScriptTypeGetter(
}
void Accessors::ScriptTypeSetter(
v8::Local<v8::Name> name,
v8::Local<v8::Value> value,
const v8::PropertyCallbackInfo<void>& info) {
UNREACHABLE();
}
Handle<AccessorInfo> Accessors::ScriptTypeInfo(
Isolate* isolate, PropertyAttributes attributes) {
Handle<String> name(
isolate->factory()->InternalizeOneByteString(STATIC_CHAR_VECTOR("type")));
return MakeAccessor(isolate,
name,
&ScriptTypeGetter,
&ScriptTypeSetter,
attributes);
return MakeAccessor(isolate, name, &ScriptTypeGetter, nullptr, attributes);
}
@ -516,22 +464,11 @@ void Accessors::ScriptCompilationTypeGetter(
}
void Accessors::ScriptCompilationTypeSetter(
v8::Local<v8::Name> name,
v8::Local<v8::Value> value,
const v8::PropertyCallbackInfo<void>& info) {
UNREACHABLE();
}
Handle<AccessorInfo> Accessors::ScriptCompilationTypeInfo(
Isolate* isolate, PropertyAttributes attributes) {
Handle<String> name(isolate->factory()->InternalizeOneByteString(
STATIC_CHAR_VECTOR("compilation_type")));
return MakeAccessor(isolate,
name,
&ScriptCompilationTypeGetter,
&ScriptCompilationTypeSetter,
return MakeAccessor(isolate, name, &ScriptCompilationTypeGetter, nullptr,
attributes);
}
@ -561,22 +498,11 @@ void Accessors::ScriptLineEndsGetter(
}
void Accessors::ScriptLineEndsSetter(
v8::Local<v8::Name> name,
v8::Local<v8::Value> value,
const v8::PropertyCallbackInfo<void>& info) {
UNREACHABLE();
}
Handle<AccessorInfo> Accessors::ScriptLineEndsInfo(
Isolate* isolate, PropertyAttributes attributes) {
Handle<String> name(isolate->factory()->InternalizeOneByteString(
STATIC_CHAR_VECTOR("line_ends")));
return MakeAccessor(isolate,
name,
&ScriptLineEndsGetter,
&ScriptLineEndsSetter,
return MakeAccessor(isolate, name, &ScriptLineEndsGetter, nullptr,
attributes);
}
@ -598,21 +524,10 @@ void Accessors::ScriptSourceUrlGetter(
}
void Accessors::ScriptSourceUrlSetter(
v8::Local<v8::Name> name,
v8::Local<v8::Value> value,
const v8::PropertyCallbackInfo<void>& info) {
UNREACHABLE();
}
Handle<AccessorInfo> Accessors::ScriptSourceUrlInfo(
Isolate* isolate, PropertyAttributes attributes) {
return MakeAccessor(isolate,
isolate->factory()->source_url_string(),
&ScriptSourceUrlGetter,
&ScriptSourceUrlSetter,
attributes);
return MakeAccessor(isolate, isolate->factory()->source_url_string(),
&ScriptSourceUrlGetter, nullptr, attributes);
}
@ -634,21 +549,10 @@ void Accessors::ScriptSourceMappingUrlGetter(
}
void Accessors::ScriptSourceMappingUrlSetter(
v8::Local<v8::Name> name,
v8::Local<v8::Value> value,
const v8::PropertyCallbackInfo<void>& info) {
UNREACHABLE();
}
Handle<AccessorInfo> Accessors::ScriptSourceMappingUrlInfo(
Isolate* isolate, PropertyAttributes attributes) {
return MakeAccessor(isolate,
isolate->factory()->source_mapping_url_string(),
&ScriptSourceMappingUrlGetter,
&ScriptSourceMappingUrlSetter,
attributes);
return MakeAccessor(isolate, isolate->factory()->source_mapping_url_string(),
&ScriptSourceMappingUrlGetter, nullptr, attributes);
}
@ -671,19 +575,12 @@ void Accessors::ScriptIsEmbedderDebugScriptGetter(
}
void Accessors::ScriptIsEmbedderDebugScriptSetter(
v8::Local<v8::Name> name, v8::Local<v8::Value> value,
const v8::PropertyCallbackInfo<void>& info) {
UNREACHABLE();
}
Handle<AccessorInfo> Accessors::ScriptIsEmbedderDebugScriptInfo(
Isolate* isolate, PropertyAttributes attributes) {
Handle<String> name(isolate->factory()->InternalizeOneByteString(
STATIC_CHAR_VECTOR("is_debugger_script")));
return MakeAccessor(isolate, name, &ScriptIsEmbedderDebugScriptGetter,
&ScriptIsEmbedderDebugScriptSetter, attributes);
nullptr, attributes);
}
@ -704,22 +601,11 @@ void Accessors::ScriptContextDataGetter(
}
void Accessors::ScriptContextDataSetter(
v8::Local<v8::Name> name,
v8::Local<v8::Value> value,
const v8::PropertyCallbackInfo<void>& info) {
UNREACHABLE();
}
Handle<AccessorInfo> Accessors::ScriptContextDataInfo(
Isolate* isolate, PropertyAttributes attributes) {
Handle<String> name(isolate->factory()->InternalizeOneByteString(
STATIC_CHAR_VECTOR("context_data")));
return MakeAccessor(isolate,
name,
&ScriptContextDataGetter,
&ScriptContextDataSetter,
return MakeAccessor(isolate, name, &ScriptContextDataGetter, nullptr,
attributes);
}
@ -751,22 +637,11 @@ void Accessors::ScriptEvalFromScriptGetter(
}
void Accessors::ScriptEvalFromScriptSetter(
v8::Local<v8::Name> name,
v8::Local<v8::Value> value,
const v8::PropertyCallbackInfo<void>& info) {
UNREACHABLE();
}
Handle<AccessorInfo> Accessors::ScriptEvalFromScriptInfo(
Isolate* isolate, PropertyAttributes attributes) {
Handle<String> name(isolate->factory()->InternalizeOneByteString(
STATIC_CHAR_VECTOR("eval_from_script")));
return MakeAccessor(isolate,
name,
&ScriptEvalFromScriptGetter,
&ScriptEvalFromScriptSetter,
return MakeAccessor(isolate, name, &ScriptEvalFromScriptGetter, nullptr,
attributes);
}
@ -789,7 +664,6 @@ void Accessors::ScriptEvalFromScriptPositionGetter(
Handle<Code> code(SharedFunctionInfo::cast(
script->eval_from_shared())->code());
result = Handle<Object>(Smi::FromInt(code->SourcePosition(
code->instruction_start() +
script->eval_from_instructions_offset())),
isolate);
}
@ -797,23 +671,12 @@ void Accessors::ScriptEvalFromScriptPositionGetter(
}
void Accessors::ScriptEvalFromScriptPositionSetter(
v8::Local<v8::Name> name,
v8::Local<v8::Value> value,
const v8::PropertyCallbackInfo<void>& info) {
UNREACHABLE();
}
Handle<AccessorInfo> Accessors::ScriptEvalFromScriptPositionInfo(
Isolate* isolate, PropertyAttributes attributes) {
Handle<String> name(isolate->factory()->InternalizeOneByteString(
STATIC_CHAR_VECTOR("eval_from_script_position")));
return MakeAccessor(isolate,
name,
&ScriptEvalFromScriptPositionGetter,
&ScriptEvalFromScriptPositionSetter,
attributes);
return MakeAccessor(isolate, name, &ScriptEvalFromScriptPositionGetter,
nullptr, attributes);
}
@ -843,22 +706,11 @@ void Accessors::ScriptEvalFromFunctionNameGetter(
}
void Accessors::ScriptEvalFromFunctionNameSetter(
v8::Local<v8::Name> name,
v8::Local<v8::Value> value,
const v8::PropertyCallbackInfo<void>& info) {
UNREACHABLE();
}
Handle<AccessorInfo> Accessors::ScriptEvalFromFunctionNameInfo(
Isolate* isolate, PropertyAttributes attributes) {
Handle<String> name(isolate->factory()->InternalizeOneByteString(
STATIC_CHAR_VECTOR("eval_from_function_name")));
return MakeAccessor(isolate,
name,
&ScriptEvalFromFunctionNameGetter,
&ScriptEvalFromFunctionNameSetter,
return MakeAccessor(isolate, name, &ScriptEvalFromFunctionNameGetter, nullptr,
attributes);
}
@ -976,59 +828,27 @@ void Accessors::FunctionLengthGetter(
info.GetReturnValue().Set(Utils::ToLocal(result));
}
MUST_USE_RESULT static MaybeHandle<Object> ReplaceAccessorWithDataProperty(
Isolate* isolate, Handle<JSObject> object, Handle<Name> name,
Handle<Object> value, bool is_observed, Handle<Object> old_value) {
LookupIterator it(object, name);
CHECK_EQ(LookupIterator::ACCESSOR, it.state());
DCHECK(it.HolderIsReceiverOrHiddenPrototype());
it.ReconfigureDataProperty(value, it.property_details().attributes());
if (is_observed && !old_value->SameValue(*value)) {
return JSObject::EnqueueChangeRecord(object, "update", name, old_value);
}
return value;
}
MUST_USE_RESULT static MaybeHandle<Object> SetFunctionLength(
Isolate* isolate, Handle<JSFunction> function, Handle<Object> value) {
Handle<Object> old_value;
bool is_observed = function->map()->is_observed();
if (is_observed) {
old_value = handle(Smi::FromInt(function->shared()->length()), isolate);
}
return ReplaceAccessorWithDataProperty(isolate, function,
isolate->factory()->length_string(),
value, is_observed, old_value);
}
void Accessors::FunctionLengthSetter(
v8::Local<v8::Name> name,
v8::Local<v8::Value> val,
void Accessors::ObservedReconfigureToDataProperty(
v8::Local<v8::Name> key, v8::Local<v8::Value> val,
const v8::PropertyCallbackInfo<void>& info) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate());
HandleScope scope(isolate);
Handle<JSObject> receiver =
Handle<JSObject>::cast(Utils::OpenHandle(*info.This()));
Handle<JSObject> holder =
Handle<JSObject>::cast(Utils::OpenHandle(*info.Holder()));
Handle<Name> name = Utils::OpenHandle(*key);
Handle<Object> value = Utils::OpenHandle(*val);
Handle<JSFunction> object =
Handle<JSFunction>::cast(Utils::OpenHandle(*info.Holder()));
if (SetFunctionLength(isolate, object, value).is_null()) {
isolate->OptionalRescheduleException(false);
}
MaybeHandle<Object> result = ReplaceAccessorWithDataProperty(
isolate, receiver, holder, name, value, true);
if (result.is_null()) isolate->OptionalRescheduleException(false);
}
Handle<AccessorInfo> Accessors::FunctionLengthInfo(
Isolate* isolate, PropertyAttributes attributes) {
return MakeAccessor(isolate,
isolate->factory()->length_string(),
&FunctionLengthGetter,
&FunctionLengthSetter,
return MakeAccessor(isolate, isolate->factory()->length_string(),
&FunctionLengthGetter, &ObservedReconfigureToDataProperty,
attributes);
}
@ -1054,43 +874,10 @@ void Accessors::FunctionNameGetter(
info.GetReturnValue().Set(Utils::ToLocal(result));
}
MUST_USE_RESULT static MaybeHandle<Object> SetFunctionName(
Isolate* isolate, Handle<JSFunction> function, Handle<Object> value) {
Handle<Object> old_value;
bool is_observed = function->map()->is_observed();
if (is_observed) {
old_value = handle(function->shared()->name(), isolate);
}
return ReplaceAccessorWithDataProperty(isolate, function,
isolate->factory()->name_string(),
value, is_observed, old_value);
}
void Accessors::FunctionNameSetter(
v8::Local<v8::Name> name,
v8::Local<v8::Value> val,
const v8::PropertyCallbackInfo<void>& info) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate());
HandleScope scope(isolate);
Handle<Object> value = Utils::OpenHandle(*val);
Handle<JSFunction> object =
Handle<JSFunction>::cast(Utils::OpenHandle(*info.Holder()));
if (SetFunctionName(isolate, object, value).is_null()) {
isolate->OptionalRescheduleException(false);
}
}
Handle<AccessorInfo> Accessors::FunctionNameInfo(
Isolate* isolate, PropertyAttributes attributes) {
return MakeAccessor(isolate,
isolate->factory()->name_string(),
&FunctionNameGetter,
&FunctionNameSetter,
return MakeAccessor(isolate, isolate->factory()->name_string(),
&FunctionNameGetter, &ObservedReconfigureToDataProperty,
attributes);
}
@ -1158,10 +945,10 @@ static int FindFunctionInFrame(JavaScriptFrame* frame,
}
namespace {
Handle<Object> GetFunctionArguments(Isolate* isolate,
Handle<JSFunction> function) {
if (function->shared()->native()) return isolate->factory()->null_value();
// Find the top invocation of the function by traversing frames.
for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) {
JavaScriptFrame* frame = it.frame();
@ -1200,9 +987,14 @@ Handle<Object> GetFunctionArguments(Isolate* isolate,
return isolate->factory()->null_value();
}
} // namespace
Handle<Object> Accessors::FunctionGetArguments(Handle<JSFunction> function) {
return GetFunctionArguments(function->GetIsolate(), function);
Handle<JSObject> Accessors::FunctionGetArguments(Handle<JSFunction> function) {
Handle<Object> arguments =
GetFunctionArguments(function->GetIsolate(), function);
CHECK(arguments->IsJSObject());
return Handle<JSObject>::cast(arguments);
}
@ -1213,27 +1005,18 @@ void Accessors::FunctionArgumentsGetter(
HandleScope scope(isolate);
Handle<JSFunction> function =
Handle<JSFunction>::cast(Utils::OpenHandle(*info.Holder()));
Handle<Object> result = GetFunctionArguments(isolate, function);
Handle<Object> result =
function->shared()->native()
? Handle<Object>::cast(isolate->factory()->null_value())
: GetFunctionArguments(isolate, function);
info.GetReturnValue().Set(Utils::ToLocal(result));
}
void Accessors::FunctionArgumentsSetter(
v8::Local<v8::Name> name,
v8::Local<v8::Value> val,
const v8::PropertyCallbackInfo<void>& info) {
// Function arguments is non writable, non configurable.
UNREACHABLE();
}
Handle<AccessorInfo> Accessors::FunctionArgumentsInfo(
Isolate* isolate, PropertyAttributes attributes) {
return MakeAccessor(isolate,
isolate->factory()->arguments_string(),
&FunctionArgumentsGetter,
&FunctionArgumentsSetter,
attributes);
return MakeAccessor(isolate, isolate->factory()->arguments_string(),
&FunctionArgumentsGetter, nullptr, attributes);
}
@ -1363,22 +1146,10 @@ void Accessors::FunctionCallerGetter(
}
void Accessors::FunctionCallerSetter(
v8::Local<v8::Name> name,
v8::Local<v8::Value> val,
const v8::PropertyCallbackInfo<void>& info) {
// Function caller is non writable, non configurable.
UNREACHABLE();
}
Handle<AccessorInfo> Accessors::FunctionCallerInfo(
Isolate* isolate, PropertyAttributes attributes) {
return MakeAccessor(isolate,
isolate->factory()->caller_string(),
&FunctionCallerGetter,
&FunctionCallerSetter,
attributes);
return MakeAccessor(isolate, isolate->factory()->caller_string(),
&FunctionCallerGetter, nullptr, attributes);
}
@ -1386,8 +1157,7 @@ Handle<AccessorInfo> Accessors::FunctionCallerInfo(
// Accessors::MakeModuleExport
//
static void ModuleGetExport(
v8::Local<v8::String> property,
static void ModuleGetExport(v8::Local<v8::Name> property,
const v8::PropertyCallbackInfo<v8::Value>& info) {
JSModule* instance = JSModule::cast(*v8::Utils::OpenHandle(*info.Holder()));
Context* context = Context::cast(instance->context());
@ -1397,7 +1167,7 @@ static void ModuleGetExport(
->Int32Value(info.GetIsolate()->GetCurrentContext())
.FromMaybe(-1);
if (slot < 0 || slot >= context->length()) {
Handle<String> name = v8::Utils::OpenHandle(*property);
Handle<Name> name = v8::Utils::OpenHandle(*property);
Handle<Object> exception = isolate->factory()->NewReferenceError(
MessageTemplate::kNotDefined, name);
@ -1406,7 +1176,7 @@ static void ModuleGetExport(
}
Object* value = context->get(slot);
if (value->IsTheHole()) {
Handle<String> name = v8::Utils::OpenHandle(*property);
Handle<Name> name = v8::Utils::OpenHandle(*property);
Handle<Object> exception = isolate->factory()->NewReferenceError(
MessageTemplate::kNotDefined, name);
@ -1417,33 +1187,15 @@ static void ModuleGetExport(
}
static void ModuleSetExport(
v8::Local<v8::String> property,
static void ModuleSetExport(v8::Local<v8::Name> property,
v8::Local<v8::Value> value,
const v8::PropertyCallbackInfo<v8::Value>& info) {
JSModule* instance = JSModule::cast(*v8::Utils::OpenHandle(*info.Holder()));
Context* context = Context::cast(instance->context());
DCHECK(context->IsModuleContext());
Isolate* isolate = instance->GetIsolate();
int slot = info.Data()
->Int32Value(info.GetIsolate()->GetCurrentContext())
.FromMaybe(-1);
if (slot < 0 || slot >= context->length()) {
Handle<String> name = v8::Utils::OpenHandle(*property);
Handle<Object> exception = isolate->factory()->NewReferenceError(
MessageTemplate::kNotDefined, name);
isolate->ScheduleThrow(*exception);
return;
}
Object* old_value = context->get(slot);
if (old_value->IsTheHole()) {
Handle<String> name = v8::Utils::OpenHandle(*property);
Handle<Object> exception = isolate->factory()->NewReferenceError(
MessageTemplate::kNotDefined, name);
const v8::PropertyCallbackInfo<void>& info) {
if (!info.ShouldThrowOnError()) return;
Handle<Name> name = v8::Utils::OpenHandle(*property);
Isolate* isolate = name->GetIsolate();
Handle<Object> exception =
isolate->factory()->NewTypeError(MessageTemplate::kNotDefined, name);
isolate->ScheduleThrow(*exception);
return;
}
context->set(slot, *v8::Utils::OpenHandle(*value));
}
@ -1452,17 +1204,9 @@ Handle<AccessorInfo> Accessors::MakeModuleExport(
int index,
PropertyAttributes attributes) {
Isolate* isolate = name->GetIsolate();
Factory* factory = isolate->factory();
Handle<ExecutableAccessorInfo> info = factory->NewExecutableAccessorInfo();
info->set_property_attributes(attributes);
info->set_all_can_read(true);
info->set_all_can_write(true);
info->set_name(*name);
Handle<AccessorInfo> info = MakeAccessor(isolate, name, &ModuleGetExport,
&ModuleSetExport, attributes);
info->set_data(Smi::FromInt(index));
Handle<Object> getter = v8::FromCData(isolate, &ModuleGetExport);
Handle<Object> setter = v8::FromCData(isolate, &ModuleSetExport);
info->set_getter(*getter);
if (!(attributes & ReadOnly)) info->set_setter(*setter);
return info;
}

24
deps/v8/src/accessors.h

@ -15,7 +15,7 @@ namespace v8 {
namespace internal {
// Forward declarations.
class ExecutableAccessorInfo;
class AccessorInfo;
// The list of accessor descriptors. This is a second-order macro
// taking a macro to be applied to all accessor descriptor names.
@ -44,6 +44,12 @@ class ExecutableAccessorInfo;
V(ScriptIsEmbedderDebugScript) \
V(StringLength)
#define ACCESSOR_SETTER_LIST(V) \
V(ReconfigureToDataProperty) \
V(ObservedReconfigureToDataProperty) \
V(ArrayLengthSetter) \
V(FunctionPrototypeSetter)
// Accessors contains all predefined proxy accessors.
class Accessors : public AllStatic {
@ -53,16 +59,18 @@ class Accessors : public AllStatic {
static void name##Getter( \
v8::Local<v8::Name> name, \
const v8::PropertyCallbackInfo<v8::Value>& info); \
static void name##Setter( \
v8::Local<v8::Name> name, \
v8::Local<v8::Value> value, \
const v8::PropertyCallbackInfo<void>& info); \
static Handle<AccessorInfo> name##Info( \
Isolate* isolate, \
PropertyAttributes attributes);
ACCESSOR_INFO_LIST(ACCESSOR_INFO_DECLARATION)
#undef ACCESSOR_INFO_DECLARATION
#define ACCESSOR_SETTER_DECLARATION(name) \
static void name(v8::Local<v8::Name> name, v8::Local<v8::Value> value, \
const v8::PropertyCallbackInfo<void>& info);
ACCESSOR_SETTER_LIST(ACCESSOR_SETTER_DECLARATION)
#undef ACCESSOR_SETTER_DECLARATION
enum DescriptorId {
#define ACCESSOR_INFO_DECLARATION(name) \
k##name##Getter, \
@ -75,7 +83,7 @@ class Accessors : public AllStatic {
// Accessor functions called directly from the runtime system.
MUST_USE_RESULT static MaybeHandle<Object> FunctionSetPrototype(
Handle<JSFunction> object, Handle<Object> value);
static Handle<Object> FunctionGetArguments(Handle<JSFunction> object);
static Handle<JSObject> FunctionGetArguments(Handle<JSFunction> object);
// Accessor infos.
static Handle<AccessorInfo> MakeModuleExport(
@ -100,10 +108,6 @@ class Accessors : public AllStatic {
AccessorNameGetterCallback getter,
AccessorNameSetterCallback setter,
PropertyAttributes attributes);
static Handle<ExecutableAccessorInfo> CloneAccessor(
Isolate* isolate,
Handle<ExecutableAccessorInfo> accessor);
};
} // namespace internal

9
deps/v8/src/address-map.cc

@ -17,10 +17,10 @@ RootIndexMap::RootIndexMap(Isolate* isolate) {
for (uint32_t i = 0; i < Heap::kStrongRootListLength; i++) {
Heap::RootListIndex root_index = static_cast<Heap::RootListIndex>(i);
Object* root = isolate->heap()->root(root_index);
if (!root->IsHeapObject()) continue;
// Omit root entries that can be written after initialization. They must
// not be referenced through the root list in the snapshot.
if (root->IsHeapObject() &&
isolate->heap()->RootCanBeTreatedAsConstant(root_index)) {
if (isolate->heap()->RootCanBeTreatedAsConstant(root_index)) {
HeapObject* heap_object = HeapObject::cast(root);
HashMap::Entry* entry = LookupEntry(map_, heap_object, false);
if (entry != NULL) {
@ -29,6 +29,11 @@ RootIndexMap::RootIndexMap(Isolate* isolate) {
} else {
SetValue(LookupEntry(map_, heap_object, true), i);
}
} else {
// Immortal immovable root objects are constant and allocated on the first
// page of old space. Non-constant roots cannot be immortal immovable. The
// root index map contains all immortal immmovable root objects.
CHECK(!Heap::RootIsImmortalImmovable(root_index));
}
}
isolate->set_root_index_map(map_);

5
deps/v8/src/api-experimental.cc

@ -122,5 +122,10 @@ void FastAccessorBuilder::CheckNotZeroOrJump(ValueId value_id,
FromApi(this)->CheckNotZeroOrJump(value_id, label_id);
}
FastAccessorBuilder::ValueId FastAccessorBuilder::Call(
v8::FunctionCallback callback, ValueId value_id) {
return FromApi(this)->Call(callback, value_id);
}
} // namespace experimental
} // namespace v8

270
deps/v8/src/api-natives.cc

@ -16,8 +16,8 @@ namespace internal {
namespace {
MaybeHandle<JSObject> InstantiateObject(Isolate* isolate,
Handle<ObjectTemplateInfo> data);
Handle<ObjectTemplateInfo> data,
bool is_hidden_prototype);
MaybeHandle<JSFunction> InstantiateFunction(Isolate* isolate,
Handle<FunctionTemplateInfo> data,
@ -30,33 +30,37 @@ MaybeHandle<Object> Instantiate(Isolate* isolate, Handle<Object> data,
return InstantiateFunction(isolate,
Handle<FunctionTemplateInfo>::cast(data), name);
} else if (data->IsObjectTemplateInfo()) {
return InstantiateObject(isolate, Handle<ObjectTemplateInfo>::cast(data));
return InstantiateObject(isolate, Handle<ObjectTemplateInfo>::cast(data),
false);
} else {
return data;
}
}
MaybeHandle<Object> DefineAccessorProperty(Isolate* isolate,
Handle<JSObject> object,
Handle<Name> name,
Handle<Object> getter,
Handle<Object> setter,
PropertyAttributes attributes) {
if (!getter->IsUndefined()) {
MaybeHandle<Object> DefineAccessorProperty(
Isolate* isolate, Handle<JSObject> object, Handle<Name> name,
Handle<Object> getter, Handle<Object> setter, PropertyAttributes attributes,
bool force_instantiate) {
DCHECK(!getter->IsFunctionTemplateInfo() ||
!FunctionTemplateInfo::cast(*getter)->do_not_cache());
DCHECK(!setter->IsFunctionTemplateInfo() ||
!FunctionTemplateInfo::cast(*setter)->do_not_cache());
if (force_instantiate) {
if (getter->IsFunctionTemplateInfo()) {
ASSIGN_RETURN_ON_EXCEPTION(
isolate, getter,
InstantiateFunction(isolate,
Handle<FunctionTemplateInfo>::cast(getter)),
Object);
}
if (!setter->IsUndefined()) {
if (setter->IsFunctionTemplateInfo()) {
ASSIGN_RETURN_ON_EXCEPTION(
isolate, setter,
InstantiateFunction(isolate,
Handle<FunctionTemplateInfo>::cast(setter)),
Object);
}
}
RETURN_ON_EXCEPTION(isolate, JSObject::DefineAccessor(object, name, getter,
setter, attributes),
Object);
@ -148,17 +152,78 @@ Object* GetIntrinsic(Isolate* isolate, v8::Intrinsic intrinsic) {
return nullptr;
}
// Returns parent function template or null.
FunctionTemplateInfo* GetParent(FunctionTemplateInfo* data) {
Object* parent = data->parent_template();
return parent->IsUndefined() ? nullptr : FunctionTemplateInfo::cast(parent);
}
// Starting from given object template's constructor walk up the inheritance
// chain till a function template that has an instance template is found.
ObjectTemplateInfo* GetParent(ObjectTemplateInfo* data) {
Object* maybe_ctor = data->constructor();
if (maybe_ctor->IsUndefined()) return nullptr;
FunctionTemplateInfo* ctor = FunctionTemplateInfo::cast(maybe_ctor);
while (true) {
ctor = GetParent(ctor);
if (ctor == nullptr) return nullptr;
Object* maybe_obj = ctor->instance_template();
if (!maybe_obj->IsUndefined()) return ObjectTemplateInfo::cast(maybe_obj);
}
}
template <typename TemplateInfoT>
MaybeHandle<JSObject> ConfigureInstance(Isolate* isolate, Handle<JSObject> obj,
Handle<TemplateInfo> data) {
Handle<TemplateInfoT> data,
bool is_hidden_prototype) {
HandleScope scope(isolate);
// Disable access checks while instantiating the object.
AccessCheckDisableScope access_check_scope(isolate, obj);
// Walk the inheritance chain and copy all accessors to current object.
int max_number_of_properties = 0;
TemplateInfoT* info = *data;
while (info != nullptr) {
if (!info->property_accessors()->IsUndefined()) {
Object* props = info->property_accessors();
if (!props->IsUndefined()) {
Handle<Object> props_handle(props, isolate);
NeanderArray props_array(props_handle);
max_number_of_properties += props_array.length();
}
}
info = GetParent(info);
}
if (max_number_of_properties > 0) {
int valid_descriptors = 0;
// Use a temporary FixedArray to accumulate unique accessors.
Handle<FixedArray> array =
isolate->factory()->NewFixedArray(max_number_of_properties);
info = *data;
while (info != nullptr) {
// Accumulate accessors.
if (!info->property_accessors()->IsUndefined()) {
Handle<Object> props(info->property_accessors(), isolate);
valid_descriptors =
AccessorInfo::AppendUnique(props, array, valid_descriptors);
}
info = GetParent(info);
}
// Install accumulated accessors.
for (int i = 0; i < valid_descriptors; i++) {
Handle<AccessorInfo> accessor(AccessorInfo::cast(array->get(i)));
JSObject::SetAccessor(obj, accessor).Assert();
}
}
auto property_list = handle(data->property_list(), isolate);
if (property_list->IsUndefined()) return obj;
// TODO(dcarney): just use a FixedArray here.
NeanderArray properties(property_list);
if (properties.length() == 0) return obj;
HandleScope scope(isolate);
// Disable access checks while instantiating the object.
AccessCheckDisableScope access_check_scope(isolate, obj);
int i = 0;
for (int c = 0; c < data->number_of_properties(); c++) {
@ -171,16 +236,15 @@ MaybeHandle<JSObject> ConfigureInstance(Isolate* isolate, Handle<JSObject> obj,
if (kind == kData) {
auto prop_data = handle(properties.get(i++), isolate);
RETURN_ON_EXCEPTION(isolate, DefineDataProperty(isolate, obj, name,
prop_data, attributes),
JSObject);
} else {
auto getter = handle(properties.get(i++), isolate);
auto setter = handle(properties.get(i++), isolate);
RETURN_ON_EXCEPTION(isolate,
DefineAccessorProperty(isolate, obj, name, getter,
setter, attributes),
RETURN_ON_EXCEPTION(
isolate, DefineAccessorProperty(isolate, obj, name, getter, setter,
attributes, is_hidden_prototype),
JSObject);
}
} else {
@ -202,14 +266,28 @@ MaybeHandle<JSObject> ConfigureInstance(Isolate* isolate, Handle<JSObject> obj,
return obj;
}
void CacheTemplateInstantiation(Isolate* isolate, Handle<Smi> serial_number,
Handle<JSObject> object) {
auto cache = isolate->template_instantiations_cache();
auto new_cache = ObjectHashTable::Put(cache, serial_number, object);
isolate->native_context()->set_template_instantiations_cache(*new_cache);
}
void UncacheTemplateInstantiation(Isolate* isolate, Handle<Smi> serial_number) {
auto cache = isolate->template_instantiations_cache();
bool was_present = false;
auto new_cache = ObjectHashTable::Remove(cache, serial_number, &was_present);
DCHECK(was_present);
isolate->native_context()->set_template_instantiations_cache(*new_cache);
}
MaybeHandle<JSObject> InstantiateObject(Isolate* isolate,
Handle<ObjectTemplateInfo> data) {
Handle<ObjectTemplateInfo> info,
bool is_hidden_prototype) {
// Enter a new scope. Recursion could otherwise create a lot of handles.
HandleScope scope(isolate);
// Fast path.
Handle<JSObject> result;
auto info = Handle<ObjectTemplateInfo>::cast(data);
auto constructor = handle(info->constructor(), isolate);
Handle<JSFunction> cons;
if (constructor->IsUndefined()) {
@ -219,29 +297,32 @@ MaybeHandle<JSObject> InstantiateObject(Isolate* isolate,
ASSIGN_RETURN_ON_EXCEPTION(
isolate, cons, InstantiateFunction(isolate, cons_templ), JSFunction);
}
auto serial_number = handle(Smi::cast(info->serial_number()), isolate);
if (serial_number->value()) {
// Probe cache.
auto cache = isolate->template_instantiations_cache();
Object* boilerplate = cache->Lookup(serial_number);
if (boilerplate->IsJSObject()) {
result = handle(JSObject::cast(boilerplate), isolate);
ASSIGN_RETURN_ON_EXCEPTION(
isolate, result, JSObject::DeepCopyApiBoilerplate(result), JSObject);
return scope.CloseAndEscape(result);
}
}
auto object = isolate->factory()->NewJSObject(cons);
ASSIGN_RETURN_ON_EXCEPTION(
isolate, result, ConfigureInstance(isolate, object, info), JSFunction);
isolate, result,
ConfigureInstance(isolate, object, info, is_hidden_prototype),
JSFunction);
// TODO(dcarney): is this necessary?
JSObject::MigrateSlowToFast(result, 0, "ApiNatives::InstantiateObject");
return scope.CloseAndEscape(result);
}
void CacheFunction(Isolate* isolate, Handle<Smi> serial_number,
Handle<JSFunction> function) {
auto cache = isolate->function_cache();
auto new_cache = ObjectHashTable::Put(cache, serial_number, function);
isolate->native_context()->set_function_cache(*new_cache);
}
void UncacheFunction(Isolate* isolate, Handle<Smi> serial_number) {
auto cache = isolate->function_cache();
bool was_present = false;
auto new_cache = ObjectHashTable::Remove(cache, serial_number, &was_present);
DCHECK(was_present);
isolate->native_context()->set_function_cache(*new_cache);
if (serial_number->value()) {
CacheTemplateInstantiation(isolate, serial_number, result);
ASSIGN_RETURN_ON_EXCEPTION(
isolate, result, JSObject::DeepCopyApiBoilerplate(result), JSObject);
}
return scope.CloseAndEscape(result);
}
@ -249,9 +330,9 @@ MaybeHandle<JSFunction> InstantiateFunction(Isolate* isolate,
Handle<FunctionTemplateInfo> data,
Handle<Name> name) {
auto serial_number = handle(Smi::cast(data->serial_number()), isolate);
if (serial_number->value()) {
// Probe cache.
if (!data->do_not_cache()) {
auto cache = isolate->function_cache();
auto cache = isolate->template_instantiations_cache();
Object* element = cache->Lookup(serial_number);
if (element->IsJSFunction()) {
return handle(JSFunction::cast(element), isolate);
@ -268,7 +349,8 @@ MaybeHandle<JSFunction> InstantiateFunction(Isolate* isolate,
ASSIGN_RETURN_ON_EXCEPTION(
isolate, prototype,
InstantiateObject(isolate,
Handle<ObjectTemplateInfo>::cast(prototype_templ)),
Handle<ObjectTemplateInfo>::cast(prototype_templ),
data->hidden_prototype()),
JSFunction);
}
auto parent = handle(data->parent_template(), isolate);
@ -296,15 +378,16 @@ MaybeHandle<JSFunction> InstantiateFunction(Isolate* isolate,
if (!name.is_null() && name->IsString()) {
function->shared()->set_name(*name);
}
if (!data->do_not_cache()) {
if (serial_number->value()) {
// Cache the function.
CacheFunction(isolate, serial_number, function);
CacheTemplateInstantiation(isolate, serial_number, function);
}
auto result = ConfigureInstance(isolate, function, data);
auto result =
ConfigureInstance(isolate, function, data, data->hidden_prototype());
if (result.is_null()) {
// Uncache on error.
if (!data->do_not_cache()) {
UncacheFunction(isolate, serial_number);
if (serial_number->value()) {
UncacheTemplateInstantiation(isolate, serial_number);
}
return MaybeHandle<JSFunction>();
}
@ -364,23 +447,7 @@ MaybeHandle<JSObject> ApiNatives::InstantiateObject(
Handle<ObjectTemplateInfo> data) {
Isolate* isolate = data->GetIsolate();
InvokeScope invoke_scope(isolate);
return ::v8::internal::InstantiateObject(isolate, data);
}
MaybeHandle<FunctionTemplateInfo> ApiNatives::ConfigureInstance(
Isolate* isolate, Handle<FunctionTemplateInfo> desc,
Handle<JSObject> instance) {
// Configure the instance by adding the properties specified by the
// instance template.
if (desc->instance_template()->IsUndefined()) return desc;
InvokeScope invoke_scope(isolate);
Handle<ObjectTemplateInfo> instance_template(
ObjectTemplateInfo::cast(desc->instance_template()), isolate);
RETURN_ON_EXCEPTION(isolate, ::v8::internal::ConfigureInstance(
isolate, instance, instance_template),
FunctionTemplateInfo);
return desc;
return ::v8::internal::InstantiateObject(isolate, data, false);
}
@ -527,11 +594,6 @@ Handle<JSFunction> ApiNatives::CreateApiFunction(
map->set_is_undetectable();
}
// Mark as hidden for the __proto__ accessor if needed.
if (obj->hidden_prototype()) {
map->set_is_hidden_prototype();
}
// Mark as needs_access_check if needed.
if (obj->needs_access_check()) {
map->set_is_access_check_needed(true);
@ -548,73 +610,7 @@ Handle<JSFunction> ApiNatives::CreateApiFunction(
// Mark instance as callable in the map.
if (!obj->instance_call_handler()->IsUndefined()) {
map->set_is_callable();
map->set_is_constructor();
}
// Recursively copy parent instance templates' accessors,
// 'data' may be modified.
int max_number_of_additional_properties = 0;
int max_number_of_static_properties = 0;
FunctionTemplateInfo* info = *obj;
while (true) {
if (!info->instance_template()->IsUndefined()) {
Object* props = ObjectTemplateInfo::cast(info->instance_template())
->property_accessors();
if (!props->IsUndefined()) {
Handle<Object> props_handle(props, isolate);
NeanderArray props_array(props_handle);
max_number_of_additional_properties += props_array.length();
}
}
if (!info->property_accessors()->IsUndefined()) {
Object* props = info->property_accessors();
if (!props->IsUndefined()) {
Handle<Object> props_handle(props, isolate);
NeanderArray props_array(props_handle);
max_number_of_static_properties += props_array.length();
}
}
Object* parent = info->parent_template();
if (parent->IsUndefined()) break;
info = FunctionTemplateInfo::cast(parent);
}
Map::EnsureDescriptorSlack(map, max_number_of_additional_properties);
// Use a temporary FixedArray to acculumate static accessors
int valid_descriptors = 0;
Handle<FixedArray> array;
if (max_number_of_static_properties > 0) {
array = isolate->factory()->NewFixedArray(max_number_of_static_properties);
}
while (true) {
// Install instance descriptors
if (!obj->instance_template()->IsUndefined()) {
Handle<ObjectTemplateInfo> instance = Handle<ObjectTemplateInfo>(
ObjectTemplateInfo::cast(obj->instance_template()), isolate);
Handle<Object> props =
Handle<Object>(instance->property_accessors(), isolate);
if (!props->IsUndefined()) {
Map::AppendCallbackDescriptors(map, props);
}
}
// Accumulate static accessors
if (!obj->property_accessors()->IsUndefined()) {
Handle<Object> props = Handle<Object>(obj->property_accessors(), isolate);
valid_descriptors =
AccessorInfo::AppendUnique(props, array, valid_descriptors);
}
// Climb parent chain
Handle<Object> parent = Handle<Object>(obj->parent_template(), isolate);
if (parent->IsUndefined()) break;
obj = Handle<FunctionTemplateInfo>::cast(parent);
}
// Install accumulated static accessors
for (int i = 0; i < valid_descriptors; i++) {
Handle<AccessorInfo> accessor(AccessorInfo::cast(array->get(i)));
JSObject::SetAccessor(result, accessor).Assert();
map->set_is_constructor(true);
}
DCHECK(result->shared()->IsApiFunction());

4
deps/v8/src/api-natives.h

@ -25,10 +25,6 @@ class ApiNatives {
MUST_USE_RESULT static MaybeHandle<JSObject> InstantiateObject(
Handle<ObjectTemplateInfo> data);
MUST_USE_RESULT static MaybeHandle<FunctionTemplateInfo> ConfigureInstance(
Isolate* isolate, Handle<FunctionTemplateInfo> instance,
Handle<JSObject> data);
enum ApiInstanceType {
JavaScriptObjectType,
GlobalObjectType,

222
deps/v8/src/api.cc

@ -15,6 +15,7 @@
#include "include/v8-experimental.h"
#include "include/v8-profiler.h"
#include "include/v8-testing.h"
#include "src/accessors.h"
#include "src/api-experimental.h"
#include "src/api-natives.h"
#include "src/assert-scope.h"
@ -38,8 +39,8 @@
#include "src/global-handles.h"
#include "src/icu_util.h"
#include "src/isolate-inl.h"
#include "src/json-parser.h"
#include "src/messages.h"
#include "src/parsing/json-parser.h"
#include "src/parsing/parser.h"
#include "src/parsing/scanner-character-streams.h"
#include "src/pending-compilation-error-handler.h"
@ -58,6 +59,7 @@
#include "src/snapshot/natives.h"
#include "src/snapshot/snapshot.h"
#include "src/startup-data-util.h"
#include "src/tracing/trace-event.h"
#include "src/unicode-inl.h"
#include "src/v8.h"
#include "src/v8threads.h"
@ -167,6 +169,7 @@ class CallDepthScope {
isolate_->IncrementJsCallsFromApiCounter();
isolate_->handle_scope_implementer()->IncrementCallDepth();
if (!context_.IsEmpty()) context_->Enter();
if (do_callback_) isolate_->FireBeforeCallEnteredCallback();
}
~CallDepthScope() {
if (!context_.IsEmpty()) context_->Exit();
@ -969,6 +972,9 @@ static void InitializeFunctionTemplate(
info->set_flag(0);
}
static Local<ObjectTemplate> ObjectTemplateNew(
i::Isolate* isolate, v8::Local<FunctionTemplate> constructor,
bool do_not_cache);
Local<ObjectTemplate> FunctionTemplate::PrototypeTemplate() {
i::Isolate* i_isolate = Utils::OpenHandle(this)->GetIsolate();
@ -976,8 +982,9 @@ Local<ObjectTemplate> FunctionTemplate::PrototypeTemplate() {
i::Handle<i::Object> result(Utils::OpenHandle(this)->prototype_template(),
i_isolate);
if (result->IsUndefined()) {
v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(i_isolate);
result = Utils::OpenHandle(*ObjectTemplate::New(isolate));
// Do not cache prototype objects.
result = Utils::OpenHandle(
*ObjectTemplateNew(i_isolate, Local<FunctionTemplate>(), true));
Utils::OpenHandle(this)->set_prototype_template(*result);
}
return ToApiHandle<ObjectTemplate>(result);
@ -1119,21 +1126,23 @@ static i::Handle<i::AccessorInfo> SetAccessorInfoProperties(
return obj;
}
template <typename Getter, typename Setter>
static i::Handle<i::AccessorInfo> MakeAccessorInfo(
v8::Local<Name> name, Getter getter, Setter setter, v8::Local<Value> data,
v8::AccessControl settings, v8::PropertyAttribute attributes,
v8::Local<AccessorSignature> signature) {
v8::Local<AccessorSignature> signature, bool is_special_data_property) {
i::Isolate* isolate = Utils::OpenHandle(*name)->GetIsolate();
i::Handle<i::ExecutableAccessorInfo> obj =
isolate->factory()->NewExecutableAccessorInfo();
i::Handle<i::AccessorInfo> obj = isolate->factory()->NewAccessorInfo();
SET_FIELD_WRAPPED(obj, set_getter, getter);
if (is_special_data_property && setter == nullptr) {
setter = reinterpret_cast<Setter>(&i::Accessors::ReconfigureToDataProperty);
}
SET_FIELD_WRAPPED(obj, set_setter, setter);
if (data.IsEmpty()) {
data = v8::Undefined(reinterpret_cast<v8::Isolate*>(isolate));
}
obj->set_data(*Utils::OpenHandle(*data));
obj->set_is_special_data_property(is_special_data_property);
return SetAccessorInfoProperties(obj, name, settings, attributes, signature);
}
@ -1225,9 +1234,9 @@ Local<ObjectTemplate> ObjectTemplate::New() {
return New(i::Isolate::Current(), Local<FunctionTemplate>());
}
Local<ObjectTemplate> ObjectTemplate::New(
i::Isolate* isolate, v8::Local<FunctionTemplate> constructor) {
static Local<ObjectTemplate> ObjectTemplateNew(
i::Isolate* isolate, v8::Local<FunctionTemplate> constructor,
bool do_not_cache) {
// Changes to the environment cannot be captured in the snapshot. Expect no
// object templates when the isolate is created for serialization.
DCHECK(!isolate->serializer_enabled());
@ -1238,12 +1247,22 @@ Local<ObjectTemplate> ObjectTemplate::New(
i::Handle<i::ObjectTemplateInfo> obj =
i::Handle<i::ObjectTemplateInfo>::cast(struct_obj);
InitializeTemplate(obj, Consts::OBJECT_TEMPLATE);
int next_serial_number = 0;
if (!do_not_cache) {
next_serial_number = isolate->next_serial_number() + 1;
isolate->set_next_serial_number(next_serial_number);
}
obj->set_serial_number(i::Smi::FromInt(next_serial_number));
if (!constructor.IsEmpty())
obj->set_constructor(*Utils::OpenHandle(*constructor));
obj->set_internal_field_count(i::Smi::FromInt(0));
return Utils::ToLocal(obj);
}
Local<ObjectTemplate> ObjectTemplate::New(
i::Isolate* isolate, v8::Local<FunctionTemplate> constructor) {
return ObjectTemplateNew(isolate, constructor, false);
}
// Ensure that the object template has a constructor. If no
// constructor is available we create one.
@ -1264,39 +1283,20 @@ static i::Handle<i::FunctionTemplateInfo> EnsureConstructor(
}
static inline i::Handle<i::TemplateInfo> GetTemplateInfo(
i::Isolate* isolate,
Template* template_obj) {
return Utils::OpenHandle(template_obj);
}
// TODO(dcarney): remove this with ObjectTemplate::SetAccessor
static inline i::Handle<i::TemplateInfo> GetTemplateInfo(
i::Isolate* isolate,
ObjectTemplate* object_template) {
EnsureConstructor(isolate, object_template);
return Utils::OpenHandle(object_template);
}
template<typename Getter, typename Setter, typename Data, typename Template>
static bool TemplateSetAccessor(
Template* template_obj,
v8::Local<Name> name,
Getter getter,
Setter setter,
Data data,
template <typename Getter, typename Setter, typename Data, typename Template>
static bool TemplateSetAccessor(Template* template_obj, v8::Local<Name> name,
Getter getter, Setter setter, Data data,
AccessControl settings,
PropertyAttribute attribute,
v8::Local<AccessorSignature> signature) {
auto isolate = Utils::OpenHandle(template_obj)->GetIsolate();
v8::Local<AccessorSignature> signature,
bool is_special_data_property) {
auto info = Utils::OpenHandle(template_obj);
auto isolate = info->GetIsolate();
ENTER_V8(isolate);
i::HandleScope scope(isolate);
auto obj = MakeAccessorInfo(name, getter, setter, data, settings, attribute,
signature);
signature, is_special_data_property);
if (obj.is_null()) return false;
auto info = GetTemplateInfo(isolate, template_obj);
i::ApiNatives::AddNativeDataProperty(isolate, info, obj);
return true;
}
@ -1309,8 +1309,8 @@ void Template::SetNativeDataProperty(v8::Local<String> name,
PropertyAttribute attribute,
v8::Local<AccessorSignature> signature,
AccessControl settings) {
TemplateSetAccessor(
this, name, getter, setter, data, settings, attribute, signature);
TemplateSetAccessor(this, name, getter, setter, data, settings, attribute,
signature, true);
}
@ -1321,8 +1321,8 @@ void Template::SetNativeDataProperty(v8::Local<Name> name,
PropertyAttribute attribute,
v8::Local<AccessorSignature> signature,
AccessControl settings) {
TemplateSetAccessor(
this, name, getter, setter, data, settings, attribute, signature);
TemplateSetAccessor(this, name, getter, setter, data, settings, attribute,
signature, true);
}
@ -1344,8 +1344,8 @@ void ObjectTemplate::SetAccessor(v8::Local<String> name,
v8::Local<Value> data, AccessControl settings,
PropertyAttribute attribute,
v8::Local<AccessorSignature> signature) {
TemplateSetAccessor(
this, name, getter, setter, data, settings, attribute, signature);
TemplateSetAccessor(this, name, getter, setter, data, settings, attribute,
signature, i::FLAG_disable_old_api_accessors);
}
@ -1355,8 +1355,8 @@ void ObjectTemplate::SetAccessor(v8::Local<Name> name,
v8::Local<Value> data, AccessControl settings,
PropertyAttribute attribute,
v8::Local<AccessorSignature> signature) {
TemplateSetAccessor(
this, name, getter, setter, data, settings, attribute, signature);
TemplateSetAccessor(this, name, getter, setter, data, settings, attribute,
signature, i::FLAG_disable_old_api_accessors);
}
@ -1452,6 +1452,10 @@ void ObjectTemplate::SetAccessCheckCallback(AccessCheckCallback callback,
cons->set_needs_access_check(true);
}
void ObjectTemplate::SetAccessCheckCallback(
DeprecatedAccessCheckCallback callback, Local<Value> data) {
SetAccessCheckCallback(reinterpret_cast<AccessCheckCallback>(callback), data);
}
void ObjectTemplate::SetAccessCheckCallbacks(
NamedSecurityCallback named_callback,
@ -1602,32 +1606,7 @@ Local<Script> UnboundScript::BindToCurrentContext() {
function_info(i::SharedFunctionInfo::cast(*obj), obj->GetIsolate());
i::Isolate* isolate = obj->GetIsolate();
i::ScopeInfo* scope_info = function_info->scope_info();
i::Handle<i::JSReceiver> global(isolate->native_context()->global_object());
for (int i = 0; i < scope_info->StrongModeFreeVariableCount(); ++i) {
i::Handle<i::String> name_string(scope_info->StrongModeFreeVariableName(i));
i::ScriptContextTable::LookupResult result;
i::Handle<i::ScriptContextTable> script_context_table(
isolate->native_context()->script_context_table());
if (!i::ScriptContextTable::Lookup(script_context_table, name_string,
&result)) {
i::Handle<i::Name> name(scope_info->StrongModeFreeVariableName(i));
Maybe<bool> has = i::JSReceiver::HasProperty(global, name);
if (has.IsJust() && !has.FromJust()) {
i::PendingCompilationErrorHandler pending_error_handler_;
pending_error_handler_.ReportMessageAt(
scope_info->StrongModeFreeVariableStartPosition(i),
scope_info->StrongModeFreeVariableEndPosition(i),
i::MessageTemplate::kStrongUnboundGlobal, name_string,
i::kReferenceError);
i::Handle<i::Script> script(i::Script::cast(function_info->script()));
pending_error_handler_.ThrowPendingError(isolate, script);
isolate->ReportPendingMessages();
isolate->OptionalRescheduleException(true);
return Local<Script>();
}
}
}
i::Handle<i::JSFunction> function =
obj->GetIsolate()->factory()->NewFunctionFromSharedFunctionInfo(
function_info, isolate->native_context());
@ -1708,6 +1687,7 @@ MaybeLocal<Value> Script::Run(Local<Context> context) {
PREPARE_FOR_EXECUTION_WITH_CALLBACK(context, "v8::Script::Run()", Value)
i::AggregatingHistogramTimerScope timer(isolate->counters()->compile_lazy());
i::TimerEventScope<i::TimerEventExecute> timer_scope(isolate);
TRACE_EVENT0("v8", "V8.Execute");
auto fun = i::Handle<i::JSFunction>::cast(Utils::OpenHandle(this));
i::Handle<i::Object> receiver(isolate->global_proxy(), isolate);
Local<Value> result;
@ -1761,6 +1741,7 @@ MaybeLocal<UnboundScript> ScriptCompiler::CompileUnboundInternal(
i::Handle<i::SharedFunctionInfo> result;
{
i::HistogramTimerScope total(isolate->counters()->compile_script(), true);
TRACE_EVENT0("v8", "V8.CompileScript");
i::Handle<i::Object> name_obj;
i::Handle<i::Object> source_map_url;
int line_offset = 0;
@ -2930,11 +2911,11 @@ Local<String> Value::ToDetailString(Isolate* isolate) const {
MaybeLocal<Object> Value::ToObject(Local<Context> context) const {
auto obj = Utils::OpenHandle(this);
if (obj->IsJSObject()) return ToApiHandle<Object>(obj);
if (obj->IsJSReceiver()) return ToApiHandle<Object>(obj);
PREPARE_FOR_EXECUTION(context, "ToObject", Object);
Local<Object> result;
has_pending_exception =
!ToLocal<Object>(i::Execution::ToObject(isolate, obj), &result);
!ToLocal<Object>(i::Object::ToObject(isolate, obj), &result);
RETURN_ON_FAILED_EXECUTION(Object);
RETURN_ESCAPED(result);
}
@ -3308,16 +3289,14 @@ double Value::NumberValue() const {
Maybe<int64_t> Value::IntegerValue(Local<Context> context) const {
auto obj = Utils::OpenHandle(this);
i::Handle<i::Object> num;
if (obj->IsNumber()) {
num = obj;
} else {
return Just(NumberToInt64(*obj));
}
PREPARE_FOR_EXECUTION_PRIMITIVE(context, "IntegerValue", int64_t);
i::Handle<i::Object> num;
has_pending_exception = !i::Object::ToInteger(isolate, obj).ToHandle(&num);
RETURN_ON_FAILED_EXECUTION_PRIMITIVE(int64_t);
}
return Just(num->IsSmi() ? static_cast<int64_t>(i::Smi::cast(*num)->value())
: static_cast<int64_t>(num->Number()));
return Just(NumberToInt64(*num));
}
@ -3559,7 +3538,8 @@ static i::MaybeHandle<i::Object> DefineObjectProperty(
isolate, js_object, key, &success, i::LookupIterator::OWN);
if (!success) return i::MaybeHandle<i::Object>();
return i::JSObject::DefineOwnPropertyIgnoreAttributes(&it, value, attrs);
return i::JSObject::DefineOwnPropertyIgnoreAttributes(
&it, value, attrs, i::JSObject::FORCE_FIELD);
}
@ -3600,8 +3580,27 @@ bool v8::Object::ForceSet(v8::Local<Value> key, v8::Local<Value> value,
Maybe<bool> v8::Object::SetPrivate(Local<Context> context, Local<Private> key,
Local<Value> value) {
return DefineOwnProperty(context, Local<Name>(reinterpret_cast<Name*>(*key)),
value, DontEnum);
PREPARE_FOR_EXECUTION_PRIMITIVE(context, "v8::Object::SetPrivate()", bool);
auto self = Utils::OpenHandle(this);
auto key_obj = Utils::OpenHandle(reinterpret_cast<Name*>(*key));
auto value_obj = Utils::OpenHandle(*value);
if (self->IsJSProxy()) {
i::PropertyDescriptor desc;
desc.set_writable(true);
desc.set_enumerable(false);
desc.set_configurable(true);
desc.set_value(value_obj);
return i::JSProxy::SetPrivateProperty(
isolate, i::Handle<i::JSProxy>::cast(self),
i::Handle<i::Symbol>::cast(key_obj), &desc, i::Object::DONT_THROW);
}
auto js_object = i::Handle<i::JSObject>::cast(self);
i::LookupIterator it(js_object, key_obj);
has_pending_exception = i::JSObject::DefineOwnPropertyIgnoreAttributes(
&it, value_obj, i::DONT_ENUM)
.is_null();
RETURN_ON_FAILED_EXECUTION_PRIMITIVE(bool);
return Just(true);
}
@ -3753,8 +3752,7 @@ MaybeLocal<Array> v8::Object::GetPropertyNames(Local<Context> context) {
auto self = Utils::OpenHandle(this);
i::Handle<i::FixedArray> value;
has_pending_exception =
!i::JSReceiver::GetKeys(self, i::JSReceiver::INCLUDE_PROTOS,
i::ENUMERABLE_STRINGS)
!i::JSReceiver::GetKeys(self, i::INCLUDE_PROTOS, i::ENUMERABLE_STRINGS)
.ToHandle(&value);
RETURN_ON_FAILED_EXECUTION(Array);
// Because we use caching to speed up enumeration it is important
@ -3776,8 +3774,8 @@ MaybeLocal<Array> v8::Object::GetOwnPropertyNames(Local<Context> context) {
PREPARE_FOR_EXECUTION(context, "v8::Object::GetOwnPropertyNames()", Array);
auto self = Utils::OpenHandle(this);
i::Handle<i::FixedArray> value;
has_pending_exception = !i::JSReceiver::GetKeys(self, i::JSReceiver::OWN_ONLY,
i::ENUMERABLE_STRINGS)
has_pending_exception =
!i::JSReceiver::GetKeys(self, i::OWN_ONLY, i::ENUMERABLE_STRINGS)
.ToHandle(&value);
RETURN_ON_FAILED_EXECUTION(Array);
// Because we use caching to speed up enumeration it is important
@ -3921,7 +3919,7 @@ static Maybe<bool> ObjectSetAccessor(Local<Context> context, Object* self,
i::Handle<i::JSObject>::cast(Utils::OpenHandle(self));
v8::Local<AccessorSignature> signature;
auto info = MakeAccessorInfo(name, getter, setter, data, settings, attributes,
signature);
signature, i::FLAG_disable_old_api_accessors);
if (info.is_null()) return Nothing<bool>();
bool fast = obj->HasFastProperties();
i::Handle<i::Object> result;
@ -4282,6 +4280,7 @@ MaybeLocal<Value> Object::CallAsFunction(Local<Context> context,
PREPARE_FOR_EXECUTION_WITH_CALLBACK(context, "v8::Object::CallAsFunction()",
Value);
i::TimerEventScope<i::TimerEventExecute> timer_scope(isolate);
TRACE_EVENT0("v8", "V8.Execute");
auto self = Utils::OpenHandle(this);
auto recv_obj = Utils::OpenHandle(*recv);
STATIC_ASSERT(sizeof(v8::Local<v8::Value>) == sizeof(i::Object**));
@ -4308,6 +4307,7 @@ MaybeLocal<Value> Object::CallAsConstructor(Local<Context> context, int argc,
PREPARE_FOR_EXECUTION_WITH_CALLBACK(context,
"v8::Object::CallAsConstructor()", Value);
i::TimerEventScope<i::TimerEventExecute> timer_scope(isolate);
TRACE_EVENT0("v8", "V8.Execute");
auto self = Utils::OpenHandle(this);
STATIC_ASSERT(sizeof(v8::Local<v8::Value>) == sizeof(i::Object**));
i::Handle<i::Object>* args = reinterpret_cast<i::Handle<i::Object>*>(argv);
@ -4357,6 +4357,7 @@ MaybeLocal<Object> Function::NewInstance(Local<Context> context, int argc,
PREPARE_FOR_EXECUTION_WITH_CALLBACK(context, "v8::Function::NewInstance()",
Object);
i::TimerEventScope<i::TimerEventExecute> timer_scope(isolate);
TRACE_EVENT0("v8", "V8.Execute");
auto self = Utils::OpenHandle(this);
STATIC_ASSERT(sizeof(v8::Local<v8::Value>) == sizeof(i::Object**));
i::Handle<i::Object>* args = reinterpret_cast<i::Handle<i::Object>*>(argv);
@ -4380,6 +4381,7 @@ MaybeLocal<v8::Value> Function::Call(Local<Context> context,
v8::Local<v8::Value> argv[]) {
PREPARE_FOR_EXECUTION_WITH_CALLBACK(context, "v8::Function::Call()", Value);
i::TimerEventScope<i::TimerEventExecute> timer_scope(isolate);
TRACE_EVENT0("v8", "V8.Execute");
auto self = Utils::OpenHandle(this);
i::Handle<i::Object> recv_obj = Utils::OpenHandle(*recv);
STATIC_ASSERT(sizeof(v8::Local<v8::Value>) == sizeof(i::Object**));
@ -7378,6 +7380,20 @@ void Isolate::SetEventLogger(LogEventCallback that) {
}
void Isolate::AddBeforeCallEnteredCallback(BeforeCallEnteredCallback callback) {
if (callback == NULL) return;
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
isolate->AddBeforeCallEnteredCallback(callback);
}
void Isolate::RemoveBeforeCallEnteredCallback(
BeforeCallEnteredCallback callback) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
isolate->RemoveBeforeCallEnteredCallback(callback);
}
void Isolate::AddCallCompletedCallback(CallCompletedCallback callback) {
if (callback == NULL) return;
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
@ -7391,6 +7407,19 @@ void Isolate::RemoveCallCompletedCallback(CallCompletedCallback callback) {
}
void Isolate::AddCallCompletedCallback(
DeprecatedCallCompletedCallback callback) {
AddCallCompletedCallback(reinterpret_cast<CallCompletedCallback>(callback));
}
void Isolate::RemoveCallCompletedCallback(
DeprecatedCallCompletedCallback callback) {
RemoveCallCompletedCallback(
reinterpret_cast<CallCompletedCallback>(callback));
}
void Isolate::SetPromiseRejectCallback(PromiseRejectCallback callback) {
if (callback == NULL) return;
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
@ -7483,6 +7512,7 @@ void Isolate::LowMemoryNotification() {
{
i::HistogramTimerScope idle_notification_scope(
isolate->counters()->gc_low_memory_notification());
TRACE_EVENT0("v8", "V8.GCLowMemoryNotification");
isolate->heap()->CollectAllAvailableGarbage("low memory notification");
}
}
@ -8057,6 +8087,9 @@ void CpuProfiler::SetSamplingInterval(int us) {
base::TimeDelta::FromMicroseconds(us));
}
void CpuProfiler::CollectSample() {
reinterpret_cast<i::CpuProfiler*>(this)->CollectSample();
}
void CpuProfiler::StartProfiling(Local<String> title, bool record_samples) {
reinterpret_cast<i::CpuProfiler*>(this)->StartProfiling(
@ -8285,6 +8318,23 @@ SnapshotObjectId HeapProfiler::GetHeapStats(OutputStream* stream,
}
bool HeapProfiler::StartSamplingHeapProfiler(uint64_t sample_interval,
int stack_depth) {
return reinterpret_cast<i::HeapProfiler*>(this)
->StartSamplingHeapProfiler(sample_interval, stack_depth);
}
void HeapProfiler::StopSamplingHeapProfiler() {
reinterpret_cast<i::HeapProfiler*>(this)->StopSamplingHeapProfiler();
}
AllocationProfile* HeapProfiler::GetAllocationProfile() {
return reinterpret_cast<i::HeapProfiler*>(this)->GetAllocationProfile();
}
void HeapProfiler::DeleteAllHeapSnapshots() {
reinterpret_cast<i::HeapProfiler*>(this)->DeleteAllSnapshots();
}

6
deps/v8/src/arguments.cc

@ -71,15 +71,13 @@ v8::Local<v8::Value> FunctionCallbackArguments::Call(FunctionCallback f) {
#define WRITE_CALL_2_VOID(Function, ReturnValue, Arg1, Arg2) \
void PropertyCallbackArguments::Call(Function f, \
Arg1 arg1, \
Arg2 arg2) { \
void PropertyCallbackArguments::Call(Function f, Arg1 arg1, Arg2 arg2) { \
Isolate* isolate = this->isolate(); \
VMState<EXTERNAL> state(isolate); \
ExternalCallbackScope call_scope(isolate, FUNCTION_ADDR(f)); \
PropertyCallbackInfo<ReturnValue> info(begin()); \
f(arg1, arg2, info); \
}
}
FOR_EACH_CALLBACK_TABLE_MAPPING_0(WRITE_CALL_0)

41
deps/v8/src/arguments.h

@ -152,17 +152,19 @@ class PropertyCallbackArguments
static const int kReturnValueDefaultValueIndex =
T::kReturnValueDefaultValueIndex;
static const int kIsolateIndex = T::kIsolateIndex;
static const int kShouldThrowOnErrorIndex = T::kShouldThrowOnErrorIndex;
PropertyCallbackArguments(Isolate* isolate,
Object* data,
Object* self,
JSObject* holder)
PropertyCallbackArguments(Isolate* isolate, Object* data, Object* self,
JSObject* holder, Object::ShouldThrow should_throw)
: Super(isolate) {
Object** values = this->begin();
values[T::kThisIndex] = self;
values[T::kHolderIndex] = holder;
values[T::kDataIndex] = data;
values[T::kIsolateIndex] = reinterpret_cast<Object*>(isolate);
values[T::kShouldThrowOnErrorIndex] =
Smi::FromInt(should_throw == Object::THROW_ON_ERROR ? 1 : 0);
// Here the hole is set as default value.
// It cannot escape into js as it's remove in Call below.
values[T::kReturnValueDefaultValueIndex] =
@ -218,13 +220,10 @@ class FunctionCallbackArguments
static const int kCalleeIndex = T::kCalleeIndex;
static const int kContextSaveIndex = T::kContextSaveIndex;
FunctionCallbackArguments(internal::Isolate* isolate,
internal::Object* data,
internal::JSFunction* callee,
internal::Object* holder,
internal::Object** argv,
int argc,
bool is_construct_call)
FunctionCallbackArguments(internal::Isolate* isolate, internal::Object* data,
internal::HeapObject* callee,
internal::Object* holder, internal::Object** argv,
int argc, bool is_construct_call)
: Super(isolate),
argv_(argv),
argc_(argc),
@ -240,7 +239,8 @@ class FunctionCallbackArguments
values[T::kReturnValueDefaultValueIndex] =
isolate->heap()->the_hole_value();
values[T::kReturnValueIndex] = isolate->heap()->the_hole_value();
DCHECK(values[T::kCalleeIndex]->IsJSFunction());
DCHECK(values[T::kCalleeIndex]->IsJSFunction() ||
values[T::kCalleeIndex]->IsFunctionTemplateInfo());
DCHECK(values[T::kHolderIndex]->IsHeapObject());
DCHECK(values[T::kIsolateIndex]->IsSmi());
}
@ -271,20 +271,23 @@ double ClobberDoubleRegisters(double x1, double x2, double x3, double x4);
#define CLOBBER_DOUBLE_REGISTERS()
#endif
#define RUNTIME_FUNCTION_RETURNS_TYPE(Type, Name) \
static INLINE(Type __RT_impl_##Name(Arguments args, Isolate* isolate)); \
Type Name(int args_length, Object** args_object, Isolate* isolate) { \
static INLINE(Type __RT_impl_##Name(Arguments args, Isolate* isolate)); \
Type Name(int args_length, Object** args_object, Isolate* isolate) { \
CLOBBER_DOUBLE_REGISTERS(); \
RuntimeCallStats* stats = isolate->counters()->runtime_call_stats(); \
RuntimeCallTimerScope timer(isolate, &stats->Name); \
Arguments args(args_length, args_object); \
return __RT_impl_##Name(args, isolate); \
} \
static Type __RT_impl_##Name(Arguments args, Isolate* isolate)
Type value = __RT_impl_##Name(args, isolate); \
return value; \
} \
static Type __RT_impl_##Name(Arguments args, Isolate* isolate)
#define RUNTIME_FUNCTION(Name) RUNTIME_FUNCTION_RETURNS_TYPE(Object*, Name)
#define RUNTIME_FUNCTION_RETURN_PAIR(Name) \
RUNTIME_FUNCTION_RETURNS_TYPE(ObjectPair, Name)
#define RUNTIME_FUNCTION_RETURN_TRIPLE(Name) \
RUNTIME_FUNCTION_RETURNS_TYPE(ObjectTriple, Name)
} // namespace internal
} // namespace v8

27
deps/v8/src/arm/assembler-arm-inl.h

@ -138,8 +138,8 @@ void RelocInfo::set_target_object(Object* target,
if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
host() != NULL &&
target->IsHeapObject()) {
host()->GetHeap()->incremental_marking()->RecordWrite(
host(), &Memory::Object_at(pc_), HeapObject::cast(target));
host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
host(), this, HeapObject::cast(target));
}
}
@ -197,10 +197,8 @@ void RelocInfo::set_target_cell(Cell* cell,
Address address = cell->address() + Cell::kValueOffset;
Memory::Address_at(pc_) = address;
if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL) {
// TODO(1550) We are passing NULL as a slot because cell can never be on
// evacuation candidate.
host()->GetHeap()->incremental_marking()->RecordWrite(
host(), NULL, cell);
host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(host(), this,
cell);
}
}
@ -263,23 +261,6 @@ void RelocInfo::WipeOut() {
}
bool RelocInfo::IsPatchedReturnSequence() {
Instr current_instr = Assembler::instr_at(pc_);
Instr next_instr = Assembler::instr_at(pc_ + Assembler::kInstrSize);
// A patched return sequence is:
// ldr ip, [pc, #0]
// blx ip
return Assembler::IsLdrPcImmediateOffset(current_instr) &&
Assembler::IsBlxReg(next_instr);
}
bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
Instr current_instr = Assembler::instr_at(pc_);
return !Assembler::IsNop(current_instr, Assembler::DEBUG_BREAK_NOP);
}
void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
RelocInfo::Mode mode = rmode();
if (mode == RelocInfo::EMBEDDED_OBJECT) {

47
deps/v8/src/arm/assembler-arm.cc

@ -82,7 +82,7 @@ static unsigned CpuFeaturesImpliedByCompiler() {
void CpuFeatures::ProbeImpl(bool cross_compile) {
supported_ |= CpuFeaturesImpliedByCompiler();
cache_line_size_ = 64;
dcache_line_size_ = 64;
// Only use statically determined features for cross compile (snapshot).
if (cross_compile) return;
@ -137,7 +137,7 @@ void CpuFeatures::ProbeImpl(bool cross_compile) {
if (cpu.implementer() == base::CPU::ARM &&
(cpu.part() == base::CPU::ARM_CORTEX_A5 ||
cpu.part() == base::CPU::ARM_CORTEX_A9)) {
cache_line_size_ = 32;
dcache_line_size_ = 32;
}
if (FLAG_enable_32dregs && cpu.has_vfp3_d32()) supported_ |= 1u << VFP32DREGS;
@ -1947,6 +1947,16 @@ void Assembler::uxtah(Register dst, Register src1, Register src2, int rotate,
}
void Assembler::rbit(Register dst, Register src, Condition cond) {
// Instruction details available in ARM DDI 0406C.b, A8.8.144.
// cond(31-28) | 011011111111(27-16) | Rd(15-12) | 11110011(11-4) | Rm(3-0)
DCHECK(IsEnabled(ARMv7));
DCHECK(!dst.is(pc));
DCHECK(!src.is(pc));
emit(cond | 0x6FF * B16 | dst.code() * B12 | 0xF3 * B4 | src.code());
}
// Status register access instructions.
void Assembler::mrs(Register dst, SRegister s, Condition cond) {
DCHECK(!dst.is(pc));
@ -2135,6 +2145,21 @@ void Assembler::svc(uint32_t imm24, Condition cond) {
}
void Assembler::dmb(BarrierOption option) {
emit(kSpecialCondition | 0x57ff*B12 | 5*B4 | option);
}
void Assembler::dsb(BarrierOption option) {
emit(kSpecialCondition | 0x57ff*B12 | 4*B4 | option);
}
void Assembler::isb(BarrierOption option) {
emit(kSpecialCondition | 0x57ff*B12 | 6*B4 | option);
}
// Coprocessor instructions.
void Assembler::cdp(Coprocessor coproc,
int opcode_1,
@ -2923,6 +2948,24 @@ void Assembler::vcvt_f64_u32(const DwVfpRegister dst,
}
void Assembler::vcvt_f32_u32(const SwVfpRegister dst, const SwVfpRegister src,
VFPConversionMode mode, const Condition cond) {
emit(EncodeVCVT(F32, dst.code(), U32, src.code(), mode, cond));
}
void Assembler::vcvt_s32_f32(const SwVfpRegister dst, const SwVfpRegister src,
VFPConversionMode mode, const Condition cond) {
emit(EncodeVCVT(S32, dst.code(), F32, src.code(), mode, cond));
}
void Assembler::vcvt_u32_f32(const SwVfpRegister dst, const SwVfpRegister src,
VFPConversionMode mode, const Condition cond) {
emit(EncodeVCVT(U32, dst.code(), F32, src.code(), mode, cond));
}
void Assembler::vcvt_s32_f64(const SwVfpRegister dst,
const DwVfpRegister src,
VFPConversionMode mode,

23
deps/v8/src/arm/assembler-arm.h

@ -285,6 +285,7 @@ struct QwNeonRegister {
typedef QwNeonRegister QuadRegister;
typedef QwNeonRegister Simd128Register;
// Support for the VFP registers s0 to s31 (d0 to d15).
// Note that "s(N):s(N+1)" is the same as "d(N/2)".
@ -950,6 +951,9 @@ class Assembler : public AssemblerBase {
void uxtah(Register dst, Register src1, Register src2, int rotate = 0,
Condition cond = al);
// Reverse the bits in a register.
void rbit(Register dst, Register src, Condition cond = al);
// Status register access instructions
void mrs(Register dst, SRegister s, Condition cond = al);
@ -986,6 +990,11 @@ class Assembler : public AssemblerBase {
void bkpt(uint32_t imm16); // v5 and above
void svc(uint32_t imm24, Condition cond = al);
// Synchronization instructions
void dmb(BarrierOption option);
void dsb(BarrierOption option);
void isb(BarrierOption option);
// Coprocessor instructions
void cdp(Coprocessor coproc, int opcode_1,
@ -1125,6 +1134,18 @@ class Assembler : public AssemblerBase {
const SwVfpRegister src,
VFPConversionMode mode = kDefaultRoundToZero,
const Condition cond = al);
void vcvt_f32_u32(const SwVfpRegister dst,
const SwVfpRegister src,
VFPConversionMode mode = kDefaultRoundToZero,
const Condition cond = al);
void vcvt_s32_f32(const SwVfpRegister dst,
const SwVfpRegister src,
VFPConversionMode mode = kDefaultRoundToZero,
const Condition cond = al);
void vcvt_u32_f32(const SwVfpRegister dst,
const SwVfpRegister src,
VFPConversionMode mode = kDefaultRoundToZero,
const Condition cond = al);
void vcvt_s32_f64(const SwVfpRegister dst,
const DwVfpRegister src,
VFPConversionMode mode = kDefaultRoundToZero,
@ -1336,7 +1357,7 @@ class Assembler : public AssemblerBase {
// Record a deoptimization reason that can be used by a log or cpu profiler.
// Use --trace-deopt to enable.
void RecordDeoptReason(const int reason, const SourcePosition position);
void RecordDeoptReason(const int reason, int raw_position);
// Record the emission of a constant pool.
//

639
deps/v8/src/arm/builtins-arm.cc

@ -136,6 +136,108 @@ void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
}
// static
void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
// ----------- S t a t e -------------
// -- r0 : number of arguments
// -- lr : return address
// -- sp[(argc - n) * 8] : arg[n] (zero-based)
// -- sp[(argc + 1) * 8] : receiver
// -----------------------------------
Condition const cc_done = (kind == MathMaxMinKind::kMin) ? mi : gt;
Condition const cc_swap = (kind == MathMaxMinKind::kMin) ? gt : mi;
Heap::RootListIndex const root_index =
(kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
: Heap::kMinusInfinityValueRootIndex;
DoubleRegister const reg = (kind == MathMaxMinKind::kMin) ? d2 : d1;
// Load the accumulator with the default return value (either -Infinity or
// +Infinity), with the tagged value in r1 and the double value in d1.
__ LoadRoot(r1, root_index);
__ vldr(d1, FieldMemOperand(r1, HeapNumber::kValueOffset));
// Remember how many slots to drop (including the receiver).
__ add(r4, r0, Operand(1));
Label done_loop, loop;
__ bind(&loop);
{
// Check if all parameters done.
__ sub(r0, r0, Operand(1), SetCC);
__ b(lt, &done_loop);
// Load the next parameter tagged value into r2.
__ ldr(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2));
// Load the double value of the parameter into d2, maybe converting the
// parameter to a number first using the ToNumberStub if necessary.
Label convert, convert_smi, convert_number, done_convert;
__ bind(&convert);
__ JumpIfSmi(r2, &convert_smi);
__ ldr(r3, FieldMemOperand(r2, HeapObject::kMapOffset));
__ JumpIfRoot(r3, Heap::kHeapNumberMapRootIndex, &convert_number);
{
// Parameter is not a Number, use the ToNumberStub to convert it.
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(r0);
__ SmiTag(r4);
__ Push(r0, r1, r4);
__ mov(r0, r2);
ToNumberStub stub(masm->isolate());
__ CallStub(&stub);
__ mov(r2, r0);
__ Pop(r0, r1, r4);
{
// Restore the double accumulator value (d1).
Label done_restore;
__ SmiToDouble(d1, r1);
__ JumpIfSmi(r1, &done_restore);
__ vldr(d1, FieldMemOperand(r1, HeapNumber::kValueOffset));
__ bind(&done_restore);
}
__ SmiUntag(r4);
__ SmiUntag(r0);
}
__ b(&convert);
__ bind(&convert_number);
__ vldr(d2, FieldMemOperand(r2, HeapNumber::kValueOffset));
__ b(&done_convert);
__ bind(&convert_smi);
__ SmiToDouble(d2, r2);
__ bind(&done_convert);
// Perform the actual comparison with the accumulator value on the left hand
// side (d1) and the next parameter value on the right hand side (d2).
Label compare_nan, compare_swap;
__ VFPCompareAndSetFlags(d1, d2);
__ b(cc_done, &loop);
__ b(cc_swap, &compare_swap);
__ b(vs, &compare_nan);
// Left and right hand side are equal, check for -0 vs. +0.
__ VmovHigh(ip, reg);
__ cmp(ip, Operand(0x80000000));
__ b(ne, &loop);
// Result is on the right hand side.
__ bind(&compare_swap);
__ vmov(d1, d2);
__ mov(r1, r2);
__ b(&loop);
// At least one side is NaN, which means that the result will be NaN too.
__ bind(&compare_nan);
__ LoadRoot(r1, Heap::kNanValueRootIndex);
__ vldr(d1, FieldMemOperand(r1, HeapNumber::kValueOffset));
__ b(&loop);
}
__ bind(&done_loop);
__ mov(r0, r1);
__ Drop(r4);
__ Ret();
}
// static
void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
// ----------- S t a t e -------------
@ -227,8 +329,9 @@ void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
__ bind(&new_object);
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ Push(r2, r1, r3); // first argument, constructor, new target
__ CallRuntime(Runtime::kNewObject);
__ Push(r2); // first argument
FastNewObjectStub stub(masm->isolate());
__ CallStub(&stub);
__ Pop(r2);
}
__ str(r2, FieldMemOperand(r0, JSValue::kValueOffset));
@ -352,8 +455,9 @@ void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
__ bind(&new_object);
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ Push(r2, r1, r3); // first argument, constructor, new target
__ CallRuntime(Runtime::kNewObject);
__ Push(r2); // first argument
FastNewObjectStub stub(masm->isolate());
__ CallStub(&stub);
__ Pop(r2);
}
__ str(r2, FieldMemOperand(r0, JSValue::kValueOffset));
@ -361,14 +465,25 @@ void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
}
static void CallRuntimePassFunction(
MacroAssembler* masm, Runtime::FunctionId function_id) {
static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
__ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
__ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset));
__ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Jump(r2);
}
static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
Runtime::FunctionId function_id) {
// ----------- S t a t e -------------
// -- r0 : argument count (preserved for callee)
// -- r1 : target function (preserved for callee)
// -- r3 : new target (preserved for callee)
// -----------------------------------
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
// Push the number of arguments to the callee.
__ SmiTag(r0);
__ push(r0);
// Push a copy of the target function and the new target.
__ push(r1);
__ push(r3);
@ -376,26 +491,19 @@ static void CallRuntimePassFunction(
__ Push(r1);
__ CallRuntime(function_id, 1);
__ mov(r2, r0);
// Restore target function and new target.
__ pop(r3);
__ pop(r1);
}
static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
__ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
__ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset));
__ pop(r0);
__ SmiUntag(r0, r0);
}
__ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Jump(r2);
}
static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
__ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Jump(r0);
}
void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
// Checking whether the queued function is ready for install is optional,
// since we come across interrupts and stack checks elsewhere. However,
@ -407,8 +515,7 @@ void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
__ cmp(sp, Operand(ip));
__ b(hs, &ok);
CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
GenerateTailCallToReturnedCode(masm);
GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
__ bind(&ok);
GenerateTailCallToSharedCode(masm);
@ -417,7 +524,8 @@ void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
bool is_api_function,
bool create_implicit_receiver) {
bool create_implicit_receiver,
bool check_derived_construct) {
// ----------- S t a t e -------------
// -- r0 : number of arguments
// -- r1 : constructor function
@ -435,155 +543,22 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// Preserve the incoming parameters on the stack.
__ AssertUndefinedOrAllocationSite(r2, r4);
__ push(r2);
__ SmiTag(r0);
__ push(r0);
__ Push(r2, r0);
if (create_implicit_receiver) {
// Try to allocate the object without transitioning into C code. If any of
// the preconditions is not met, the code bails out to the runtime call.
Label rt_call, allocated;
if (FLAG_inline_new) {
// Verify that the new target is a JSFunction.
__ CompareObjectType(r3, r5, r4, JS_FUNCTION_TYPE);
__ b(ne, &rt_call);
// Load the initial map and verify that it is in fact a map.
// r3: new target
__ ldr(r2,
FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset));
__ JumpIfSmi(r2, &rt_call);
__ CompareObjectType(r2, r5, r4, MAP_TYPE);
__ b(ne, &rt_call);
// Fall back to runtime if the expected base constructor and base
// constructor differ.
__ ldr(r5, FieldMemOperand(r2, Map::kConstructorOrBackPointerOffset));
__ cmp(r1, r5);
__ b(ne, &rt_call);
// Check that the constructor is not constructing a JSFunction (see
// comments in Runtime_NewObject in runtime.cc). In which case the
// initial map's instance type would be JS_FUNCTION_TYPE.
// r1: constructor function
// r2: initial map
// r3: new target
__ CompareInstanceType(r2, r5, JS_FUNCTION_TYPE);
__ b(eq, &rt_call);
// Now allocate the JSObject on the heap.
// r1: constructor function
// r2: initial map
// r3: new target
__ ldrb(r9, FieldMemOperand(r2, Map::kInstanceSizeOffset));
__ Allocate(r9, r4, r9, r6, &rt_call, SIZE_IN_WORDS);
// Allocated the JSObject, now initialize the fields. Map is set to
// initial map and properties and elements are set to empty fixed array.
// r1: constructor function
// r2: initial map
// r3: new target
// r4: JSObject (not HeapObject tagged - the actual address).
// r9: start of next object
__ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
__ mov(r5, r4);
STATIC_ASSERT(0 * kPointerSize == JSObject::kMapOffset);
__ str(r2, MemOperand(r5, kPointerSize, PostIndex));
STATIC_ASSERT(1 * kPointerSize == JSObject::kPropertiesOffset);
__ str(r6, MemOperand(r5, kPointerSize, PostIndex));
STATIC_ASSERT(2 * kPointerSize == JSObject::kElementsOffset);
__ str(r6, MemOperand(r5, kPointerSize, PostIndex));
STATIC_ASSERT(3 * kPointerSize == JSObject::kHeaderSize);
// Add the object tag to make the JSObject real, so that we can continue
// and jump into the continuation code at any time from now on.
__ add(r4, r4, Operand(kHeapObjectTag));
// Fill all the in-object properties with the appropriate filler.
// r4: JSObject (tagged)
// r5: First in-object property of JSObject (not tagged)
__ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
if (!is_api_function) {
Label no_inobject_slack_tracking;
// Check if slack tracking is enabled.
MemOperand bit_field3 = FieldMemOperand(r2, Map::kBitField3Offset);
// Check if slack tracking is enabled.
__ ldr(r0, bit_field3);
__ DecodeField<Map::ConstructionCounter>(ip, r0);
// ip: slack tracking counter
__ cmp(ip, Operand(Map::kSlackTrackingCounterEnd));
__ b(lt, &no_inobject_slack_tracking);
__ push(ip); // Save allocation count value.
// Decrease generous allocation count.
__ sub(r0, r0, Operand(1 << Map::ConstructionCounter::kShift));
__ str(r0, bit_field3);
// Allocate object with a slack.
__ ldr(r0, FieldMemOperand(r2, Map::kInstanceAttributesOffset));
__ Ubfx(r0, r0, Map::kUnusedPropertyFieldsByte * kBitsPerByte,
kBitsPerByte);
__ sub(r0, r9, Operand(r0, LSL, kPointerSizeLog2));
// r0: offset of first field after pre-allocated fields
if (FLAG_debug_code) {
__ cmp(r5, r0);
__ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields);
}
__ InitializeFieldsWithFiller(r5, r0, r6);
// To allow truncation fill the remaining fields with one pointer
// filler map.
__ LoadRoot(r6, Heap::kOnePointerFillerMapRootIndex);
__ InitializeFieldsWithFiller(r5, r9, r6);
__ pop(r0); // Restore allocation count value before decreasing.
__ cmp(r0, Operand(Map::kSlackTrackingCounterEnd));
__ b(ne, &allocated);
// Push the constructor, new_target and the object to the stack,
// and then the initial map as an argument to the runtime call.
__ Push(r1, r3, r4, r2);
__ CallRuntime(Runtime::kFinalizeInstanceSize);
__ Pop(r1, r3, r4);
// Continue with JSObject being successfully allocated
// r1: constructor function
// r3: new target
// r4: JSObject
__ jmp(&allocated);
__ bind(&no_inobject_slack_tracking);
}
__ InitializeFieldsWithFiller(r5, r9, r6);
// Continue with JSObject being successfully allocated
// r1: constructor function
// r3: new target
// r4: JSObject
__ jmp(&allocated);
}
// Allocate the new receiver object using the runtime call.
// r1: constructor function
// r3: new target
__ bind(&rt_call);
// Push the constructor and new_target twice, second pair as arguments
// to the runtime call.
// Allocate the new receiver object.
__ Push(r1, r3);
__ Push(r1, r3); // constructor function, new target
__ CallRuntime(Runtime::kNewObject);
FastNewObjectStub stub(masm->isolate());
__ CallStub(&stub);
__ mov(r4, r0);
__ Pop(r1, r3);
// Receiver for constructor call allocated.
// r1: constructor function
// r3: new target
// r4: JSObject
__ bind(&allocated);
// ----------- S t a t e -------------
// -- r1: constructor function
// -- r3: new target
// -- r4: newly allocated object
// -----------------------------------
// Retrieve smi-tagged arguments count from the stack.
__ ldr(r0, MemOperand(sp));
@ -685,6 +660,19 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// Leave construct frame.
}
// ES6 9.2.2. Step 13+
// Check that the result is not a Smi, indicating that the constructor result
// from a derived class is neither undefined nor an Object.
if (check_derived_construct) {
Label dont_throw;
__ JumpIfNotSmi(r0, &dont_throw);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
}
__ bind(&dont_throw);
}
__ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
__ add(sp, sp, Operand(kPointerSize));
if (create_implicit_receiver) {
@ -695,17 +683,23 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Generate_JSConstructStubHelper(masm, false, true);
Generate_JSConstructStubHelper(masm, false, true, false);
}
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Generate_JSConstructStubHelper(masm, true, true);
Generate_JSConstructStubHelper(masm, true, false, false);
}
void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
Generate_JSConstructStubHelper(masm, false, false);
Generate_JSConstructStubHelper(masm, false, false, false);
}
void Builtins::Generate_JSBuiltinsConstructStubForDerived(
MacroAssembler* masm) {
Generate_JSConstructStubHelper(masm, false, false, true);
}
@ -854,10 +848,8 @@ void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
// o sp: stack pointer
// o lr: return address
//
// The function builds a JS frame. Please see JavaScriptFrameConstants in
// frames-arm.h for its layout.
// TODO(rmcilroy): We will need to include the current bytecode pointer in the
// frame.
// The function builds an interpreter frame. See InterpreterFrameConstants in
// frames.h for its layout.
void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
// Open a frame scope to indicate that there is a frame on the stack. The
// MANUAL indicates that the scope shouldn't actually generate code to set up
@ -865,17 +857,19 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
FrameScope frame_scope(masm, StackFrame::MANUAL);
__ PushFixedFrame(r1);
__ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
__ push(r3);
// Push zero for bytecode array offset.
__ mov(r0, Operand(0));
__ push(r0);
// Get the bytecode array from the function object and load the pointer to the
// first entry into kInterpreterBytecodeRegister.
__ ldr(r0, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
Register debug_info = kInterpreterBytecodeArrayRegister;
DCHECK(!debug_info.is(r0));
__ ldr(debug_info, FieldMemOperand(r0, SharedFunctionInfo::kDebugInfoOffset));
__ cmp(debug_info, Operand(DebugInfo::uninitialized()));
// Load original bytecode array or the debug copy.
__ ldr(kInterpreterBytecodeArrayRegister,
FieldMemOperand(r0, SharedFunctionInfo::kFunctionDataOffset), eq);
__ ldr(kInterpreterBytecodeArrayRegister,
FieldMemOperand(r0, SharedFunctionInfo::kFunctionDataOffset));
FieldMemOperand(debug_info, DebugInfo::kAbstractCodeIndex), ne);
if (FLAG_debug_code) {
// Check function data field is actually a BytecodeArray object.
@ -886,6 +880,10 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
__ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
}
// Push new.target, bytecode array and zero for bytecode array offset.
__ mov(r0, Operand(0));
__ Push(r3, kInterpreterBytecodeArrayRegister, r0);
// Allocate the local and temporary register file on the stack.
{
// Load frame size from the BytecodeArray object.
@ -917,23 +915,9 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
// TODO(rmcilroy): List of things not currently dealt with here but done in
// fullcodegen's prologue:
// - Support profiler (specifically profiling_counter).
// - Call ProfileEntryHookStub when isolate has a function_entry_hook.
// - Allow simulator stop operations if FLAG_stop_at is set.
// - Code aging of the BytecodeArray object.
// Perform stack guard check.
{
Label ok;
__ LoadRoot(ip, Heap::kStackLimitRootIndex);
__ cmp(sp, Operand(ip));
__ b(hs, &ok);
__ push(kInterpreterBytecodeArrayRegister);
__ CallRuntime(Runtime::kStackGuard);
__ pop(kInterpreterBytecodeArrayRegister);
__ bind(&ok);
}
// Load accumulator, register file, bytecode offset, dispatch table into
// registers.
__ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
@ -941,10 +925,9 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp));
__ mov(kInterpreterBytecodeOffsetRegister,
Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
__ LoadRoot(kInterpreterDispatchTableRegister,
Heap::kInterpreterTableRootIndex);
__ add(kInterpreterDispatchTableRegister, kInterpreterDispatchTableRegister,
Operand(FixedArray::kHeaderSize - kHeapObjectTag));
__ mov(kInterpreterDispatchTableRegister,
Operand(ExternalReference::interpreter_dispatch_table_address(
masm->isolate())));
// Dispatch to the first bytecode handler for the function.
__ ldrb(r1, MemOperand(kInterpreterBytecodeArrayRegister,
@ -955,6 +938,9 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
// and header removal.
__ add(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Call(ip);
// Even though the first bytecode handler was called, we will never return.
__ Abort(kUnexpectedReturnFromBytecodeHandler);
}
@ -992,7 +978,8 @@ static void Generate_InterpreterPushArgs(MacroAssembler* masm, Register index,
// static
void Builtins::Generate_InterpreterPushArgsAndCall(MacroAssembler* masm) {
void Builtins::Generate_InterpreterPushArgsAndCallImpl(
MacroAssembler* masm, TailCallMode tail_call_mode) {
// ----------- S t a t e -------------
// -- r0 : the number of arguments (not including the receiver)
// -- r2 : the address of the first argument to be pushed. Subsequent
@ -1010,7 +997,9 @@ void Builtins::Generate_InterpreterPushArgsAndCall(MacroAssembler* masm) {
Generate_InterpreterPushArgs(masm, r2, r3, r4);
// Call the target.
__ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
__ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
tail_call_mode),
RelocInfo::CODE_TARGET);
}
@ -1039,47 +1028,24 @@ void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
}
static void Generate_InterpreterNotifyDeoptimizedHelper(
MacroAssembler* masm, Deoptimizer::BailoutType type) {
// Enter an internal frame.
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ push(kInterpreterAccumulatorRegister); // Save accumulator register.
// Pass the deoptimization type to the runtime system.
__ mov(r1, Operand(Smi::FromInt(static_cast<int>(type))));
__ push(r1);
__ CallRuntime(Runtime::kNotifyDeoptimized);
__ pop(kInterpreterAccumulatorRegister); // Restore accumulator register.
// Tear down internal frame.
}
// Drop state (we don't use this for interpreter deopts).
__ Drop(1);
static void Generate_EnterBytecodeDispatch(MacroAssembler* masm) {
// Initialize register file register and dispatch table register.
__ add(kInterpreterRegisterFileRegister, fp,
Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp));
__ LoadRoot(kInterpreterDispatchTableRegister,
Heap::kInterpreterTableRootIndex);
__ add(kInterpreterDispatchTableRegister, kInterpreterDispatchTableRegister,
Operand(FixedArray::kHeaderSize - kHeapObjectTag));
__ mov(kInterpreterDispatchTableRegister,
Operand(ExternalReference::interpreter_dispatch_table_address(
masm->isolate())));
// Get the context from the frame.
// TODO(rmcilroy): Update interpreter frame to expect current context at the
// context slot instead of the function context.
__ ldr(kContextRegister,
MemOperand(kInterpreterRegisterFileRegister,
InterpreterFrameConstants::kContextFromRegisterPointer));
// Get the bytecode array pointer from the frame.
__ ldr(r1,
__ ldr(
kInterpreterBytecodeArrayRegister,
MemOperand(kInterpreterRegisterFileRegister,
InterpreterFrameConstants::kFunctionFromRegisterPointer));
__ ldr(r1, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
__ ldr(kInterpreterBytecodeArrayRegister,
FieldMemOperand(r1, SharedFunctionInfo::kFunctionDataOffset));
InterpreterFrameConstants::kBytecodeArrayFromRegisterPointer));
if (FLAG_debug_code) {
// Check function data field is actually a BytecodeArray object.
@ -1107,6 +1073,29 @@ static void Generate_InterpreterNotifyDeoptimizedHelper(
}
static void Generate_InterpreterNotifyDeoptimizedHelper(
MacroAssembler* masm, Deoptimizer::BailoutType type) {
// Enter an internal frame.
{
FrameScope scope(masm, StackFrame::INTERNAL);
// Pass the deoptimization type to the runtime system.
__ mov(r1, Operand(Smi::FromInt(static_cast<int>(type))));
__ push(r1);
__ CallRuntime(Runtime::kNotifyDeoptimized);
// Tear down internal frame.
}
// Drop state (we don't use these for interpreter deopts) and and pop the
// accumulator value into the accumulator register.
__ Drop(1);
__ Pop(kInterpreterAccumulatorRegister);
// Enter the bytecode dispatch.
Generate_EnterBytecodeDispatch(masm);
}
void Builtins::Generate_InterpreterNotifyDeoptimized(MacroAssembler* masm) {
Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
}
@ -1121,22 +1110,30 @@ void Builtins::Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler* masm) {
Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
}
void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
// Set the address of the interpreter entry trampoline as a return address.
// This simulates the initial call to bytecode handlers in interpreter entry
// trampoline. The return will never actually be taken, but our stack walker
// uses this address to determine whether a frame is interpreted.
__ Move(lr, masm->isolate()->builtins()->InterpreterEntryTrampoline());
Generate_EnterBytecodeDispatch(masm);
}
void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
CallRuntimePassFunction(masm, Runtime::kCompileLazy);
GenerateTailCallToReturnedCode(masm);
GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
}
void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
CallRuntimePassFunction(masm, Runtime::kCompileOptimized_NotConcurrent);
GenerateTailCallToReturnedCode(masm);
GenerateTailCallToReturnedCode(masm,
Runtime::kCompileOptimized_NotConcurrent);
}
void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
CallRuntimePassFunction(masm, Runtime::kCompileOptimized_Concurrent);
GenerateTailCallToReturnedCode(masm);
GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
}
@ -1342,14 +1339,11 @@ static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
// Load the next prototype.
__ bind(&next_prototype);
__ ldr(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
// End if the prototype is null or not hidden.
__ CompareRoot(receiver, Heap::kNullValueRootIndex);
__ b(eq, receiver_check_failed);
__ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
__ ldr(ip, FieldMemOperand(map, Map::kBitField3Offset));
__ tst(ip, Operand(Map::IsHiddenPrototype::kMask));
__ tst(ip, Operand(Map::HasHiddenPrototype::kMask));
__ b(eq, receiver_check_failed);
__ ldr(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
__ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
// Iterate.
__ b(&prototype_loop_start);
@ -1829,9 +1823,7 @@ void Builtins::Generate_Apply(MacroAssembler* masm) {
// Try to create the list from an arguments object.
__ bind(&create_arguments);
__ ldr(r2,
FieldMemOperand(r0, JSObject::kHeaderSize +
Heap::kArgumentsLengthIndex * kPointerSize));
__ ldr(r2, FieldMemOperand(r0, JSArgumentsObject::kLengthOffset));
__ ldr(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
__ ldr(ip, FieldMemOperand(r4, FixedArray::kLengthOffset));
__ cmp(r2, ip);
@ -1906,10 +1898,136 @@ void Builtins::Generate_Apply(MacroAssembler* masm) {
}
}
namespace {
// Drops top JavaScript frame and an arguments adaptor frame below it (if
// present) preserving all the arguments prepared for current call.
// Does nothing if debugger is currently active.
// ES6 14.6.3. PrepareForTailCall
//
// Stack structure for the function g() tail calling f():
//
// ------- Caller frame: -------
// | ...
// | g()'s arg M
// | ...
// | g()'s arg 1
// | g()'s receiver arg
// | g()'s caller pc
// ------- g()'s frame: -------
// | g()'s caller fp <- fp
// | g()'s context
// | function pointer: g
// | -------------------------
// | ...
// | ...
// | f()'s arg N
// | ...
// | f()'s arg 1
// | f()'s receiver arg <- sp (f()'s caller pc is not on the stack yet!)
// ----------------------
//
void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
Register scratch1, Register scratch2,
Register scratch3) {
DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
Comment cmnt(masm, "[ PrepareForTailCall");
// Prepare for tail call only if the debugger is not active.
Label done;
ExternalReference debug_is_active =
ExternalReference::debug_is_active_address(masm->isolate());
__ mov(scratch1, Operand(debug_is_active));
__ ldrb(scratch1, MemOperand(scratch1));
__ cmp(scratch1, Operand(0));
__ b(ne, &done);
// Drop possible interpreter handler/stub frame.
{
Label no_interpreter_frame;
__ ldr(scratch3, MemOperand(fp, StandardFrameConstants::kMarkerOffset));
__ cmp(scratch3, Operand(Smi::FromInt(StackFrame::STUB)));
__ b(ne, &no_interpreter_frame);
__ ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
__ bind(&no_interpreter_frame);
}
// Check if next frame is an arguments adaptor frame.
Label no_arguments_adaptor, formal_parameter_count_loaded;
__ ldr(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
__ ldr(scratch3,
MemOperand(scratch2, StandardFrameConstants::kContextOffset));
__ cmp(scratch3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
__ b(ne, &no_arguments_adaptor);
// Drop arguments adaptor frame and load arguments count.
__ mov(fp, scratch2);
__ ldr(scratch1,
MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ SmiUntag(scratch1);
__ b(&formal_parameter_count_loaded);
__ bind(&no_arguments_adaptor);
// Load caller's formal parameter count
__ ldr(scratch1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
__ ldr(scratch1,
FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
__ ldr(scratch1,
FieldMemOperand(scratch1,
SharedFunctionInfo::kFormalParameterCountOffset));
__ SmiUntag(scratch1);
__ bind(&formal_parameter_count_loaded);
// Calculate the end of destination area where we will put the arguments
// after we drop current frame. We add kPointerSize to count the receiver
// argument which is not included into formal parameters count.
Register dst_reg = scratch2;
__ add(dst_reg, fp, Operand(scratch1, LSL, kPointerSizeLog2));
__ add(dst_reg, dst_reg,
Operand(StandardFrameConstants::kCallerSPOffset + kPointerSize));
Register src_reg = scratch1;
__ add(src_reg, sp, Operand(args_reg, LSL, kPointerSizeLog2));
// Count receiver argument as well (not included in args_reg).
__ add(src_reg, src_reg, Operand(kPointerSize));
if (FLAG_debug_code) {
__ cmp(src_reg, dst_reg);
__ Check(lo, kStackAccessBelowStackPointer);
}
// Restore caller's frame pointer and return address now as they will be
// overwritten by the copying loop.
__ ldr(lr, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
__ ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
// Now copy callee arguments to the caller frame going backwards to avoid
// callee arguments corruption (source and destination areas could overlap).
// Both src_reg and dst_reg are pointing to the word after the one to copy,
// so they must be pre-decremented in the loop.
Register tmp_reg = scratch3;
Label loop, entry;
__ b(&entry);
__ bind(&loop);
__ ldr(tmp_reg, MemOperand(src_reg, -kPointerSize, PreIndex));
__ str(tmp_reg, MemOperand(dst_reg, -kPointerSize, PreIndex));
__ bind(&entry);
__ cmp(sp, src_reg);
__ b(ne, &loop);
// Leave current frame.
__ mov(sp, dst_reg);
__ bind(&done);
}
} // namespace
// static
void Builtins::Generate_CallFunction(MacroAssembler* masm,
ConvertReceiverMode mode) {
ConvertReceiverMode mode,
TailCallMode tail_call_mode) {
// ----------- S t a t e -------------
// -- r0 : the number of arguments (not including the receiver)
// -- r1 : the function to call (checked to be a JSFunction)
@ -1995,6 +2113,10 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm,
// -- cp : the function context.
// -----------------------------------
if (tail_call_mode == TailCallMode::kAllow) {
PrepareForTailCall(masm, r0, r3, r4, r5);
}
__ ldr(r2,
FieldMemOperand(r2, SharedFunctionInfo::kFormalParameterCountOffset));
__ SmiUntag(r2);
@ -2093,13 +2215,18 @@ void Generate_PushBoundArguments(MacroAssembler* masm) {
// static
void Builtins::Generate_CallBoundFunction(MacroAssembler* masm) {
void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
TailCallMode tail_call_mode) {
// ----------- S t a t e -------------
// -- r0 : the number of arguments (not including the receiver)
// -- r1 : the function to call (checked to be a JSBoundFunction)
// -----------------------------------
__ AssertBoundFunction(r1);
if (tail_call_mode == TailCallMode::kAllow) {
PrepareForTailCall(masm, r0, r3, r4, r5);
}
// Patch the receiver to [[BoundThis]].
__ ldr(ip, FieldMemOperand(r1, JSBoundFunction::kBoundThisOffset));
__ str(ip, MemOperand(sp, r0, LSL, kPointerSizeLog2));
@ -2117,7 +2244,8 @@ void Builtins::Generate_CallBoundFunction(MacroAssembler* masm) {
// static
void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
TailCallMode tail_call_mode) {
// ----------- S t a t e -------------
// -- r0 : the number of arguments (not including the receiver)
// -- r1 : the target to call (can be any Object).
@ -2127,14 +2255,25 @@ void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
__ JumpIfSmi(r1, &non_callable);
__ bind(&non_smi);
__ CompareObjectType(r1, r4, r5, JS_FUNCTION_TYPE);
__ Jump(masm->isolate()->builtins()->CallFunction(mode),
__ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
RelocInfo::CODE_TARGET, eq);
__ cmp(r5, Operand(JS_BOUND_FUNCTION_TYPE));
__ Jump(masm->isolate()->builtins()->CallBoundFunction(),
__ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
RelocInfo::CODE_TARGET, eq);
// Check if target has a [[Call]] internal method.
__ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
__ tst(r4, Operand(1 << Map::kIsCallable));
__ b(eq, &non_callable);
__ cmp(r5, Operand(JS_PROXY_TYPE));
__ b(ne, &non_function);
// 0. Prepare for tail call if necessary.
if (tail_call_mode == TailCallMode::kAllow) {
PrepareForTailCall(masm, r0, r3, r4, r5);
}
// 1. Runtime fallback for Proxy [[Call]].
__ Push(r1);
// Increase the arguments size to include the pushed function and the
@ -2147,16 +2286,12 @@ void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
// 2. Call to something else, which might have a [[Call]] internal method (if
// not we raise an exception).
__ bind(&non_function);
// Check if target has a [[Call]] internal method.
__ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
__ tst(r4, Operand(1 << Map::kIsCallable));
__ b(eq, &non_callable);
// Overwrite the original receiver the (original) target.
__ str(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
// Let the "call_as_function_delegate" take care of the rest.
__ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r1);
__ Jump(masm->isolate()->builtins()->CallFunction(
ConvertReceiverMode::kNotNullOrUndefined),
ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
RelocInfo::CODE_TARGET);
// 3. Call to something that is not callable.

2460
deps/v8/src/arm/code-stubs-arm.cc

File diff suppressed because it is too large

10
deps/v8/src/arm/codegen-arm.cc

@ -108,23 +108,23 @@ MemCopyUint8Function CreateMemCopyUint8Function(Isolate* isolate,
__ b(lt, &size_less_than_8);
__ cmp(chars, Operand(32));
__ b(lt, &less_32);
if (CpuFeatures::cache_line_size() == 32) {
if (CpuFeatures::dcache_line_size() == 32) {
__ pld(MemOperand(src, 32));
}
__ cmp(chars, Operand(64));
__ b(lt, &less_64);
__ pld(MemOperand(src, 64));
if (CpuFeatures::cache_line_size() == 32) {
if (CpuFeatures::dcache_line_size() == 32) {
__ pld(MemOperand(src, 96));
}
__ cmp(chars, Operand(128));
__ b(lt, &less_128);
__ pld(MemOperand(src, 128));
if (CpuFeatures::cache_line_size() == 32) {
if (CpuFeatures::dcache_line_size() == 32) {
__ pld(MemOperand(src, 160));
}
__ pld(MemOperand(src, 192));
if (CpuFeatures::cache_line_size() == 32) {
if (CpuFeatures::dcache_line_size() == 32) {
__ pld(MemOperand(src, 224));
}
__ cmp(chars, Operand(256));
@ -134,7 +134,7 @@ MemCopyUint8Function CreateMemCopyUint8Function(Isolate* isolate,
__ bind(&loop);
__ pld(MemOperand(src, 256));
__ vld1(Neon8, NeonListOperand(d0, 4), NeonMemOperand(src, PostIndex));
if (CpuFeatures::cache_line_size() == 32) {
if (CpuFeatures::dcache_line_size() == 32) {
__ pld(MemOperand(src, 256));
}
__ vld1(Neon8, NeonListOperand(d4, 4), NeonMemOperand(src, PostIndex));

16
deps/v8/src/arm/constants-arm.h

@ -219,6 +219,22 @@ enum {
};
enum BarrierOption {
OSHLD = 0x1,
OSHST = 0x2,
OSH = 0x3,
NSHLD = 0x5,
NSHST = 0x6,
NSH = 0x7,
ISHLD = 0x9,
ISHST = 0xa,
ISH = 0xb,
LD = 0xd,
ST = 0xe,
SY = 0xf,
};
// -----------------------------------------------------------------------------
// Addressing modes and instruction variants.

24
deps/v8/src/arm/deoptimizer-arm.cc

@ -85,27 +85,6 @@ void Deoptimizer::PatchCodeForDeoptimization(Isolate* isolate, Code* code) {
}
void Deoptimizer::FillInputFrame(Address tos, JavaScriptFrame* frame) {
// Set the register values. The values are not important as there are no
// callee saved registers in JavaScript frames, so all registers are
// spilled. Registers fp and sp are set to the correct values though.
for (int i = 0; i < Register::kNumRegisters; i++) {
input_->SetRegister(i, i * 4);
}
input_->SetRegister(sp.code(), reinterpret_cast<intptr_t>(frame->sp()));
input_->SetRegister(fp.code(), reinterpret_cast<intptr_t>(frame->fp()));
for (int i = 0; i < DoubleRegister::kMaxNumRegisters; i++) {
input_->SetDoubleRegister(i, 0.0);
}
// Fill the frame content from the actual data on the frame.
for (unsigned i = 0; i < input_->GetFrameSize(); i += kPointerSize) {
input_->SetFrameSlot(i, Memory::uint32_at(tos + i));
}
}
void Deoptimizer::SetPlatformCompiledStubRegisters(
FrameDescription* output_frame, CodeStubDescriptor* descriptor) {
ApiFunction function(descriptor->deoptimization_handler());
@ -124,8 +103,7 @@ void Deoptimizer::CopyDoubleRegisters(FrameDescription* output_frame) {
}
}
bool Deoptimizer::HasAlignmentPadding(JSFunction* function) {
bool Deoptimizer::HasAlignmentPadding(SharedFunctionInfo* shared) {
// There is no dynamic alignment padding on ARM in the input frame.
return false;
}

30
deps/v8/src/arm/disasm-arm.cc

@ -1187,9 +1187,15 @@ void Decoder::DecodeType3(Instruction* instr) {
}
}
}
} else {
// PU == 0b01, BW == 0b11, Bits(9, 6) != 0b0001
if ((instr->Bits(20, 16) == 0x1f) &&
(instr->Bits(11, 4) == 0xf3)) {
Format(instr, "rbit'cond 'rd, 'rm");
} else {
UNREACHABLE();
}
}
break;
}
}
@ -1689,6 +1695,12 @@ void Decoder::DecodeType6CoprocessorIns(Instruction* instr) {
}
static const char* const barrier_option_names[] = {
"invalid", "oshld", "oshst", "osh", "invalid", "nshld", "nshst", "nsh",
"invalid", "ishld", "ishst", "ish", "invalid", "ld", "st", "sy",
};
void Decoder::DecodeSpecialCondition(Instruction* instr) {
switch (instr->SpecialValue()) {
case 5:
@ -1765,6 +1777,24 @@ void Decoder::DecodeSpecialCondition(Instruction* instr) {
out_buffer_pos_ += SNPrintF(out_buffer_ + out_buffer_pos_,
"pld [r%d, #+%d]", Rn, offset);
}
} else if (instr->SpecialValue() == 0xA && instr->Bits(22, 20) == 7) {
int option = instr->Bits(3, 0);
switch (instr->Bits(7, 4)) {
case 4:
out_buffer_pos_ += SNPrintF(out_buffer_ + out_buffer_pos_,
"dsb %s", barrier_option_names[option]);
break;
case 5:
out_buffer_pos_ += SNPrintF(out_buffer_ + out_buffer_pos_,
"dmb %s", barrier_option_names[option]);
break;
case 6:
out_buffer_pos_ += SNPrintF(out_buffer_ + out_buffer_pos_,
"isb %s", barrier_option_names[option]);
break;
default:
Unknown(instr);
}
} else {
Unknown(instr);
}

61
deps/v8/src/arm/interface-descriptors-arm.cc

@ -56,20 +56,6 @@ const Register StringCompareDescriptor::LeftRegister() { return r1; }
const Register StringCompareDescriptor::RightRegister() { return r0; }
const Register ArgumentsAccessReadDescriptor::index() { return r1; }
const Register ArgumentsAccessReadDescriptor::parameter_count() { return r0; }
const Register ArgumentsAccessNewDescriptor::function() { return r1; }
const Register ArgumentsAccessNewDescriptor::parameter_count() { return r2; }
const Register ArgumentsAccessNewDescriptor::parameter_pointer() { return r3; }
const Register RestParamAccessDescriptor::parameter_count() { return r2; }
const Register RestParamAccessDescriptor::parameter_pointer() { return r3; }
const Register RestParamAccessDescriptor::rest_parameter_index() { return r4; }
const Register ApiGetterDescriptor::function_address() { return r2; }
@ -98,6 +84,32 @@ void FastNewContextDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewObjectDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {r1, r3};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewRestParameterDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {r1};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewSloppyArgumentsDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {r1};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewStrictArgumentsDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {r1};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void ToNumberDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
@ -114,6 +126,10 @@ const Register ToLengthDescriptor::ReceiverRegister() { return r0; }
const Register ToStringDescriptor::ReceiverRegister() { return r0; }
// static
const Register ToNameDescriptor::ReceiverRegister() { return r0; }
// static
const Register ToObjectDescriptor::ReceiverRegister() { return r0; }
@ -167,13 +183,6 @@ void CreateWeakCellDescriptor::InitializePlatformSpecific(
}
void StoreArrayLiteralElementDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {r3, r0};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void CallFunctionDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {r1};
@ -432,6 +441,14 @@ void ApiAccessorDescriptor::InitializePlatformSpecific(
&default_descriptor);
}
void InterpreterDispatchDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {
kInterpreterAccumulatorRegister, kInterpreterRegisterFileRegister,
kInterpreterBytecodeOffsetRegister, kInterpreterBytecodeArrayRegister,
kInterpreterDispatchTableRegister};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void InterpreterPushArgsAndCallDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
@ -443,7 +460,6 @@ void InterpreterPushArgsAndCallDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void InterpreterPushArgsAndConstructDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {
@ -455,7 +471,6 @@ void InterpreterPushArgsAndConstructDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void InterpreterCEntryDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {

109
deps/v8/src/arm/macro-assembler-arm.cc

@ -185,6 +185,9 @@ void MacroAssembler::Drop(int count, Condition cond) {
}
}
void MacroAssembler::Drop(Register count, Condition cond) {
add(sp, sp, Operand(count, LSL, kPointerSizeLog2), LeaveCC, cond);
}
void MacroAssembler::Ret(int drop, Condition cond) {
Drop(drop, cond);
@ -449,9 +452,9 @@ void MacroAssembler::InNewSpace(Register object,
Condition cond,
Label* branch) {
DCHECK(cond == eq || cond == ne);
and_(scratch, object, Operand(ExternalReference::new_space_mask(isolate())));
cmp(scratch, Operand(ExternalReference::new_space_start(isolate())));
b(cond, branch);
const int mask =
(1 << MemoryChunk::IN_FROM_SPACE) | (1 << MemoryChunk::IN_TO_SPACE);
CheckPageFlag(object, scratch, mask, cond, branch);
}
@ -648,6 +651,69 @@ void MacroAssembler::RecordWrite(
}
}
void MacroAssembler::RecordWriteCodeEntryField(Register js_function,
Register code_entry,
Register scratch) {
const int offset = JSFunction::kCodeEntryOffset;
// Since a code entry (value) is always in old space, we don't need to update
// remembered set. If incremental marking is off, there is nothing for us to
// do.
if (!FLAG_incremental_marking) return;
DCHECK(js_function.is(r1));
DCHECK(code_entry.is(r4));
DCHECK(scratch.is(r5));
AssertNotSmi(js_function);
if (emit_debug_code()) {
add(scratch, js_function, Operand(offset - kHeapObjectTag));
ldr(ip, MemOperand(scratch));
cmp(ip, code_entry);
Check(eq, kWrongAddressOrValuePassedToRecordWrite);
}
// First, check if a write barrier is even needed. The tests below
// catch stores of Smis and stores into young gen.
Label done;
CheckPageFlag(code_entry, scratch,
MemoryChunk::kPointersToHereAreInterestingMask, eq, &done);
CheckPageFlag(js_function, scratch,
MemoryChunk::kPointersFromHereAreInterestingMask, eq, &done);
const Register dst = scratch;
add(dst, js_function, Operand(offset - kHeapObjectTag));
push(code_entry);
// Save caller-saved registers, which includes js_function.
DCHECK((kCallerSaved & js_function.bit()) != 0);
DCHECK_EQ(kCallerSaved & code_entry.bit(), 0);
stm(db_w, sp, (kCallerSaved | lr.bit()));
int argument_count = 3;
PrepareCallCFunction(argument_count, code_entry);
mov(r0, js_function);
mov(r1, dst);
mov(r2, Operand(ExternalReference::isolate_address(isolate())));
{
AllowExternalCallThatCantCauseGC scope(this);
CallCFunction(
ExternalReference::incremental_marking_record_write_code_entry_function(
isolate()),
argument_count);
}
// Restore caller-saved registers (including js_function and code_entry).
ldm(ia_w, sp, (kCallerSaved | lr.bit()));
pop(code_entry);
bind(&done);
}
void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
Register address,
@ -1330,7 +1396,7 @@ void MacroAssembler::FloodFunctionIfStepping(Register fun, Register new_target,
}
Push(fun);
Push(fun);
CallRuntime(Runtime::kDebugPrepareStepInIfStepping, 1);
CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
Pop(fun);
if (new_target.is_valid()) {
Pop(new_target);
@ -2506,18 +2572,6 @@ void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) {
}
void MacroAssembler::InvokeBuiltin(int native_context_index, InvokeFlag flag,
const CallWrapper& call_wrapper) {
// You can't call a builtin without a valid frame.
DCHECK(flag == JUMP_FUNCTION || has_frame());
// Fake a parameter count to avoid emitting code to do the check.
ParameterCount expected(0);
LoadNativeContextSlot(native_context_index, r1);
InvokeFunctionCode(r1, no_reg, expected, expected, flag, call_wrapper);
}
void MacroAssembler::SetCounter(StatsCounter* counter, int value,
Register scratch1, Register scratch2) {
if (FLAG_native_code_counters && counter->Enabled()) {
@ -2613,9 +2667,9 @@ void MacroAssembler::Abort(BailoutReason reason) {
// We don't actually want to generate a pile of code for this, so just
// claim there is a stack frame, without generating one.
FrameScope scope(this, StackFrame::NONE);
CallRuntime(Runtime::kAbort, 1);
CallRuntime(Runtime::kAbort);
} else {
CallRuntime(Runtime::kAbort, 1);
CallRuntime(Runtime::kAbort);
}
// will not return here
if (is_const_pool_blocked()) {
@ -2822,6 +2876,20 @@ void MacroAssembler::AssertBoundFunction(Register object) {
}
void MacroAssembler::AssertReceiver(Register object) {
if (emit_debug_code()) {
STATIC_ASSERT(kSmiTag == 0);
tst(object, Operand(kSmiTagMask));
Check(ne, kOperandIsASmiAndNotAReceiver);
push(object);
STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
CompareObjectType(object, object, object, FIRST_JS_RECEIVER_TYPE);
pop(object);
Check(hs, kOperandIsNotAReceiver);
}
}
void MacroAssembler::AssertUndefinedOrAllocationSite(Register object,
Register scratch) {
if (emit_debug_code()) {
@ -3259,6 +3327,7 @@ void MacroAssembler::CheckPageFlag(
int mask,
Condition cc,
Label* condition_met) {
DCHECK(cc == eq || cc == ne);
Bfc(scratch, object, 0, kPageSizeBits);
ldr(scratch, MemOperand(scratch, MemoryChunk::kFlagsOffset));
tst(scratch, Operand(mask));
@ -3396,7 +3465,8 @@ void MacroAssembler::LoadAccessor(Register dst, Register holder,
}
void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) {
void MacroAssembler::CheckEnumCache(Label* call_runtime) {
Register null_value = r5;
Register empty_fixed_array_value = r6;
LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
Label next, start;
@ -3410,6 +3480,7 @@ void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) {
cmp(r3, Operand(Smi::FromInt(kInvalidEnumCacheSentinel)));
b(eq, call_runtime);
LoadRoot(null_value, Heap::kNullValueRootIndex);
jmp(&start);
bind(&next);

24
deps/v8/src/arm/macro-assembler-arm.h

@ -16,6 +16,7 @@ namespace internal {
// Give alias names to registers for calling conventions.
const Register kReturnRegister0 = {Register::kCode_r0};
const Register kReturnRegister1 = {Register::kCode_r1};
const Register kReturnRegister2 = {Register::kCode_r2};
const Register kJSFunctionRegister = {Register::kCode_r1};
const Register kContextRegister = {Register::kCode_r7};
const Register kInterpreterAccumulatorRegister = {Register::kCode_r0};
@ -127,6 +128,7 @@ class MacroAssembler: public Assembler {
// Emit code to discard a non-negative number of pointer-sized elements
// from the stack, clobbering only the sp register.
void Drop(int count, Condition cond = al);
void Drop(Register count, Condition cond = al);
void Ret(int drop, Condition cond = al);
@ -218,7 +220,7 @@ class MacroAssembler: public Assembler {
void JumpIfNotInNewSpace(Register object,
Register scratch,
Label* branch) {
InNewSpace(object, scratch, ne, branch);
InNewSpace(object, scratch, eq, branch);
}
// Check if object is in new space. Jumps if the object is in new space.
@ -226,7 +228,7 @@ class MacroAssembler: public Assembler {
void JumpIfInNewSpace(Register object,
Register scratch,
Label* branch) {
InNewSpace(object, scratch, eq, branch);
InNewSpace(object, scratch, ne, branch);
}
// Check if an object has a given incremental marking color.
@ -288,6 +290,11 @@ class MacroAssembler: public Assembler {
pointers_to_here_check_for_value);
}
// Notify the garbage collector that we wrote a code entry into a
// JSFunction. Only scratch is clobbered by the operation.
void RecordWriteCodeEntryField(Register js_function, Register code_entry,
Register scratch);
void RecordWriteForMap(
Register object,
Register map,
@ -315,7 +322,6 @@ class MacroAssembler: public Assembler {
// Push two registers. Pushes leftmost register first (to highest address).
void Push(Register src1, Register src2, Condition cond = al) {
DCHECK(!src1.is(src2));
if (src1.code() > src2.code()) {
stm(db_w, sp, src1.bit() | src2.bit(), cond);
} else {
@ -326,7 +332,6 @@ class MacroAssembler: public Assembler {
// Push three registers. Pushes leftmost register first (to highest address).
void Push(Register src1, Register src2, Register src3, Condition cond = al) {
DCHECK(!AreAliased(src1, src2, src3));
if (src1.code() > src2.code()) {
if (src2.code() > src3.code()) {
stm(db_w, sp, src1.bit() | src2.bit() | src3.bit(), cond);
@ -346,7 +351,6 @@ class MacroAssembler: public Assembler {
Register src3,
Register src4,
Condition cond = al) {
DCHECK(!AreAliased(src1, src2, src3, src4));
if (src1.code() > src2.code()) {
if (src2.code() > src3.code()) {
if (src3.code() > src4.code()) {
@ -371,7 +375,6 @@ class MacroAssembler: public Assembler {
// Push five registers. Pushes leftmost register first (to highest address).
void Push(Register src1, Register src2, Register src3, Register src4,
Register src5, Condition cond = al) {
DCHECK(!AreAliased(src1, src2, src3, src4, src5));
if (src1.code() > src2.code()) {
if (src2.code() > src3.code()) {
if (src3.code() > src4.code()) {
@ -1143,10 +1146,6 @@ class MacroAssembler: public Assembler {
// Jump to a runtime routine.
void JumpToExternalReference(const ExternalReference& builtin);
// Invoke specified builtin JavaScript function.
void InvokeBuiltin(int native_context_index, InvokeFlag flag,
const CallWrapper& call_wrapper = NullCallWrapper());
Handle<Object> CodeObject() {
DCHECK(!code_object_.is_null());
return code_object_;
@ -1298,6 +1297,9 @@ class MacroAssembler: public Assembler {
// enabled via --debug-code.
void AssertBoundFunction(Register object);
// Abort execution if argument is not a JSReceiver, enabled via --debug-code.
void AssertReceiver(Register object);
// Abort execution if argument is not undefined or an AllocationSite, enabled
// via --debug-code.
void AssertUndefinedOrAllocationSite(Register object, Register scratch);
@ -1407,7 +1409,7 @@ class MacroAssembler: public Assembler {
// Expects object in r0 and returns map with validated enum cache
// in r0. Assumes that any other register can be used as a scratch.
void CheckEnumCache(Register null_value, Label* call_runtime);
void CheckEnumCache(Label* call_runtime);
// AllocationMemento support. Arrays may have an associated
// AllocationMemento object that can be checked for in order to pretransition

48
deps/v8/src/arm/simulator-arm.cc

@ -14,6 +14,7 @@
#include "src/base/bits.h"
#include "src/codegen.h"
#include "src/disasm.h"
#include "src/runtime/runtime-utils.h"
#if defined(USE_SIMULATOR)
@ -391,7 +392,8 @@ void ArmDebugger::Debug() {
HeapObject* obj = reinterpret_cast<HeapObject*>(*cur);
int value = *cur;
Heap* current_heap = sim_->isolate_->heap();
if (((value & 1) == 0) || current_heap->Contains(obj)) {
if (((value & 1) == 0) ||
current_heap->ContainsSlow(obj->address())) {
PrintF(" (");
if ((value & 1) == 0) {
PrintF("smi %d", value / 2);
@ -1717,6 +1719,10 @@ typedef int64_t (*SimulatorRuntimeCall)(int32_t arg0,
int32_t arg4,
int32_t arg5);
typedef ObjectTriple (*SimulatorRuntimeTripleCall)(int32_t arg0, int32_t arg1,
int32_t arg2, int32_t arg3,
int32_t arg4);
// These prototypes handle the four types of FP calls.
typedef int64_t (*SimulatorRuntimeCompareCall)(double darg0, double darg1);
typedef double (*SimulatorRuntimeFPFPCall)(double darg0, double darg1);
@ -1900,9 +1906,36 @@ void Simulator::SoftwareInterrupt(Instruction* instr) {
reinterpret_cast<SimulatorRuntimeProfilingGetterCall>(
external);
target(arg0, arg1, Redirection::ReverseRedirection(arg2));
} else if (redirection->type() ==
ExternalReference::BUILTIN_CALL_TRIPLE) {
// builtin call returning ObjectTriple.
SimulatorRuntimeTripleCall target =
reinterpret_cast<SimulatorRuntimeTripleCall>(external);
if (::v8::internal::FLAG_trace_sim || !stack_aligned) {
PrintF(
"Call to host triple returning runtime function %p "
"args %08x, %08x, %08x, %08x, %08x",
FUNCTION_ADDR(target), arg1, arg2, arg3, arg4, arg5);
if (!stack_aligned) {
PrintF(" with unaligned stack %08x\n", get_register(sp));
}
PrintF("\n");
}
CHECK(stack_aligned);
// arg0 is a hidden argument pointing to the return location, so don't
// pass it to the target function.
ObjectTriple result = target(arg1, arg2, arg3, arg4, arg5);
if (::v8::internal::FLAG_trace_sim) {
PrintF("Returned { %p, %p, %p }\n", result.x, result.y, result.z);
}
// Return is passed back in address pointed to by hidden first argument.
ObjectTriple* sim_result = reinterpret_cast<ObjectTriple*>(arg0);
*sim_result = result;
set_register(r0, arg0);
} else {
// builtin call.
DCHECK(redirection->type() == ExternalReference::BUILTIN_CALL);
DCHECK(redirection->type() == ExternalReference::BUILTIN_CALL ||
redirection->type() == ExternalReference::BUILTIN_CALL_PAIR);
SimulatorRuntimeCall target =
reinterpret_cast<SimulatorRuntimeCall>(external);
if (::v8::internal::FLAG_trace_sim || !stack_aligned) {
@ -2886,9 +2919,17 @@ void Simulator::DecodeType3(Instruction* instr) {
set_register(rd, rn_val + (rm_val & 0xFFFF));
}
}
} else {
// PU == 0b01, BW == 0b11, Bits(9, 6) != 0b0001
if ((instr->Bits(20, 16) == 0x1f) &&
(instr->Bits(11, 4) == 0xf3)) {
// Rbit.
uint32_t rm_val = get_register(instr->RmValue());
set_register(rd, base::bits::ReverseBits(rm_val));
} else {
UNIMPLEMENTED();
}
}
break;
}
}
@ -3871,6 +3912,9 @@ void Simulator::DecodeSpecialCondition(Instruction* instr) {
case 0xB:
if ((instr->Bits(22, 20) == 5) && (instr->Bits(15, 12) == 0xf)) {
// pld: ignore instruction.
} else if (instr->SpecialValue() == 0xA && instr->Bits(22, 20) == 7) {
// dsb, dmb, isb: ignore instruction for now.
// TODO(binji): implement
} else {
UNIMPLEMENTED();
}

22
deps/v8/src/arm64/assembler-arm64-inl.h

@ -731,8 +731,8 @@ void RelocInfo::set_target_object(Object* target,
if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
host() != NULL &&
target->IsHeapObject()) {
host()->GetHeap()->incremental_marking()->RecordWrite(
host(), &Memory::Object_at(pc_), HeapObject::cast(target));
host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
host(), this, HeapObject::cast(target));
}
}
@ -853,24 +853,6 @@ void RelocInfo::WipeOut() {
}
bool RelocInfo::IsPatchedReturnSequence() {
// The sequence must be:
// ldr ip0, [pc, #offset]
// blr ip0
// See arm64/debug-arm64.cc DebugCodegen::PatchDebugBreakSlot
Instruction* i1 = reinterpret_cast<Instruction*>(pc_);
Instruction* i2 = i1->following();
return i1->IsLdrLiteralX() && (i1->Rt() == kIp0Code) &&
i2->IsBranchAndLinkToRegister() && (i2->Rn() == kIp0Code);
}
bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
Instruction* current_instr = reinterpret_cast<Instruction*>(pc_);
return !current_instr->IsNop(Assembler::DEBUG_BREAK_NOP);
}
void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
RelocInfo::Mode mode = rmode();
if (mode == RelocInfo::EMBEDDED_OBJECT) {

4
deps/v8/src/arm64/assembler-arm64.h

@ -369,6 +369,8 @@ bool AreSameSizeAndType(const CPURegister& reg1,
typedef FPRegister DoubleRegister;
// TODO(arm64) Define SIMD registers.
typedef FPRegister Simd128Register;
// -----------------------------------------------------------------------------
// Lists of registers.
@ -925,7 +927,7 @@ class Assembler : public AssemblerBase {
// Record a deoptimization reason that can be used by a log or cpu profiler.
// Use --trace-deopt to enable.
void RecordDeoptReason(const int reason, const SourcePosition position);
void RecordDeoptReason(const int reason, int raw_position);
int buffer_space() const;

624
deps/v8/src/arm64/builtins-arm64.cc

@ -137,6 +137,97 @@ void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
}
// static
void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
// ----------- S t a t e -------------
// -- x0 : number of arguments
// -- lr : return address
// -- sp[(argc - n) * 8] : arg[n] (zero-based)
// -- sp[(argc + 1) * 8] : receiver
// -----------------------------------
ASM_LOCATION("Builtins::Generate_MathMaxMin");
Heap::RootListIndex const root_index =
(kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
: Heap::kMinusInfinityValueRootIndex;
// Load the accumulator with the default return value (either -Infinity or
// +Infinity), with the tagged value in x1 and the double value in d1.
__ LoadRoot(x1, root_index);
__ Ldr(d1, FieldMemOperand(x1, HeapNumber::kValueOffset));
// Remember how many slots to drop (including the receiver).
__ Add(x4, x0, 1);
Label done_loop, loop;
__ Bind(&loop);
{
// Check if all parameters done.
__ Subs(x0, x0, 1);
__ B(lt, &done_loop);
// Load the next parameter tagged value into x2.
__ Peek(x2, Operand(x0, LSL, kPointerSizeLog2));
// Load the double value of the parameter into d2, maybe converting the
// parameter to a number first using the ToNumberStub if necessary.
Label convert_smi, convert_number, done_convert;
__ JumpIfSmi(x2, &convert_smi);
__ JumpIfHeapNumber(x2, &convert_number);
{
// Parameter is not a Number, use the ToNumberStub to convert it.
FrameScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(x0);
__ SmiTag(x4);
__ Push(x0, x1, x4);
__ Mov(x0, x2);
ToNumberStub stub(masm->isolate());
__ CallStub(&stub);
__ Mov(x2, x0);
__ Pop(x4, x1, x0);
{
// Restore the double accumulator value (d1).
Label done_restore;
__ SmiUntagToDouble(d1, x1, kSpeculativeUntag);
__ JumpIfSmi(x1, &done_restore);
__ Ldr(d1, FieldMemOperand(x1, HeapNumber::kValueOffset));
__ Bind(&done_restore);
}
__ SmiUntag(x4);
__ SmiUntag(x0);
}
__ AssertNumber(x2);
__ JumpIfSmi(x2, &convert_smi);
__ Bind(&convert_number);
__ Ldr(d2, FieldMemOperand(x2, HeapNumber::kValueOffset));
__ B(&done_convert);
__ Bind(&convert_smi);
__ SmiUntagToDouble(d2, x2);
__ Bind(&done_convert);
// We can use a single fmin/fmax for the operation itself, but we then need
// to work out which HeapNumber (or smi) the result came from.
__ Fmov(x11, d1);
if (kind == MathMaxMinKind::kMin) {
__ Fmin(d1, d1, d2);
} else {
DCHECK(kind == MathMaxMinKind::kMax);
__ Fmax(d1, d1, d2);
}
__ Fmov(x10, d1);
__ Cmp(x10, x11);
__ Csel(x1, x1, x2, eq);
__ B(&loop);
}
__ Bind(&done_loop);
__ Mov(x0, x1);
__ Drop(x4);
__ Ret();
}
// static
void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
// ----------- S t a t e -------------
@ -229,8 +320,9 @@ void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
__ bind(&new_object);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(x2, x1, x3); // first argument, constructor, new target
__ CallRuntime(Runtime::kNewObject);
__ Push(x2); // first argument
FastNewObjectStub stub(masm->isolate());
__ CallStub(&stub);
__ Pop(x2);
}
__ Str(x2, FieldMemOperand(x0, JSValue::kValueOffset));
@ -356,48 +448,49 @@ void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
__ bind(&new_object);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(x2, x1, x3); // first argument, constructor, new target
__ CallRuntime(Runtime::kNewObject);
__ Push(x2); // first argument
FastNewObjectStub stub(masm->isolate());
__ CallStub(&stub);
__ Pop(x2);
}
__ Str(x2, FieldMemOperand(x0, JSValue::kValueOffset));
__ Ret();
}
static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
__ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
__ Ldr(x2, FieldMemOperand(x2, SharedFunctionInfo::kCodeOffset));
__ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag);
__ Br(x2);
}
static void CallRuntimePassFunction(MacroAssembler* masm,
static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
Runtime::FunctionId function_id) {
// ----------- S t a t e -------------
// -- x0 : argument count (preserved for callee)
// -- x1 : target function (preserved for callee)
// -- x3 : new target (preserved for callee)
// -----------------------------------
{
FrameScope scope(masm, StackFrame::INTERNAL);
// Push a copy of the target function and the new target.
// Push another copy as a parameter to the runtime call.
__ Push(x1, x3, x1);
__ SmiTag(x0);
__ Push(x0, x1, x3, x1);
__ CallRuntime(function_id, 1);
__ Move(x2, x0);
// Restore target function and new target.
__ Pop(x3, x1);
}
__ Pop(x3, x1, x0);
__ SmiUntag(x0);
}
static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
__ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
__ Ldr(x2, FieldMemOperand(x2, SharedFunctionInfo::kCodeOffset));
__ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag);
__ Br(x2);
}
static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
__ Add(x0, x0, Code::kHeaderSize - kHeapObjectTag);
__ Br(x0);
}
void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
// Checking whether the queued function is ready for install is optional,
// since we come across interrupts and stack checks elsewhere. However, not
@ -408,8 +501,7 @@ void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
__ CompareRoot(masm->StackPointer(), Heap::kStackLimitRootIndex);
__ B(hs, &ok);
CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
GenerateTailCallToReturnedCode(masm);
GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
__ Bind(&ok);
GenerateTailCallToSharedCode(masm);
@ -418,7 +510,8 @@ void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
bool is_api_function,
bool create_implicit_receiver) {
bool create_implicit_receiver,
bool check_derived_construct) {
// ----------- S t a t e -------------
// -- x0 : number of arguments
// -- x1 : constructor function
@ -448,148 +541,18 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
__ Push(allocation_site, argc);
if (create_implicit_receiver) {
// sp[0]: new.target
// sp[1]: Constructor function.
// sp[2]: number of arguments (smi-tagged)
// sp[3]: allocation site
// Try to allocate the object without transitioning into C code. If any of
// the preconditions is not met, the code bails out to the runtime call.
Label rt_call, allocated;
if (FLAG_inline_new) {
// Verify that the new target is a JSFunction.
__ JumpIfNotObjectType(new_target, x10, x11, JS_FUNCTION_TYPE,
&rt_call);
// Load the initial map and verify that it is in fact a map.
Register init_map = x2;
__ Ldr(init_map,
FieldMemOperand(new_target,
JSFunction::kPrototypeOrInitialMapOffset));
__ JumpIfSmi(init_map, &rt_call);
__ JumpIfNotObjectType(init_map, x10, x11, MAP_TYPE, &rt_call);
// Fall back to runtime if the expected base constructor and base
// constructor differ.
__ Ldr(x10,
FieldMemOperand(init_map, Map::kConstructorOrBackPointerOffset));
__ Cmp(constructor, x10);
__ B(ne, &rt_call);
// Check that the constructor is not constructing a JSFunction (see
// comments in Runtime_NewObject in runtime.cc). In which case the
// initial
// map's instance type would be JS_FUNCTION_TYPE.
__ CompareInstanceType(init_map, x10, JS_FUNCTION_TYPE);
__ B(eq, &rt_call);
// Now allocate the JSObject on the heap.
Register obj_size = x10;
Register new_obj = x4;
Register next_obj = obj_size; // May overlap.
__ Ldrb(obj_size, FieldMemOperand(init_map, Map::kInstanceSizeOffset));
__ Allocate(obj_size, new_obj, next_obj, x11, &rt_call, SIZE_IN_WORDS);
// Allocated the JSObject, now initialize the fields. Map is set to
// initial map and properties and elements are set to empty fixed array.
// NB. the object pointer is not tagged, so MemOperand is used.
Register write_address = x5;
Register empty = x7;
__ Mov(write_address, new_obj);
__ LoadRoot(empty, Heap::kEmptyFixedArrayRootIndex);
STATIC_ASSERT(0 * kPointerSize == JSObject::kMapOffset);
__ Str(init_map, MemOperand(write_address, kPointerSize, PostIndex));
STATIC_ASSERT(1 * kPointerSize == JSObject::kPropertiesOffset);
STATIC_ASSERT(2 * kPointerSize == JSObject::kElementsOffset);
__ Stp(empty, empty,
MemOperand(write_address, 2 * kPointerSize, PostIndex));
STATIC_ASSERT(3 * kPointerSize == JSObject::kHeaderSize);
// Add the object tag to make the JSObject real, so that we can continue
// and jump into the continuation code at any time from now on.
__ Add(new_obj, new_obj, kHeapObjectTag);
// Fill all of the in-object properties with the appropriate filler.
Register filler = x7;
__ LoadRoot(filler, Heap::kUndefinedValueRootIndex);
if (!is_api_function) {
Label no_inobject_slack_tracking;
Register constructon_count = x14;
MemOperand bit_field3 =
FieldMemOperand(init_map, Map::kBitField3Offset);
// Check if slack tracking is enabled.
__ Ldr(x11, bit_field3);
__ DecodeField<Map::ConstructionCounter>(constructon_count, x11);
__ Cmp(constructon_count, Operand(Map::kSlackTrackingCounterEnd));
__ B(lt, &no_inobject_slack_tracking);
// Decrease generous allocation count.
__ Subs(x11, x11, Operand(1 << Map::ConstructionCounter::kShift));
__ Str(x11, bit_field3);
// Allocate object with a slack.
Register unused_props = x11;
__ Ldr(unused_props,
FieldMemOperand(init_map, Map::kInstanceAttributesOffset));
__ Ubfx(unused_props, unused_props,
Map::kUnusedPropertyFieldsByte * kBitsPerByte, kBitsPerByte);
Register end_of_pre_allocated = x11;
__ Sub(end_of_pre_allocated, next_obj,
Operand(unused_props, LSL, kPointerSizeLog2));
unused_props = NoReg;
if (FLAG_debug_code) {
__ Cmp(write_address, end_of_pre_allocated);
__ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields);
}
// Fill the pre-allocated fields with undef.
__ InitializeFieldsWithFiller(write_address, end_of_pre_allocated,
filler);
// Fill the remaining fields with one pointer filler map.
__ LoadRoot(filler, Heap::kOnePointerFillerMapRootIndex);
__ InitializeFieldsWithFiller(write_address, next_obj, filler);
__ Cmp(constructon_count, Operand(Map::kSlackTrackingCounterEnd));
__ B(ne, &allocated);
// Push the constructor, new_target and the object to the stack,
// and then the initial map as an argument to the runtime call.
__ Push(constructor, new_target, new_obj, init_map);
__ CallRuntime(Runtime::kFinalizeInstanceSize);
__ Pop(new_obj, new_target, constructor);
// Continue with JSObject being successfully allocated.
__ B(&allocated);
__ bind(&no_inobject_slack_tracking);
}
__ InitializeFieldsWithFiller(write_address, next_obj, filler);
// Continue with JSObject being successfully allocated.
__ B(&allocated);
}
// Allocate the new receiver object using the runtime call.
// x1: constructor function
// x3: new target
__ Bind(&rt_call);
// Push the constructor and new_target twice, second pair as arguments
// to the runtime call.
__ Push(constructor, new_target, constructor, new_target);
__ CallRuntime(Runtime::kNewObject);
// Allocate the new receiver object.
__ Push(constructor, new_target);
FastNewObjectStub stub(masm->isolate());
__ CallStub(&stub);
__ Mov(x4, x0);
__ Pop(new_target, constructor);
// Receiver for constructor call allocated.
// x1: constructor function
// x3: new target
// x4: JSObject
__ Bind(&allocated);
// ----------- S t a t e -------------
// -- x1: constructor function
// -- x3: new target
// -- x4: newly allocated object
// -----------------------------------
// Reload the number of arguments from the stack.
// Set it up in x0 for the function call below.
@ -697,6 +660,19 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// Leave construct frame.
}
// ES6 9.2.2. Step 13+
// Check that the result is not a Smi, indicating that the constructor result
// from a derived class is neither undefined nor an Object.
if (check_derived_construct) {
Label dont_throw;
__ JumpIfNotSmi(x0, &dont_throw);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
}
__ Bind(&dont_throw);
}
__ DropBySMI(x1);
__ Drop(1);
if (create_implicit_receiver) {
@ -707,17 +683,23 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Generate_JSConstructStubHelper(masm, false, true);
Generate_JSConstructStubHelper(masm, false, true, false);
}
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Generate_JSConstructStubHelper(masm, true, true);
Generate_JSConstructStubHelper(masm, true, false, false);
}
void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
Generate_JSConstructStubHelper(masm, false, false);
Generate_JSConstructStubHelper(masm, false, false, false);
}
void Builtins::Generate_JSBuiltinsConstructStubForDerived(
MacroAssembler* masm) {
Generate_JSConstructStubHelper(masm, false, false, true);
}
@ -877,10 +859,8 @@ void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
// - jssp: stack pointer.
// - lr: return address.
//
// The function builds a JS frame. Please see JavaScriptFrameConstants in
// frames-arm64.h for its layout.
// TODO(rmcilroy): We will need to include the current bytecode pointer in the
// frame.
// The function builds an interpreter frame. See InterpreterFrameConstants in
// frames.h for its layout.
void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
// Open a frame scope to indicate that there is a frame on the stack. The
// MANUAL indicates that the scope shouldn't actually generate code to set up
@ -888,17 +868,19 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
FrameScope frame_scope(masm, StackFrame::MANUAL);
__ Push(lr, fp, cp, x1);
__ Add(fp, jssp, StandardFrameConstants::kFixedFrameSizeFromFp);
__ Push(x3);
// Push zero for bytecode array offset.
__ Mov(x0, Operand(0));
__ Push(x0);
// Get the bytecode array from the function object and load the pointer to the
// first entry into kInterpreterBytecodeRegister.
__ Ldr(x0, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
Register debug_info = kInterpreterBytecodeArrayRegister;
Label load_debug_bytecode_array, bytecode_array_loaded;
DCHECK(!debug_info.is(x0));
__ Ldr(debug_info, FieldMemOperand(x0, SharedFunctionInfo::kDebugInfoOffset));
__ Cmp(debug_info, Operand(DebugInfo::uninitialized()));
__ B(ne, &load_debug_bytecode_array);
__ Ldr(kInterpreterBytecodeArrayRegister,
FieldMemOperand(x0, SharedFunctionInfo::kFunctionDataOffset));
__ Bind(&bytecode_array_loaded);
if (FLAG_debug_code) {
// Check function data field is actually a BytecodeArray object.
@ -909,6 +891,10 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
__ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
}
// Push new.target, bytecode array and zero for bytecode array offset.
__ Mov(x0, Operand(0));
__ Push(x3, kInterpreterBytecodeArrayRegister, x0);
// Allocate the local and temporary register file on the stack.
{
// Load frame size from the BytecodeArray object.
@ -938,22 +924,9 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
// TODO(rmcilroy): List of things not currently dealt with here but done in
// fullcodegen's prologue:
// - Support profiler (specifically profiling_counter).
// - Call ProfileEntryHookStub when isolate has a function_entry_hook.
// - Allow simulator stop operations if FLAG_stop_at is set.
// - Code aging of the BytecodeArray object.
// Perform stack guard check.
{
Label ok;
__ CompareRoot(jssp, Heap::kStackLimitRootIndex);
__ B(hs, &ok);
__ Push(kInterpreterBytecodeArrayRegister);
__ CallRuntime(Runtime::kStackGuard);
__ Pop(kInterpreterBytecodeArrayRegister);
__ Bind(&ok);
}
// Load accumulator, register file, bytecode offset, dispatch table into
// registers.
__ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
@ -961,10 +934,9 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp));
__ Mov(kInterpreterBytecodeOffsetRegister,
Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
__ LoadRoot(kInterpreterDispatchTableRegister,
Heap::kInterpreterTableRootIndex);
__ Add(kInterpreterDispatchTableRegister, kInterpreterDispatchTableRegister,
Operand(FixedArray::kHeaderSize - kHeapObjectTag));
__ Mov(kInterpreterDispatchTableRegister,
Operand(ExternalReference::interpreter_dispatch_table_address(
masm->isolate())));
// Dispatch to the first bytecode handler for the function.
__ Ldrb(x1, MemOperand(kInterpreterBytecodeArrayRegister,
@ -975,6 +947,15 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
// and header removal.
__ Add(ip0, ip0, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Call(ip0);
// Even though the first bytecode handler was called, we will never return.
__ Abort(kUnexpectedReturnFromBytecodeHandler);
// Load debug copy of the bytecode array.
__ Bind(&load_debug_bytecode_array);
__ Ldr(kInterpreterBytecodeArrayRegister,
FieldMemOperand(debug_info, DebugInfo::kAbstractCodeIndex));
__ B(&bytecode_array_loaded);
}
@ -998,47 +979,24 @@ void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) {
}
static void Generate_InterpreterNotifyDeoptimizedHelper(
MacroAssembler* masm, Deoptimizer::BailoutType type) {
// Enter an internal frame.
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(kInterpreterAccumulatorRegister); // Save accumulator register.
// Pass the deoptimization type to the runtime system.
__ Mov(x1, Operand(Smi::FromInt(static_cast<int>(type))));
__ Push(x1);
__ CallRuntime(Runtime::kNotifyDeoptimized);
__ Pop(kInterpreterAccumulatorRegister); // Restore accumulator register.
// Tear down internal frame.
}
// Drop state (we don't use this for interpreter deopts).
__ Drop(1);
static void Generate_EnterBytecodeDispatch(MacroAssembler* masm) {
// Initialize register file register and dispatch table register.
__ Add(kInterpreterRegisterFileRegister, fp,
Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp));
__ LoadRoot(kInterpreterDispatchTableRegister,
Heap::kInterpreterTableRootIndex);
__ Add(kInterpreterDispatchTableRegister, kInterpreterDispatchTableRegister,
Operand(FixedArray::kHeaderSize - kHeapObjectTag));
__ Mov(kInterpreterDispatchTableRegister,
Operand(ExternalReference::interpreter_dispatch_table_address(
masm->isolate())));
// Get the context from the frame.
// TODO(rmcilroy): Update interpreter frame to expect current context at the
// context slot instead of the function context.
__ Ldr(kContextRegister,
MemOperand(kInterpreterRegisterFileRegister,
InterpreterFrameConstants::kContextFromRegisterPointer));
// Get the bytecode array pointer from the frame.
__ Ldr(x1,
__ Ldr(
kInterpreterBytecodeArrayRegister,
MemOperand(kInterpreterRegisterFileRegister,
InterpreterFrameConstants::kFunctionFromRegisterPointer));
__ Ldr(x1, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
__ Ldr(kInterpreterBytecodeArrayRegister,
FieldMemOperand(x1, SharedFunctionInfo::kFunctionDataOffset));
InterpreterFrameConstants::kBytecodeArrayFromRegisterPointer));
if (FLAG_debug_code) {
// Check function data field is actually a BytecodeArray object.
@ -1066,6 +1024,29 @@ static void Generate_InterpreterNotifyDeoptimizedHelper(
}
static void Generate_InterpreterNotifyDeoptimizedHelper(
MacroAssembler* masm, Deoptimizer::BailoutType type) {
// Enter an internal frame.
{
FrameScope scope(masm, StackFrame::INTERNAL);
// Pass the deoptimization type to the runtime system.
__ Mov(x1, Operand(Smi::FromInt(static_cast<int>(type))));
__ Push(x1);
__ CallRuntime(Runtime::kNotifyDeoptimized);
// Tear down internal frame.
}
// Drop state (we don't use these for interpreter deopts) and and pop the
// accumulator value into the accumulator register.
__ Drop(1);
__ Pop(kInterpreterAccumulatorRegister);
// Enter the bytecode dispatch.
Generate_EnterBytecodeDispatch(masm);
}
void Builtins::Generate_InterpreterNotifyDeoptimized(MacroAssembler* masm) {
Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
}
@ -1080,22 +1061,30 @@ void Builtins::Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler* masm) {
Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
}
void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
// Set the address of the interpreter entry trampoline as a return address.
// This simulates the initial call to bytecode handlers in interpreter entry
// trampoline. The return will never actually be taken, but our stack walker
// uses this address to determine whether a frame is interpreted.
__ LoadObject(lr, masm->isolate()->builtins()->InterpreterEntryTrampoline());
Generate_EnterBytecodeDispatch(masm);
}
void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
CallRuntimePassFunction(masm, Runtime::kCompileLazy);
GenerateTailCallToReturnedCode(masm);
GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
}
void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
CallRuntimePassFunction(masm, Runtime::kCompileOptimized_NotConcurrent);
GenerateTailCallToReturnedCode(masm);
GenerateTailCallToReturnedCode(masm,
Runtime::kCompileOptimized_NotConcurrent);
}
void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
CallRuntimePassFunction(masm, Runtime::kCompileOptimized_Concurrent);
GenerateTailCallToReturnedCode(masm);
GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
}
@ -1321,14 +1310,11 @@ static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
// Load the next prototype.
__ Bind(&next_prototype);
__ Ldr(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
// End if the prototype is null or not hidden.
__ CompareRoot(receiver, Heap::kNullValueRootIndex);
__ B(eq, receiver_check_failed);
__ Ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
__ Ldr(x16, FieldMemOperand(map, Map::kBitField3Offset));
__ Tst(x16, Operand(Map::IsHiddenPrototype::kMask));
__ Tst(x16, Operand(Map::HasHiddenPrototype::kMask));
__ B(eq, receiver_check_failed);
__ Ldr(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
__ Ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
// Iterate.
__ B(&prototype_loop_start);
@ -1868,10 +1854,8 @@ void Builtins::Generate_Apply(MacroAssembler* masm) {
// Try to create the list from an arguments object.
__ Bind(&create_arguments);
__ Ldrsw(len, UntagSmiFieldMemOperand(
arguments_list,
JSObject::kHeaderSize +
Heap::kArgumentsLengthIndex * kPointerSize));
__ Ldrsw(len, UntagSmiFieldMemOperand(arguments_list,
JSArgumentsObject::kLengthOffset));
__ Ldr(x10, FieldMemOperand(arguments_list, JSObject::kElementsOffset));
__ Ldrsw(x11, UntagSmiFieldMemOperand(x10, FixedArray::kLengthOffset));
__ CompareAndBranch(len, x11, ne, &create_runtime);
@ -1953,10 +1937,136 @@ void Builtins::Generate_Apply(MacroAssembler* masm) {
}
}
namespace {
// Drops top JavaScript frame and an arguments adaptor frame below it (if
// present) preserving all the arguments prepared for current call.
// Does nothing if debugger is currently active.
// ES6 14.6.3. PrepareForTailCall
//
// Stack structure for the function g() tail calling f():
//
// ------- Caller frame: -------
// | ...
// | g()'s arg M
// | ...
// | g()'s arg 1
// | g()'s receiver arg
// | g()'s caller pc
// ------- g()'s frame: -------
// | g()'s caller fp <- fp
// | g()'s context
// | function pointer: g
// | -------------------------
// | ...
// | ...
// | f()'s arg N
// | ...
// | f()'s arg 1
// | f()'s receiver arg <- sp (f()'s caller pc is not on the stack yet!)
// ----------------------
//
void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
Register scratch1, Register scratch2,
Register scratch3) {
DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
Comment cmnt(masm, "[ PrepareForTailCall");
// Prepare for tail call only if the debugger is not active.
Label done;
ExternalReference debug_is_active =
ExternalReference::debug_is_active_address(masm->isolate());
__ Mov(scratch1, Operand(debug_is_active));
__ Ldrb(scratch1, MemOperand(scratch1));
__ Cmp(scratch1, Operand(0));
__ B(ne, &done);
// Drop possible interpreter handler/stub frame.
{
Label no_interpreter_frame;
__ Ldr(scratch3, MemOperand(fp, StandardFrameConstants::kMarkerOffset));
__ Cmp(scratch3, Operand(Smi::FromInt(StackFrame::STUB)));
__ B(ne, &no_interpreter_frame);
__ Ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
__ bind(&no_interpreter_frame);
}
// Check if next frame is an arguments adaptor frame.
Label no_arguments_adaptor, formal_parameter_count_loaded;
__ Ldr(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
__ Ldr(scratch3,
MemOperand(scratch2, StandardFrameConstants::kContextOffset));
__ Cmp(scratch3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
__ B(ne, &no_arguments_adaptor);
// Drop arguments adaptor frame and load arguments count.
__ mov(fp, scratch2);
__ Ldr(scratch1,
MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ SmiUntag(scratch1);
__ B(&formal_parameter_count_loaded);
__ bind(&no_arguments_adaptor);
// Load caller's formal parameter count
__ Ldr(scratch1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
__ Ldr(scratch1,
FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
__ Ldrsw(scratch1,
FieldMemOperand(scratch1,
SharedFunctionInfo::kFormalParameterCountOffset));
__ bind(&formal_parameter_count_loaded);
// Calculate the end of destination area where we will put the arguments
// after we drop current frame. We add kPointerSize to count the receiver
// argument which is not included into formal parameters count.
Register dst_reg = scratch2;
__ add(dst_reg, fp, Operand(scratch1, LSL, kPointerSizeLog2));
__ add(dst_reg, dst_reg,
Operand(StandardFrameConstants::kCallerSPOffset + kPointerSize));
Register src_reg = scratch1;
__ add(src_reg, jssp, Operand(args_reg, LSL, kPointerSizeLog2));
// Count receiver argument as well (not included in args_reg).
__ add(src_reg, src_reg, Operand(kPointerSize));
if (FLAG_debug_code) {
__ Cmp(src_reg, dst_reg);
__ Check(lo, kStackAccessBelowStackPointer);
}
// Restore caller's frame pointer and return address now as they will be
// overwritten by the copying loop.
__ Ldr(lr, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
__ Ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
// Now copy callee arguments to the caller frame going backwards to avoid
// callee arguments corruption (source and destination areas could overlap).
// Both src_reg and dst_reg are pointing to the word after the one to copy,
// so they must be pre-decremented in the loop.
Register tmp_reg = scratch3;
Label loop, entry;
__ B(&entry);
__ bind(&loop);
__ Ldr(tmp_reg, MemOperand(src_reg, -kPointerSize, PreIndex));
__ Str(tmp_reg, MemOperand(dst_reg, -kPointerSize, PreIndex));
__ bind(&entry);
__ Cmp(jssp, src_reg);
__ B(ne, &loop);
// Leave current frame.
__ Mov(jssp, dst_reg);
__ SetStackPointer(jssp);
__ AssertStackConsistency();
__ bind(&done);
}
} // namespace
// static
void Builtins::Generate_CallFunction(MacroAssembler* masm,
ConvertReceiverMode mode) {
ConvertReceiverMode mode,
TailCallMode tail_call_mode) {
ASM_LOCATION("Builtins::Generate_CallFunction");
// ----------- S t a t e -------------
// -- x0 : the number of arguments (not including the receiver)
@ -2044,6 +2154,10 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm,
// -- cp : the function context.
// -----------------------------------
if (tail_call_mode == TailCallMode::kAllow) {
PrepareForTailCall(masm, x0, x3, x4, x5);
}
__ Ldrsw(
x2, FieldMemOperand(x2, SharedFunctionInfo::kFormalParameterCountOffset));
ParameterCount actual(x0);
@ -2140,13 +2254,18 @@ void Generate_PushBoundArguments(MacroAssembler* masm) {
// static
void Builtins::Generate_CallBoundFunction(MacroAssembler* masm) {
void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
TailCallMode tail_call_mode) {
// ----------- S t a t e -------------
// -- x0 : the number of arguments (not including the receiver)
// -- x1 : the function to call (checked to be a JSBoundFunction)
// -----------------------------------
__ AssertBoundFunction(x1);
if (tail_call_mode == TailCallMode::kAllow) {
PrepareForTailCall(masm, x0, x3, x4, x5);
}
// Patch the receiver to [[BoundThis]].
__ Ldr(x10, FieldMemOperand(x1, JSBoundFunction::kBoundThisOffset));
__ Poke(x10, Operand(x0, LSL, kPointerSizeLog2));
@ -2165,7 +2284,8 @@ void Builtins::Generate_CallBoundFunction(MacroAssembler* masm) {
// static
void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
TailCallMode tail_call_mode) {
// ----------- S t a t e -------------
// -- x0 : the number of arguments (not including the receiver)
// -- x1 : the target to call (can be any Object).
@ -2175,14 +2295,24 @@ void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
__ JumpIfSmi(x1, &non_callable);
__ Bind(&non_smi);
__ CompareObjectType(x1, x4, x5, JS_FUNCTION_TYPE);
__ Jump(masm->isolate()->builtins()->CallFunction(mode),
__ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
RelocInfo::CODE_TARGET, eq);
__ Cmp(x5, JS_BOUND_FUNCTION_TYPE);
__ Jump(masm->isolate()->builtins()->CallBoundFunction(),
__ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
RelocInfo::CODE_TARGET, eq);
// Check if target has a [[Call]] internal method.
__ Ldrb(x4, FieldMemOperand(x4, Map::kBitFieldOffset));
__ TestAndBranchIfAllClear(x4, 1 << Map::kIsCallable, &non_callable);
__ Cmp(x5, JS_PROXY_TYPE);
__ B(ne, &non_function);
// 0. Prepare for tail call if necessary.
if (tail_call_mode == TailCallMode::kAllow) {
PrepareForTailCall(masm, x0, x3, x4, x5);
}
// 1. Runtime fallback for Proxy [[Call]].
__ Push(x1);
// Increase the arguments size to include the pushed function and the
@ -2195,15 +2325,12 @@ void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
// 2. Call to something else, which might have a [[Call]] internal method (if
// not we raise an exception).
__ Bind(&non_function);
// Check if target has a [[Call]] internal method.
__ Ldrb(x4, FieldMemOperand(x4, Map::kBitFieldOffset));
__ TestAndBranchIfAllClear(x4, 1 << Map::kIsCallable, &non_callable);
// Overwrite the original receiver with the (original) target.
__ Poke(x1, Operand(x0, LSL, kXRegSizeLog2));
// Let the "call_as_function_delegate" take care of the rest.
__ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, x1);
__ Jump(masm->isolate()->builtins()->CallFunction(
ConvertReceiverMode::kNotNullOrUndefined),
ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
RelocInfo::CODE_TARGET);
// 3. Call to something that is not callable.
@ -2341,7 +2468,8 @@ void Builtins::Generate_Construct(MacroAssembler* masm) {
// static
void Builtins::Generate_InterpreterPushArgsAndCall(MacroAssembler* masm) {
void Builtins::Generate_InterpreterPushArgsAndCallImpl(
MacroAssembler* masm, TailCallMode tail_call_mode) {
// ----------- S t a t e -------------
// -- x0 : the number of arguments (not including the receiver)
// -- x2 : the address of the first argument to be pushed. Subsequent
@ -2369,7 +2497,9 @@ void Builtins::Generate_InterpreterPushArgsAndCall(MacroAssembler* masm) {
__ B(gt, &loop_header);
// Call the target.
__ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
__ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
tail_call_mode),
RelocInfo::CODE_TARGET);
}

5046
deps/v8/src/arm64/code-stubs-arm64.cc

File diff suppressed because it is too large

5
deps/v8/src/arm64/cpu-arm64.cc

@ -19,8 +19,8 @@ class CacheLineSizes {
cache_type_register_ = 0;
#else
// Copy the content of the cache type register to a core register.
__asm__ __volatile__ ("mrs %[ctr], ctr_el0" // NOLINT
: [ctr] "=r" (cache_type_register_));
__asm__ __volatile__("mrs %[ctr], ctr_el0" // NOLINT
: [ctr] "=r"(cache_type_register_));
#endif
}
@ -37,7 +37,6 @@ class CacheLineSizes {
uint32_t cache_type_register_;
};
void CpuFeatures::FlushICache(void* address, size_t length) {
#ifdef V8_HOST_ARCH_ARM64
// The code below assumes user space cache operations are allowed. The goal

25
deps/v8/src/arm64/deoptimizer-arm64.cc

@ -65,30 +65,7 @@ void Deoptimizer::PatchCodeForDeoptimization(Isolate* isolate, Code* code) {
}
void Deoptimizer::FillInputFrame(Address tos, JavaScriptFrame* frame) {
// Set the register values. The values are not important as there are no
// callee saved registers in JavaScript frames, so all registers are
// spilled. Registers fp and sp are set to the correct values though.
for (int i = 0; i < Register::NumRegisters(); i++) {
input_->SetRegister(i, 0);
}
// TODO(all): Do we also need to set a value to csp?
input_->SetRegister(jssp.code(), reinterpret_cast<intptr_t>(frame->sp()));
input_->SetRegister(fp.code(), reinterpret_cast<intptr_t>(frame->fp()));
for (int i = 0; i < DoubleRegister::kMaxNumRegisters; i++) {
input_->SetDoubleRegister(i, 0.0);
}
// Fill the frame content from the actual data on the frame.
for (unsigned i = 0; i < input_->GetFrameSize(); i += kPointerSize) {
input_->SetFrameSlot(i, Memory::uint64_at(tos + i));
}
}
bool Deoptimizer::HasAlignmentPadding(JSFunction* function) {
bool Deoptimizer::HasAlignmentPadding(SharedFunctionInfo* shared) {
// There is no dynamic alignment padding on ARM64 in the input frame.
return false;
}

64
deps/v8/src/arm64/interface-descriptors-arm64.cc

@ -56,20 +56,6 @@ const Register StringCompareDescriptor::LeftRegister() { return x1; }
const Register StringCompareDescriptor::RightRegister() { return x0; }
const Register ArgumentsAccessReadDescriptor::index() { return x1; }
const Register ArgumentsAccessReadDescriptor::parameter_count() { return x0; }
const Register ArgumentsAccessNewDescriptor::function() { return x1; }
const Register ArgumentsAccessNewDescriptor::parameter_count() { return x2; }
const Register ArgumentsAccessNewDescriptor::parameter_pointer() { return x3; }
const Register RestParamAccessDescriptor::parameter_count() { return x2; }
const Register RestParamAccessDescriptor::parameter_pointer() { return x3; }
const Register RestParamAccessDescriptor::rest_parameter_index() { return x4; }
const Register ApiGetterDescriptor::function_address() { return x2; }
@ -98,6 +84,35 @@ void FastNewContextDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewObjectDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {x1, x3};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewRestParameterDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
// x1: function
Register registers[] = {x1};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewSloppyArgumentsDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
// x1: function
Register registers[] = {x1};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewStrictArgumentsDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
// x1: function
Register registers[] = {x1};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void ToNumberDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
@ -115,6 +130,10 @@ const Register ToLengthDescriptor::ReceiverRegister() { return x0; }
const Register ToStringDescriptor::ReceiverRegister() { return x0; }
// static
const Register ToNameDescriptor::ReceiverRegister() { return x0; }
// static
const Register ToObjectDescriptor::ReceiverRegister() { return x0; }
@ -185,13 +204,6 @@ void CreateWeakCellDescriptor::InitializePlatformSpecific(
}
void StoreArrayLiteralElementDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {x3, x0};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void CallFunctionDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
// x1 function the function to call
@ -465,6 +477,14 @@ void ApiAccessorDescriptor::InitializePlatformSpecific(
&default_descriptor);
}
void InterpreterDispatchDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {
kInterpreterAccumulatorRegister, kInterpreterRegisterFileRegister,
kInterpreterBytecodeOffsetRegister, kInterpreterBytecodeArrayRegister,
kInterpreterDispatchTableRegister};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void InterpreterPushArgsAndCallDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
@ -476,7 +496,6 @@ void InterpreterPushArgsAndCallDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void InterpreterPushArgsAndConstructDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {
@ -488,7 +507,6 @@ void InterpreterPushArgsAndConstructDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void InterpreterCEntryDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {

133
deps/v8/src/arm64/macro-assembler-arm64.cc

@ -1488,18 +1488,15 @@ void MacroAssembler::LoadAccessor(Register dst, Register holder,
}
void MacroAssembler::CheckEnumCache(Register object,
Register null_value,
Register scratch0,
Register scratch1,
Register scratch2,
Register scratch3,
void MacroAssembler::CheckEnumCache(Register object, Register scratch0,
Register scratch1, Register scratch2,
Register scratch3, Register scratch4,
Label* call_runtime) {
DCHECK(!AreAliased(object, null_value, scratch0, scratch1, scratch2,
scratch3));
DCHECK(!AreAliased(object, scratch0, scratch1, scratch2, scratch3, scratch4));
Register empty_fixed_array_value = scratch0;
Register current_object = scratch1;
Register null_value = scratch4;
LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
Label next, start;
@ -1516,6 +1513,7 @@ void MacroAssembler::CheckEnumCache(Register object,
Cmp(enum_length, kInvalidEnumCacheSentinel);
B(eq, call_runtime);
LoadRoot(null_value, Heap::kNullValueRootIndex);
B(&start);
Bind(&next);
@ -1576,10 +1574,9 @@ void MacroAssembler::InNewSpace(Register object,
Label* branch) {
DCHECK(cond == eq || cond == ne);
UseScratchRegisterScope temps(this);
Register temp = temps.AcquireX();
And(temp, object, ExternalReference::new_space_mask(isolate()));
Cmp(temp, ExternalReference::new_space_start(isolate()));
B(cond, branch);
const int mask =
(1 << MemoryChunk::IN_FROM_SPACE) | (1 << MemoryChunk::IN_TO_SPACE);
CheckPageFlag(object, temps.AcquireSameSizeAs(object), mask, cond, branch);
}
@ -1641,6 +1638,20 @@ void MacroAssembler::AssertBoundFunction(Register object) {
}
void MacroAssembler::AssertReceiver(Register object) {
if (emit_debug_code()) {
AssertNotSmi(object, kOperandIsASmiAndNotAReceiver);
UseScratchRegisterScope temps(this);
Register temp = temps.AcquireX();
STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
CompareObjectType(object, temp, temp, FIRST_JS_RECEIVER_TYPE);
Check(hs, kOperandIsNotAReceiver);
}
}
void MacroAssembler::AssertUndefinedOrAllocationSite(Register object,
Register scratch) {
if (emit_debug_code()) {
@ -1679,6 +1690,15 @@ void MacroAssembler::AssertPositiveOrZero(Register value) {
}
}
void MacroAssembler::AssertNumber(Register value) {
if (emit_debug_code()) {
Label done;
JumpIfSmi(value, &done);
JumpIfHeapNumber(value, &done);
Abort(kOperandIsNotANumber);
Bind(&done);
}
}
void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
DCHECK(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs.
@ -1727,19 +1747,6 @@ void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) {
}
void MacroAssembler::InvokeBuiltin(int native_context_index, InvokeFlag flag,
const CallWrapper& call_wrapper) {
ASM_LOCATION("MacroAssembler::InvokeBuiltin");
// You can't call a builtin without a valid frame.
DCHECK(flag == JUMP_FUNCTION || has_frame());
// Fake a parameter count to avoid emitting code to do the check.
ParameterCount expected(0);
LoadNativeContextSlot(native_context_index, x1);
InvokeFunctionCode(x1, no_reg, expected, expected, flag, call_wrapper);
}
void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
const Runtime::Function* function = Runtime::FunctionForId(fid);
DCHECK_EQ(1, function->result_size);
@ -2423,7 +2430,7 @@ void MacroAssembler::FloodFunctionIfStepping(Register fun, Register new_target,
}
Push(fun);
Push(fun);
CallRuntime(Runtime::kDebugPrepareStepInIfStepping, 1);
CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
Pop(fun);
if (new_target.is_valid()) {
Pop(new_target);
@ -3824,6 +3831,65 @@ void MacroAssembler::LoadFromNumberDictionary(Label* miss,
Ldr(result, FieldMemOperand(scratch2, kValueOffset));
}
void MacroAssembler::RecordWriteCodeEntryField(Register js_function,
Register code_entry,
Register scratch) {
const int offset = JSFunction::kCodeEntryOffset;
// Since a code entry (value) is always in old space, we don't need to update
// remembered set. If incremental marking is off, there is nothing for us to
// do.
if (!FLAG_incremental_marking) return;
DCHECK(js_function.is(x1));
DCHECK(code_entry.is(x7));
DCHECK(scratch.is(x5));
AssertNotSmi(js_function);
if (emit_debug_code()) {
UseScratchRegisterScope temps(this);
Register temp = temps.AcquireX();
Add(scratch, js_function, offset - kHeapObjectTag);
Ldr(temp, MemOperand(scratch));
Cmp(temp, code_entry);
Check(eq, kWrongAddressOrValuePassedToRecordWrite);
}
// First, check if a write barrier is even needed. The tests below
// catch stores of Smis and stores into young gen.
Label done;
CheckPageFlagClear(code_entry, scratch,
MemoryChunk::kPointersToHereAreInterestingMask, &done);
CheckPageFlagClear(js_function, scratch,
MemoryChunk::kPointersFromHereAreInterestingMask, &done);
const Register dst = scratch;
Add(dst, js_function, offset - kHeapObjectTag);
// Save caller-saved registers.Both input registers (x1 and x7) are caller
// saved, so there is no need to push them.
PushCPURegList(kCallerSaved);
int argument_count = 3;
Mov(x0, js_function);
Mov(x1, dst);
Mov(x2, ExternalReference::isolate_address(isolate()));
{
AllowExternalCallThatCantCauseGC scope(this);
CallCFunction(
ExternalReference::incremental_marking_record_write_code_entry_function(
isolate()),
argument_count);
}
// Restore caller-saved registers.
PopCPURegList(kCallerSaved);
Bind(&done);
}
void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
Register address,
@ -3938,6 +4004,17 @@ int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
}
}
void MacroAssembler::CheckPageFlag(const Register& object,
const Register& scratch, int mask,
Condition cc, Label* condition_met) {
And(scratch, object, ~Page::kPageAlignmentMask);
Ldr(scratch, MemOperand(scratch, MemoryChunk::kFlagsOffset));
if (cc == eq) {
TestAndBranchIfAnySet(scratch, mask, condition_met);
} else {
TestAndBranchIfAllClear(scratch, mask, condition_met);
}
}
void MacroAssembler::CheckPageFlagSet(const Register& object,
const Register& scratch,
@ -4409,9 +4486,9 @@ void MacroAssembler::Abort(BailoutReason reason) {
// We don't actually want to generate a pile of code for this, so just
// claim there is a stack frame, without generating one.
FrameScope scope(this, StackFrame::NONE);
CallRuntime(Runtime::kAbort, 1);
CallRuntime(Runtime::kAbort);
} else {
CallRuntime(Runtime::kAbort, 1);
CallRuntime(Runtime::kAbort);
}
} else {
// Load the string to pass to Printf.

28
deps/v8/src/arm64/macro-assembler-arm64.h

@ -34,9 +34,9 @@ namespace v8 {
namespace internal {
// Give alias names to registers for calling conventions.
// TODO(titzer): arm64 is a pain for aliasing; get rid of these macros
#define kReturnRegister0 x0
#define kReturnRegister1 x1
#define kReturnRegister2 x2
#define kJSFunctionRegister x1
#define kContextRegister cp
#define kInterpreterAccumulatorRegister x0
@ -970,6 +970,9 @@ class MacroAssembler : public Assembler {
// enabled via --debug-code.
void AssertBoundFunction(Register object);
// Abort execution if argument is not a JSReceiver, enabled via --debug-code.
void AssertReceiver(Register object);
// Abort execution if argument is not undefined or an AllocationSite, enabled
// via --debug-code.
void AssertUndefinedOrAllocationSite(Register object, Register scratch);
@ -981,6 +984,9 @@ class MacroAssembler : public Assembler {
// --debug-code.
void AssertPositiveOrZero(Register value);
// Abort execution if argument is not a number (heap number or smi).
void AssertNumber(Register value);
void JumpIfHeapNumber(Register object, Label* on_heap_number,
SmiCheckType smi_check_type = DONT_DO_SMI_CHECK);
void JumpIfNotHeapNumber(Register object, Label* on_not_heap_number,
@ -1138,10 +1144,6 @@ class MacroAssembler : public Assembler {
int num_arguments);
// Invoke specified builtin JavaScript function.
void InvokeBuiltin(int native_context_index, InvokeFlag flag,
const CallWrapper& call_wrapper = NullCallWrapper());
void Jump(Register target);
void Jump(Address target, RelocInfo::Mode rmode, Condition cond = al);
void Jump(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
@ -1586,12 +1588,8 @@ class MacroAssembler : public Assembler {
void LeaveFrame(StackFrame::Type type);
// Returns map with validated enum cache in object register.
void CheckEnumCache(Register object,
Register null_value,
Register scratch0,
Register scratch1,
Register scratch2,
Register scratch3,
void CheckEnumCache(Register object, Register scratch0, Register scratch1,
Register scratch2, Register scratch3, Register scratch4,
Label* call_runtime);
// AllocationMemento support. Arrays may have an associated
@ -1730,6 +1728,9 @@ class MacroAssembler : public Assembler {
Peek(src, SafepointRegisterStackIndex(dst.code()) * kPointerSize);
}
void CheckPageFlag(const Register& object, const Register& scratch, int mask,
Condition cc, Label* condition_met);
void CheckPageFlagSet(const Register& object,
const Register& scratch,
int mask,
@ -1793,6 +1794,11 @@ class MacroAssembler : public Assembler {
pointers_to_here_check_for_value);
}
// Notify the garbage collector that we wrote a code entry into a
// JSFunction. Only scratch is clobbered by the operation.
void RecordWriteCodeEntryField(Register js_function, Register code_entry,
Register scratch);
void RecordWriteForMap(
Register object,
Register map,

61
deps/v8/src/arm64/simulator-arm64.cc

@ -15,6 +15,7 @@
#include "src/disasm.h"
#include "src/macro-assembler.h"
#include "src/ostreams.h"
#include "src/runtime/runtime-utils.h"
namespace v8 {
namespace internal {
@ -533,12 +534,6 @@ void Simulator::TearDown(HashMap* i_cache, Redirection* first) {
// uses the ObjectPair structure.
// The simulator assumes all runtime calls return two 64-bits values. If they
// don't, register x1 is clobbered. This is fine because x1 is caller-saved.
struct ObjectPair {
int64_t res0;
int64_t res1;
};
typedef ObjectPair (*SimulatorRuntimeCall)(int64_t arg0,
int64_t arg1,
int64_t arg2,
@ -548,6 +543,11 @@ typedef ObjectPair (*SimulatorRuntimeCall)(int64_t arg0,
int64_t arg6,
int64_t arg7);
typedef ObjectTriple (*SimulatorRuntimeTripleCall)(int64_t arg0, int64_t arg1,
int64_t arg2, int64_t arg3,
int64_t arg4, int64_t arg5,
int64_t arg6, int64_t arg7);
typedef int64_t (*SimulatorRuntimeCompareCall)(double arg1, double arg2);
typedef double (*SimulatorRuntimeFPFPCall)(double arg1, double arg2);
typedef double (*SimulatorRuntimeFPCall)(double arg1);
@ -589,8 +589,10 @@ void Simulator::DoRuntimeCall(Instruction* instr) {
UNREACHABLE();
break;
case ExternalReference::BUILTIN_CALL: {
// Object* f(v8::internal::Arguments).
case ExternalReference::BUILTIN_CALL:
case ExternalReference::BUILTIN_CALL_PAIR: {
// Object* f(v8::internal::Arguments) or
// ObjectPair f(v8::internal::Arguments).
TraceSim("Type: BUILTIN_CALL\n");
SimulatorRuntimeCall target =
reinterpret_cast<SimulatorRuntimeCall>(external);
@ -607,13 +609,41 @@ void Simulator::DoRuntimeCall(Instruction* instr) {
xreg(4), xreg(5), xreg(6), xreg(7));
ObjectPair result = target(xreg(0), xreg(1), xreg(2), xreg(3),
xreg(4), xreg(5), xreg(6), xreg(7));
TraceSim("Returned: {0x%" PRIx64 ", 0x%" PRIx64 "}\n",
result.res0, result.res1);
TraceSim("Returned: {%p, %p}\n", result.x, result.y);
#ifdef DEBUG
CorruptAllCallerSavedCPURegisters();
#endif
set_xreg(0, reinterpret_cast<int64_t>(result.x));
set_xreg(1, reinterpret_cast<int64_t>(result.y));
break;
}
case ExternalReference::BUILTIN_CALL_TRIPLE: {
// ObjectTriple f(v8::internal::Arguments).
TraceSim("Type: BUILTIN_CALL TRIPLE\n");
SimulatorRuntimeTripleCall target =
reinterpret_cast<SimulatorRuntimeTripleCall>(external);
// We don't know how many arguments are being passed, but we can
// pass 8 without touching the stack. They will be ignored by the
// host function if they aren't used.
TraceSim(
"Arguments: "
"0x%016" PRIx64 ", 0x%016" PRIx64 ", "
"0x%016" PRIx64 ", 0x%016" PRIx64 ", "
"0x%016" PRIx64 ", 0x%016" PRIx64 ", "
"0x%016" PRIx64 ", 0x%016" PRIx64,
xreg(0), xreg(1), xreg(2), xreg(3), xreg(4), xreg(5), xreg(6),
xreg(7));
// Return location passed in x8.
ObjectTriple* sim_result = reinterpret_cast<ObjectTriple*>(xreg(8));
ObjectTriple result = target(xreg(0), xreg(1), xreg(2), xreg(3), xreg(4),
xreg(5), xreg(6), xreg(7));
TraceSim("Returned: {%p, %p, %p}\n", result.x, result.y, result.z);
#ifdef DEBUG
CorruptAllCallerSavedCPURegisters();
#endif
set_xreg(0, result.res0);
set_xreg(1, result.res1);
*sim_result = result;
break;
}
@ -1966,10 +1996,10 @@ void Simulator::VisitDataProcessing1Source(Instruction* instr) {
switch (instr->Mask(DataProcessing1SourceMask)) {
case RBIT_w:
set_wreg(dst, ReverseBits(wreg(src)));
set_wreg(dst, base::bits::ReverseBits(wreg(src)));
break;
case RBIT_x:
set_xreg(dst, ReverseBits(xreg(src)));
set_xreg(dst, base::bits::ReverseBits(xreg(src)));
break;
case REV16_w:
set_wreg(dst, ReverseBytes(wreg(src), 1));
@ -3510,7 +3540,8 @@ void Simulator::Debug() {
HeapObject* obj = reinterpret_cast<HeapObject*>(*cur);
int64_t value = *cur;
Heap* current_heap = isolate_->heap();
if (((value & 1) == 0) || current_heap->Contains(obj)) {
if (((value & 1) == 0) ||
current_heap->ContainsSlow(obj->address())) {
PrintF(" (");
if ((value & kSmiTagMask) == 0) {
STATIC_ASSERT(kSmiValueSize == 32);

13
deps/v8/src/arm64/utils-arm64.h

@ -54,19 +54,6 @@ uint64_t LargestPowerOf2Divisor(uint64_t value);
int MaskToBit(uint64_t mask);
template <typename T>
T ReverseBits(T value) {
DCHECK((sizeof(value) == 1) || (sizeof(value) == 2) || (sizeof(value) == 4) ||
(sizeof(value) == 8));
T result = 0;
for (unsigned i = 0; i < (sizeof(value) * 8); i++) {
result = (result << 1) | (value & 1);
value >>= 1;
}
return result;
}
template <typename T>
T ReverseBytes(T value, int block_bytes_log2) {
DCHECK((sizeof(value) == 4) || (sizeof(value) == 8));

132
deps/v8/src/assembler.cc

@ -34,6 +34,7 @@
#include "src/assembler.h"
#include <math.h>
#include <cmath>
#include "src/api.h"
#include "src/base/cpu.h"
@ -50,8 +51,8 @@
#include "src/execution.h"
#include "src/ic/ic.h"
#include "src/ic/stub-cache.h"
#include "src/interpreter/interpreter.h"
#include "src/ostreams.h"
#include "src/parsing/token.h"
#include "src/profiler/cpu-profiler.h"
#include "src/regexp/jsregexp.h"
#include "src/regexp/regexp-macro-assembler.h"
@ -265,8 +266,8 @@ CpuFeatureScope::~CpuFeatureScope() {
bool CpuFeatures::initialized_ = false;
unsigned CpuFeatures::supported_ = 0;
unsigned CpuFeatures::cache_line_size_ = 0;
unsigned CpuFeatures::icache_line_size_ = 0;
unsigned CpuFeatures::dcache_line_size_ = 0;
// -----------------------------------------------------------------------------
// Implementation of Label
@ -770,6 +771,9 @@ RelocIterator::RelocIterator(const CodeDesc& desc, int mode_mask)
// -----------------------------------------------------------------------------
// Implementation of RelocInfo
bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
return DebugCodegen::DebugBreakSlotIsPatched(pc_);
}
#ifdef DEBUG
bool RelocInfo::RequiresRelocation(const CodeDesc& desc) {
@ -943,6 +947,20 @@ void RelocInfo::Verify(Isolate* isolate) {
// Implementation of ExternalReference
static ExternalReference::Type BuiltinCallTypeForResultSize(int result_size) {
switch (result_size) {
case 1:
return ExternalReference::BUILTIN_CALL;
case 2:
return ExternalReference::BUILTIN_CALL_PAIR;
case 3:
return ExternalReference::BUILTIN_CALL_TRIPLE;
}
UNREACHABLE();
return ExternalReference::BUILTIN_CALL;
}
void ExternalReference::SetUp() {
double_constants.min_int = kMinInt;
double_constants.one_half = 0.5;
@ -1025,18 +1043,23 @@ ExternalReference::ExternalReference(Builtins::Name name, Isolate* isolate)
ExternalReference::ExternalReference(Runtime::FunctionId id, Isolate* isolate)
: address_(Redirect(isolate, Runtime::FunctionForId(id)->entry)) {}
: ExternalReference(Runtime::FunctionForId(id), isolate) {}
ExternalReference::ExternalReference(const Runtime::Function* f,
Isolate* isolate)
: address_(Redirect(isolate, f->entry)) {}
: address_(Redirect(isolate, f->entry,
BuiltinCallTypeForResultSize(f->result_size))) {}
ExternalReference ExternalReference::isolate_address(Isolate* isolate) {
return ExternalReference(isolate);
}
ExternalReference ExternalReference::interpreter_dispatch_table_address(
Isolate* isolate) {
return ExternalReference(isolate->interpreter()->dispatch_table_address());
}
ExternalReference::ExternalReference(StatsCounter* counter)
: address_(reinterpret_cast<Address>(counter->GetInternalPointer())) {}
@ -1057,9 +1080,16 @@ ExternalReference ExternalReference::
FUNCTION_ADDR(IncrementalMarking::RecordWriteFromCode)));
}
ExternalReference
ExternalReference::incremental_marking_record_write_code_entry_function(
Isolate* isolate) {
return ExternalReference(Redirect(
isolate,
FUNCTION_ADDR(IncrementalMarking::RecordWriteOfCodeEntryFromCode)));
}
ExternalReference ExternalReference::
store_buffer_overflow_function(Isolate* isolate) {
ExternalReference ExternalReference::store_buffer_overflow_function(
Isolate* isolate) {
return ExternalReference(Redirect(
isolate,
FUNCTION_ADDR(StoreBuffer::StoreBufferOverflow)));
@ -1117,6 +1147,67 @@ ExternalReference ExternalReference::compute_output_frames_function(
Redirect(isolate, FUNCTION_ADDR(Deoptimizer::ComputeOutputFrames)));
}
static void f32_trunc_wrapper(float* param) { *param = truncf(*param); }
ExternalReference ExternalReference::f32_trunc_wrapper_function(
Isolate* isolate) {
return ExternalReference(Redirect(isolate, FUNCTION_ADDR(f32_trunc_wrapper)));
}
static void f32_floor_wrapper(float* param) { *param = floorf(*param); }
ExternalReference ExternalReference::f32_floor_wrapper_function(
Isolate* isolate) {
return ExternalReference(Redirect(isolate, FUNCTION_ADDR(f32_floor_wrapper)));
}
static void f32_ceil_wrapper(float* param) { *param = ceilf(*param); }
ExternalReference ExternalReference::f32_ceil_wrapper_function(
Isolate* isolate) {
return ExternalReference(Redirect(isolate, FUNCTION_ADDR(f32_ceil_wrapper)));
}
static void f32_nearest_int_wrapper(float* param) {
*param = nearbyintf(*param);
}
ExternalReference ExternalReference::f32_nearest_int_wrapper_function(
Isolate* isolate) {
return ExternalReference(
Redirect(isolate, FUNCTION_ADDR(f32_nearest_int_wrapper)));
}
static void f64_trunc_wrapper(double* param) { *param = trunc(*param); }
ExternalReference ExternalReference::f64_trunc_wrapper_function(
Isolate* isolate) {
return ExternalReference(Redirect(isolate, FUNCTION_ADDR(f64_trunc_wrapper)));
}
static void f64_floor_wrapper(double* param) { *param = floor(*param); }
ExternalReference ExternalReference::f64_floor_wrapper_function(
Isolate* isolate) {
return ExternalReference(Redirect(isolate, FUNCTION_ADDR(f64_floor_wrapper)));
}
static void f64_ceil_wrapper(double* param) { *param = ceil(*param); }
ExternalReference ExternalReference::f64_ceil_wrapper_function(
Isolate* isolate) {
return ExternalReference(Redirect(isolate, FUNCTION_ADDR(f64_ceil_wrapper)));
}
static void f64_nearest_int_wrapper(double* param) {
*param = nearbyint(*param);
}
ExternalReference ExternalReference::f64_nearest_int_wrapper_function(
Isolate* isolate) {
return ExternalReference(
Redirect(isolate, FUNCTION_ADDR(f64_nearest_int_wrapper)));
}
ExternalReference ExternalReference::log_enter_external_function(
Isolate* isolate) {
@ -1182,12 +1273,6 @@ ExternalReference ExternalReference::store_buffer_top(Isolate* isolate) {
}
ExternalReference ExternalReference::new_space_mask(Isolate* isolate) {
return ExternalReference(reinterpret_cast<Address>(
isolate->heap()->NewSpaceMask()));
}
ExternalReference ExternalReference::new_space_allocation_top_address(
Isolate* isolate) {
return ExternalReference(isolate->heap()->NewSpaceAllocationTopAddress());
@ -1521,23 +1606,6 @@ ExternalReference ExternalReference::power_double_int_function(
}
bool EvalComparison(Token::Value op, double op1, double op2) {
DCHECK(Token::IsCompareOp(op));
switch (op) {
case Token::EQ:
case Token::EQ_STRICT: return (op1 == op2);
case Token::NE: return (op1 != op2);
case Token::LT: return (op1 < op2);
case Token::GT: return (op1 > op2);
case Token::LTE: return (op1 <= op2);
case Token::GTE: return (op1 >= op2);
default:
UNREACHABLE();
return false;
}
}
ExternalReference ExternalReference::mod_two_doubles_operation(
Isolate* isolate) {
return ExternalReference(Redirect(isolate,
@ -1837,11 +1905,9 @@ int ConstantPoolBuilder::Emit(Assembler* assm) {
// Platform specific but identical code for all the platforms.
void Assembler::RecordDeoptReason(const int reason,
const SourcePosition position) {
void Assembler::RecordDeoptReason(const int reason, int raw_position) {
if (FLAG_trace_deopt || isolate()->cpu_profiler()->is_profiling()) {
EnsureSpace ensure_space(this);
int raw_position = position.IsUnknown() ? 0 : position.raw();
RecordRelocInfo(RelocInfo::POSITION, raw_position);
RecordRelocInfo(RelocInfo::DEOPT_REASON, reason);
}

46
deps/v8/src/assembler.h

@ -38,7 +38,6 @@
#include "src/allocation.h"
#include "src/builtins.h"
#include "src/isolate.h"
#include "src/parsing/token.h"
#include "src/runtime/runtime.h"
namespace v8 {
@ -49,7 +48,6 @@ class ApiFunction;
namespace internal {
// Forward declarations.
class SourcePosition;
class StatsCounter;
// -----------------------------------------------------------------------------
@ -225,9 +223,14 @@ class CpuFeatures : public AllStatic {
static inline bool SupportsCrankshaft();
static inline unsigned cache_line_size() {
DCHECK(cache_line_size_ != 0);
return cache_line_size_;
static inline unsigned icache_line_size() {
DCHECK(icache_line_size_ != 0);
return icache_line_size_;
}
static inline unsigned dcache_line_size() {
DCHECK(dcache_line_size_ != 0);
return dcache_line_size_;
}
static void PrintTarget();
@ -243,7 +246,8 @@ class CpuFeatures : public AllStatic {
static void ProbeImpl(bool cross_compile);
static unsigned supported_;
static unsigned cache_line_size_;
static unsigned icache_line_size_;
static unsigned dcache_line_size_;
static bool initialized_;
DISALLOW_COPY_AND_ASSIGN(CpuFeatures);
};
@ -614,13 +618,9 @@ class RelocInfo {
template<typename StaticVisitor> inline void Visit(Heap* heap);
inline void Visit(Isolate* isolate, ObjectVisitor* v);
// Check whether this return sequence has been patched
// with a call to the debugger.
INLINE(bool IsPatchedReturnSequence());
// Check whether this debug break slot has been patched with a call to the
// debugger.
INLINE(bool IsPatchedDebugBreakSlotSequence());
bool IsPatchedDebugBreakSlotSequence();
#ifdef DEBUG
// Check whether the given code contains relocation information that
@ -819,6 +819,14 @@ class ExternalReference BASE_EMBEDDED {
// Object* f(v8::internal::Arguments).
BUILTIN_CALL, // default
// Builtin call returning object pair.
// ObjectPair f(v8::internal::Arguments).
BUILTIN_CALL_PAIR,
// Builtin call that returns .
// ObjectTriple f(v8::internal::Arguments).
BUILTIN_CALL_TRIPLE,
// Builtin that takes float arguments and returns an int.
// int f(double, double).
BUILTIN_COMPARE_CALL,
@ -885,8 +893,12 @@ class ExternalReference BASE_EMBEDDED {
// pattern. This means that they have to be added to the
// ExternalReferenceTable in serialize.cc manually.
static ExternalReference interpreter_dispatch_table_address(Isolate* isolate);
static ExternalReference incremental_marking_record_write_function(
Isolate* isolate);
static ExternalReference incremental_marking_record_write_code_entry_function(
Isolate* isolate);
static ExternalReference store_buffer_overflow_function(
Isolate* isolate);
static ExternalReference delete_handle_scope_extensions(Isolate* isolate);
@ -901,6 +913,15 @@ class ExternalReference BASE_EMBEDDED {
static ExternalReference new_deoptimizer_function(Isolate* isolate);
static ExternalReference compute_output_frames_function(Isolate* isolate);
static ExternalReference f32_trunc_wrapper_function(Isolate* isolate);
static ExternalReference f32_floor_wrapper_function(Isolate* isolate);
static ExternalReference f32_ceil_wrapper_function(Isolate* isolate);
static ExternalReference f32_nearest_int_wrapper_function(Isolate* isolate);
static ExternalReference f64_trunc_wrapper_function(Isolate* isolate);
static ExternalReference f64_floor_wrapper_function(Isolate* isolate);
static ExternalReference f64_ceil_wrapper_function(Isolate* isolate);
static ExternalReference f64_nearest_int_wrapper_function(Isolate* isolate);
// Log support.
static ExternalReference log_enter_external_function(Isolate* isolate);
static ExternalReference log_leave_external_function(Isolate* isolate);
@ -933,7 +954,6 @@ class ExternalReference BASE_EMBEDDED {
// Static variable Heap::NewSpaceStart()
static ExternalReference new_space_start(Isolate* isolate);
static ExternalReference new_space_mask(Isolate* isolate);
// Write barrier.
static ExternalReference store_buffer_top(Isolate* isolate);
@ -1120,8 +1140,6 @@ inline int NumberOfBitsSet(uint32_t x) {
return num_bits_set;
}
bool EvalComparison(Token::Value op, double op1, double op2);
// Computes pow(x, y) with the special cases in the spec for Math.pow.
double power_helper(Isolate* isolate, double x, double y);
double power_double_int(double x, int y);

1
deps/v8/src/ast/OWNERS

@ -5,3 +5,4 @@ bmeurer@chromium.org
littledan@chromium.org
mstarzinger@chromium.org
rossberg@chromium.org

6
deps/v8/src/ast/ast-expression-rewriter.cc

@ -398,10 +398,10 @@ void AstExpressionRewriter::VisitDoExpression(DoExpression* node) {
}
void AstExpressionRewriter::VisitRewritableAssignmentExpression(
RewritableAssignmentExpression* node) {
void AstExpressionRewriter::VisitRewritableExpression(
RewritableExpression* node) {
REWRITE_THIS(node);
AST_REWRITE_PROPERTY(Expression, node, expression);
AST_REWRITE(Expression, node->expression(), node->Rewrite(replacement));
}

2
deps/v8/src/ast/ast-expression-rewriter.h

@ -8,9 +8,7 @@
#include "src/allocation.h"
#include "src/ast/ast.h"
#include "src/ast/scopes.h"
#include "src/effects.h"
#include "src/type-info.h"
#include "src/types.h"
#include "src/zone.h"
namespace v8 {

5
deps/v8/src/ast/ast-expression-visitor.cc

@ -208,6 +208,7 @@ void AstExpressionVisitor::VisitNativeFunctionLiteral(
void AstExpressionVisitor::VisitDoExpression(DoExpression* expr) {
VisitExpression(expr);
RECURSE(VisitBlock(expr->block()));
RECURSE(VisitVariableProxy(expr->result()));
}
@ -399,8 +400,8 @@ void AstExpressionVisitor::VisitSuperCallReference(SuperCallReference* expr) {
}
void AstExpressionVisitor::VisitRewritableAssignmentExpression(
RewritableAssignmentExpression* expr) {
void AstExpressionVisitor::VisitRewritableExpression(
RewritableExpression* expr) {
VisitExpression(expr);
RECURSE(Visit(expr->expression()));
}

2
deps/v8/src/ast/ast-expression-visitor.h

@ -8,9 +8,7 @@
#include "src/allocation.h"
#include "src/ast/ast.h"
#include "src/ast/scopes.h"
#include "src/effects.h"
#include "src/type-info.h"
#include "src/types.h"
#include "src/zone.h"
namespace v8 {

9
deps/v8/src/ast/ast-literal-reindexer.cc

@ -44,7 +44,8 @@ void AstLiteralReindexer::VisitNativeFunctionLiteral(
void AstLiteralReindexer::VisitDoExpression(DoExpression* node) {
// TODO(caitp): literals in do expressions need re-indexing too.
Visit(node->block());
Visit(node->result());
}
@ -76,8 +77,8 @@ void AstLiteralReindexer::VisitSuperCallReference(SuperCallReference* node) {
}
void AstLiteralReindexer::VisitRewritableAssignmentExpression(
RewritableAssignmentExpression* node) {
void AstLiteralReindexer::VisitRewritableExpression(
RewritableExpression* node) {
Visit(node->expression());
}
@ -187,6 +188,8 @@ void AstLiteralReindexer::VisitCompareOperation(CompareOperation* node) {
void AstLiteralReindexer::VisitSpread(Spread* node) {
// This is reachable because ParserBase::ParseArrowFunctionLiteral calls
// ReindexLiterals before calling RewriteDestructuringAssignments.
Visit(node->expression());
}

17
deps/v8/src/ast/ast-numbering.cc

@ -306,7 +306,6 @@ void AstNumberingVisitor::VisitWhileStatement(WhileStatement* node) {
void AstNumberingVisitor::VisitTryCatchStatement(TryCatchStatement* node) {
IncrementNodeCount();
DisableOptimization(kTryCatchStatement);
node->set_base_id(ReserveIdRange(TryCatchStatement::num_ids()));
Visit(node->try_block());
Visit(node->catch_block());
}
@ -315,7 +314,6 @@ void AstNumberingVisitor::VisitTryCatchStatement(TryCatchStatement* node) {
void AstNumberingVisitor::VisitTryFinallyStatement(TryFinallyStatement* node) {
IncrementNodeCount();
DisableOptimization(kTryFinallyStatement);
node->set_base_id(ReserveIdRange(TryFinallyStatement::num_ids()));
Visit(node->try_block());
Visit(node->finally_block());
}
@ -372,11 +370,7 @@ void AstNumberingVisitor::VisitCompareOperation(CompareOperation* node) {
}
void AstNumberingVisitor::VisitSpread(Spread* node) {
IncrementNodeCount();
DisableCrankshaft(kSpread);
Visit(node->expression());
}
void AstNumberingVisitor::VisitSpread(Spread* node) { UNREACHABLE(); }
void AstNumberingVisitor::VisitEmptyParentheses(EmptyParentheses* node) {
@ -510,6 +504,9 @@ void AstNumberingVisitor::VisitArrayLiteral(ArrayLiteral* node) {
void AstNumberingVisitor::VisitCall(Call* node) {
IncrementNodeCount();
if (node->tail_call_mode() == TailCallMode::kAllow) {
DisableOptimization(kTailCall);
}
ReserveFeedbackSlots(node);
node->set_base_id(ReserveIdRange(Call::num_ids()));
Visit(node->expression());
@ -557,10 +554,10 @@ void AstNumberingVisitor::VisitFunctionLiteral(FunctionLiteral* node) {
}
void AstNumberingVisitor::VisitRewritableAssignmentExpression(
RewritableAssignmentExpression* node) {
void AstNumberingVisitor::VisitRewritableExpression(
RewritableExpression* node) {
IncrementNodeCount();
node->set_base_id(ReserveIdRange(RewritableAssignmentExpression::num_ids()));
node->set_base_id(ReserveIdRange(RewritableExpression::num_ids()));
Visit(node->expression());
}

2
deps/v8/src/ast/ast-value-factory.cc

@ -172,6 +172,8 @@ void AstValue::Internalize(Isolate* isolate) {
if (symbol_name_[0] == 'i') {
DCHECK_EQ(0, strcmp(symbol_name_, "iterator_symbol"));
value_ = isolate->factory()->iterator_symbol();
} else if (strcmp(symbol_name_, "hasInstance_symbol") == 0) {
value_ = isolate->factory()->has_instance_symbol();
} else {
DCHECK_EQ(0, strcmp(symbol_name_, "home_object_symbol"));
value_ = isolate->factory()->home_object_symbol();

3
deps/v8/src/ast/ast-value-factory.h

@ -255,6 +255,7 @@ class AstValue : public ZoneObject {
F(dot_catch, ".catch") \
F(empty, "") \
F(eval, "eval") \
F(function, "function") \
F(get_space, "get ") \
F(let, "let") \
F(native, "native") \
@ -263,9 +264,11 @@ class AstValue : public ZoneObject {
F(proto, "__proto__") \
F(prototype, "prototype") \
F(rest_parameter, ".rest_parameter") \
F(return, "return") \
F(set_space, "set ") \
F(this, "this") \
F(this_function, ".this_function") \
F(throw, "throw") \
F(undefined, "undefined") \
F(use_asm, "use asm") \
F(use_strong, "use strong") \

43
deps/v8/src/ast/ast.cc

@ -5,6 +5,8 @@
#include "src/ast/ast.h"
#include <cmath> // For isfinite.
#include "src/ast/prettyprinter.h"
#include "src/ast/scopes.h"
#include "src/builtins.h"
#include "src/code-stubs.h"
@ -32,6 +34,25 @@ AST_NODE_LIST(DECL_ACCEPT)
// ----------------------------------------------------------------------------
// Implementation of other node functionality.
#ifdef DEBUG
void AstNode::Print() { Print(Isolate::Current()); }
void AstNode::Print(Isolate* isolate) {
AstPrinter::PrintOut(isolate, this);
}
void AstNode::PrettyPrint() { PrettyPrint(Isolate::Current()); }
void AstNode::PrettyPrint(Isolate* isolate) {
PrettyPrinter::PrintOut(isolate, this);
}
#endif // DEBUG
bool Expression::IsSmiLiteral() const {
return IsLiteral() && AsLiteral()->value()->IsSmi();
@ -254,14 +275,21 @@ ObjectLiteralProperty::ObjectLiteralProperty(AstValueFactory* ast_value_factory,
}
}
bool ObjectLiteralProperty::NeedsSetFunctionName() const {
return is_computed_name_ &&
(value_->IsAnonymousFunctionDefinition() ||
(value_->IsFunctionLiteral() &&
IsConciseMethod(value_->AsFunctionLiteral()->kind())));
}
void ClassLiteral::AssignFeedbackVectorSlots(Isolate* isolate,
FeedbackVectorSpec* spec,
FeedbackVectorSlotCache* cache) {
// This logic that computes the number of slots needed for vector store
// ICs must mirror FullCodeGenerator::VisitClassLiteral.
prototype_slot_ = spec->AddLoadICSlot();
if (NeedsProxySlot()) {
slot_ = spec->AddStoreICSlot();
proxy_slot_ = spec->AddStoreICSlot();
}
for (int i = 0; i < properties()->length(); i++) {
@ -476,10 +504,11 @@ void ObjectLiteral::BuildConstantProperties(Isolate* isolate) {
void ArrayLiteral::BuildConstantElements(Isolate* isolate) {
DCHECK_LT(first_spread_index_, 0);
if (!constant_elements_.is_null()) return;
int constants_length =
first_spread_index_ >= 0 ? first_spread_index_ : values()->length();
int constants_length = values()->length();
// Allocate a fixed array to hold all the object literals.
Handle<JSArray> array = isolate->factory()->NewJSArray(
@ -487,7 +516,7 @@ void ArrayLiteral::BuildConstantElements(Isolate* isolate) {
Strength::WEAK, INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE);
// Fill in the literals.
bool is_simple = (first_spread_index_ < 0);
bool is_simple = true;
int depth_acc = 1;
bool is_holey = false;
int array_index = 0;
@ -553,7 +582,7 @@ void ArrayLiteral::AssignFeedbackVectorSlots(Isolate* isolate,
int array_index = 0;
for (; array_index < values()->length(); array_index++) {
Expression* subexpr = values()->at(array_index);
if (subexpr->IsSpread()) break;
DCHECK(!subexpr->IsSpread());
if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
// We'll reuse the same literal slot for all of the non-constant
@ -797,14 +826,12 @@ void AstVisitor::VisitExpressions(ZoneList<Expression*>* expressions) {
}
}
CaseClause::CaseClause(Zone* zone, Expression* label,
ZoneList<Statement*>* statements, int pos)
: Expression(zone, pos),
label_(label),
statements_(statements),
compare_type_(Type::None(zone)) {}
compare_type_(Type::None()) {}
uint32_t Literal::Hash() {
return raw_value()->IsString()

358
deps/v8/src/ast/ast.h

@ -91,7 +91,7 @@ namespace internal {
V(CaseClause) \
V(EmptyParentheses) \
V(DoExpression) \
V(RewritableAssignmentExpression)
V(RewritableExpression)
#define AST_NODE_LIST(V) \
DECLARATION_NODE_LIST(V) \
@ -196,15 +196,18 @@ class AstNode: public ZoneObject {
virtual NodeType node_type() const = 0;
int position() const { return position_; }
#ifdef DEBUG
void PrettyPrint(Isolate* isolate);
void PrettyPrint();
void Print(Isolate* isolate);
void Print();
#endif // DEBUG
// Type testing & conversion functions overridden by concrete subclasses.
#define DECLARE_NODE_FUNCTIONS(type) \
bool Is##type() const { return node_type() == AstNode::k##type; } \
type* As##type() { \
return Is##type() ? reinterpret_cast<type*>(this) : NULL; \
} \
const type* As##type() const { \
return Is##type() ? reinterpret_cast<const type*>(this) : NULL; \
}
V8_INLINE bool Is##type() const; \
V8_INLINE type* As##type(); \
V8_INLINE const type* As##type() const;
AST_NODE_LIST(DECLARE_NODE_FUNCTIONS)
#undef DECLARE_NODE_FUNCTIONS
@ -237,7 +240,6 @@ class Statement : public AstNode {
bool IsEmpty() { return AsEmptyStatement() != NULL; }
virtual bool IsJump() const { return false; }
virtual void MarkTail() {}
};
@ -317,6 +319,10 @@ class Expression : public AstNode {
// names because [] for string objects is handled only by keyed ICs.
virtual bool IsPropertyName() const { return false; }
// True iff the expression is a class or function expression without
// a syntactic name.
virtual bool IsAnonymousFunctionDefinition() const { return false; }
// True iff the expression is a literal represented as a smi.
bool IsSmiLiteral() const;
@ -365,14 +371,6 @@ class Expression : public AstNode {
BailoutId id() const { return BailoutId(local_id(0)); }
TypeFeedbackId test_id() const { return TypeFeedbackId(local_id(1)); }
// Parenthesized expressions in the form `( Expression )`.
void set_is_parenthesized() {
bit_field_ = ParenthesizedField::update(bit_field_, true);
}
bool is_parenthesized() const {
return ParenthesizedField::decode(bit_field_);
}
protected:
Expression(Zone* zone, int pos)
: AstNode(pos),
@ -395,8 +393,6 @@ class Expression : public AstNode {
int base_id_;
Bounds bounds_;
class ToBooleanTypesField : public BitField16<uint16_t, 0, 9> {};
class ParenthesizedField
: public BitField16<bool, ToBooleanTypesField::kNext, 1> {};
uint16_t bit_field_;
// Ends with 16-bit field; deriving classes in turn begin with
// 16-bit fields for optimum packing efficiency.
@ -471,10 +467,6 @@ class Block final : public BreakableStatement {
&& labels() == NULL; // Good enough as an approximation...
}
void MarkTail() override {
if (!statements_.is_empty()) statements_.last()->MarkTail();
}
Scope* scope() const { return scope_; }
void set_scope(Scope* scope) { scope_ = scope; }
@ -505,8 +497,6 @@ class DoExpression final : public Expression {
VariableProxy* result() { return result_; }
void set_result(VariableProxy* v) { result_ = v; }
void MarkTail() override { block_->MarkTail(); }
protected:
DoExpression(Zone* zone, Block* block, VariableProxy* result, int pos)
: Expression(zone, pos), block_(block), result_(result) {
@ -555,24 +545,10 @@ class VariableDeclaration final : public Declaration {
return mode() == VAR ? kCreatedInitialized : kNeedsInitialization;
}
bool is_class_declaration() const { return is_class_declaration_; }
// VariableDeclarations can be grouped into consecutive declaration
// groups. Each VariableDeclaration is associated with the start position of
// the group it belongs to. The positions are used for strong mode scope
// checks for classes and functions.
int declaration_group_start() const { return declaration_group_start_; }
protected:
VariableDeclaration(Zone* zone, VariableProxy* proxy, VariableMode mode,
Scope* scope, int pos, bool is_class_declaration = false,
int declaration_group_start = -1)
: Declaration(zone, proxy, mode, scope, pos),
is_class_declaration_(is_class_declaration),
declaration_group_start_(declaration_group_start) {}
bool is_class_declaration_;
int declaration_group_start_;
Scope* scope, int pos)
: Declaration(zone, proxy, mode, scope, pos) {}
};
@ -820,6 +796,10 @@ class ForEachStatement : public IterationStatement {
FeedbackVectorSlotCache* cache) override;
FeedbackVectorSlot EachFeedbackSlot() const { return each_slot_; }
static const char* VisitModeString(VisitMode mode) {
return mode == ITERATE ? "for-of" : "for-in";
}
protected:
ForEachStatement(Zone* zone, ZoneList<const AstRawString*>* labels, int pos)
: IterationStatement(zone, labels, pos), each_(NULL), subject_(NULL) {}
@ -857,9 +837,9 @@ class ForInStatement final : public ForEachStatement {
static int num_ids() { return parent_num_ids() + 6; }
BailoutId BodyId() const { return BailoutId(local_id(0)); }
BailoutId PrepareId() const { return BailoutId(local_id(1)); }
BailoutId EnumId() const { return BailoutId(local_id(2)); }
BailoutId ToObjectId() const { return BailoutId(local_id(3)); }
BailoutId EnumId() const { return BailoutId(local_id(1)); }
BailoutId ToObjectId() const { return BailoutId(local_id(2)); }
BailoutId PrepareId() const { return BailoutId(local_id(3)); }
BailoutId FilterId() const { return BailoutId(local_id(4)); }
BailoutId AssignmentId() const { return BailoutId(local_id(5)); }
BailoutId ContinueId() const override { return EntryId(); }
@ -885,11 +865,13 @@ class ForOfStatement final : public ForEachStatement {
void Initialize(Expression* each,
Expression* subject,
Statement* body,
Variable* iterator,
Expression* assign_iterator,
Expression* next_result,
Expression* result_done,
Expression* assign_each) {
ForEachStatement::Initialize(each, subject, body);
iterator_ = iterator;
assign_iterator_ = assign_iterator;
next_result_ = next_result;
result_done_ = result_done;
@ -900,6 +882,10 @@ class ForOfStatement final : public ForEachStatement {
return subject();
}
Variable* iterator() const {
return iterator_;
}
// iterator = subject[Symbol.iterator]()
Expression* assign_iterator() const {
return assign_iterator_;
@ -934,6 +920,7 @@ class ForOfStatement final : public ForEachStatement {
protected:
ForOfStatement(Zone* zone, ZoneList<const AstRawString*>* labels, int pos)
: ForEachStatement(zone, labels, pos),
iterator_(NULL),
assign_iterator_(NULL),
next_result_(NULL),
result_done_(NULL),
@ -943,6 +930,7 @@ class ForOfStatement final : public ForEachStatement {
private:
int local_id(int n) const { return base_id() + parent_num_ids() + n; }
Variable* iterator_;
Expression* assign_iterator_;
Expression* next_result_;
Expression* result_done_;
@ -957,7 +945,6 @@ class ExpressionStatement final : public Statement {
void set_expression(Expression* e) { expression_ = e; }
Expression* expression() const { return expression_; }
bool IsJump() const override { return expression_->IsThrow(); }
void MarkTail() override { expression_->MarkTail(); }
protected:
ExpressionStatement(Zone* zone, Expression* expression, int pos)
@ -1039,8 +1026,6 @@ class WithStatement final : public Statement {
BailoutId ToObjectId() const { return BailoutId(local_id(0)); }
BailoutId EntryId() const { return BailoutId(local_id(1)); }
void MarkTail() override { statement_->MarkTail(); }
protected:
WithStatement(Zone* zone, Scope* scope, Expression* expression,
Statement* statement, int pos)
@ -1083,10 +1068,6 @@ class CaseClause final : public Expression {
BailoutId EntryId() const { return BailoutId(local_id(0)); }
TypeFeedbackId CompareId() { return TypeFeedbackId(local_id(1)); }
void MarkTail() override {
if (!statements_->is_empty()) statements_->last()->MarkTail();
}
Type* compare_type() { return compare_type_; }
void set_compare_type(Type* type) { compare_type_ = type; }
@ -1119,10 +1100,6 @@ class SwitchStatement final : public BreakableStatement {
void set_tag(Expression* t) { tag_ = t; }
void MarkTail() override {
if (!cases_->is_empty()) cases_->last()->MarkTail();
}
protected:
SwitchStatement(Zone* zone, ZoneList<const AstRawString*>* labels, int pos)
: BreakableStatement(zone, labels, TARGET_FOR_ANONYMOUS, pos),
@ -1160,11 +1137,6 @@ class IfStatement final : public Statement {
&& HasElseStatement() && else_statement()->IsJump();
}
void MarkTail() override {
then_statement_->MarkTail();
else_statement_->MarkTail();
}
void set_base_id(int id) { base_id_ = id; }
static int num_ids() { return parent_num_ids() + 3; }
BailoutId IfId() const { return BailoutId(local_id(0)); }
@ -1201,27 +1173,12 @@ class TryStatement : public Statement {
Block* try_block() const { return try_block_; }
void set_try_block(Block* b) { try_block_ = b; }
void set_base_id(int id) { base_id_ = id; }
static int num_ids() { return parent_num_ids() + 1; }
BailoutId HandlerId() const { return BailoutId(local_id(0)); }
protected:
TryStatement(Zone* zone, Block* try_block, int pos)
: Statement(zone, pos),
try_block_(try_block),
base_id_(BailoutId::None().ToInt()) {}
static int parent_num_ids() { return 0; }
int base_id() const {
DCHECK(!BailoutId(base_id_).IsNone());
return base_id_;
}
: Statement(zone, pos), try_block_(try_block) {}
private:
int local_id(int n) const { return base_id() + parent_num_ids() + n; }
Block* try_block_;
int base_id_;
};
@ -1234,8 +1191,6 @@ class TryCatchStatement final : public TryStatement {
Block* catch_block() const { return catch_block_; }
void set_catch_block(Block* b) { catch_block_ = b; }
void MarkTail() override { catch_block_->MarkTail(); }
protected:
TryCatchStatement(Zone* zone, Block* try_block, Scope* scope,
Variable* variable, Block* catch_block, int pos)
@ -1258,8 +1213,6 @@ class TryFinallyStatement final : public TryStatement {
Block* finally_block() const { return finally_block_; }
void set_finally_block(Block* b) { finally_block_ = b; }
void MarkTail() override { finally_block_->MarkTail(); }
protected:
TryFinallyStatement(Zone* zone, Block* try_block, Block* finally_block,
int pos)
@ -1472,6 +1425,8 @@ class ObjectLiteralProperty final : public ZoneObject {
void set_receiver_type(Handle<Map> map) { receiver_type_ = map; }
bool NeedsSetFunctionName() const;
protected:
friend class AstNodeFactory;
@ -1510,6 +1465,9 @@ class ObjectLiteral final : public MaterializedLiteral {
bool may_store_doubles() const { return may_store_doubles_; }
bool has_function() const { return has_function_; }
bool has_elements() const { return has_elements_; }
bool has_shallow_properties() const {
return depth() == 1 && !has_elements() && !may_store_doubles();
}
// Decide if a property should be in the object boilerplate.
static bool IsBoilerplateProperty(Property* property);
@ -1526,7 +1484,7 @@ class ObjectLiteral final : public MaterializedLiteral {
int ComputeFlags(bool disable_mementos = false) const {
int flags = fast_elements() ? kFastElements : kNoFlags;
flags |= has_function() ? kHasFunction : kNoFlags;
if (depth() == 1 && !has_elements() && !may_store_doubles()) {
if (has_shallow_properties()) {
flags |= kShallowProperties;
}
if (disable_mementos) {
@ -1683,6 +1641,19 @@ class ArrayLiteral final : public MaterializedLiteral {
return flags;
}
// Provide a mechanism for iterating through values to rewrite spreads.
ZoneList<Expression*>::iterator FirstSpread() const {
return (first_spread_index_ >= 0) ? values_->begin() + first_spread_index_
: values_->end();
}
ZoneList<Expression*>::iterator EndValue() const { return values_->end(); }
// Rewind an array literal omitting everything from the first spread on.
void RewindSpreads() {
values_->Rewind(first_spread_index_);
first_spread_index_ = -1;
}
enum Flags {
kNoFlags = 0,
kShallowElements = 1,
@ -1975,7 +1946,10 @@ class Call final : public Expression {
bit_field_ = IsUninitializedField::update(bit_field_, b);
}
bool is_tail() const { return IsTailField::decode(bit_field_); }
TailCallMode tail_call_mode() const {
return IsTailField::decode(bit_field_) ? TailCallMode::kAllow
: TailCallMode::kDisallow;
}
void MarkTail() override {
bit_field_ = IsTailField::update(bit_field_, true);
}
@ -2349,7 +2323,7 @@ class CompareOperation final : public Expression {
op_(op),
left_(left),
right_(right),
combined_type_(Type::None(zone)) {
combined_type_(Type::None()) {
DCHECK(Token::IsCompareOp(op));
}
static int parent_num_ids() { return Expression::num_ids(); }
@ -2372,17 +2346,20 @@ class Spread final : public Expression {
Expression* expression() const { return expression_; }
void set_expression(Expression* e) { expression_ = e; }
int expression_position() const { return expr_pos_; }
static int num_ids() { return parent_num_ids(); }
protected:
Spread(Zone* zone, Expression* expression, int pos)
: Expression(zone, pos), expression_(expression) {}
Spread(Zone* zone, Expression* expression, int pos, int expr_pos)
: Expression(zone, pos), expression_(expression), expr_pos_(expr_pos) {}
static int parent_num_ids() { return Expression::num_ids(); }
private:
int local_id(int n) const { return base_id() + parent_num_ids() + n; }
Expression* expression_;
int expr_pos_;
};
@ -2505,18 +2482,32 @@ class Assignment final : public Expression {
};
class RewritableAssignmentExpression : public Expression {
// The RewritableExpression class is a wrapper for AST nodes that wait
// for some potential rewriting. However, even if such nodes are indeed
// rewritten, the RewritableExpression wrapper nodes will survive in the
// final AST and should be just ignored, i.e., they should be treated as
// equivalent to the wrapped nodes. For this reason and to simplify later
// phases, RewritableExpressions are considered as exceptions of AST nodes
// in the following sense:
//
// 1. IsRewritableExpression and AsRewritableExpression behave as usual.
// 2. All other Is* and As* methods are practically delegated to the
// wrapped node, i.e. IsArrayLiteral() will return true iff the
// wrapped node is an array literal.
//
// Furthermore, an invariant that should be respected is that the wrapped
// node is not a RewritableExpression.
class RewritableExpression : public Expression {
public:
DECLARE_NODE_TYPE(RewritableAssignmentExpression)
DECLARE_NODE_TYPE(RewritableExpression)
Expression* expression() { return expr_; }
Expression* expression() const { return expr_; }
bool is_rewritten() const { return is_rewritten_; }
void set_expression(Expression* e) { expr_ = e; }
void Rewrite(Expression* new_expression) {
DCHECK(!is_rewritten());
DCHECK_NOT_NULL(new_expression);
DCHECK(!new_expression->IsRewritableExpression());
expr_ = new_expression;
is_rewritten_ = true;
}
@ -2524,10 +2515,12 @@ class RewritableAssignmentExpression : public Expression {
static int num_ids() { return parent_num_ids(); }
protected:
RewritableAssignmentExpression(Zone* zone, Expression* expression)
RewritableExpression(Zone* zone, Expression* expression)
: Expression(zone, expression->position()),
is_rewritten_(false),
expr_(expression) {}
expr_(expression) {
DCHECK(!expression->IsRewritableExpression());
}
private:
int local_id(int n) const { return base_id() + parent_num_ids() + n; }
@ -2555,26 +2548,6 @@ class Yield final : public Expression {
void set_generator_object(Expression* e) { generator_object_ = e; }
void set_expression(Expression* e) { expression_ = e; }
// Type feedback information.
bool HasFeedbackSlots() const { return yield_kind() == kDelegating; }
void AssignFeedbackVectorSlots(Isolate* isolate, FeedbackVectorSpec* spec,
FeedbackVectorSlotCache* cache) override {
if (HasFeedbackSlots()) {
yield_first_feedback_slot_ = spec->AddKeyedLoadICSlot();
keyed_load_feedback_slot_ = spec->AddLoadICSlot();
done_feedback_slot_ = spec->AddLoadICSlot();
}
}
FeedbackVectorSlot KeyedLoadFeedbackSlot() {
DCHECK(!HasFeedbackSlots() || !yield_first_feedback_slot_.IsInvalid());
return yield_first_feedback_slot_;
}
FeedbackVectorSlot DoneFeedbackSlot() { return keyed_load_feedback_slot_; }
FeedbackVectorSlot ValueFeedbackSlot() { return done_feedback_slot_; }
protected:
Yield(Zone* zone, Expression* generator_object, Expression* expression,
Kind yield_kind, int pos)
@ -2587,9 +2560,6 @@ class Yield final : public Expression {
Expression* generator_object_;
Expression* expression_;
Kind yield_kind_;
FeedbackVectorSlot yield_first_feedback_slot_;
FeedbackVectorSlot keyed_load_feedback_slot_;
FeedbackVectorSlot done_feedback_slot_;
};
@ -2615,15 +2585,13 @@ class FunctionLiteral final : public Expression {
kAnonymousExpression,
kNamedExpression,
kDeclaration,
kGlobalOrEval
kAccessorOrMethod
};
enum ParameterFlag { kNoDuplicateParameters, kHasDuplicateParameters };
enum EagerCompileHint { kShouldEagerCompile, kShouldLazyCompile };
enum ArityRestriction { kNormalArity, kGetterArity, kSetterArity };
DECLARE_NODE_TYPE(FunctionLiteral)
Handle<String> name() const { return raw_name_->string(); }
@ -2636,8 +2604,13 @@ class FunctionLiteral final : public Expression {
int start_position() const;
int end_position() const;
int SourceSize() const { return end_position() - start_position(); }
bool is_expression() const { return IsExpression::decode(bitfield_); }
bool is_anonymous() const { return IsAnonymous::decode(bitfield_); }
bool is_declaration() const { return IsDeclaration::decode(bitfield_); }
bool is_named_expression() const {
return IsNamedExpression::decode(bitfield_);
}
bool is_anonymous_expression() const {
return IsAnonymousExpression::decode(bitfield_);
}
LanguageMode language_mode() const;
static bool NeedsHomeObject(Expression* expr);
@ -2729,6 +2702,10 @@ class FunctionLiteral final : public Expression {
dont_optimize_reason_ = reason;
}
bool IsAnonymousFunctionDefinition() const final {
return is_anonymous_expression();
}
protected:
FunctionLiteral(Zone* zone, const AstString* name,
AstValueFactory* ast_value_factory, Scope* scope,
@ -2737,7 +2714,7 @@ class FunctionLiteral final : public Expression {
FunctionType function_type,
ParameterFlag has_duplicate_parameters,
EagerCompileHint eager_compile_hint, FunctionKind kind,
int position)
int position, bool is_function)
: Expression(zone, position),
raw_name_(name),
scope_(scope),
@ -2750,26 +2727,28 @@ class FunctionLiteral final : public Expression {
parameter_count_(parameter_count),
function_token_position_(RelocInfo::kNoPosition) {
bitfield_ =
IsExpression::encode(function_type != kDeclaration) |
IsAnonymous::encode(function_type == kAnonymousExpression) |
IsDeclaration::encode(function_type == kDeclaration) |
IsNamedExpression::encode(function_type == kNamedExpression) |
IsAnonymousExpression::encode(function_type == kAnonymousExpression) |
Pretenure::encode(false) |
HasDuplicateParameters::encode(has_duplicate_parameters ==
kHasDuplicateParameters) |
IsFunction::encode(function_type != kGlobalOrEval) |
IsFunction::encode(is_function) |
ShouldEagerCompile::encode(eager_compile_hint == kShouldEagerCompile) |
FunctionKindBits::encode(kind) | ShouldBeUsedOnceHint::encode(false);
DCHECK(IsValidFunctionKind(kind));
}
private:
class IsExpression : public BitField16<bool, 0, 1> {};
class IsAnonymous : public BitField16<bool, 1, 1> {};
class Pretenure : public BitField16<bool, 2, 1> {};
class HasDuplicateParameters : public BitField16<bool, 3, 1> {};
class IsFunction : public BitField16<bool, 4, 1> {};
class ShouldEagerCompile : public BitField16<bool, 5, 1> {};
class FunctionKindBits : public BitField16<FunctionKind, 6, 8> {};
class ShouldBeUsedOnceHint : public BitField16<bool, 15, 1> {};
class IsDeclaration : public BitField16<bool, 0, 1> {};
class IsNamedExpression : public BitField16<bool, 1, 1> {};
class IsAnonymousExpression : public BitField16<bool, 2, 1> {};
class Pretenure : public BitField16<bool, 3, 1> {};
class HasDuplicateParameters : public BitField16<bool, 4, 1> {};
class IsFunction : public BitField16<bool, 5, 1> {};
class ShouldEagerCompile : public BitField16<bool, 6, 1> {};
class ShouldBeUsedOnceHint : public BitField16<bool, 7, 1> {};
class FunctionKindBits : public BitField16<FunctionKind, 8, 8> {};
// Start with 16-bit field, which should get packed together
// with Expression's trailing 16-bit field.
@ -2796,13 +2775,6 @@ class ClassLiteral final : public Expression {
DECLARE_NODE_TYPE(ClassLiteral)
Handle<String> name() const { return raw_name_->string(); }
const AstRawString* raw_name() const { return raw_name_; }
void set_raw_name(const AstRawString* name) {
DCHECK_NULL(raw_name_);
raw_name_ = name;
}
Scope* scope() const { return scope_; }
VariableProxy* class_variable_proxy() const { return class_variable_proxy_; }
Expression* extends() const { return extends_; }
@ -2817,13 +2789,14 @@ class ClassLiteral final : public Expression {
BailoutId DeclsId() const { return BailoutId(local_id(1)); }
BailoutId ExitId() { return BailoutId(local_id(2)); }
BailoutId CreateLiteralId() const { return BailoutId(local_id(3)); }
BailoutId PrototypeId() { return BailoutId(local_id(4)); }
// Return an AST id for a property that is used in simulate instructions.
BailoutId GetIdForProperty(int i) { return BailoutId(local_id(i + 4)); }
BailoutId GetIdForProperty(int i) { return BailoutId(local_id(i + 5)); }
// Unlike other AST nodes, this number of bailout IDs allocated for an
// ClassLiteral can vary, so num_ids() is not a static method.
int num_ids() const { return parent_num_ids() + 4 + properties()->length(); }
int num_ids() const { return parent_num_ids() + 5 + properties()->length(); }
// Object literals need one feedback slot for each non-trivial value, as well
// as some slots for home objects.
@ -2835,15 +2808,19 @@ class ClassLiteral final : public Expression {
class_variable_proxy()->var()->IsUnallocated();
}
FeedbackVectorSlot ProxySlot() const { return slot_; }
FeedbackVectorSlot PrototypeSlot() const { return prototype_slot_; }
FeedbackVectorSlot ProxySlot() const { return proxy_slot_; }
bool IsAnonymousFunctionDefinition() const final {
return constructor()->raw_name()->length() == 0;
}
protected:
ClassLiteral(Zone* zone, const AstRawString* name, Scope* scope,
VariableProxy* class_variable_proxy, Expression* extends,
FunctionLiteral* constructor, ZoneList<Property*>* properties,
int start_position, int end_position)
ClassLiteral(Zone* zone, Scope* scope, VariableProxy* class_variable_proxy,
Expression* extends, FunctionLiteral* constructor,
ZoneList<Property*>* properties, int start_position,
int end_position)
: Expression(zone, start_position),
raw_name_(name),
scope_(scope),
class_variable_proxy_(class_variable_proxy),
extends_(extends),
@ -2856,14 +2833,14 @@ class ClassLiteral final : public Expression {
private:
int local_id(int n) const { return base_id() + parent_num_ids() + n; }
const AstRawString* raw_name_;
Scope* scope_;
VariableProxy* class_variable_proxy_;
Expression* extends_;
FunctionLiteral* constructor_;
ZoneList<Property*>* properties_;
int end_position_;
FeedbackVectorSlot slot_;
FeedbackVectorSlot prototype_slot_;
FeedbackVectorSlot proxy_slot_;
};
@ -3095,12 +3072,11 @@ class AstNodeFactory final BASE_EMBEDDED {
AstValueFactory* ast_value_factory() const { return ast_value_factory_; }
VariableDeclaration* NewVariableDeclaration(
VariableProxy* proxy, VariableMode mode, Scope* scope, int pos,
bool is_class_declaration = false, int declaration_group_start = -1) {
VariableDeclaration* NewVariableDeclaration(VariableProxy* proxy,
VariableMode mode, Scope* scope,
int pos) {
return new (parser_zone_)
VariableDeclaration(parser_zone_, proxy, mode, scope, pos,
is_class_declaration, declaration_group_start);
VariableDeclaration(parser_zone_, proxy, mode, scope, pos);
}
FunctionDeclaration* NewFunctionDeclaration(VariableProxy* proxy,
@ -3389,8 +3365,8 @@ class AstNodeFactory final BASE_EMBEDDED {
CompareOperation(local_zone_, op, left, right, pos);
}
Spread* NewSpread(Expression* expression, int pos) {
return new (local_zone_) Spread(local_zone_, expression, pos);
Spread* NewSpread(Expression* expression, int pos, int expr_pos) {
return new (local_zone_) Spread(local_zone_, expression, pos, expr_pos);
}
Conditional* NewConditional(Expression* condition,
@ -3401,12 +3377,9 @@ class AstNodeFactory final BASE_EMBEDDED {
local_zone_, condition, then_expression, else_expression, position);
}
RewritableAssignmentExpression* NewRewritableAssignmentExpression(
Expression* expression) {
RewritableExpression* NewRewritableExpression(Expression* expression) {
DCHECK_NOT_NULL(expression);
DCHECK(expression->IsAssignment());
return new (local_zone_)
RewritableAssignmentExpression(local_zone_, expression);
return new (local_zone_) RewritableExpression(local_zone_, expression);
}
Assignment* NewAssignment(Token::Value op,
@ -3449,16 +3422,31 @@ class AstNodeFactory final BASE_EMBEDDED {
parser_zone_, name, ast_value_factory_, scope, body,
materialized_literal_count, expected_property_count, parameter_count,
function_type, has_duplicate_parameters, eager_compile_hint, kind,
position);
position, true);
}
ClassLiteral* NewClassLiteral(const AstRawString* name, Scope* scope,
VariableProxy* proxy, Expression* extends,
// Creates a FunctionLiteral representing a top-level script, the
// result of an eval (top-level or otherwise), or the result of calling
// the Function constructor.
FunctionLiteral* NewScriptOrEvalFunctionLiteral(
Scope* scope, ZoneList<Statement*>* body, int materialized_literal_count,
int expected_property_count) {
return new (parser_zone_) FunctionLiteral(
parser_zone_, ast_value_factory_->empty_string(), ast_value_factory_,
scope, body, materialized_literal_count, expected_property_count, 0,
FunctionLiteral::kAnonymousExpression,
FunctionLiteral::kNoDuplicateParameters,
FunctionLiteral::kShouldLazyCompile, FunctionKind::kNormalFunction, 0,
false);
}
ClassLiteral* NewClassLiteral(Scope* scope, VariableProxy* proxy,
Expression* extends,
FunctionLiteral* constructor,
ZoneList<ObjectLiteral::Property*>* properties,
int start_position, int end_position) {
return new (parser_zone_)
ClassLiteral(parser_zone_, name, scope, proxy, extends, constructor,
ClassLiteral(parser_zone_, scope, proxy, extends, constructor,
properties, start_position, end_position);
}
@ -3529,6 +3517,46 @@ class AstNodeFactory final BASE_EMBEDDED {
};
// Type testing & conversion functions overridden by concrete subclasses.
// Inline functions for AstNode.
#define DECLARE_NODE_FUNCTIONS(type) \
bool AstNode::Is##type() const { \
NodeType mine = node_type(); \
if (mine == AstNode::kRewritableExpression && \
AstNode::k##type != AstNode::kRewritableExpression) \
mine = reinterpret_cast<const RewritableExpression*>(this) \
->expression() \
->node_type(); \
return mine == AstNode::k##type; \
} \
type* AstNode::As##type() { \
NodeType mine = node_type(); \
AstNode* result = this; \
if (mine == AstNode::kRewritableExpression && \
AstNode::k##type != AstNode::kRewritableExpression) { \
result = \
reinterpret_cast<const RewritableExpression*>(this)->expression(); \
mine = result->node_type(); \
} \
return mine == AstNode::k##type ? reinterpret_cast<type*>(result) : NULL; \
} \
const type* AstNode::As##type() const { \
NodeType mine = node_type(); \
const AstNode* result = this; \
if (mine == AstNode::kRewritableExpression && \
AstNode::k##type != AstNode::kRewritableExpression) { \
result = \
reinterpret_cast<const RewritableExpression*>(this)->expression(); \
mine = result->node_type(); \
} \
return mine == AstNode::k##type ? reinterpret_cast<const type*>(result) \
: NULL; \
}
AST_NODE_LIST(DECLARE_NODE_FUNCTIONS)
#undef DECLARE_NODE_FUNCTIONS
} // namespace internal
} // namespace v8

1
deps/v8/src/ast/modules.cc

@ -13,7 +13,6 @@ namespace internal {
void ModuleDescriptor::AddLocalExport(const AstRawString* export_name,
const AstRawString* local_name,
Zone* zone, bool* ok) {
DCHECK(!IsFrozen());
void* key = const_cast<AstRawString*>(export_name);
ZoneAllocationPolicy allocator(zone);

19
deps/v8/src/ast/modules.h

@ -26,8 +26,7 @@ class ModuleDescriptor : public ZoneObject {
// ---------------------------------------------------------------------------
// Mutators.
// Add a name to the list of exports. If it already exists, or this descriptor
// is frozen, that's an error.
// Add a name to the list of exports. If it already exists, that's an error.
void AddLocalExport(const AstRawString* export_name,
const AstRawString* local_name, Zone* zone, bool* ok);
@ -35,30 +34,22 @@ class ModuleDescriptor : public ZoneObject {
// if not already present.
void AddModuleRequest(const AstRawString* module_specifier, Zone* zone);
// Do not allow any further refinements, directly or through unification.
void Freeze() { frozen_ = true; }
// Assign an index.
void Allocate(int index) {
DCHECK(IsFrozen() && index_ == -1);
DCHECK_EQ(-1, index_);
index_ = index;
}
// ---------------------------------------------------------------------------
// Accessors.
// Check whether this is closed (i.e. fully determined).
bool IsFrozen() { return frozen_; }
int Length() {
DCHECK(IsFrozen());
ZoneHashMap* exports = exports_;
return exports ? exports->occupancy() : 0;
}
// The context slot in the hosting script context pointing to this module.
int Index() {
DCHECK(IsFrozen());
return index_;
}
@ -104,12 +95,8 @@ class ModuleDescriptor : public ZoneObject {
// Implementation.
private:
explicit ModuleDescriptor(Zone* zone)
: frozen_(false),
exports_(NULL),
requested_modules_(1, zone),
index_(-1) {}
: exports_(NULL), requested_modules_(1, zone), index_(-1) {}
bool frozen_;
ZoneHashMap* exports_; // Module exports and their types (allocated lazily)
ZoneList<const AstRawString*> requested_modules_;
int index_;

42
deps/v8/src/ast/prettyprinter.cc

@ -412,8 +412,7 @@ void CallPrinter::VisitSuperCallReference(SuperCallReference* node) {
}
void CallPrinter::VisitRewritableAssignmentExpression(
RewritableAssignmentExpression* node) {
void CallPrinter::VisitRewritableExpression(RewritableExpression* node) {
Find(node->expression());
}
@ -719,7 +718,7 @@ void PrettyPrinter::VisitFunctionLiteral(FunctionLiteral* node) {
void PrettyPrinter::VisitClassLiteral(ClassLiteral* node) {
Print("(class ");
PrintLiteral(node->name(), false);
PrintLiteral(node->constructor()->name(), false);
if (node->extends()) {
Print(" extends ");
Visit(node->extends());
@ -929,8 +928,7 @@ void PrettyPrinter::VisitSuperCallReference(SuperCallReference* node) {
}
void PrettyPrinter::VisitRewritableAssignmentExpression(
RewritableAssignmentExpression* node) {
void PrettyPrinter::VisitRewritableExpression(RewritableExpression* node) {
Visit(node->expression());
}
@ -1203,6 +1201,14 @@ const char* AstPrinter::PrintProgram(FunctionLiteral* program) {
}
void AstPrinter::PrintOut(Isolate* isolate, AstNode* node) {
AstPrinter printer(isolate);
printer.Init();
printer.Visit(node);
PrintF("%s", printer.Output());
}
void AstPrinter::PrintDeclarations(ZoneList<Declaration*>* declarations) {
if (declarations->length() > 0) {
IndentedScope indent(this, "DECLS");
@ -1390,6 +1396,10 @@ void AstPrinter::VisitForOfStatement(ForOfStatement* node) {
PrintIndentedVisit("FOR", node->each());
PrintIndentedVisit("OF", node->iterable());
PrintIndentedVisit("BODY", node->body());
PrintIndentedVisit("INIT", node->assign_iterator());
PrintIndentedVisit("NEXT", node->next_result());
PrintIndentedVisit("EACH", node->assign_each());
PrintIndentedVisit("DONE", node->result_done());
}
@ -1429,9 +1439,7 @@ void AstPrinter::VisitFunctionLiteral(FunctionLiteral* node) {
void AstPrinter::VisitClassLiteral(ClassLiteral* node) {
IndentedScope indent(this, "CLASS LITERAL", node->position());
if (node->raw_name() != nullptr) {
PrintLiteralIndented("NAME", node->name(), false);
}
PrintLiteralIndented("NAME", node->constructor()->name(), false);
if (node->extends() != nullptr) {
PrintIndentedVisit("EXTENDS", node->extends());
}
@ -1544,11 +1552,15 @@ void AstPrinter::VisitArrayLiteral(ArrayLiteral* node) {
void AstPrinter::VisitVariableProxy(VariableProxy* node) {
Variable* var = node->var();
EmbeddedVector<char, 128> buf;
int pos =
FormatSlotNode(&buf, node, "VAR PROXY", node->VariableFeedbackSlot());
if (!node->is_resolved()) {
SNPrintF(buf + pos, " unresolved");
PrintLiteralWithModeIndented(buf.start(), nullptr, node->name());
} else {
Variable* var = node->var();
switch (var->location()) {
case VariableLocation::UNALLOCATED:
break;
@ -1569,6 +1581,7 @@ void AstPrinter::VisitVariableProxy(VariableProxy* node) {
break;
}
PrintLiteralWithModeIndented(buf.start(), var, node->name());
}
}
@ -1580,7 +1593,9 @@ void AstPrinter::VisitAssignment(Assignment* node) {
void AstPrinter::VisitYield(Yield* node) {
IndentedScope indent(this, "YIELD", node->position());
EmbeddedVector<char, 128> buf;
SNPrintF(buf, "YIELD (kind %d)", node->yield_kind());
IndentedScope indent(this, buf.start(), node->position());
Visit(node->expression());
}
@ -1608,7 +1623,9 @@ void AstPrinter::VisitProperty(Property* node) {
void AstPrinter::VisitCall(Call* node) {
EmbeddedVector<char, 128> buf;
FormatSlotNode(&buf, node, "CALL", node->CallFeedbackICSlot());
const char* name =
node->tail_call_mode() == TailCallMode::kAllow ? "TAIL CALL" : "CALL";
FormatSlotNode(&buf, node, name, node->CallFeedbackICSlot());
IndentedScope indent(this, buf.start());
Visit(node->expression());
@ -1686,8 +1703,7 @@ void AstPrinter::VisitSuperCallReference(SuperCallReference* node) {
}
void AstPrinter::VisitRewritableAssignmentExpression(
RewritableAssignmentExpression* node) {
void AstPrinter::VisitRewritableExpression(RewritableExpression* node) {
Visit(node->expression());
}

3
deps/v8/src/ast/prettyprinter.h

@ -104,6 +104,9 @@ class AstPrinter: public PrettyPrinter {
const char* PrintProgram(FunctionLiteral* program);
// Print a node to stdout.
static void PrintOut(Isolate* isolate, AstNode* node);
// Individual nodes
#define DECLARE_VISIT(type) virtual void Visit##type(type* node);
AST_NODE_LIST(DECLARE_VISIT)

76
deps/v8/src/ast/scopeinfo.cc

@ -19,16 +19,12 @@ Handle<ScopeInfo> ScopeInfo::Create(Isolate* isolate, Zone* zone,
ZoneList<Variable*> stack_locals(scope->StackLocalCount(), zone);
ZoneList<Variable*> context_locals(scope->ContextLocalCount(), zone);
ZoneList<Variable*> context_globals(scope->ContextGlobalCount(), zone);
ZoneList<Variable*> strong_mode_free_variables(0, zone);
scope->CollectStackAndContextLocals(&stack_locals, &context_locals,
&context_globals,
&strong_mode_free_variables);
&context_globals);
const int stack_local_count = stack_locals.length();
const int context_local_count = context_locals.length();
const int context_global_count = context_globals.length();
const int strong_mode_free_variable_count =
strong_mode_free_variables.length();
// Make sure we allocate the correct amount.
DCHECK_EQ(scope->ContextLocalCount(), context_local_count);
DCHECK_EQ(scope->ContextGlobalCount(), context_global_count);
@ -77,7 +73,6 @@ Handle<ScopeInfo> ScopeInfo::Create(Isolate* isolate, Zone* zone,
const int length = kVariablePartIndex + parameter_count +
(1 + stack_local_count) + 2 * context_local_count +
2 * context_global_count +
3 * strong_mode_free_variable_count +
(has_receiver ? 1 : 0) + (has_function_name ? 2 : 0);
Factory* factory = isolate->factory();
@ -104,7 +99,6 @@ Handle<ScopeInfo> ScopeInfo::Create(Isolate* isolate, Zone* zone,
scope_info->SetStackLocalCount(stack_local_count);
scope_info->SetContextLocalCount(context_local_count);
scope_info->SetContextGlobalCount(context_global_count);
scope_info->SetStrongModeFreeVariableCount(strong_mode_free_variable_count);
int index = kVariablePartIndex;
// Add parameters.
@ -173,25 +167,6 @@ Handle<ScopeInfo> ScopeInfo::Create(Isolate* isolate, Zone* zone,
scope_info->set(index++, Smi::FromInt(value));
}
DCHECK(index == scope_info->StrongModeFreeVariableNameEntriesIndex());
for (int i = 0; i < strong_mode_free_variable_count; ++i) {
scope_info->set(index++, *strong_mode_free_variables[i]->name());
}
DCHECK(index == scope_info->StrongModeFreeVariablePositionEntriesIndex());
for (int i = 0; i < strong_mode_free_variable_count; ++i) {
// Unfortunately, the source code positions are stored as int even though
// int32_t would be enough (given the maximum source code length).
Handle<Object> start_position = factory->NewNumberFromInt(
static_cast<int32_t>(strong_mode_free_variables[i]
->strong_mode_reference_start_position()));
scope_info->set(index++, *start_position);
Handle<Object> end_position = factory->NewNumberFromInt(
static_cast<int32_t>(strong_mode_free_variables[i]
->strong_mode_reference_end_position()));
scope_info->set(index++, *end_position);
}
// If the receiver is allocated, add its index.
DCHECK(index == scope_info->ReceiverEntryIndex());
if (has_receiver) {
@ -226,7 +201,6 @@ Handle<ScopeInfo> ScopeInfo::CreateGlobalThisBinding(Isolate* isolate) {
const int stack_local_count = 0;
const int context_local_count = 1;
const int context_global_count = 0;
const int strong_mode_free_variable_count = 0;
const bool has_simple_parameters = true;
const VariableAllocationInfo receiver_info = CONTEXT;
const VariableAllocationInfo function_name_info = NONE;
@ -237,7 +211,6 @@ Handle<ScopeInfo> ScopeInfo::CreateGlobalThisBinding(Isolate* isolate) {
const int length = kVariablePartIndex + parameter_count +
(1 + stack_local_count) + 2 * context_local_count +
2 * context_global_count +
3 * strong_mode_free_variable_count +
(has_receiver ? 1 : 0) + (has_function_name ? 2 : 0);
Factory* factory = isolate->factory();
@ -259,7 +232,6 @@ Handle<ScopeInfo> ScopeInfo::CreateGlobalThisBinding(Isolate* isolate) {
scope_info->SetStackLocalCount(stack_local_count);
scope_info->SetContextLocalCount(context_local_count);
scope_info->SetContextGlobalCount(context_global_count);
scope_info->SetStrongModeFreeVariableCount(strong_mode_free_variable_count);
int index = kVariablePartIndex;
const int first_slot_index = 0;
@ -276,9 +248,6 @@ Handle<ScopeInfo> ScopeInfo::CreateGlobalThisBinding(Isolate* isolate) {
ContextLocalMaybeAssignedFlag::encode(kNotAssigned);
scope_info->set(index++, Smi::FromInt(value));
DCHECK(index == scope_info->StrongModeFreeVariableNameEntriesIndex());
DCHECK(index == scope_info->StrongModeFreeVariablePositionEntriesIndex());
// And here we record that this scopeinfo binds a receiver.
DCHECK(index == scope_info->ReceiverEntryIndex());
const int receiver_index = Context::MIN_CONTEXT_SLOTS + 0;
@ -482,35 +451,6 @@ bool ScopeInfo::LocalIsSynthetic(int var) {
}
String* ScopeInfo::StrongModeFreeVariableName(int var) {
DCHECK(0 <= var && var < StrongModeFreeVariableCount());
int info_index = StrongModeFreeVariableNameEntriesIndex() + var;
return String::cast(get(info_index));
}
int ScopeInfo::StrongModeFreeVariableStartPosition(int var) {
DCHECK(0 <= var && var < StrongModeFreeVariableCount());
int info_index = StrongModeFreeVariablePositionEntriesIndex() + var * 2;
int32_t value = 0;
bool ok = get(info_index)->ToInt32(&value);
USE(ok);
DCHECK(ok);
return value;
}
int ScopeInfo::StrongModeFreeVariableEndPosition(int var) {
DCHECK(0 <= var && var < StrongModeFreeVariableCount());
int info_index = StrongModeFreeVariablePositionEntriesIndex() + var * 2 + 1;
int32_t value = 0;
bool ok = get(info_index)->ToInt32(&value);
USE(ok);
DCHECK(ok);
return value;
}
int ScopeInfo::StackSlotIndex(String* name) {
DCHECK(name->IsInternalizedString());
if (length() > 0) {
@ -691,20 +631,8 @@ int ScopeInfo::ContextGlobalInfoEntriesIndex() {
}
int ScopeInfo::StrongModeFreeVariableNameEntriesIndex() {
return ContextGlobalInfoEntriesIndex() + ContextGlobalCount();
}
int ScopeInfo::StrongModeFreeVariablePositionEntriesIndex() {
return StrongModeFreeVariableNameEntriesIndex() +
StrongModeFreeVariableCount();
}
int ScopeInfo::ReceiverEntryIndex() {
return StrongModeFreeVariablePositionEntriesIndex() +
2 * StrongModeFreeVariableCount();
return ContextGlobalInfoEntriesIndex() + ContextGlobalCount();
}

170
deps/v8/src/ast/scopes.cc

@ -27,12 +27,10 @@ VariableMap::VariableMap(Zone* zone)
zone_(zone) {}
VariableMap::~VariableMap() {}
Variable* VariableMap::Declare(Scope* scope, const AstRawString* name,
VariableMode mode, Variable::Kind kind,
InitializationFlag initialization_flag,
MaybeAssignedFlag maybe_assigned_flag,
int declaration_group_start) {
MaybeAssignedFlag maybe_assigned_flag) {
// AstRawStrings are unambiguous, i.e., the same string is always represented
// by the same AstRawString*.
// FIXME(marja): fix the type of Lookup.
@ -42,14 +40,8 @@ Variable* VariableMap::Declare(Scope* scope, const AstRawString* name,
if (p->value == NULL) {
// The variable has not been declared yet -> insert it.
DCHECK(p->key == name);
if (kind == Variable::CLASS) {
p->value = new (zone())
ClassVariable(scope, name, mode, initialization_flag,
maybe_assigned_flag, declaration_group_start);
} else {
p->value = new (zone()) Variable(
scope, name, mode, kind, initialization_flag, maybe_assigned_flag);
}
p->value = new (zone()) Variable(scope, name, mode, kind,
initialization_flag, maybe_assigned_flag);
}
return reinterpret_cast<Variable*>(p->value);
}
@ -103,8 +95,7 @@ Scope::Scope(Zone* zone, Scope* outer_scope, ScopeType scope_type,
sloppy_block_function_map_(zone),
already_resolved_(false),
ast_value_factory_(ast_value_factory),
zone_(zone),
class_declaration_group_start_(-1) {
zone_(zone) {
SetDefaults(scope_type, outer_scope, Handle<ScopeInfo>::null(),
function_kind);
// The outermost scope must be a script scope.
@ -112,7 +103,6 @@ Scope::Scope(Zone* zone, Scope* outer_scope, ScopeType scope_type,
DCHECK(!HasIllegalRedeclaration());
}
Scope::Scope(Zone* zone, Scope* inner_scope, ScopeType scope_type,
Handle<ScopeInfo> scope_info, AstValueFactory* value_factory)
: inner_scopes_(4, zone),
@ -125,8 +115,7 @@ Scope::Scope(Zone* zone, Scope* inner_scope, ScopeType scope_type,
sloppy_block_function_map_(zone),
already_resolved_(true),
ast_value_factory_(value_factory),
zone_(zone),
class_declaration_group_start_(-1) {
zone_(zone) {
SetDefaults(scope_type, NULL, scope_info);
if (!scope_info.is_null()) {
num_heap_slots_ = scope_info_->ContextLength();
@ -137,7 +126,6 @@ Scope::Scope(Zone* zone, Scope* inner_scope, ScopeType scope_type,
AddInnerScope(inner_scope);
}
Scope::Scope(Zone* zone, Scope* inner_scope,
const AstRawString* catch_variable_name,
AstValueFactory* value_factory)
@ -151,8 +139,7 @@ Scope::Scope(Zone* zone, Scope* inner_scope,
sloppy_block_function_map_(zone),
already_resolved_(true),
ast_value_factory_(value_factory),
zone_(zone),
class_declaration_group_start_(-1) {
zone_(zone) {
SetDefaults(CATCH_SCOPE, NULL, Handle<ScopeInfo>::null());
AddInnerScope(inner_scope);
++num_var_or_const_;
@ -528,19 +515,17 @@ Variable* Scope::DeclareParameter(
return var;
}
Variable* Scope::DeclareLocal(const AstRawString* name, VariableMode mode,
InitializationFlag init_flag, Variable::Kind kind,
MaybeAssignedFlag maybe_assigned_flag,
int declaration_group_start) {
MaybeAssignedFlag maybe_assigned_flag) {
DCHECK(!already_resolved());
// This function handles VAR, LET, and CONST modes. DYNAMIC variables are
// introduces during variable allocation, and TEMPORARY variables are
// introduced during variable allocation, and TEMPORARY variables are
// allocated via NewTemporary().
DCHECK(IsDeclaredVariableMode(mode));
++num_var_or_const_;
return variables_.Declare(this, name, mode, kind, init_flag,
maybe_assigned_flag, declaration_group_start);
maybe_assigned_flag);
}
@ -660,11 +645,9 @@ class VarAndOrder {
int order_;
};
void Scope::CollectStackAndContextLocals(
ZoneList<Variable*>* stack_locals, ZoneList<Variable*>* context_locals,
ZoneList<Variable*>* context_globals,
ZoneList<Variable*>* strong_mode_free_variables) {
void Scope::CollectStackAndContextLocals(ZoneList<Variable*>* stack_locals,
ZoneList<Variable*>* context_locals,
ZoneList<Variable*>* context_globals) {
DCHECK(stack_locals != NULL);
DCHECK(context_locals != NULL);
DCHECK(context_globals != NULL);
@ -691,11 +674,6 @@ void Scope::CollectStackAndContextLocals(
p != NULL;
p = variables_.Next(p)) {
Variable* var = reinterpret_cast<Variable*>(p->value);
if (strong_mode_free_variables && var->has_strong_mode_reference() &&
var->mode() == DYNAMIC_GLOBAL) {
strong_mode_free_variables->Add(var, zone());
}
if (var->is_used()) {
vars.Add(VarAndOrder(var, p->order), zone());
}
@ -1017,9 +995,7 @@ void Scope::Print(int n) {
if (HasTrivialOuterContext()) {
Indent(n1, "// scope has trivial outer context\n");
}
if (is_strong(language_mode())) {
Indent(n1, "// strong mode scope\n");
} else if (is_strict(language_mode())) {
if (is_strict(language_mode())) {
Indent(n1, "// strict mode scope\n");
}
if (scope_inside_with_) Indent(n1, "// scope inside 'with'\n");
@ -1204,10 +1180,6 @@ bool Scope::ResolveVariable(ParseInfo* info, VariableProxy* proxy,
switch (binding_kind) {
case BOUND:
// We found a variable binding.
if (is_strong(language_mode())) {
if (!CheckStrongModeDeclaration(proxy, var)) return false;
}
break;
case BOUND_EVAL_SHADOWED:
@ -1245,126 +1217,12 @@ bool Scope::ResolveVariable(ParseInfo* info, VariableProxy* proxy,
DCHECK(var != NULL);
if (proxy->is_assigned()) var->set_maybe_assigned();
if (is_strong(language_mode())) {
// Record that the variable is referred to from strong mode. Also, record
// the position.
var->RecordStrongModeReference(proxy->position(), proxy->end_position());
}
proxy->BindTo(var);
return true;
}
bool Scope::CheckStrongModeDeclaration(VariableProxy* proxy, Variable* var) {
// Check for declaration-after use (for variables) in strong mode. Note that
// we can only do this in the case where we have seen the declaration. And we
// always allow referencing functions (for now).
// This might happen during lazy compilation; we don't keep track of
// initializer positions for variables stored in ScopeInfo, so we cannot check
// bindings against them. TODO(marja, rossberg): remove this hack.
if (var->initializer_position() == RelocInfo::kNoPosition) return true;
// Allow referencing the class name from methods of that class, even though
// the initializer position for class names is only after the body.
Scope* scope = this;
while (scope) {
if (scope->ClassVariableForMethod() == var) return true;
scope = scope->outer_scope();
}
// Allow references from methods to classes declared later, if we detect no
// problematic dependency cycles. Note that we can be inside multiple methods
// at the same time, and it's enough if we find one where the reference is
// allowed.
if (var->is_class() &&
var->AsClassVariable()->declaration_group_start() >= 0) {
for (scope = this; scope && scope != var->scope();
scope = scope->outer_scope()) {
ClassVariable* class_var = scope->ClassVariableForMethod();
// A method is referring to some other class, possibly declared
// later. Referring to a class declared earlier is always OK and covered
// by the code outside this if. Here we only need to allow special cases
// for referring to a class which is declared later.
// Referring to a class C declared later is OK under the following
// circumstances:
// 1. The class declarations are in a consecutive group with no other
// declarations or statements in between, and
// 2. There is no dependency cycle where the first edge is an
// initialization time dependency (computed property name or extends
// clause) from C to something that depends on this class directly or
// transitively.
if (class_var &&
class_var->declaration_group_start() ==
var->AsClassVariable()->declaration_group_start()) {
return true;
}
// TODO(marja,rossberg): implement the dependency cycle detection. Here we
// undershoot the target and allow referring to any class in the same
// consectuive declaration group.
// The cycle detection can work roughly like this: 1) detect init-time
// references here (they are free variables which are inside the class
// scope but not inside a method scope - no parser changes needed to
// detect them) 2) if we encounter an init-time reference here, allow it,
// but record it for a later dependency cycle check 3) also record
// non-init-time references here 4) after scope analysis is done, analyse
// the dependency cycles: an illegal cycle is one starting with an
// init-time reference and leading back to the starting point with either
// non-init-time and init-time references.
}
}
// If both the use and the declaration are inside an eval scope (possibly
// indirectly), or one of them is, we need to check whether they are inside
// the same eval scope or different ones.
// TODO(marja,rossberg): Detect errors across different evals (depends on the
// future of eval in strong mode).
const Scope* eval_for_use = NearestOuterEvalScope();
const Scope* eval_for_declaration = var->scope()->NearestOuterEvalScope();
if (proxy->position() != RelocInfo::kNoPosition &&
proxy->position() < var->initializer_position() && !var->is_function() &&
eval_for_use == eval_for_declaration) {
DCHECK(proxy->end_position() != RelocInfo::kNoPosition);
ReportMessage(proxy->position(), proxy->end_position(),
MessageTemplate::kStrongUseBeforeDeclaration,
proxy->raw_name());
return false;
}
return true;
}
ClassVariable* Scope::ClassVariableForMethod() const {
// TODO(marja, rossberg): This fails to find a class variable in the following
// cases:
// let A = class { ... }
// It needs to be investigated whether this causes any practical problems.
if (!is_function_scope()) return nullptr;
if (IsInObjectLiteral(function_kind_)) return nullptr;
if (!IsConciseMethod(function_kind_) && !IsClassConstructor(function_kind_) &&
!IsAccessorFunction(function_kind_)) {
return nullptr;
}
DCHECK_NOT_NULL(outer_scope_);
// The class scope contains at most one variable, the class name.
DCHECK(outer_scope_->variables_.occupancy() <= 1);
if (outer_scope_->variables_.occupancy() == 0) return nullptr;
VariableMap::Entry* p = outer_scope_->variables_.Start();
Variable* var = reinterpret_cast<Variable*>(p->value);
if (!var->is_class()) return nullptr;
return var->AsClassVariable();
}
bool Scope::ResolveVariablesRecursively(ParseInfo* info,
AstNodeFactory* factory) {
DCHECK(info->script_scope()->is_script_scope());
@ -1646,7 +1504,7 @@ void Scope::AllocateVariablesRecursively(Isolate* isolate) {
}
// If scope is already resolved, we still need to allocate
// variables in inner scopes which might not had been resolved yet.
// variables in inner scopes which might not have been resolved yet.
if (already_resolved()) return;
// The number of slots required for variables.
num_heap_slots_ = Context::MIN_CONTEXT_SLOTS;

57
deps/v8/src/ast/scopes.h

@ -24,8 +24,7 @@ class VariableMap: public ZoneHashMap {
Variable* Declare(Scope* scope, const AstRawString* name, VariableMode mode,
Variable::Kind kind, InitializationFlag initialization_flag,
MaybeAssignedFlag maybe_assigned_flag = kNotAssigned,
int declaration_group_start = -1);
MaybeAssignedFlag maybe_assigned_flag = kNotAssigned);
Variable* Lookup(const AstRawString* name);
@ -163,8 +162,7 @@ class Scope: public ZoneObject {
// declared before, the previously declared variable is returned.
Variable* DeclareLocal(const AstRawString* name, VariableMode mode,
InitializationFlag init_flag, Variable::Kind kind,
MaybeAssignedFlag maybe_assigned_flag = kNotAssigned,
int declaration_group_start = -1);
MaybeAssignedFlag maybe_assigned_flag = kNotAssigned);
// Declare an implicit global variable in this scope which must be a
// script scope. The variable was introduced (possibly from an inner
@ -377,12 +375,6 @@ class Scope: public ZoneObject {
IsClassConstructor(function_kind())));
}
const Scope* NearestOuterEvalScope() const {
if (is_eval_scope()) return this;
if (outer_scope() == nullptr) return nullptr;
return outer_scope()->NearestOuterEvalScope();
}
// ---------------------------------------------------------------------------
// Accessors.
@ -428,7 +420,24 @@ class Scope: public ZoneObject {
// Returns the default function arity excluding default or rest parameters.
int default_function_length() const { return arity_; }
int num_parameters() const { return params_.length(); }
// Returns the number of formal parameters, up to but not including the
// rest parameter index (if the function has rest parameters), i.e. it
// says 2 for
//
// function foo(a, b) { ... }
//
// and
//
// function foo(a, b, ...c) { ... }
//
// but for
//
// function foo(a, b, c = 1) { ... }
//
// we return 3 here.
int num_parameters() const {
return has_rest_parameter() ? params_.length() - 1 : params_.length();
}
// A function can have at most one rest parameter. Returns Variable* or NULL.
Variable* rest_parameter(int* index) const {
@ -486,25 +495,15 @@ class Scope: public ZoneObject {
// The ModuleDescriptor for this scope; only for module scopes.
ModuleDescriptor* module() const { return module_descriptor_; }
void set_class_declaration_group_start(int position) {
class_declaration_group_start_ = position;
}
int class_declaration_group_start() const {
return class_declaration_group_start_;
}
// ---------------------------------------------------------------------------
// Variable allocation.
// Collect stack and context allocated local variables in this scope. Note
// that the function variable - if present - is not collected and should be
// handled separately.
void CollectStackAndContextLocals(
ZoneList<Variable*>* stack_locals, ZoneList<Variable*>* context_locals,
ZoneList<Variable*>* context_globals,
ZoneList<Variable*>* strong_mode_free_variables = nullptr);
void CollectStackAndContextLocals(ZoneList<Variable*>* stack_locals,
ZoneList<Variable*>* context_locals,
ZoneList<Variable*>* context_globals);
// Current number of var or const locals.
int num_var_or_const() { return num_var_or_const_; }
@ -767,12 +766,6 @@ class Scope: public ZoneObject {
MUST_USE_RESULT
bool ResolveVariablesRecursively(ParseInfo* info, AstNodeFactory* factory);
bool CheckStrongModeDeclaration(VariableProxy* proxy, Variable* var);
// If this scope is a method scope of a class, return the corresponding
// class variable, otherwise nullptr.
ClassVariable* ClassVariableForMethod() const;
// Scope analysis.
void PropagateScopeInfo(bool outer_scope_calls_sloppy_eval);
bool HasTrivialContext() const;
@ -837,10 +830,6 @@ class Scope: public ZoneObject {
Zone* zone_;
PendingCompilationErrorHandler pending_error_handler_;
// For tracking which classes are declared consecutively. Needed for strong
// mode.
int class_declaration_group_start_;
};
} // namespace internal

3
deps/v8/src/ast/variables.cc

@ -40,9 +40,6 @@ Variable::Variable(Scope* scope, const AstRawString* name, VariableMode mode,
location_(VariableLocation::UNALLOCATED),
index_(-1),
initializer_position_(RelocInfo::kNoPosition),
has_strong_mode_reference_(false),
strong_mode_reference_start_position_(RelocInfo::kNoPosition),
strong_mode_reference_end_position_(RelocInfo::kNoPosition),
local_if_not_shadowed_(NULL),
is_from_eval_(false),
force_context_allocation_(false),

56
deps/v8/src/ast/variables.h

@ -15,12 +15,9 @@ namespace internal {
// variables. Variables themselves are never directly referred to from the AST,
// they are maintained by scopes, and referred to from VariableProxies and Slots
// after binding and variable allocation.
class ClassVariable;
class Variable: public ZoneObject {
public:
enum Kind { NORMAL, FUNCTION, CLASS, THIS, ARGUMENTS };
enum Kind { NORMAL, FUNCTION, THIS, ARGUMENTS };
Variable(Scope* scope, const AstRawString* name, VariableMode mode, Kind kind,
InitializationFlag initialization_flag,
@ -84,7 +81,6 @@ class Variable: public ZoneObject {
}
bool is_function() const { return kind_ == FUNCTION; }
bool is_class() const { return kind_ == CLASS; }
bool is_this() const { return kind_ == THIS; }
bool is_arguments() const { return kind_ == ARGUMENTS; }
@ -98,11 +94,6 @@ class Variable: public ZoneObject {
return is_this() || *name() == *isolate->factory()->this_string();
}
ClassVariable* AsClassVariable() {
DCHECK(is_class());
return reinterpret_cast<ClassVariable*>(this);
}
// True if the variable is named eval and not known to be shadowed.
bool is_possibly_eval(Isolate* isolate) const {
return IsVariable(isolate->factory()->eval_string());
@ -132,24 +123,6 @@ class Variable: public ZoneObject {
static int CompareIndex(Variable* const* v, Variable* const* w);
void RecordStrongModeReference(int start_position, int end_position) {
// Record the earliest reference to the variable. Used in error messages for
// strong mode references to undeclared variables.
if (has_strong_mode_reference_ &&
strong_mode_reference_start_position_ < start_position)
return;
has_strong_mode_reference_ = true;
strong_mode_reference_start_position_ = start_position;
strong_mode_reference_end_position_ = end_position;
}
bool has_strong_mode_reference() const { return has_strong_mode_reference_; }
int strong_mode_reference_start_position() const {
return strong_mode_reference_start_position_;
}
int strong_mode_reference_end_position() const {
return strong_mode_reference_end_position_;
}
PropertyAttributes DeclarationPropertyAttributes() const {
int property_attributes = NONE;
if (IsImmutableVariableMode(mode_)) {
@ -169,11 +142,6 @@ class Variable: public ZoneObject {
VariableLocation location_;
int index_;
int initializer_position_;
// Tracks whether the variable is bound to a VariableProxy which is in strong
// mode, and if yes, the source location of the reference.
bool has_strong_mode_reference_;
int strong_mode_reference_start_position_;
int strong_mode_reference_end_position_;
// If this field is set, this variable references the stored locally bound
// variable, but it might be shadowed by variable bindings introduced by
@ -190,28 +158,6 @@ class Variable: public ZoneObject {
InitializationFlag initialization_flag_;
MaybeAssignedFlag maybe_assigned_;
};
class ClassVariable : public Variable {
public:
ClassVariable(Scope* scope, const AstRawString* name, VariableMode mode,
InitializationFlag initialization_flag,
MaybeAssignedFlag maybe_assigned_flag = kNotAssigned,
int declaration_group_start = -1)
: Variable(scope, name, mode, Variable::CLASS, initialization_flag,
maybe_assigned_flag),
declaration_group_start_(declaration_group_start) {}
int declaration_group_start() const { return declaration_group_start_; }
void set_declaration_group_start(int declaration_group_start) {
declaration_group_start_ = declaration_group_start;
}
private:
// For classes we keep track of consecutive groups of delcarations. They are
// needed for strong mode scoping checks. TODO(marja, rossberg): Implement
// checks for functions too.
int declaration_group_start_;
};
} // namespace internal
} // namespace v8

14
deps/v8/src/bailout-reason.h

@ -78,8 +78,6 @@ namespace internal {
V(kExportDeclaration, "Export declaration") \
V(kExternalStringExpectedButNotFound, \
"External string expected, but not found") \
V(kForInStatementOptimizationIsDisabled, \
"ForInStatement optimization is disabled") \
V(kForInStatementWithNonLocalEachVariable, \
"ForInStatement with non-local each variable") \
V(kForOfStatement, "ForOfStatement") \
@ -99,8 +97,6 @@ namespace internal {
V(kImportDeclaration, "Import declaration") \
V(kIndexIsNegative, "Index is negative") \
V(kIndexIsTooLarge, "Index is too large") \
V(kInlinedRuntimeFunctionFastOneByteArrayJoin, \
"Inlined runtime function: FastOneByteArrayJoin") \
V(kInliningBailedOut, "Inlining bailed out") \
V(kInputGPRIsExpectedToHaveUpper32Cleared, \
"Input GPR is expected to have upper32 cleared") \
@ -131,8 +127,6 @@ namespace internal {
V(kNativeFunctionLiteral, "Native function literal") \
V(kNeedSmiLiteral, "Need a Smi literal here") \
V(kNoCasesLeft, "No cases left") \
V(kNoEmptyArraysHereInEmitFastOneByteArrayJoin, \
"No empty arrays here in EmitFastOneByteArrayJoin") \
V(kNonInitializerAssignmentToConst, "Non-initializer assignment to const") \
V(kNonSmiIndex, "Non-smi index") \
V(kNonSmiKeyInArrayLiteral, "Non-smi key in array literal") \
@ -150,6 +144,7 @@ namespace internal {
"Operand is a smi and not a bound function") \
V(kOperandIsASmiAndNotAFunction, "Operand is a smi and not a function") \
V(kOperandIsASmiAndNotAName, "Operand is a smi and not a name") \
V(kOperandIsASmiAndNotAReceiver, "Operand is a smi and not a receiver") \
V(kOperandIsASmiAndNotAString, "Operand is a smi and not a string") \
V(kOperandIsASmi, "Operand is a smi") \
V(kOperandIsNotADate, "Operand is not a date") \
@ -157,6 +152,7 @@ namespace internal {
V(kOperandIsNotAFunction, "Operand is not a function") \
V(kOperandIsNotAName, "Operand is not a name") \
V(kOperandIsNotANumber, "Operand is not a number") \
V(kOperandIsNotAReceiver, "Operand is not a receiver") \
V(kOperandIsNotASmi, "Operand is not a smi") \
V(kOperandIsNotAString, "Operand is not a string") \
V(kOperandIsNotSmi, "Operand is not smi") \
@ -183,10 +179,10 @@ namespace internal {
"Sloppy function expects JSReceiver as receiver.") \
V(kSmiAdditionOverflow, "Smi addition overflow") \
V(kSmiSubtractionOverflow, "Smi subtraction overflow") \
V(kSpread, "Spread in array literal") \
V(kStackAccessBelowStackPointer, "Stack access below stack pointer") \
V(kStackFrameTypesMustMatch, "Stack frame types must match") \
V(kSuperReference, "Super reference") \
V(kTailCall, "Tail call") \
V(kTheCurrentStackPointerIsBelowCsp, \
"The current stack pointer is below csp") \
V(kTheSourceAndDestinationAreTheSame, \
@ -236,6 +232,7 @@ namespace internal {
"Unexpected number of pre-allocated property fields") \
V(kUnexpectedFPCRMode, "Unexpected FPCR mode.") \
V(kUnexpectedSmi, "Unexpected smi value") \
V(kUnexpectedStackDepth, "Unexpected operand stack depth in full-codegen") \
V(kUnexpectedStackPointer, "The stack pointer is not the expected value") \
V(kUnexpectedStringType, "Unexpected string type") \
V(kUnexpectedTypeForRegExpDataFixedArrayExpected, \
@ -253,6 +250,8 @@ namespace internal {
V(kUnsupportedPhiUseOfArguments, "Unsupported phi use of arguments") \
V(kUnsupportedPhiUseOfConstVariable, \
"Unsupported phi use of const variable") \
V(kUnexpectedReturnFromBytecodeHandler, \
"Unexpectedly returned from a bytecode handler") \
V(kUnexpectedReturnFromThrow, "Unexpectedly returned from a throw") \
V(kUnsupportedSwitchStatement, "Unsupported switch statement") \
V(kUnsupportedTaggedImmediate, "Unsupported tagged immediate") \
@ -267,7 +266,6 @@ namespace internal {
"Should not directly enter OSR-compiled function") \
V(kYield, "Yield")
#define ERROR_MESSAGES_CONSTANTS(C, T) C,
enum BailoutReason {
ERROR_MESSAGES_LIST(ERROR_MESSAGES_CONSTANTS) kLastErrorMessage

7
deps/v8/src/base.isolate

@ -22,6 +22,13 @@
],
},
}],
['v8_use_snapshot=="true" and v8_use_external_startup_data==1 and v8_separate_ignition_snapshot==1', {
'variables': {
'files': [
'<(PRODUCT_DIR)/snapshot_blob_ignition.bin',
],
},
}],
['OS=="linux" and component=="shared_library" and target_arch=="ia32"', {
'variables': {
'files': [

2
deps/v8/src/base/atomicops.h

@ -157,6 +157,8 @@ Atomic64 Release_Load(volatile const Atomic64* ptr);
#include "src/base/atomicops_internals_mips_gcc.h"
#elif defined(__GNUC__) && V8_HOST_ARCH_MIPS64
#include "src/base/atomicops_internals_mips64_gcc.h"
#elif defined(__GNUC__) && V8_HOST_ARCH_S390
#include "src/base/atomicops_internals_s390_gcc.h"
#else
#error "Atomic operations are not supported on your platform"
#endif

152
deps/v8/src/base/atomicops_internals_s390_gcc.h

@ -0,0 +1,152 @@
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// This file is an internal atomic implementation, use atomicops.h instead.
#ifndef V8_BASE_ATOMICOPS_INTERNALS_S390_H_
#define V8_BASE_ATOMICOPS_INTERNALS_S390_H_
namespace v8 {
namespace base {
inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr,
Atomic32 old_value,
Atomic32 new_value) {
return (__sync_val_compare_and_swap(ptr, old_value, new_value));
}
inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr,
Atomic32 new_value) {
Atomic32 old_value;
do {
old_value = *ptr;
} while (__sync_bool_compare_and_swap(ptr, old_value, new_value) == false);
return old_value;
}
inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr,
Atomic32 increment) {
return Barrier_AtomicIncrement(ptr, increment);
}
inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr,
Atomic32 increment) {
return __sync_add_and_fetch(ptr, increment);
}
inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr,
Atomic32 old_value, Atomic32 new_value) {
return NoBarrier_CompareAndSwap(ptr, old_value, new_value);
}
inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr,
Atomic32 old_value, Atomic32 new_value) {
return NoBarrier_CompareAndSwap(ptr, old_value, new_value);
}
inline void NoBarrier_Store(volatile Atomic8* ptr, Atomic8 value) {
*ptr = value;
}
inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
*ptr = value;
}
inline void MemoryBarrier() { __sync_synchronize(); }
inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) {
*ptr = value;
MemoryBarrier();
}
inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) {
MemoryBarrier();
*ptr = value;
}
inline Atomic8 NoBarrier_Load(volatile const Atomic8* ptr) { return *ptr; }
inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) { return *ptr; }
inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) {
Atomic32 value = *ptr;
MemoryBarrier();
return value;
}
inline Atomic32 Release_Load(volatile const Atomic32* ptr) {
MemoryBarrier();
return *ptr;
}
#ifdef V8_TARGET_ARCH_S390X
inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64* ptr,
Atomic64 old_value,
Atomic64 new_value) {
return (__sync_val_compare_and_swap(ptr, old_value, new_value));
}
inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64* ptr,
Atomic64 new_value) {
Atomic64 old_value;
do {
old_value = *ptr;
} while (__sync_bool_compare_and_swap(ptr, old_value, new_value) == false);
return old_value;
}
inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64* ptr,
Atomic64 increment) {
return Barrier_AtomicIncrement(ptr, increment);
}
inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64* ptr,
Atomic64 increment) {
return __sync_add_and_fetch(ptr, increment);
}
inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64* ptr,
Atomic64 old_value, Atomic64 new_value) {
return NoBarrier_CompareAndSwap(ptr, old_value, new_value);
}
inline Atomic64 Release_CompareAndSwap(volatile Atomic64* ptr,
Atomic64 old_value, Atomic64 new_value) {
return NoBarrier_CompareAndSwap(ptr, old_value, new_value);
}
inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) {
*ptr = value;
}
inline void Acquire_Store(volatile Atomic64* ptr, Atomic64 value) {
*ptr = value;
MemoryBarrier();
}
inline void Release_Store(volatile Atomic64* ptr, Atomic64 value) {
MemoryBarrier();
*ptr = value;
}
inline Atomic64 NoBarrier_Load(volatile const Atomic64* ptr) { return *ptr; }
inline Atomic64 Acquire_Load(volatile const Atomic64* ptr) {
Atomic64 value = *ptr;
MemoryBarrier();
return value;
}
inline Atomic64 Release_Load(volatile const Atomic64* ptr) {
MemoryBarrier();
return *ptr;
}
#endif
} // namespace base
} // namespace v8
#endif // V8_BASE_ATOMICOPS_INTERNALS_S390_H_

14
deps/v8/src/base/bits.h

@ -92,6 +92,20 @@ inline unsigned CountLeadingZeros64(uint64_t value) {
}
// ReverseBits(value) returns |value| in reverse bit order.
template <typename T>
T ReverseBits(T value) {
DCHECK((sizeof(value) == 1) || (sizeof(value) == 2) || (sizeof(value) == 4) ||
(sizeof(value) == 8));
T result = 0;
for (unsigned i = 0; i < (sizeof(value) * 8); i++) {
result = (result << 1) | (value & 1);
value >>= 1;
}
return result;
}
// CountTrailingZeros32(value) returns the number of zero bits preceding the
// least significant 1 bit in |value| if |value| is non-zero, otherwise it
// returns 32.

11
deps/v8/src/base/cpu.cc

@ -312,6 +312,8 @@ CPU::CPU()
architecture_(0),
variant_(-1),
part_(0),
icache_line_size_(UNKNOWN_CACHE_LINE_SIZE),
dcache_line_size_(UNKNOWN_CACHE_LINE_SIZE),
has_fpu_(false),
has_cmov_(false),
has_sahf_(false),
@ -644,9 +646,16 @@ CPU::CPU()
if (n == 0 || entry.a_type == AT_NULL) {
break;
}
if (entry.a_type == AT_PLATFORM) {
switch (entry.a_type) {
case AT_PLATFORM:
auxv_cpu_type = reinterpret_cast<char*>(entry.a_un.a_val);
break;
case AT_ICACHEBSIZE:
icache_line_size_ = entry.a_un.a_val;
break;
case AT_DCACHEBSIZE:
dcache_line_size_ = entry.a_un.a_val;
break;
}
}
fclose(fp);

5
deps/v8/src/base/cpu.h

@ -75,6 +75,9 @@ class CPU final {
// General features
bool has_fpu() const { return has_fpu_; }
int icache_line_size() const { return icache_line_size_; }
int dcache_line_size() const { return dcache_line_size_; }
static const int UNKNOWN_CACHE_LINE_SIZE = 0;
// x86 features
bool has_cmov() const { return has_cmov_; }
@ -118,6 +121,8 @@ class CPU final {
int architecture_;
int variant_;
int part_;
int icache_line_size_;
int dcache_line_size_;
bool has_fpu_;
bool has_cmov_;
bool has_sahf_;

347
deps/v8/src/bootstrapper.cc

@ -134,7 +134,8 @@ class Genesis BASE_EMBEDDED {
public:
Genesis(Isolate* isolate, MaybeHandle<JSGlobalProxy> maybe_global_proxy,
v8::Local<v8::ObjectTemplate> global_proxy_template,
v8::ExtensionConfiguration* extensions, ContextType context_type);
v8::ExtensionConfiguration* extensions,
GlobalContextType context_type);
~Genesis() { }
Isolate* isolate() const { return isolate_; }
@ -187,10 +188,10 @@ class Genesis BASE_EMBEDDED {
// New context initialization. Used for creating a context from scratch.
void InitializeGlobal(Handle<JSGlobalObject> global_object,
Handle<JSFunction> empty_function,
ContextType context_type);
GlobalContextType context_type);
void InitializeExperimentalGlobal();
// Depending on the situation, expose and/or get rid of the utils object.
void ConfigureUtilsObject(ContextType context_type);
void ConfigureUtilsObject(GlobalContextType context_type);
#define DECLARE_FEATURE_INITIALIZATION(id, descr) \
void InitializeGlobal_##id();
@ -206,7 +207,7 @@ class Genesis BASE_EMBEDDED {
Handle<JSFunction> InstallInternalArray(Handle<JSObject> target,
const char* name,
ElementsKind elements_kind);
bool InstallNatives(ContextType context_type);
bool InstallNatives(GlobalContextType context_type);
void InstallTypedArray(const char* name, ElementsKind elements_kind,
Handle<JSFunction>* fun);
@ -318,11 +319,10 @@ void Bootstrapper::Iterate(ObjectVisitor* v) {
v->Synchronize(VisitorSynchronization::kExtensions);
}
Handle<Context> Bootstrapper::CreateEnvironment(
MaybeHandle<JSGlobalProxy> maybe_global_proxy,
v8::Local<v8::ObjectTemplate> global_proxy_template,
v8::ExtensionConfiguration* extensions, ContextType context_type) {
v8::ExtensionConfiguration* extensions, GlobalContextType context_type) {
HandleScope scope(isolate_);
Genesis genesis(isolate_, maybe_global_proxy, global_proxy_template,
extensions, context_type);
@ -483,7 +483,7 @@ void Genesis::SetFunctionInstanceDescriptor(Handle<Map> map,
Handle<Map> Genesis::CreateSloppyFunctionMap(FunctionMode function_mode) {
Handle<Map> map = factory()->NewMap(JS_FUNCTION_TYPE, JSFunction::kSize);
SetFunctionInstanceDescriptor(map, function_mode);
if (IsFunctionModeWithPrototype(function_mode)) map->set_is_constructor();
map->set_is_constructor(IsFunctionModeWithPrototype(function_mode));
map->set_is_callable();
return map;
}
@ -715,7 +715,7 @@ Handle<Map> Genesis::CreateStrictFunctionMap(
FunctionMode function_mode, Handle<JSFunction> empty_function) {
Handle<Map> map = factory()->NewMap(JS_FUNCTION_TYPE, JSFunction::kSize);
SetStrictFunctionInstanceDescriptor(map, function_mode);
if (IsFunctionModeWithPrototype(function_mode)) map->set_is_constructor();
map->set_is_constructor(IsFunctionModeWithPrototype(function_mode));
map->set_is_callable();
Map::SetPrototype(map, empty_function);
return map;
@ -726,7 +726,7 @@ Handle<Map> Genesis::CreateStrongFunctionMap(
Handle<JSFunction> empty_function, bool is_constructor) {
Handle<Map> map = factory()->NewMap(JS_FUNCTION_TYPE, JSFunction::kSize);
SetStrongFunctionInstanceDescriptor(map);
if (is_constructor) map->set_is_constructor();
map->set_is_constructor(is_constructor);
Map::SetPrototype(map, empty_function);
map->set_is_callable();
map->set_is_extensible(is_constructor);
@ -789,6 +789,7 @@ void Genesis::CreateIteratorMaps() {
// Generator functions do not have "caller" or "arguments" accessors.
Handle<Map> sloppy_generator_function_map =
Map::Copy(strict_function_map, "SloppyGeneratorFunction");
sloppy_generator_function_map->set_is_constructor(false);
Map::SetPrototype(sloppy_generator_function_map,
generator_function_prototype);
native_context()->set_sloppy_generator_function_map(
@ -796,6 +797,7 @@ void Genesis::CreateIteratorMaps() {
Handle<Map> strict_generator_function_map =
Map::Copy(strict_function_map, "StrictGeneratorFunction");
strict_generator_function_map->set_is_constructor(false);
Map::SetPrototype(strict_generator_function_map,
generator_function_prototype);
native_context()->set_strict_generator_function_map(
@ -804,6 +806,7 @@ void Genesis::CreateIteratorMaps() {
Handle<Map> strong_function_map(native_context()->strong_function_map());
Handle<Map> strong_generator_function_map =
Map::Copy(strong_function_map, "StrongGeneratorFunction");
strong_generator_function_map->set_is_constructor(false);
Map::SetPrototype(strong_generator_function_map,
generator_function_prototype);
native_context()->set_strong_generator_function_map(
@ -822,7 +825,7 @@ static void ReplaceAccessors(Handle<Map> map,
PropertyAttributes attributes,
Handle<AccessorPair> accessor_pair) {
DescriptorArray* descriptors = map->instance_descriptors();
int idx = descriptors->SearchWithCache(*name, *map);
int idx = descriptors->SearchWithCache(map->GetIsolate(), *name, *map);
AccessorConstantDescriptor descriptor(name, accessor_pair, attributes);
descriptors->Replace(idx, &descriptor);
}
@ -952,7 +955,6 @@ Handle<JSGlobalObject> Genesis::CreateNewGlobals(
}
js_global_object_function->initial_map()->set_is_prototype_map(true);
js_global_object_function->initial_map()->set_is_hidden_prototype();
js_global_object_function->initial_map()->set_dictionary_map(true);
Handle<JSGlobalObject> global_object =
factory()->NewJSGlobalObject(js_global_object_function);
@ -973,10 +975,10 @@ Handle<JSGlobalObject> Genesis::CreateNewGlobals(
isolate(), global_constructor, factory()->the_hole_value(),
ApiNatives::GlobalProxyType);
}
Handle<String> global_name = factory()->global_string();
global_proxy_function->shared()->set_instance_class_name(*global_name);
global_proxy_function->initial_map()->set_is_access_check_needed(true);
global_proxy_function->initial_map()->set_has_hidden_prototype(true);
// Set global_proxy.__proto__ to js_global after ConfigureGlobalObjects
// Return the global proxy.
@ -1063,7 +1065,7 @@ static void InstallWithIntrinsicDefaultProto(Isolate* isolate,
// work in the snapshot case is done in HookUpGlobalObject.
void Genesis::InitializeGlobal(Handle<JSGlobalObject> global_object,
Handle<JSFunction> empty_function,
ContextType context_type) {
GlobalContextType context_type) {
// --- N a t i v e C o n t e x t ---
// Use the empty function as closure (no scope info).
native_context()->set_closure(*empty_function);
@ -1095,6 +1097,13 @@ void Genesis::InitializeGlobal(Handle<JSGlobalObject> global_object,
Handle<JSFunction> object_freeze = SimpleInstallFunction(
object_function, "freeze", Builtins::kObjectFreeze, 1, false);
native_context()->set_object_freeze(*object_freeze);
SimpleInstallFunction(object_function, "getOwnPropertyDescriptor",
Builtins::kObjectGetOwnPropertyDescriptor, 2, false);
SimpleInstallFunction(object_function, "getOwnPropertyNames",
Builtins::kObjectGetOwnPropertyNames, 1, false);
SimpleInstallFunction(object_function, "getOwnPropertySymbols",
Builtins::kObjectGetOwnPropertySymbols, 1, false);
SimpleInstallFunction(object_function, "is", Builtins::kObjectIs, 2, true);
Handle<JSFunction> object_is_extensible =
SimpleInstallFunction(object_function, "isExtensible",
Builtins::kObjectIsExtensible, 1, false);
@ -1140,6 +1149,22 @@ void Genesis::InitializeGlobal(Handle<JSGlobalObject> global_object,
SimpleInstallFunction(prototype, factory->toString_string(),
Builtins::kFunctionPrototypeToString, 0, false);
// Install the @@hasInstance function.
Handle<JSFunction> has_instance = InstallFunction(
prototype, factory->has_instance_symbol(), JS_OBJECT_TYPE,
JSObject::kHeaderSize, MaybeHandle<JSObject>(),
Builtins::kFunctionHasInstance,
static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY));
// Set the expected parameters for @@hasInstance to 1; required by builtin.
has_instance->shared()->set_internal_formal_parameter_count(1);
// Set the length for the function to satisfy ECMA-262.
has_instance->shared()->set_length(1);
// Install in the native context
native_context()->set_ordinary_has_instance(*has_instance);
// Install the "constructor" property on the %FunctionPrototype%.
JSObject::AddProperty(prototype, factory->constructor_string(),
function_fun, DONT_ENUM);
@ -1216,9 +1241,29 @@ void Genesis::InitializeGlobal(Handle<JSGlobalObject> global_object,
Handle<JSFunction> boolean_fun =
InstallFunction(global, "Boolean", JS_VALUE_TYPE, JSValue::kSize,
isolate->initial_object_prototype(),
Builtins::kIllegal);
Builtins::kBooleanConstructor);
boolean_fun->shared()->DontAdaptArguments();
boolean_fun->shared()->set_construct_stub(
*isolate->builtins()->BooleanConstructor_ConstructStub());
boolean_fun->shared()->set_length(1);
InstallWithIntrinsicDefaultProto(isolate, boolean_fun,
Context::BOOLEAN_FUNCTION_INDEX);
// Create the %BooleanPrototype%
Handle<JSValue> prototype =
Handle<JSValue>::cast(factory->NewJSObject(boolean_fun, TENURED));
prototype->set_value(isolate->heap()->false_value());
Accessors::FunctionSetPrototype(boolean_fun, prototype).Assert();
// Install the "constructor" property on the {prototype}.
JSObject::AddProperty(prototype, factory->constructor_string(), boolean_fun,
DONT_ENUM);
// Install the Boolean.prototype methods.
SimpleInstallFunction(prototype, "toString",
Builtins::kBooleanPrototypeToString, 0, false);
SimpleInstallFunction(prototype, "valueOf",
Builtins::kBooleanPrototypeValueOf, 0, false);
}
{ // --- S t r i n g ---
@ -1234,6 +1279,7 @@ void Genesis::InitializeGlobal(Handle<JSGlobalObject> global_object,
Handle<Map> string_map =
Handle<Map>(native_context()->string_function()->initial_map());
string_map->set_elements_kind(FAST_STRING_WRAPPER_ELEMENTS);
Map::EnsureDescriptorSlack(string_map, 1);
PropertyAttributes attribs = static_cast<PropertyAttributes>(
@ -1250,14 +1296,20 @@ void Genesis::InitializeGlobal(Handle<JSGlobalObject> global_object,
{
// --- S y m b o l ---
Handle<JSFunction> symbol_fun = InstallFunction(
global, "Symbol", JS_VALUE_TYPE, JSValue::kSize,
isolate->initial_object_prototype(), Builtins::kSymbolConstructor);
Handle<JSObject> prototype =
factory->NewJSObject(isolate->object_function(), TENURED);
Handle<JSFunction> symbol_fun =
InstallFunction(global, "Symbol", JS_VALUE_TYPE, JSValue::kSize,
prototype, Builtins::kSymbolConstructor);
symbol_fun->shared()->set_construct_stub(
*isolate->builtins()->SymbolConstructor_ConstructStub());
symbol_fun->shared()->set_length(1);
symbol_fun->shared()->DontAdaptArguments();
native_context()->set_symbol_function(*symbol_fun);
// Install the "constructor" property on the {prototype}.
JSObject::AddProperty(prototype, factory->constructor_string(), symbol_fun,
DONT_ENUM);
}
{ // --- D a t e ---
@ -1290,12 +1342,13 @@ void Genesis::InitializeGlobal(Handle<JSGlobalObject> global_object,
Builtins::kDatePrototypeToDateString, 0, false);
SimpleInstallFunction(prototype, "toTimeString",
Builtins::kDatePrototypeToTimeString, 0, false);
SimpleInstallFunction(prototype, "toGMTString",
Builtins::kDatePrototypeToUTCString, 0, false);
SimpleInstallFunction(prototype, "toISOString",
Builtins::kDatePrototypeToISOString, 0, false);
Handle<JSFunction> to_utc_string =
SimpleInstallFunction(prototype, "toUTCString",
Builtins::kDatePrototypeToUTCString, 0, false);
InstallFunction(prototype, to_utc_string,
factory->InternalizeUtf8String("toGMTString"), DONT_ENUM);
SimpleInstallFunction(prototype, "getDate", Builtins::kDatePrototypeGetDate,
0, true);
SimpleInstallFunction(prototype, "setDate", Builtins::kDatePrototypeSetDate,
@ -1504,9 +1557,11 @@ void Genesis::InitializeGlobal(Handle<JSGlobalObject> global_object,
cons,
Handle<Object>(native_context()->initial_object_prototype(), isolate));
cons->shared()->set_instance_class_name(*name);
Handle<JSObject> json_object = factory->NewJSObject(cons, TENURED);
DCHECK(json_object->IsJSObject());
JSObject::AddProperty(global, name, json_object, DONT_ENUM);
Handle<JSObject> math = factory->NewJSObject(cons, TENURED);
DCHECK(math->IsJSObject());
JSObject::AddProperty(global, name, math, DONT_ENUM);
SimpleInstallFunction(math, "max", Builtins::kMathMax, 2, false);
SimpleInstallFunction(math, "min", Builtins::kMathMin, 2, false);
}
{ // -- A r r a y B u f f e r
@ -1527,16 +1582,16 @@ void Genesis::InitializeGlobal(Handle<JSGlobalObject> global_object,
TYPED_ARRAYS(INSTALL_TYPED_ARRAY)
#undef INSTALL_TYPED_ARRAY
Handle<JSFunction> data_view_fun =
InstallFunction(
Handle<JSFunction> data_view_fun = InstallFunction(
global, "DataView", JS_DATA_VIEW_TYPE,
JSDataView::kSizeWithInternalFields,
isolate->initial_object_prototype(),
Builtins::kIllegal);
isolate->initial_object_prototype(), Builtins::kDataViewConstructor);
InstallWithIntrinsicDefaultProto(isolate, data_view_fun,
Context::DATA_VIEW_FUN_INDEX);
data_view_fun->shared()->set_construct_stub(
*isolate->builtins()->JSBuiltinsConstructStub());
*isolate->builtins()->DataViewConstructor_ConstructStub());
data_view_fun->shared()->set_length(3);
data_view_fun->shared()->DontAdaptArguments();
}
{ // -- M a p
@ -1557,7 +1612,7 @@ void Genesis::InitializeGlobal(Handle<JSGlobalObject> global_object,
{ // -- I t e r a t o r R e s u l t
Handle<Map> map =
factory->NewMap(JS_ITERATOR_RESULT_TYPE, JSIteratorResult::kSize);
factory->NewMap(JS_OBJECT_TYPE, JSIteratorResult::kSize);
Map::SetPrototype(map, isolate->initial_object_prototype());
Map::EnsureDescriptorSlack(map, 2);
@ -1573,6 +1628,7 @@ void Genesis::InitializeGlobal(Handle<JSGlobalObject> global_object,
map->AppendDescriptor(&d);
}
map->SetConstructor(native_context()->object_function());
map->SetInObjectProperties(2);
native_context()->set_iterator_result_map(*map);
}
@ -1618,7 +1674,7 @@ void Genesis::InitializeGlobal(Handle<JSGlobalObject> global_object,
native_context()->set_bound_function_without_constructor_map(*map);
map = Map::Copy(map, "IsConstructor");
map->set_is_constructor();
map->set_is_constructor(true);
native_context()->set_bound_function_with_constructor_map(*map);
}
@ -1633,18 +1689,20 @@ void Genesis::InitializeGlobal(Handle<JSGlobalObject> global_object,
function->shared()->set_instance_class_name(*arguments_string);
Handle<Map> map = factory->NewMap(
JS_OBJECT_TYPE, Heap::kSloppyArgumentsObjectSize, FAST_ELEMENTS);
JS_OBJECT_TYPE, JSSloppyArgumentsObject::kSize, FAST_ELEMENTS);
// Create the descriptor array for the arguments object.
Map::EnsureDescriptorSlack(map, 2);
{ // length
DataDescriptor d(factory->length_string(), Heap::kArgumentsLengthIndex,
DONT_ENUM, Representation::Tagged());
DataDescriptor d(factory->length_string(),
JSSloppyArgumentsObject::kLengthIndex, DONT_ENUM,
Representation::Tagged());
map->AppendDescriptor(&d);
}
{ // callee
DataDescriptor d(factory->callee_string(), Heap::kArgumentsCalleeIndex,
DONT_ENUM, Representation::Tagged());
DataDescriptor d(factory->callee_string(),
JSSloppyArgumentsObject::kCalleeIndex, DONT_ENUM,
Representation::Tagged());
map->AppendDescriptor(&d);
}
// @@iterator method is added later.
@ -1656,8 +1714,6 @@ void Genesis::InitializeGlobal(Handle<JSGlobalObject> global_object,
JSFunction::SetInitialMap(function, map,
isolate->initial_object_prototype());
DCHECK(map->GetInObjectProperties() > Heap::kArgumentsCalleeIndex);
DCHECK(map->GetInObjectProperties() > Heap::kArgumentsLengthIndex);
DCHECK(!map->is_dictionary_map());
DCHECK(IsFastObjectElementsKind(map->elements_kind()));
}
@ -1693,13 +1749,14 @@ void Genesis::InitializeGlobal(Handle<JSGlobalObject> global_object,
// Create the map. Allocate one in-object field for length.
Handle<Map> map = factory->NewMap(
JS_OBJECT_TYPE, Heap::kStrictArgumentsObjectSize, FAST_ELEMENTS);
JS_OBJECT_TYPE, JSStrictArgumentsObject::kSize, FAST_ELEMENTS);
// Create the descriptor array for the arguments object.
Map::EnsureDescriptorSlack(map, 3);
{ // length
DataDescriptor d(factory->length_string(), Heap::kArgumentsLengthIndex,
DONT_ENUM, Representation::Tagged());
DataDescriptor d(factory->length_string(),
JSStrictArgumentsObject::kLengthIndex, DONT_ENUM,
Representation::Tagged());
map->AppendDescriptor(&d);
}
{ // callee
@ -1725,7 +1782,6 @@ void Genesis::InitializeGlobal(Handle<JSGlobalObject> global_object,
native_context()->set_strict_arguments_map(*map);
DCHECK(map->GetInObjectProperties() > Heap::kArgumentsLengthIndex);
DCHECK(!map->is_dictionary_map());
DCHECK(IsFastObjectElementsKind(map->elements_kind()));
}
@ -1805,7 +1861,7 @@ bool Bootstrapper::CompileBuiltin(Isolate* isolate, int index) {
Handle<Object> args[] = {global, utils, extras_utils};
return Bootstrapper::CompileNative(isolate, name, source_code,
arraysize(args), args);
arraysize(args), args, NATIVES_CODE);
}
@ -1818,7 +1874,7 @@ bool Bootstrapper::CompileExperimentalBuiltin(Isolate* isolate, int index) {
Handle<Object> utils = isolate->natives_utils_object();
Handle<Object> args[] = {global, utils};
return Bootstrapper::CompileNative(isolate, name, source_code,
arraysize(args), args);
arraysize(args), args, NATIVES_CODE);
}
@ -1832,7 +1888,7 @@ bool Bootstrapper::CompileExtraBuiltin(Isolate* isolate, int index) {
Handle<Object> extras_utils = isolate->extras_utils_object();
Handle<Object> args[] = {global, binding, extras_utils};
return Bootstrapper::CompileNative(isolate, name, source_code,
arraysize(args), args);
arraysize(args), args, EXTENSION_CODE);
}
@ -1847,13 +1903,13 @@ bool Bootstrapper::CompileExperimentalExtraBuiltin(Isolate* isolate,
Handle<Object> extras_utils = isolate->extras_utils_object();
Handle<Object> args[] = {global, binding, extras_utils};
return Bootstrapper::CompileNative(isolate, name, source_code,
arraysize(args), args);
arraysize(args), args, EXTENSION_CODE);
}
bool Bootstrapper::CompileNative(Isolate* isolate, Vector<const char> name,
Handle<String> source, int argc,
Handle<Object> argv[]) {
Handle<Object> argv[],
NativesFlag natives_flag) {
SuppressDebug compiling_natives(isolate->debug());
// During genesis, the boilerplate for stack overflow won't work until the
// environment has been at least partially initialized. Add a stack check
@ -1870,7 +1926,7 @@ bool Bootstrapper::CompileNative(Isolate* isolate, Vector<const char> name,
isolate->factory()->NewStringFromUtf8(name).ToHandleChecked();
Handle<SharedFunctionInfo> function_info = Compiler::CompileScript(
source, script_name, 0, 0, ScriptOriginOptions(), Handle<Object>(),
context, NULL, NULL, ScriptCompiler::kNoCompileOptions, NATIVES_CODE,
context, NULL, NULL, ScriptCompiler::kNoCompileOptions, natives_flag,
false);
if (function_info.is_null()) return false;
@ -1928,7 +1984,7 @@ bool Genesis::CompileExtension(Isolate* isolate, v8::Extension* extension) {
function_info = Compiler::CompileScript(
source, script_name, 0, 0, ScriptOriginOptions(), Handle<Object>(),
context, extension, NULL, ScriptCompiler::kNoCompileOptions,
NOT_NATIVES_CODE, false);
EXTENSION_CODE, false);
if (function_info.is_null()) return false;
cache->Add(name, function_info);
}
@ -1977,8 +2033,7 @@ static Handle<JSObject> ResolveBuiltinIdHolder(Handle<Context> native_context,
return Handle<JSObject>::cast(value);
}
void Genesis::ConfigureUtilsObject(ContextType context_type) {
void Genesis::ConfigureUtilsObject(GlobalContextType context_type) {
switch (context_type) {
// We still need the utils object to find debug functions.
case DEBUG_CONTEXT:
@ -2026,24 +2081,6 @@ void Bootstrapper::ExportFromRuntime(Isolate* isolate,
WELL_KNOWN_SYMBOL_LIST(EXPORT_PUBLIC_SYMBOL)
#undef EXPORT_PUBLIC_SYMBOL
{
Handle<JSFunction> apply = InstallFunction(
container, "reflect_apply", JS_OBJECT_TYPE, JSObject::kHeaderSize,
MaybeHandle<JSObject>(), Builtins::kReflectApply);
apply->shared()->DontAdaptArguments();
apply->shared()->set_length(3);
native_context->set_reflect_apply(*apply);
}
{
Handle<JSFunction> construct = InstallFunction(
container, "reflect_construct", JS_OBJECT_TYPE, JSObject::kHeaderSize,
MaybeHandle<JSObject>(), Builtins::kReflectConstruct);
construct->shared()->DontAdaptArguments();
construct->shared()->set_length(2);
native_context->set_reflect_construct(*construct);
}
{
Handle<JSFunction> to_string = InstallFunction(
container, "object_to_string", JS_OBJECT_TYPE, JSObject::kHeaderSize,
@ -2279,7 +2316,6 @@ void Bootstrapper::ExportExperimentalFromRuntime(Isolate* isolate,
}
INITIALIZE_FLAG(FLAG_harmony_tostring)
INITIALIZE_FLAG(FLAG_harmony_tolength)
INITIALIZE_FLAG(FLAG_harmony_species)
#undef INITIALIZE_FLAG
@ -2299,13 +2335,15 @@ EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_destructuring_assignment)
EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_object_observe)
EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_regexps)
EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_unicode_regexps)
EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_completion)
EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_tolength)
EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_do_expressions)
EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_iterator_close)
EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_regexp_lookbehind)
EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_regexp_property)
EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_function_name)
EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_function_sent)
EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(promise_extra)
EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_tailcalls)
EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_instanceof)
void InstallPublicSymbol(Factory* factory, Handle<Context> native_context,
const char* name, Handle<Symbol> value) {
@ -2328,13 +2366,6 @@ void Genesis::InitializeGlobal_harmony_tostring() {
}
void Genesis::InitializeGlobal_harmony_concat_spreadable() {
if (!FLAG_harmony_concat_spreadable) return;
InstallPublicSymbol(factory(), native_context(), "isConcatSpreadable",
factory()->is_concat_spreadable_symbol());
}
void Genesis::InitializeGlobal_harmony_regexp_subclass() {
if (!FLAG_harmony_regexp_subclass) return;
InstallPublicSymbol(factory(), native_context(), "match",
@ -2364,6 +2395,15 @@ void Genesis::InitializeGlobal_harmony_reflect() {
Builtins::kReflectDeleteProperty, 2, true);
native_context()->set_reflect_delete_property(*delete_property);
Handle<JSFunction> apply = SimpleCreateFunction(
isolate(), factory->apply_string(), Builtins::kReflectApply, 3, false);
native_context()->set_reflect_apply(*apply);
Handle<JSFunction> construct =
SimpleCreateFunction(isolate(), factory->construct_string(),
Builtins::kReflectConstruct, 2, false);
native_context()->set_reflect_construct(*construct);
if (!FLAG_harmony_reflect) return;
Handle<JSGlobalObject> global(JSGlobalObject::cast(
@ -2375,6 +2415,8 @@ void Genesis::InitializeGlobal_harmony_reflect() {
InstallFunction(reflect, define_property, factory->defineProperty_string());
InstallFunction(reflect, delete_property, factory->deleteProperty_string());
InstallFunction(reflect, apply, factory->apply_string());
InstallFunction(reflect, construct, factory->construct_string());
SimpleInstallFunction(reflect, factory->get_string(),
Builtins::kReflectGet, 2, false);
@ -2438,6 +2480,35 @@ void Genesis::InitializeGlobal_harmony_simd() {
}
void Genesis::InitializeGlobal_harmony_object_values_entries() {
if (!FLAG_harmony_object_values_entries) return;
Handle<JSGlobalObject> global(
JSGlobalObject::cast(native_context()->global_object()));
Isolate* isolate = global->GetIsolate();
Factory* factory = isolate->factory();
Handle<JSFunction> object_function = isolate->object_function();
SimpleInstallFunction(object_function, factory->entries_string(),
Builtins::kObjectEntries, 1, false);
SimpleInstallFunction(object_function, factory->values_string(),
Builtins::kObjectValues, 1, false);
}
void Genesis::InitializeGlobal_harmony_object_own_property_descriptors() {
if (!FLAG_harmony_object_own_property_descriptors) return;
Handle<JSGlobalObject> global(
JSGlobalObject::cast(native_context()->global_object()));
Isolate* isolate = global->GetIsolate();
Factory* factory = isolate->factory();
Handle<JSFunction> object_function = isolate->object_function();
SimpleInstallFunction(object_function,
factory->getOwnPropertyDescriptors_string(),
Builtins::kObjectGetOwnPropertyDescriptors, 1, false);
}
void Genesis::InstallJSProxyMaps() {
// Allocate the different maps for all Proxy types.
// Next to the default proxy, we need maps indicating callable and
@ -2445,7 +2516,7 @@ void Genesis::InstallJSProxyMaps() {
Handle<Map> proxy_function_map =
Map::Copy(isolate()->sloppy_function_without_prototype_map(), "Proxy");
proxy_function_map->set_is_constructor();
proxy_function_map->set_is_constructor(true);
native_context()->set_proxy_function_map(*proxy_function_map);
Handle<Map> proxy_map =
@ -2460,7 +2531,7 @@ void Genesis::InstallJSProxyMaps() {
Handle<Map> proxy_constructor_map =
Map::Copy(proxy_callable_map, "constructor Proxy");
proxy_constructor_map->set_is_constructor();
proxy_constructor_map->set_is_constructor(true);
native_context()->set_proxy_constructor_map(*proxy_constructor_map);
}
@ -2478,8 +2549,9 @@ void Genesis::InitializeGlobal_harmony_proxies() {
Handle<String> name = factory->Proxy_string();
Handle<Code> code(isolate->builtins()->ProxyConstructor());
Handle<JSFunction> proxy_function = factory->NewFunction(
isolate->proxy_function_map(), factory->Proxy_string(), code);
Handle<JSFunction> proxy_function =
factory->NewFunction(isolate->proxy_function_map(),
factory->Proxy_string(), MaybeHandle<Code>(code));
JSFunction::SetInitialMap(proxy_function,
Handle<Map>(native_context()->proxy_map(), isolate),
@ -2574,8 +2646,7 @@ Handle<JSFunction> Genesis::InstallInternalArray(Handle<JSObject> target,
return array_function;
}
bool Genesis::InstallNatives(ContextType context_type) {
bool Genesis::InstallNatives(GlobalContextType context_type) {
HandleScope scope(isolate());
// Set up the utils object as shared container between native scripts.
@ -2637,10 +2708,11 @@ bool Genesis::InstallNatives(ContextType context_type) {
if (!CallUtilsFunction(isolate(), "PostNatives")) return false;
auto function_cache =
auto template_instantiations_cache =
ObjectHashTable::New(isolate(), ApiNatives::kInitialFunctionCacheSize,
USE_CUSTOM_MINIMUM_CAPACITY);
native_context()->set_function_cache(*function_cache);
native_context()->set_template_instantiations_cache(
*template_instantiations_cache);
// Store the map for the %ObjectPrototype% after the natives has been compiled
// and the Object function has been set up.
@ -2717,6 +2789,91 @@ bool Genesis::InstallNatives(ContextType context_type) {
InstallBuiltinFunctionIds();
// Create a map for accessor property descriptors (a variant of JSObject
// that predefines four properties get, set, configurable and enumerable).
{
// AccessorPropertyDescriptor initial map.
Handle<Map> map =
factory()->NewMap(JS_OBJECT_TYPE, JSAccessorPropertyDescriptor::kSize);
// Create the descriptor array for the property descriptor object.
Map::EnsureDescriptorSlack(map, 4);
{ // get
DataDescriptor d(factory()->get_string(),
JSAccessorPropertyDescriptor::kGetIndex, NONE,
Representation::Tagged());
map->AppendDescriptor(&d);
}
{ // set
DataDescriptor d(factory()->set_string(),
JSAccessorPropertyDescriptor::kSetIndex, NONE,
Representation::Tagged());
map->AppendDescriptor(&d);
}
{ // enumerable
DataDescriptor d(factory()->enumerable_string(),
JSAccessorPropertyDescriptor::kEnumerableIndex, NONE,
Representation::Tagged());
map->AppendDescriptor(&d);
}
{ // configurable
DataDescriptor d(factory()->configurable_string(),
JSAccessorPropertyDescriptor::kConfigurableIndex, NONE,
Representation::Tagged());
map->AppendDescriptor(&d);
}
Map::SetPrototype(map, isolate()->initial_object_prototype());
map->SetConstructor(native_context()->object_function());
map->SetInObjectProperties(4);
map->set_unused_property_fields(0);
native_context()->set_accessor_property_descriptor_map(*map);
}
// Create a map for data property descriptors (a variant of JSObject
// that predefines four properties value, writable, configurable and
// enumerable).
{
// DataPropertyDescriptor initial map.
Handle<Map> map =
factory()->NewMap(JS_OBJECT_TYPE, JSDataPropertyDescriptor::kSize);
// Create the descriptor array for the property descriptor object.
Map::EnsureDescriptorSlack(map, 4);
{ // value
DataDescriptor d(factory()->value_string(),
JSDataPropertyDescriptor::kValueIndex, NONE,
Representation::Tagged());
map->AppendDescriptor(&d);
}
{ // writable
DataDescriptor d(factory()->writable_string(),
JSDataPropertyDescriptor::kWritableIndex, NONE,
Representation::Tagged());
map->AppendDescriptor(&d);
}
{ // enumerable
DataDescriptor d(factory()->enumerable_string(),
JSDataPropertyDescriptor::kEnumerableIndex, NONE,
Representation::Tagged());
map->AppendDescriptor(&d);
}
{ // configurable
DataDescriptor d(factory()->configurable_string(),
JSDataPropertyDescriptor::kConfigurableIndex, NONE,
Representation::Tagged());
map->AppendDescriptor(&d);
}
Map::SetPrototype(map, isolate()->initial_object_prototype());
map->SetConstructor(native_context()->object_function());
map->SetInObjectProperties(4);
map->set_unused_property_fields(0);
native_context()->set_data_property_descriptor_map(*map);
}
// Create a constructor for RegExp results (a variant of Array that
// predefines the two properties index and match).
{
@ -2745,7 +2902,7 @@ bool Genesis::InstallNatives(ContextType context_type) {
array_function->initial_map()->instance_descriptors());
Handle<String> length = factory()->length_string();
int old = array_descriptors->SearchWithCache(
*length, array_function->initial_map());
isolate(), *length, array_function->initial_map());
DCHECK(old != DescriptorArray::kNotFound);
AccessorConstantDescriptor desc(
length, handle(array_descriptors->GetValue(old), isolate()),
@ -2817,11 +2974,13 @@ bool Genesis::InstallExperimentalNatives() {
static const char* harmony_regexps_natives[] = {"native harmony-regexp.js",
nullptr};
static const char* harmony_tostring_natives[] = {nullptr};
static const char* harmony_iterator_close_natives[] = {nullptr};
static const char* harmony_sloppy_natives[] = {nullptr};
static const char* harmony_sloppy_function_natives[] = {nullptr};
static const char* harmony_sloppy_let_natives[] = {nullptr};
static const char* harmony_species_natives[] = {"native harmony-species.js",
nullptr};
static const char* harmony_tailcalls_natives[] = {nullptr};
static const char* harmony_unicode_regexps_natives[] = {
"native harmony-unicode-regexps.js", nullptr};
static const char* harmony_default_parameters_natives[] = {nullptr};
@ -2833,17 +2992,20 @@ bool Genesis::InstallExperimentalNatives() {
"native harmony-object-observe.js", nullptr};
static const char* harmony_sharedarraybuffer_natives[] = {
"native harmony-sharedarraybuffer.js", "native harmony-atomics.js", NULL};
static const char* harmony_concat_spreadable_natives[] = {nullptr};
static const char* harmony_simd_natives[] = {"native harmony-simd.js",
nullptr};
static const char* harmony_tolength_natives[] = {nullptr};
static const char* harmony_completion_natives[] = {nullptr};
static const char* harmony_do_expressions_natives[] = {nullptr};
static const char* harmony_regexp_subclass_natives[] = {nullptr};
static const char* harmony_regexp_lookbehind_natives[] = {nullptr};
static const char* harmony_instanceof_natives[] = {nullptr};
static const char* harmony_regexp_property_natives[] = {nullptr};
static const char* harmony_function_name_natives[] = {nullptr};
static const char* harmony_function_sent_natives[] = {nullptr};
static const char* promise_extra_natives[] = {"native promise-extra.js",
nullptr};
static const char* harmony_object_values_entries_natives[] = {nullptr};
static const char* harmony_object_own_property_descriptors_natives[] = {
nullptr};
for (int i = ExperimentalNatives::GetDebuggerCount();
i < ExperimentalNatives::GetBuiltinsCount(); i++) {
@ -3371,12 +3533,11 @@ class NoTrackDoubleFieldsForSerializerScope {
bool enabled_;
};
Genesis::Genesis(Isolate* isolate,
MaybeHandle<JSGlobalProxy> maybe_global_proxy,
v8::Local<v8::ObjectTemplate> global_proxy_template,
v8::ExtensionConfiguration* extensions,
ContextType context_type)
GlobalContextType context_type)
: isolate_(isolate), active_(isolate->bootstrapper()) {
NoTrackDoubleFieldsForSerializerScope disable_scope(isolate);
result_ = Handle<Context>::null();

6
deps/v8/src/bootstrapper.h

@ -61,7 +61,7 @@ class SourceCodeCache final BASE_EMBEDDED {
DISALLOW_COPY_AND_ASSIGN(SourceCodeCache);
};
enum ContextType { FULL_CONTEXT, THIN_CONTEXT, DEBUG_CONTEXT };
enum GlobalContextType { FULL_CONTEXT, THIN_CONTEXT, DEBUG_CONTEXT };
// The Boostrapper is the public interface for creating a JavaScript global
// context.
@ -80,7 +80,7 @@ class Bootstrapper final {
MaybeHandle<JSGlobalProxy> maybe_global_proxy,
v8::Local<v8::ObjectTemplate> global_object_template,
v8::ExtensionConfiguration* extensions,
ContextType context_type = FULL_CONTEXT);
GlobalContextType context_type = FULL_CONTEXT);
// Detach the environment from its outer global object.
void DetachGlobal(Handle<Context> env);
@ -109,7 +109,7 @@ class Bootstrapper final {
static bool CompileNative(Isolate* isolate, Vector<const char> name,
Handle<String> source, int argc,
Handle<Object> argv[]);
Handle<Object> argv[], NativesFlag natives_flag);
static bool CompileBuiltin(Isolate* isolate, int index);
static bool CompileExperimentalBuiltin(Isolate* isolate, int index);
static bool CompileExtraBuiltin(Isolate* isolate, int index);

956
deps/v8/src/builtins.cc

File diff suppressed because it is too large

133
deps/v8/src/builtins.h

@ -69,6 +69,14 @@ inline bool operator&(BuiltinExtraArguments lhs, BuiltinExtraArguments rhs) {
V(ArrayBufferConstructor_ConstructStub, kTargetAndNewTarget) \
V(ArrayBufferIsView, kNone) \
\
V(BooleanConstructor, kNone) \
V(BooleanConstructor_ConstructStub, kTargetAndNewTarget) \
V(BooleanPrototypeToString, kNone) \
V(BooleanPrototypeValueOf, kNone) \
\
V(DataViewConstructor, kNone) \
V(DataViewConstructor_ConstructStub, kTargetAndNewTarget) \
\
V(DateConstructor, kNone) \
V(DateConstructor_ConstructStub, kTargetAndNewTarget) \
V(DateNow, kNone) \
@ -102,6 +110,7 @@ inline bool operator&(BuiltinExtraArguments lhs, BuiltinExtraArguments rhs) {
V(FunctionConstructor, kTargetAndNewTarget) \
V(FunctionPrototypeBind, kNone) \
V(FunctionPrototypeToString, kNone) \
V(FunctionHasInstance, kNone) \
\
V(GeneratorFunctionConstructor, kTargetAndNewTarget) \
\
@ -110,10 +119,17 @@ inline bool operator&(BuiltinExtraArguments lhs, BuiltinExtraArguments rhs) {
V(ObjectAssign, kNone) \
V(ObjectCreate, kNone) \
V(ObjectFreeze, kNone) \
V(ObjectGetOwnPropertyDescriptor, kNone) \
V(ObjectGetOwnPropertyNames, kNone) \
V(ObjectGetOwnPropertySymbols, kNone) \
V(ObjectIs, kNone) \
V(ObjectIsExtensible, kNone) \
V(ObjectIsFrozen, kNone) \
V(ObjectIsSealed, kNone) \
V(ObjectKeys, kNone) \
V(ObjectValues, kNone) \
V(ObjectEntries, kNone) \
V(ObjectGetOwnPropertyDescriptors, kNone) \
V(ObjectPreventExtensions, kNone) \
V(ObjectSeal, kNone) \
V(ObjectProtoToString, kNone) \
@ -155,11 +171,22 @@ inline bool operator&(BuiltinExtraArguments lhs, BuiltinExtraArguments rhs) {
V(CallFunction_ReceiverIsNotNullOrUndefined, BUILTIN, UNINITIALIZED, \
kNoExtraICState) \
V(CallFunction_ReceiverIsAny, BUILTIN, UNINITIALIZED, kNoExtraICState) \
V(TailCallFunction_ReceiverIsNullOrUndefined, BUILTIN, UNINITIALIZED, \
kNoExtraICState) \
V(TailCallFunction_ReceiverIsNotNullOrUndefined, BUILTIN, UNINITIALIZED, \
kNoExtraICState) \
V(TailCallFunction_ReceiverIsAny, BUILTIN, UNINITIALIZED, kNoExtraICState) \
V(CallBoundFunction, BUILTIN, UNINITIALIZED, kNoExtraICState) \
V(TailCallBoundFunction, BUILTIN, UNINITIALIZED, kNoExtraICState) \
V(Call_ReceiverIsNullOrUndefined, BUILTIN, UNINITIALIZED, kNoExtraICState) \
V(Call_ReceiverIsNotNullOrUndefined, BUILTIN, UNINITIALIZED, \
kNoExtraICState) \
V(Call_ReceiverIsAny, BUILTIN, UNINITIALIZED, kNoExtraICState) \
V(TailCall_ReceiverIsNullOrUndefined, BUILTIN, UNINITIALIZED, \
kNoExtraICState) \
V(TailCall_ReceiverIsNotNullOrUndefined, BUILTIN, UNINITIALIZED, \
kNoExtraICState) \
V(TailCall_ReceiverIsAny, BUILTIN, UNINITIALIZED, kNoExtraICState) \
\
V(ConstructFunction, BUILTIN, UNINITIALIZED, kNoExtraICState) \
V(ConstructBoundFunction, BUILTIN, UNINITIALIZED, kNoExtraICState) \
@ -173,6 +200,8 @@ inline bool operator&(BuiltinExtraArguments lhs, BuiltinExtraArguments rhs) {
V(InOptimizationQueue, BUILTIN, UNINITIALIZED, kNoExtraICState) \
V(JSConstructStubGeneric, BUILTIN, UNINITIALIZED, kNoExtraICState) \
V(JSBuiltinsConstructStub, BUILTIN, UNINITIALIZED, kNoExtraICState) \
V(JSBuiltinsConstructStubForDerived, BUILTIN, UNINITIALIZED, \
kNoExtraICState) \
V(JSConstructStubApi, BUILTIN, UNINITIALIZED, kNoExtraICState) \
V(JSEntryTrampoline, BUILTIN, UNINITIALIZED, kNoExtraICState) \
V(JSConstructEntryTrampoline, BUILTIN, UNINITIALIZED, kNoExtraICState) \
@ -188,10 +217,12 @@ inline bool operator&(BuiltinExtraArguments lhs, BuiltinExtraArguments rhs) {
V(InterpreterEntryTrampoline, BUILTIN, UNINITIALIZED, kNoExtraICState) \
V(InterpreterExitTrampoline, BUILTIN, UNINITIALIZED, kNoExtraICState) \
V(InterpreterPushArgsAndCall, BUILTIN, UNINITIALIZED, kNoExtraICState) \
V(InterpreterPushArgsAndTailCall, BUILTIN, UNINITIALIZED, kNoExtraICState) \
V(InterpreterPushArgsAndConstruct, BUILTIN, UNINITIALIZED, kNoExtraICState) \
V(InterpreterNotifyDeoptimized, BUILTIN, UNINITIALIZED, kNoExtraICState) \
V(InterpreterNotifySoftDeoptimized, BUILTIN, UNINITIALIZED, kNoExtraICState) \
V(InterpreterNotifyLazyDeoptimized, BUILTIN, UNINITIALIZED, kNoExtraICState) \
V(InterpreterEnterBytecodeDispatch, BUILTIN, UNINITIALIZED, kNoExtraICState) \
\
V(LoadIC_Miss, BUILTIN, UNINITIALIZED, kNoExtraICState) \
V(KeyedLoadIC_Miss, BUILTIN, UNINITIALIZED, kNoExtraICState) \
@ -200,9 +231,6 @@ inline bool operator&(BuiltinExtraArguments lhs, BuiltinExtraArguments rhs) {
V(LoadIC_Getter_ForDeopt, LOAD_IC, MONOMORPHIC, kNoExtraICState) \
V(KeyedLoadIC_Megamorphic, KEYED_LOAD_IC, MEGAMORPHIC, kNoExtraICState) \
\
V(KeyedLoadIC_Megamorphic_Strong, KEYED_LOAD_IC, MEGAMORPHIC, \
LoadICState::kStrongModeState) \
\
V(StoreIC_Setter_ForDeopt, STORE_IC, MONOMORPHIC, \
StoreICState::kStrictModeState) \
\
@ -246,6 +274,9 @@ inline bool operator&(BuiltinExtraArguments lhs, BuiltinExtraArguments rhs) {
V(InternalArrayCode, BUILTIN, UNINITIALIZED, kNoExtraICState) \
V(ArrayCode, BUILTIN, UNINITIALIZED, kNoExtraICState) \
\
V(MathMax, BUILTIN, UNINITIALIZED, kNoExtraICState) \
V(MathMin, BUILTIN, UNINITIALIZED, kNoExtraICState) \
\
V(NumberConstructor, BUILTIN, UNINITIALIZED, kNoExtraICState) \
V(NumberConstructor_ConstructStub, BUILTIN, UNINITIALIZED, kNoExtraICState) \
\
@ -265,13 +296,10 @@ inline bool operator&(BuiltinExtraArguments lhs, BuiltinExtraArguments rhs) {
// Define list of builtin handlers implemented in assembly.
#define BUILTIN_LIST_H(V) \
V(LoadIC_Slow, LOAD_IC) \
V(LoadIC_Slow_Strong, LOAD_IC) \
V(KeyedLoadIC_Slow, KEYED_LOAD_IC) \
V(KeyedLoadIC_Slow_Strong, KEYED_LOAD_IC) \
V(StoreIC_Slow, STORE_IC) \
V(KeyedStoreIC_Slow, KEYED_STORE_IC) \
V(LoadIC_Normal, LOAD_IC) \
V(LoadIC_Normal_Strong, LOAD_IC) \
V(StoreIC_Normal, STORE_IC)
// Define list of builtins used by the debugger implemented in assembly.
@ -332,8 +360,13 @@ class Builtins {
#undef DECLARE_BUILTIN_ACCESSOR_A
// Convenience wrappers.
Handle<Code> CallFunction(ConvertReceiverMode = ConvertReceiverMode::kAny);
Handle<Code> Call(ConvertReceiverMode = ConvertReceiverMode::kAny);
Handle<Code> CallFunction(
ConvertReceiverMode = ConvertReceiverMode::kAny,
TailCallMode tail_call_mode = TailCallMode::kDisallow);
Handle<Code> Call(ConvertReceiverMode = ConvertReceiverMode::kAny,
TailCallMode tail_call_mode = TailCallMode::kDisallow);
Handle<Code> CallBoundFunction(TailCallMode tail_call_mode);
Handle<Code> InterpreterPushArgsAndCall(TailCallMode tail_call_mode);
Code* builtin(Name name) {
// Code::cast cannot be used here since we access builtins
@ -358,7 +391,7 @@ class Builtins {
bool is_initialized() const { return initialized_; }
MUST_USE_RESULT static MaybeHandle<Object> InvokeApiFunction(
Handle<JSFunction> function, Handle<Object> receiver, int argc,
Handle<HeapObject> function, Handle<Object> receiver, int argc,
Handle<Object> args[]);
private:
@ -383,6 +416,7 @@ class Builtins {
static void Generate_CompileOptimizedConcurrent(MacroAssembler* masm);
static void Generate_JSConstructStubGeneric(MacroAssembler* masm);
static void Generate_JSBuiltinsConstructStub(MacroAssembler* masm);
static void Generate_JSBuiltinsConstructStubForDerived(MacroAssembler* masm);
static void Generate_JSConstructStubApi(MacroAssembler* masm);
static void Generate_JSEntryTrampoline(MacroAssembler* masm);
static void Generate_JSConstructEntryTrampoline(MacroAssembler* masm);
@ -397,30 +431,71 @@ class Builtins {
// ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
static void Generate_CallFunction(MacroAssembler* masm,
ConvertReceiverMode mode);
ConvertReceiverMode mode,
TailCallMode tail_call_mode);
static void Generate_CallFunction_ReceiverIsNullOrUndefined(
MacroAssembler* masm) {
Generate_CallFunction(masm, ConvertReceiverMode::kNullOrUndefined);
Generate_CallFunction(masm, ConvertReceiverMode::kNullOrUndefined,
TailCallMode::kDisallow);
}
static void Generate_CallFunction_ReceiverIsNotNullOrUndefined(
MacroAssembler* masm) {
Generate_CallFunction(masm, ConvertReceiverMode::kNotNullOrUndefined);
Generate_CallFunction(masm, ConvertReceiverMode::kNotNullOrUndefined,
TailCallMode::kDisallow);
}
static void Generate_CallFunction_ReceiverIsAny(MacroAssembler* masm) {
Generate_CallFunction(masm, ConvertReceiverMode::kAny);
Generate_CallFunction(masm, ConvertReceiverMode::kAny,
TailCallMode::kDisallow);
}
static void Generate_TailCallFunction_ReceiverIsNullOrUndefined(
MacroAssembler* masm) {
Generate_CallFunction(masm, ConvertReceiverMode::kNullOrUndefined,
TailCallMode::kAllow);
}
static void Generate_TailCallFunction_ReceiverIsNotNullOrUndefined(
MacroAssembler* masm) {
Generate_CallFunction(masm, ConvertReceiverMode::kNotNullOrUndefined,
TailCallMode::kAllow);
}
static void Generate_TailCallFunction_ReceiverIsAny(MacroAssembler* masm) {
Generate_CallFunction(masm, ConvertReceiverMode::kAny,
TailCallMode::kAllow);
}
// ES6 section 9.4.1.1 [[Call]] ( thisArgument, argumentsList)
static void Generate_CallBoundFunction(MacroAssembler* masm);
static void Generate_CallBoundFunctionImpl(MacroAssembler* masm,
TailCallMode tail_call_mode);
static void Generate_CallBoundFunction(MacroAssembler* masm) {
Generate_CallBoundFunctionImpl(masm, TailCallMode::kDisallow);
}
static void Generate_TailCallBoundFunction(MacroAssembler* masm) {
Generate_CallBoundFunctionImpl(masm, TailCallMode::kAllow);
}
// ES6 section 7.3.12 Call(F, V, [argumentsList])
static void Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode);
static void Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
TailCallMode tail_call_mode);
static void Generate_Call_ReceiverIsNullOrUndefined(MacroAssembler* masm) {
Generate_Call(masm, ConvertReceiverMode::kNullOrUndefined);
Generate_Call(masm, ConvertReceiverMode::kNullOrUndefined,
TailCallMode::kDisallow);
}
static void Generate_Call_ReceiverIsNotNullOrUndefined(MacroAssembler* masm) {
Generate_Call(masm, ConvertReceiverMode::kNotNullOrUndefined);
Generate_Call(masm, ConvertReceiverMode::kNotNullOrUndefined,
TailCallMode::kDisallow);
}
static void Generate_Call_ReceiverIsAny(MacroAssembler* masm) {
Generate_Call(masm, ConvertReceiverMode::kAny);
Generate_Call(masm, ConvertReceiverMode::kAny, TailCallMode::kDisallow);
}
static void Generate_TailCall_ReceiverIsNullOrUndefined(
MacroAssembler* masm) {
Generate_Call(masm, ConvertReceiverMode::kNullOrUndefined,
TailCallMode::kAllow);
}
static void Generate_TailCall_ReceiverIsNotNullOrUndefined(
MacroAssembler* masm) {
Generate_Call(masm, ConvertReceiverMode::kNotNullOrUndefined,
TailCallMode::kAllow);
}
static void Generate_TailCall_ReceiverIsAny(MacroAssembler* masm) {
Generate_Call(masm, ConvertReceiverMode::kAny, TailCallMode::kAllow);
}
// ES6 section 9.2.2 [[Construct]] ( argumentsList, newTarget)
@ -482,6 +557,17 @@ class Builtins {
static void Generate_InternalArrayCode(MacroAssembler* masm);
static void Generate_ArrayCode(MacroAssembler* masm);
enum class MathMaxMinKind { kMax, kMin };
static void Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind);
// ES6 section 20.2.2.24 Math.max ( value1, value2 , ...values )
static void Generate_MathMax(MacroAssembler* masm) {
Generate_MathMaxMin(masm, MathMaxMinKind::kMax);
}
// ES6 section 20.2.2.25 Math.min ( value1, value2 , ...values )
static void Generate_MathMin(MacroAssembler* masm) {
Generate_MathMaxMin(masm, MathMaxMinKind::kMin);
}
// ES6 section 20.1.1.1 Number ( [ value ] ) for the [[Call]] case.
static void Generate_NumberConstructor(MacroAssembler* masm);
// ES6 section 20.1.1.1 Number ( [ value ] ) for the [[Construct]] case.
@ -496,11 +582,20 @@ class Builtins {
static void Generate_InterpreterEntryTrampoline(MacroAssembler* masm);
static void Generate_InterpreterExitTrampoline(MacroAssembler* masm);
static void Generate_InterpreterPushArgsAndCall(MacroAssembler* masm);
static void Generate_InterpreterPushArgsAndCall(MacroAssembler* masm) {
return Generate_InterpreterPushArgsAndCallImpl(masm,
TailCallMode::kDisallow);
}
static void Generate_InterpreterPushArgsAndTailCall(MacroAssembler* masm) {
return Generate_InterpreterPushArgsAndCallImpl(masm, TailCallMode::kAllow);
}
static void Generate_InterpreterPushArgsAndCallImpl(
MacroAssembler* masm, TailCallMode tail_call_mode);
static void Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm);
static void Generate_InterpreterNotifyDeoptimized(MacroAssembler* masm);
static void Generate_InterpreterNotifySoftDeoptimized(MacroAssembler* masm);
static void Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler* masm);
static void Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm);
#define DECLARE_CODE_AGE_BUILTIN_GENERATOR(C) \
static void Generate_Make##C##CodeYoungAgainEvenMarking( \

87
deps/v8/src/code-factory.cc

@ -12,44 +12,36 @@ namespace internal {
// static
Callable CodeFactory::LoadIC(Isolate* isolate, TypeofMode typeof_mode,
LanguageMode language_mode) {
return Callable(
LoadIC::initialize_stub(
isolate, LoadICState(typeof_mode, language_mode).GetExtraICState()),
Callable CodeFactory::LoadIC(Isolate* isolate, TypeofMode typeof_mode) {
return Callable(LoadIC::initialize_stub(
isolate, LoadICState(typeof_mode).GetExtraICState()),
LoadDescriptor(isolate));
}
// static
Callable CodeFactory::LoadICInOptimizedCode(
Isolate* isolate, TypeofMode typeof_mode, LanguageMode language_mode,
Isolate* isolate, TypeofMode typeof_mode,
InlineCacheState initialization_state) {
auto code = LoadIC::initialize_stub_in_optimized_code(
isolate, LoadICState(typeof_mode, language_mode).GetExtraICState(),
isolate, LoadICState(typeof_mode).GetExtraICState(),
initialization_state);
return Callable(code, LoadWithVectorDescriptor(isolate));
}
// static
Callable CodeFactory::KeyedLoadIC(Isolate* isolate,
LanguageMode language_mode) {
ExtraICState state = is_strong(language_mode) ? LoadICState::kStrongModeState
: kNoExtraICState;
return Callable(KeyedLoadIC::initialize_stub(isolate, state),
Callable CodeFactory::KeyedLoadIC(Isolate* isolate) {
return Callable(KeyedLoadIC::initialize_stub(isolate, kNoExtraICState),
LoadDescriptor(isolate));
}
// static
Callable CodeFactory::KeyedLoadICInOptimizedCode(
Isolate* isolate, LanguageMode language_mode,
InlineCacheState initialization_state) {
ExtraICState state = is_strong(language_mode) ? LoadICState::kStrongModeState
: kNoExtraICState;
Isolate* isolate, InlineCacheState initialization_state) {
auto code = KeyedLoadIC::initialize_stub_in_optimized_code(
isolate, initialization_state, state);
isolate, initialization_state, kNoExtraICState);
if (initialization_state != MEGAMORPHIC) {
return Callable(code, LoadWithVectorDescriptor(isolate));
}
@ -59,17 +51,19 @@ Callable CodeFactory::KeyedLoadICInOptimizedCode(
// static
Callable CodeFactory::CallIC(Isolate* isolate, int argc,
ConvertReceiverMode mode) {
return Callable(CallIC::initialize_stub(isolate, argc, mode),
ConvertReceiverMode mode,
TailCallMode tail_call_mode) {
return Callable(CallIC::initialize_stub(isolate, argc, mode, tail_call_mode),
CallFunctionWithFeedbackDescriptor(isolate));
}
// static
Callable CodeFactory::CallICInOptimizedCode(Isolate* isolate, int argc,
ConvertReceiverMode mode) {
return Callable(
CallIC::initialize_stub_in_optimized_code(isolate, argc, mode),
ConvertReceiverMode mode,
TailCallMode tail_call_mode) {
return Callable(CallIC::initialize_stub_in_optimized_code(isolate, argc, mode,
tail_call_mode),
CallFunctionWithFeedbackAndVectorDescriptor(isolate));
}
@ -118,9 +112,8 @@ Callable CodeFactory::KeyedStoreICInOptimizedCode(
// static
Callable CodeFactory::CompareIC(Isolate* isolate, Token::Value op,
Strength strength) {
Handle<Code> code = CompareIC::GetUninitialized(isolate, op, strength);
Callable CodeFactory::CompareIC(Isolate* isolate, Token::Value op) {
Handle<Code> code = CompareIC::GetUninitialized(isolate, op);
return Callable(code, CompareDescriptor(isolate));
}
@ -133,9 +126,8 @@ Callable CodeFactory::CompareNilIC(Isolate* isolate, NilValue nil_value) {
// static
Callable CodeFactory::BinaryOpIC(Isolate* isolate, Token::Value op,
Strength strength) {
BinaryOpICStub stub(isolate, op, strength);
Callable CodeFactory::BinaryOpIC(Isolate* isolate, Token::Value op) {
BinaryOpICStub stub(isolate, op);
return Callable(stub.GetCode(), stub.GetCallInterfaceDescriptor());
}
@ -168,6 +160,13 @@ Callable CodeFactory::ToString(Isolate* isolate) {
}
// static
Callable CodeFactory::ToName(Isolate* isolate) {
ToNameStub stub(isolate);
return Callable(stub.GetCode(), stub.GetCallInterfaceDescriptor());
}
// static
Callable CodeFactory::ToLength(Isolate* isolate) {
ToLengthStub stub(isolate);
@ -271,19 +270,29 @@ Callable CodeFactory::FastNewClosure(Isolate* isolate,
// static
Callable CodeFactory::ArgumentsAccess(Isolate* isolate,
bool is_unmapped_arguments,
bool has_duplicate_parameters) {
ArgumentsAccessStub::Type type = ArgumentsAccessStub::ComputeType(
is_unmapped_arguments, has_duplicate_parameters);
ArgumentsAccessStub stub(isolate, type);
Callable CodeFactory::FastNewObject(Isolate* isolate) {
FastNewObjectStub stub(isolate);
return Callable(stub.GetCode(), stub.GetCallInterfaceDescriptor());
}
// static
Callable CodeFactory::RestArgumentsAccess(Isolate* isolate) {
RestParamAccessStub stub(isolate);
Callable CodeFactory::FastNewRestParameter(Isolate* isolate) {
FastNewRestParameterStub stub(isolate);
return Callable(stub.GetCode(), stub.GetCallInterfaceDescriptor());
}
// static
Callable CodeFactory::FastNewSloppyArguments(Isolate* isolate) {
FastNewSloppyArgumentsStub stub(isolate);
return Callable(stub.GetCode(), stub.GetCallInterfaceDescriptor());
}
// static
Callable CodeFactory::FastNewStrictArguments(Isolate* isolate) {
FastNewStrictArgumentsStub stub(isolate);
return Callable(stub.GetCode(), stub.GetCallInterfaceDescriptor());
}
@ -345,8 +354,10 @@ Callable CodeFactory::ConstructFunction(Isolate* isolate) {
// static
Callable CodeFactory::InterpreterPushArgsAndCall(Isolate* isolate) {
return Callable(isolate->builtins()->InterpreterPushArgsAndCall(),
Callable CodeFactory::InterpreterPushArgsAndCall(Isolate* isolate,
TailCallMode tail_call_mode) {
return Callable(
isolate->builtins()->InterpreterPushArgsAndCall(tail_call_mode),
InterpreterPushArgsAndCallDescriptor(isolate));
}

33
deps/v8/src/code-factory.h

@ -32,21 +32,20 @@ class Callable final BASE_EMBEDDED {
class CodeFactory final {
public:
// Initial states for ICs.
static Callable LoadIC(Isolate* isolate, TypeofMode typeof_mode,
LanguageMode language_mode);
static Callable LoadIC(Isolate* isolate, TypeofMode typeof_mode);
static Callable LoadICInOptimizedCode(Isolate* isolate,
TypeofMode typeof_mode,
LanguageMode language_mode,
InlineCacheState initialization_state);
static Callable KeyedLoadIC(Isolate* isolate, LanguageMode language_mode);
static Callable KeyedLoadIC(Isolate* isolate);
static Callable KeyedLoadICInOptimizedCode(
Isolate* isolate, LanguageMode language_mode,
InlineCacheState initialization_state);
Isolate* isolate, InlineCacheState initialization_state);
static Callable CallIC(Isolate* isolate, int argc,
ConvertReceiverMode mode = ConvertReceiverMode::kAny);
ConvertReceiverMode mode = ConvertReceiverMode::kAny,
TailCallMode tail_call_mode = TailCallMode::kDisallow);
static Callable CallICInOptimizedCode(
Isolate* isolate, int argc,
ConvertReceiverMode mode = ConvertReceiverMode::kAny);
ConvertReceiverMode mode = ConvertReceiverMode::kAny,
TailCallMode tail_call_mode = TailCallMode::kDisallow);
static Callable StoreIC(Isolate* isolate, LanguageMode mode);
static Callable StoreICInOptimizedCode(Isolate* isolate, LanguageMode mode,
InlineCacheState initialization_state);
@ -55,12 +54,10 @@ class CodeFactory final {
Isolate* isolate, LanguageMode mode,
InlineCacheState initialization_state);
static Callable CompareIC(Isolate* isolate, Token::Value op,
Strength strength);
static Callable CompareIC(Isolate* isolate, Token::Value op);
static Callable CompareNilIC(Isolate* isolate, NilValue nil_value);
static Callable BinaryOpIC(Isolate* isolate, Token::Value op,
Strength strength);
static Callable BinaryOpIC(Isolate* isolate, Token::Value op);
// Code stubs. Add methods here as needed to reduce dependency on
// code-stubs.h.
@ -70,6 +67,7 @@ class CodeFactory final {
static Callable ToNumber(Isolate* isolate);
static Callable ToString(Isolate* isolate);
static Callable ToName(Isolate* isolate);
static Callable ToLength(Isolate* isolate);
static Callable ToObject(Isolate* isolate);
static Callable NumberToString(Isolate* isolate);
@ -91,10 +89,10 @@ class CodeFactory final {
static Callable FastNewContext(Isolate* isolate, int slot_count);
static Callable FastNewClosure(Isolate* isolate, LanguageMode language_mode,
FunctionKind kind);
static Callable ArgumentsAccess(Isolate* isolate, bool is_unmapped_arguments,
bool has_duplicate_parameters);
static Callable RestArgumentsAccess(Isolate* isolate);
static Callable FastNewObject(Isolate* isolate);
static Callable FastNewRestParameter(Isolate* isolate);
static Callable FastNewSloppyArguments(Isolate* isolate);
static Callable FastNewStrictArguments(Isolate* isolate);
static Callable AllocateHeapNumber(Isolate* isolate);
static Callable AllocateMutableHeapNumber(Isolate* isolate);
@ -108,7 +106,8 @@ class CodeFactory final {
static Callable Construct(Isolate* isolate);
static Callable ConstructFunction(Isolate* isolate);
static Callable InterpreterPushArgsAndCall(Isolate* isolate);
static Callable InterpreterPushArgsAndCall(Isolate* isolate,
TailCallMode tail_call_mode);
static Callable InterpreterPushArgsAndConstruct(Isolate* isolate);
static Callable InterpreterCEntry(Isolate* isolate, int result_size = 1);
};

84
deps/v8/src/code-stubs-hydrogen.cc

@ -34,11 +34,12 @@ static LChunk* OptimizeGraph(HGraph* graph) {
class CodeStubGraphBuilderBase : public HGraphBuilder {
public:
explicit CodeStubGraphBuilderBase(CompilationInfo* info)
: HGraphBuilder(info),
explicit CodeStubGraphBuilderBase(CompilationInfo* info, CodeStub* code_stub)
: HGraphBuilder(info, code_stub->GetCallInterfaceDescriptor()),
arguments_length_(NULL),
info_(info),
descriptor_(info->code_stub()),
code_stub_(code_stub),
descriptor_(code_stub),
context_(NULL) {
int parameter_count = GetParameterCount();
parameters_.Reset(new HParameter*[parameter_count]);
@ -68,7 +69,7 @@ class CodeStubGraphBuilderBase : public HGraphBuilder {
return arguments_length_;
}
CompilationInfo* info() { return info_; }
CodeStub* stub() { return info_->code_stub(); }
CodeStub* stub() { return code_stub_; }
HContext* context() { return context_; }
Isolate* isolate() { return info_->isolate(); }
@ -124,6 +125,7 @@ class CodeStubGraphBuilderBase : public HGraphBuilder {
base::SmartArrayPointer<HParameter*> parameters_;
HValue* arguments_length_;
CompilationInfo* info_;
CodeStub* code_stub_;
CodeStubDescriptor descriptor_;
HContext* context_;
};
@ -178,6 +180,7 @@ bool CodeStubGraphBuilderBase::BuildGraph() {
context_ = Add<HContext>();
start_environment->BindContext(context_);
start_environment->Bind(param_count, context_);
Add<HSimulate>(BailoutId::StubEntry());
@ -214,8 +217,8 @@ bool CodeStubGraphBuilderBase::BuildGraph() {
template <class Stub>
class CodeStubGraphBuilder: public CodeStubGraphBuilderBase {
public:
explicit CodeStubGraphBuilder(CompilationInfo* info)
: CodeStubGraphBuilderBase(info) {}
explicit CodeStubGraphBuilder(CompilationInfo* info, CodeStub* stub)
: CodeStubGraphBuilderBase(info, stub) {}
protected:
virtual HValue* BuildCodeStub() {
@ -269,13 +272,8 @@ Handle<Code> HydrogenCodeStub::GenerateLightweightMissCode(
masm.GetCode(&desc);
// Copy the generated code into a heap object.
Code::Flags flags = Code::ComputeFlags(
GetCodeKind(),
GetICState(),
GetExtraICState(),
GetStubType());
Handle<Code> new_object = factory->NewCode(
desc, flags, masm.CodeObject(), NeedsImmovableCode());
desc, GetCodeFlags(), masm.CodeObject(), NeedsImmovableCode());
return new_object;
}
@ -297,8 +295,15 @@ static Handle<Code> DoGenerateCode(Stub* stub) {
timer.Start();
}
Zone zone;
CompilationInfo info(stub, isolate, &zone);
CodeStubGraphBuilder<Stub> builder(&info);
CompilationInfo info(CodeStub::MajorName(stub->MajorKey()), isolate, &zone,
stub->GetCodeFlags());
// Parameter count is number of stack parameters.
int parameter_count = descriptor.GetStackParameterCount();
if (descriptor.function_mode() == NOT_JS_FUNCTION_STUB_MODE) {
parameter_count--;
}
info.set_parameter_count(parameter_count);
CodeStubGraphBuilder<Stub> builder(&info, stub);
LChunk* chunk = OptimizeGraph(builder.CreateGraph());
Handle<Code> code = chunk->Codegen();
if (FLAG_profile_hydrogen_code_stub_compilation) {
@ -314,7 +319,7 @@ template <>
HValue* CodeStubGraphBuilder<NumberToStringStub>::BuildCodeStub() {
info()->MarkAsSavesCallerDoubles();
HValue* number = GetParameter(NumberToStringStub::kNumber);
return BuildNumberToString(number, Type::Number(zone()));
return BuildNumberToString(number, Type::Number());
}
@ -1464,16 +1469,15 @@ HValue* CodeStubGraphBuilder<BinaryOpICStub>::BuildCodeInitializedStub() {
if_leftisstring.If<HIsStringAndBranch>(left);
if_leftisstring.Then();
{
Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()),
Push(BuildBinaryOperation(state.op(), left, right, Type::String(),
right_type, result_type,
state.fixed_right_arg(), allocation_mode,
state.strength()));
state.fixed_right_arg(), allocation_mode));
}
if_leftisstring.Else();
{
Push(BuildBinaryOperation(
state.op(), left, right, left_type, right_type, result_type,
state.fixed_right_arg(), allocation_mode, state.strength()));
Push(BuildBinaryOperation(state.op(), left, right, left_type,
right_type, result_type,
state.fixed_right_arg(), allocation_mode));
}
if_leftisstring.End();
result = Pop();
@ -1483,23 +1487,22 @@ HValue* CodeStubGraphBuilder<BinaryOpICStub>::BuildCodeInitializedStub() {
if_rightisstring.Then();
{
Push(BuildBinaryOperation(state.op(), left, right, left_type,
Type::String(zone()), result_type,
state.fixed_right_arg(), allocation_mode,
state.strength()));
Type::String(), result_type,
state.fixed_right_arg(), allocation_mode));
}
if_rightisstring.Else();
{
Push(BuildBinaryOperation(
state.op(), left, right, left_type, right_type, result_type,
state.fixed_right_arg(), allocation_mode, state.strength()));
Push(BuildBinaryOperation(state.op(), left, right, left_type,
right_type, result_type,
state.fixed_right_arg(), allocation_mode));
}
if_rightisstring.End();
result = Pop();
}
} else {
result = BuildBinaryOperation(
state.op(), left, right, left_type, right_type, result_type,
state.fixed_right_arg(), allocation_mode, state.strength());
result = BuildBinaryOperation(state.op(), left, right, left_type,
right_type, result_type,
state.fixed_right_arg(), allocation_mode);
}
// If we encounter a generic argument, the number conversion is
@ -1533,7 +1536,7 @@ HValue* CodeStubGraphBuilder<BinaryOpWithAllocationSiteStub>::BuildCodeStub() {
return BuildBinaryOperation(state.op(), left, right, left_type, right_type,
result_type, state.fixed_right_arg(),
allocation_mode, state.strength());
allocation_mode);
}
@ -2154,8 +2157,7 @@ HValue* CodeStubGraphBuilder<LoadDictionaryElementStub>::BuildCodeStub() {
HValue* hash = BuildElementIndexHash(key);
return BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash,
casted_stub()->language_mode());
return BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash);
}
@ -2186,8 +2188,8 @@ template <>
class CodeStubGraphBuilder<KeyedLoadGenericStub>
: public CodeStubGraphBuilderBase {
public:
explicit CodeStubGraphBuilder(CompilationInfo* info)
: CodeStubGraphBuilderBase(info) {}
explicit CodeStubGraphBuilder(CompilationInfo* info, CodeStub* stub)
: CodeStubGraphBuilderBase(info, stub) {}
protected:
virtual HValue* BuildCodeStub();
@ -2289,8 +2291,7 @@ HValue* CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildCodeStub() {
HValue* hash = BuildElementIndexHash(key);
Push(BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash,
casted_stub()->language_mode()));
Push(BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash));
}
kind_if.Else();
@ -2334,8 +2335,8 @@ HValue* CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildCodeStub() {
hash = AddUncasted<HShr>(hash, Add<HConstant>(Name::kHashShift));
HValue* value = BuildUncheckedDictionaryElementLoad(
receiver, properties, key, hash, casted_stub()->language_mode());
HValue* value =
BuildUncheckedDictionaryElementLoad(receiver, properties, key, hash);
Push(value);
}
if_dict_properties.Else();
@ -2412,10 +2413,7 @@ HValue* CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildCodeStub() {
// KeyedLookupCache miss; call runtime.
Add<HPushArguments>(receiver, key);
Push(Add<HCallRuntime>(
Runtime::FunctionForId(is_strong(casted_stub()->language_mode())
? Runtime::kKeyedGetPropertyStrong
: Runtime::kKeyedGetProperty),
2));
Runtime::FunctionForId(Runtime::kKeyedGetProperty), 2));
}
inline_or_runtime.End();
}

104
deps/v8/src/code-stubs.cc

@ -96,6 +96,12 @@ Code::Kind CodeStub::GetCodeKind() const {
}
Code::Flags CodeStub::GetCodeFlags() const {
return Code::ComputeFlags(GetCodeKind(), GetICState(), GetExtraICState(),
GetStubType());
}
Handle<Code> CodeStub::GetCodeCopy(const Code::FindAndReplacePattern& pattern) {
Handle<Code> ic = GetCode();
ic = isolate()->factory()->CopyCode(ic);
@ -270,7 +276,7 @@ MaybeHandle<Code> CodeStub::GetCode(Isolate* isolate, uint32_t key) {
void BinaryOpICStub::GenerateAheadOfTime(Isolate* isolate) {
// Generate the uninitialized versions of the stub.
for (int op = Token::BIT_OR; op <= Token::MOD; ++op) {
BinaryOpICStub stub(isolate, static_cast<Token::Value>(op), Strength::WEAK);
BinaryOpICStub stub(isolate, static_cast<Token::Value>(op));
stub.GetCode();
}
@ -453,9 +459,7 @@ void CompareNilICStub::UpdateStatus(Handle<Object> object) {
state.Add(NULL_TYPE);
} else if (object->IsUndefined()) {
state.Add(UNDEFINED);
} else if (object->IsUndetectableObject() ||
object->IsOddball() ||
!object->IsHeapObject()) {
} else if (object->IsUndetectableObject() || object->IsSmi()) {
state.RemoveAll();
state.Add(GENERIC);
} else if (IsMonomorphic()) {
@ -474,7 +478,7 @@ Handle<Code> TurboFanCodeStub::GenerateCode() {
Zone zone;
CallInterfaceDescriptor descriptor(GetCallInterfaceDescriptor());
compiler::CodeStubAssembler assembler(isolate(), &zone, descriptor,
GetCodeKind(), name);
GetCodeFlags(), name);
GenerateAssembly(&assembler);
return assembler.GenerateCode();
}
@ -549,18 +553,17 @@ std::ostream& operator<<(std::ostream& os, const CompareNilICStub::State& s) {
Type* CompareNilICStub::GetType(Zone* zone, Handle<Map> map) {
State state = this->state();
if (state.Contains(CompareNilICStub::GENERIC)) return Type::Any(zone);
if (state.Contains(CompareNilICStub::GENERIC)) return Type::Any();
Type* result = Type::None(zone);
Type* result = Type::None();
if (state.Contains(CompareNilICStub::UNDEFINED)) {
result = Type::Union(result, Type::Undefined(zone), zone);
result = Type::Union(result, Type::Undefined(), zone);
}
if (state.Contains(CompareNilICStub::NULL_TYPE)) {
result = Type::Union(result, Type::Null(zone), zone);
result = Type::Union(result, Type::Null(), zone);
}
if (state.Contains(CompareNilICStub::MONOMORPHIC_MAP)) {
Type* type =
map.is_null() ? Type::Detectable(zone) : Type::Class(map, zone);
Type* type = map.is_null() ? Type::Detectable() : Type::Class(map, zone);
result = Type::Union(result, type, zone);
}
@ -570,8 +573,7 @@ Type* CompareNilICStub::GetType(Zone* zone, Handle<Map> map) {
Type* CompareNilICStub::GetInputType(Zone* zone, Handle<Map> map) {
Type* output_type = GetType(zone, map);
Type* nil_type =
nil_value() == kNullValue ? Type::Null(zone) : Type::Undefined(zone);
Type* nil_type = nil_value() == kNullValue ? Type::Null() : Type::Undefined();
return Type::Union(output_type, nil_type, zone);
}
@ -599,9 +601,7 @@ void LoadDictionaryElementStub::InitializeDescriptor(
void KeyedLoadGenericStub::InitializeDescriptor(
CodeStubDescriptor* descriptor) {
descriptor->Initialize(
Runtime::FunctionForId(is_strong(language_mode())
? Runtime::kKeyedGetPropertyStrong
: Runtime::kKeyedGetProperty)->entry);
Runtime::FunctionForId(Runtime::kKeyedGetProperty)->entry);
}
@ -798,28 +798,8 @@ void CreateWeakCellStub::GenerateAheadOfTime(Isolate* isolate) {
void StoreElementStub::Generate(MacroAssembler* masm) {
switch (elements_kind()) {
case FAST_ELEMENTS:
case FAST_HOLEY_ELEMENTS:
case FAST_SMI_ELEMENTS:
case FAST_HOLEY_SMI_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
case FAST_HOLEY_DOUBLE_ELEMENTS:
#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
case TYPE##_ELEMENTS:
TYPED_ARRAYS(TYPED_ARRAY_CASE)
#undef TYPED_ARRAY_CASE
UNREACHABLE();
break;
case DICTIONARY_ELEMENTS:
DCHECK_EQ(DICTIONARY_ELEMENTS, elements_kind());
ElementHandlerCompiler::GenerateStoreSlow(masm);
break;
case FAST_SLOPPY_ARGUMENTS_ELEMENTS:
case SLOW_SLOPPY_ARGUMENTS_ELEMENTS:
UNREACHABLE();
break;
}
}
@ -838,52 +818,6 @@ void StoreFastElementStub::GenerateAheadOfTime(Isolate* isolate) {
}
void RestParamAccessStub::Generate(MacroAssembler* masm) { GenerateNew(masm); }
void ArgumentsAccessStub::Generate(MacroAssembler* masm) {
switch (type()) {
case READ_ELEMENT:
GenerateReadElement(masm);
break;
case NEW_SLOPPY_FAST:
GenerateNewSloppyFast(masm);
break;
case NEW_SLOPPY_SLOW:
GenerateNewSloppySlow(masm);
break;
case NEW_STRICT:
GenerateNewStrict(masm);
break;
}
}
void ArgumentsAccessStub::PrintName(std::ostream& os) const { // NOLINT
os << "ArgumentsAccessStub_";
switch (type()) {
case READ_ELEMENT:
os << "ReadElement";
break;
case NEW_SLOPPY_FAST:
os << "NewSloppyFast";
break;
case NEW_SLOPPY_SLOW:
os << "NewSloppySlow";
break;
case NEW_STRICT:
os << "NewStrict";
break;
}
return;
}
void RestParamAccessStub::PrintName(std::ostream& os) const { // NOLINT
os << "RestParamAccessStub_";
}
void ArrayConstructorStub::PrintName(std::ostream& os) const { // NOLINT
os << "ArrayConstructorStub";
switch (argument_count()) {
@ -964,9 +898,9 @@ bool ToBooleanStub::Types::UpdateStatus(Handle<Object> object) {
Add(SPEC_OBJECT);
return !object->IsUndetectableObject();
} else if (object->IsString()) {
DCHECK(!object->IsUndetectableObject());
Add(STRING);
return !object->IsUndetectableObject() &&
String::cast(*object)->length() != 0;
return String::cast(*object)->length() != 0;
} else if (object->IsSymbol()) {
Add(SYMBOL);
return true;

184
deps/v8/src/code-stubs.h

@ -21,7 +21,6 @@ namespace internal {
// List of code stubs used on all platforms.
#define CODE_STUB_LIST_ALL_PLATFORMS(V) \
/* PlatformCodeStubs */ \
V(ArgumentsAccess) \
V(ArrayConstructor) \
V(BinaryOpICWithAllocationSite) \
V(CallApiFunction) \
@ -44,7 +43,6 @@ namespace internal {
V(MathPow) \
V(ProfileEntryHook) \
V(RecordWrite) \
V(RestParamAccess) \
V(RegExpExec) \
V(StoreBufferOverflow) \
V(StoreElement) \
@ -54,6 +52,7 @@ namespace internal {
V(ToNumber) \
V(ToLength) \
V(ToString) \
V(ToName) \
V(ToObject) \
V(VectorStoreICTrampoline) \
V(VectorKeyedStoreICTrampoline) \
@ -77,6 +76,10 @@ namespace internal {
V(FastCloneShallowObject) \
V(FastNewClosure) \
V(FastNewContext) \
V(FastNewObject) \
V(FastNewRestParameter) \
V(FastNewSloppyArguments) \
V(FastNewStrictArguments) \
V(GrowArrayElements) \
V(InternalArrayNArgumentsConstructor) \
V(InternalArrayNoArgumentConstructor) \
@ -240,6 +243,8 @@ class CodeStub BASE_EMBEDDED {
virtual ExtraICState GetExtraICState() const { return kNoExtraICState; }
virtual Code::StubType GetStubType() const { return Code::NORMAL; }
Code::Flags GetCodeFlags() const;
friend std::ostream& operator<<(std::ostream& os, const CodeStub& s) {
s.PrintName(os);
return os;
@ -323,8 +328,10 @@ class CodeStub BASE_EMBEDDED {
#define DEFINE_CODE_STUB(NAME, SUPER) \
protected: \
public: \
inline Major MajorKey() const override { return NAME; }; \
\
protected: \
DEFINE_CODE_STUB_BASE(NAME##Stub, SUPER)
@ -720,6 +727,55 @@ class FastNewContextStub final : public HydrogenCodeStub {
};
class FastNewObjectStub final : public PlatformCodeStub {
public:
explicit FastNewObjectStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
DEFINE_CALL_INTERFACE_DESCRIPTOR(FastNewObject);
DEFINE_PLATFORM_CODE_STUB(FastNewObject, PlatformCodeStub);
};
// TODO(turbofan): This stub should be possible to write in TurboFan
// using the CodeStubAssembler very soon in a way that is as efficient
// and easy as the current handwritten version, which is partly a copy
// of the strict arguments object materialization code.
class FastNewRestParameterStub final : public PlatformCodeStub {
public:
explicit FastNewRestParameterStub(Isolate* isolate)
: PlatformCodeStub(isolate) {}
DEFINE_CALL_INTERFACE_DESCRIPTOR(FastNewRestParameter);
DEFINE_PLATFORM_CODE_STUB(FastNewRestParameter, PlatformCodeStub);
};
// TODO(turbofan): This stub should be possible to write in TurboFan
// using the CodeStubAssembler very soon in a way that is as efficient
// and easy as the current handwritten version.
class FastNewSloppyArgumentsStub final : public PlatformCodeStub {
public:
explicit FastNewSloppyArgumentsStub(Isolate* isolate)
: PlatformCodeStub(isolate) {}
DEFINE_CALL_INTERFACE_DESCRIPTOR(FastNewSloppyArguments);
DEFINE_PLATFORM_CODE_STUB(FastNewSloppyArguments, PlatformCodeStub);
};
// TODO(turbofan): This stub should be possible to write in TurboFan
// using the CodeStubAssembler very soon in a way that is as efficient
// and easy as the current handwritten version.
class FastNewStrictArgumentsStub final : public PlatformCodeStub {
public:
explicit FastNewStrictArgumentsStub(Isolate* isolate)
: PlatformCodeStub(isolate) {}
DEFINE_CALL_INTERFACE_DESCRIPTOR(FastNewStrictArguments);
DEFINE_PLATFORM_CODE_STUB(FastNewStrictArguments, PlatformCodeStub);
};
class FastCloneRegExpStub final : public HydrogenCodeStub {
public:
explicit FastCloneRegExpStub(Isolate* isolate) : HydrogenCodeStub(isolate) {}
@ -920,6 +976,7 @@ class CallICStub: public PlatformCodeStub {
protected:
int arg_count() const { return state().argc(); }
ConvertReceiverMode convert_mode() const { return state().convert_mode(); }
TailCallMode tail_call_mode() const { return state().tail_call_mode(); }
CallICState state() const {
return CallICState(static_cast<ExtraICState>(minor_key_));
@ -1383,11 +1440,13 @@ class CallApiFunctionStub : public PlatformCodeStub {
class CallApiAccessorStub : public PlatformCodeStub {
public:
CallApiAccessorStub(Isolate* isolate, bool is_store, bool call_data_undefined)
CallApiAccessorStub(Isolate* isolate, bool is_store, bool call_data_undefined,
bool is_lazy)
: PlatformCodeStub(isolate) {
minor_key_ = IsStoreBits::encode(is_store) |
CallDataUndefinedBits::encode(call_data_undefined) |
ArgumentBits::encode(is_store ? 1 : 0);
ArgumentBits::encode(is_store ? 1 : 0) |
IsLazyAccessorBits::encode(is_lazy);
}
protected:
@ -1402,6 +1461,7 @@ class CallApiAccessorStub : public PlatformCodeStub {
private:
bool is_store() const { return IsStoreBits::decode(minor_key_); }
bool is_lazy() const { return IsLazyAccessorBits::decode(minor_key_); }
bool call_data_undefined() const {
return CallDataUndefinedBits::decode(minor_key_);
}
@ -1410,6 +1470,7 @@ class CallApiAccessorStub : public PlatformCodeStub {
class IsStoreBits: public BitField<bool, 0, 1> {};
class CallDataUndefinedBits: public BitField<bool, 1, 1> {};
class ArgumentBits : public BitField<int, 2, kArgBits> {};
class IsLazyAccessorBits : public BitField<bool, 3 + kArgBits, 1> {};
DEFINE_CALL_INTERFACE_DESCRIPTOR(ApiAccessor);
DEFINE_PLATFORM_CODE_STUB(CallApiAccessor, PlatformCodeStub);
@ -1445,9 +1506,9 @@ class CallApiGetterStub : public PlatformCodeStub {
class BinaryOpICStub : public HydrogenCodeStub {
public:
BinaryOpICStub(Isolate* isolate, Token::Value op, Strength strength)
BinaryOpICStub(Isolate* isolate, Token::Value op)
: HydrogenCodeStub(isolate, UNINITIALIZED) {
BinaryOpICState state(isolate, op, strength);
BinaryOpICState state(isolate, op);
set_sub_minor_key(state.GetExtraICState());
}
@ -1528,9 +1589,8 @@ class BinaryOpICWithAllocationSiteStub final : public PlatformCodeStub {
class BinaryOpWithAllocationSiteStub final : public BinaryOpICStub {
public:
BinaryOpWithAllocationSiteStub(Isolate* isolate, Token::Value op,
Strength strength)
: BinaryOpICStub(isolate, op, strength) {}
BinaryOpWithAllocationSiteStub(Isolate* isolate, Token::Value op)
: BinaryOpICStub(isolate, op) {}
BinaryOpWithAllocationSiteStub(Isolate* isolate, const BinaryOpICState& state)
: BinaryOpICStub(isolate, state) {}
@ -1581,13 +1641,11 @@ class StringAddStub final : public HydrogenCodeStub {
class CompareICStub : public PlatformCodeStub {
public:
CompareICStub(Isolate* isolate, Token::Value op, Strength strength,
CompareICState::State left, CompareICState::State right,
CompareICState::State state)
CompareICStub(Isolate* isolate, Token::Value op, CompareICState::State left,
CompareICState::State right, CompareICState::State state)
: PlatformCodeStub(isolate) {
DCHECK(Token::IsCompareOp(op));
minor_key_ = OpBits::encode(op - Token::EQ) |
StrengthBits::encode(is_strong(strength)) |
LeftStateBits::encode(left) | RightStateBits::encode(right) |
StateBits::encode(state);
}
@ -1600,10 +1658,6 @@ class CompareICStub : public PlatformCodeStub {
return static_cast<Token::Value>(Token::EQ + OpBits::decode(minor_key_));
}
Strength strength() const {
return StrengthBits::decode(minor_key_) ? Strength::STRONG : Strength::WEAK;
}
CompareICState::State left() const {
return LeftStateBits::decode(minor_key_);
}
@ -1636,10 +1690,9 @@ class CompareICStub : public PlatformCodeStub {
}
class OpBits : public BitField<int, 0, 3> {};
class StrengthBits : public BitField<bool, 3, 1> {};
class LeftStateBits : public BitField<CompareICState::State, 4, 4> {};
class RightStateBits : public BitField<CompareICState::State, 8, 4> {};
class StateBits : public BitField<CompareICState::State, 12, 4> {};
class LeftStateBits : public BitField<CompareICState::State, 3, 4> {};
class RightStateBits : public BitField<CompareICState::State, 7, 4> {};
class StateBits : public BitField<CompareICState::State, 11, 4> {};
Handle<Map> known_map_;
@ -1746,10 +1799,8 @@ class CEntryStub : public PlatformCodeStub {
: PlatformCodeStub(isolate) {
minor_key_ = SaveDoublesBits::encode(save_doubles == kSaveFPRegs) |
ArgvMode::encode(argv_mode == kArgvInRegister);
DCHECK(result_size == 1 || result_size == 2);
#if _WIN64 || V8_TARGET_ARCH_PPC
DCHECK(result_size == 1 || result_size == 2 || result_size == 3);
minor_key_ = ResultSizeBits::update(minor_key_, result_size);
#endif // _WIN64
}
// The version of this stub that doesn't save doubles is generated ahead of
@ -1761,9 +1812,7 @@ class CEntryStub : public PlatformCodeStub {
private:
bool save_doubles() const { return SaveDoublesBits::decode(minor_key_); }
bool argv_in_register() const { return ArgvMode::decode(minor_key_); }
#if _WIN64 || V8_TARGET_ARCH_PPC
int result_size() const { return ResultSizeBits::decode(minor_key_); }
#endif // _WIN64
bool NeedsImmovableCode() override;
@ -1805,67 +1854,6 @@ class JSEntryStub : public PlatformCodeStub {
};
class ArgumentsAccessStub: public PlatformCodeStub {
public:
enum Type {
READ_ELEMENT,
NEW_SLOPPY_FAST,
NEW_SLOPPY_SLOW,
NEW_STRICT
};
ArgumentsAccessStub(Isolate* isolate, Type type) : PlatformCodeStub(isolate) {
minor_key_ = TypeBits::encode(type);
}
CallInterfaceDescriptor GetCallInterfaceDescriptor() const override {
if (type() == READ_ELEMENT) {
return ArgumentsAccessReadDescriptor(isolate());
} else {
return ArgumentsAccessNewDescriptor(isolate());
}
}
static Type ComputeType(bool is_unmapped, bool has_duplicate_parameters) {
if (is_unmapped) {
return Type::NEW_STRICT;
} else if (has_duplicate_parameters) {
return Type::NEW_SLOPPY_SLOW;
} else {
return Type::NEW_SLOPPY_FAST;
}
}
private:
Type type() const { return TypeBits::decode(minor_key_); }
void GenerateReadElement(MacroAssembler* masm);
void GenerateNewStrict(MacroAssembler* masm);
void GenerateNewSloppyFast(MacroAssembler* masm);
void GenerateNewSloppySlow(MacroAssembler* masm);
void PrintName(std::ostream& os) const override; // NOLINT
class TypeBits : public BitField<Type, 0, 2> {};
DEFINE_PLATFORM_CODE_STUB(ArgumentsAccess, PlatformCodeStub);
};
class RestParamAccessStub : public PlatformCodeStub {
public:
explicit RestParamAccessStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
private:
void GenerateNew(MacroAssembler* masm);
void PrintName(std::ostream& os) const override; // NOLINT
DEFINE_CALL_INTERFACE_DESCRIPTOR(RestParamAccess);
DEFINE_PLATFORM_CODE_STUB(RestParamAccess, PlatformCodeStub);
};
class RegExpExecStub: public PlatformCodeStub {
public:
explicit RegExpExecStub(Isolate* isolate) : PlatformCodeStub(isolate) { }
@ -2096,10 +2084,6 @@ class LoadDictionaryElementStub : public HydrogenCodeStub {
return LoadWithVectorDescriptor(isolate());
}
LanguageMode language_mode() const {
return LoadICState::GetLanguageMode(MinorKey());
}
DEFINE_HYDROGEN_CODE_STUB(LoadDictionaryElement, HydrogenCodeStub);
};
@ -2114,10 +2098,6 @@ class KeyedLoadGenericStub : public HydrogenCodeStub {
Code::Kind GetCodeKind() const override { return Code::KEYED_LOAD_IC; }
InlineCacheState GetICState() const override { return GENERIC; }
LanguageMode language_mode() const {
return LoadICState::GetLanguageMode(MinorKey());
}
DEFINE_CALL_INTERFACE_DESCRIPTOR(Load);
DEFINE_HYDROGEN_CODE_STUB(KeyedLoadGeneric, HydrogenCodeStub);
@ -2724,6 +2704,9 @@ class StoreElementStub : public PlatformCodeStub {
StoreElementStub(Isolate* isolate, ElementsKind elements_kind,
KeyedAccessStoreMode mode)
: PlatformCodeStub(isolate) {
// TODO(jkummerow): Rename this stub to StoreSlowElementStub,
// drop elements_kind parameter.
DCHECK_EQ(DICTIONARY_ELEMENTS, elements_kind);
minor_key_ = ElementsKindBits::encode(elements_kind) |
CommonStoreModeBits::encode(mode);
}
@ -2950,6 +2933,15 @@ class ToStringStub final : public PlatformCodeStub {
};
class ToNameStub final : public PlatformCodeStub {
public:
explicit ToNameStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
DEFINE_CALL_INTERFACE_DESCRIPTOR(ToName);
DEFINE_PLATFORM_CODE_STUB(ToName, PlatformCodeStub);
};
class ToObjectStub final : public HydrogenCodeStub {
public:
explicit ToObjectStub(Isolate* isolate) : HydrogenCodeStub(isolate) {}

11
deps/v8/src/codegen.cc

@ -124,18 +124,9 @@ Handle<Code> CodeGenerator::MakeCodeEpilogue(MacroAssembler* masm,
CompilationInfo* info) {
Isolate* isolate = info->isolate();
Code::Flags flags;
if (info->IsStub() && info->code_stub()) {
DCHECK_EQ(info->output_code_kind(), info->code_stub()->GetCodeKind());
flags = Code::ComputeFlags(
info->output_code_kind(), info->code_stub()->GetICState(),
info->code_stub()->GetExtraICState(), info->code_stub()->GetStubType());
} else {
flags = Code::ComputeFlags(info->output_code_kind());
}
// Allocate and install the code.
CodeDesc desc;
Code::Flags flags = info->code_flags();
bool is_crankshafted =
Code::ExtractKindFromFlags(flags) == Code::OPTIMIZED_FUNCTION ||
info->IsStub();

175
deps/v8/src/compiler.cc

@ -37,16 +37,6 @@
namespace v8 {
namespace internal {
std::ostream& operator<<(std::ostream& os, const SourcePosition& p) {
if (p.IsUnknown()) {
return os << "<?>";
} else if (FLAG_hydrogen_track_positions) {
return os << "<" << p.inlining_id() << ":" << p.position() << ">";
} else {
return os << "<0:" << p.raw() << ">";
}
}
#define PARSE_INFO_GETTER(type, name) \
type CompilationInfo::name() const { \
@ -120,8 +110,8 @@ bool CompilationInfo::has_scope() const {
CompilationInfo::CompilationInfo(ParseInfo* parse_info)
: CompilationInfo(parse_info, nullptr, nullptr, BASE, parse_info->isolate(),
parse_info->zone()) {
: CompilationInfo(parse_info, nullptr, Code::ComputeFlags(Code::FUNCTION),
BASE, parse_info->isolate(), parse_info->zone()) {
// Compiling for the snapshot typically results in different code than
// compiling later on. This means that code recompiled with deoptimization
// support won't be "equivalent" (as defined by SharedFunctionInfo::
@ -148,23 +138,17 @@ CompilationInfo::CompilationInfo(ParseInfo* parse_info)
}
CompilationInfo::CompilationInfo(CodeStub* stub, Isolate* isolate, Zone* zone)
: CompilationInfo(nullptr, stub, CodeStub::MajorName(stub->MajorKey()),
STUB, isolate, zone) {}
CompilationInfo::CompilationInfo(const char* debug_name, Isolate* isolate,
Zone* zone)
: CompilationInfo(nullptr, nullptr, debug_name, STUB, isolate, zone) {
set_output_code_kind(Code::STUB);
}
Zone* zone, Code::Flags code_flags)
: CompilationInfo(nullptr, debug_name, code_flags, STUB, isolate, zone) {}
CompilationInfo::CompilationInfo(ParseInfo* parse_info, CodeStub* code_stub,
const char* debug_name, Mode mode,
CompilationInfo::CompilationInfo(ParseInfo* parse_info, const char* debug_name,
Code::Flags code_flags, Mode mode,
Isolate* isolate, Zone* zone)
: parse_info_(parse_info),
isolate_(isolate),
flags_(0),
code_stub_(code_stub),
code_flags_(code_flags),
mode_(mode),
osr_ast_id_(BailoutId::None()),
zone_(zone),
@ -178,19 +162,7 @@ CompilationInfo::CompilationInfo(ParseInfo* parse_info, CodeStub* code_stub,
parameter_count_(0),
optimization_id_(-1),
osr_expr_stack_height_(0),
debug_name_(debug_name) {
// Parameter count is number of stack parameters.
if (code_stub_ != NULL) {
CodeStubDescriptor descriptor(code_stub_);
parameter_count_ = descriptor.GetStackParameterCount();
if (descriptor.function_mode() == NOT_JS_FUNCTION_STUB_MODE) {
parameter_count_--;
}
set_output_code_kind(code_stub->GetCodeKind());
} else {
set_output_code_kind(Code::FUNCTION);
}
}
debug_name_(debug_name) {}
CompilationInfo::~CompilationInfo() {
@ -307,10 +279,13 @@ void CompilationInfo::LogDeoptCallPosition(int pc_offset, int inlining_id) {
base::SmartArrayPointer<char> CompilationInfo::GetDebugName() const {
if (parse_info()) {
if (parse_info() && parse_info()->literal()) {
AllowHandleDereference allow_deref;
return parse_info()->literal()->debug_name()->ToCString();
}
if (parse_info() && !parse_info()->shared_info().is_null()) {
return parse_info()->shared_info()->DebugName()->ToCString();
}
const char* str = debug_name_ ? debug_name_ : "unknown";
size_t len = strlen(str) + 1;
base::SmartArrayPointer<char> name(new char[len]);
@ -446,11 +421,15 @@ OptimizedCompileJob::Status OptimizedCompileJob::CreateGraph() {
if (info()->shared_info()->asm_function()) {
if (info()->osr_frame()) info()->MarkAsFrameSpecializing();
info()->MarkAsFunctionContextSpecializing();
} else if (info()->has_global_object() &&
FLAG_native_context_specialization) {
} else {
if (!FLAG_always_opt) {
info()->MarkAsBailoutOnUninitialized();
}
if (FLAG_native_context_specialization) {
info()->MarkAsNativeContextSpecializing();
info()->MarkAsTypingEnabled();
}
}
if (!info()->shared_info()->asm_function() ||
FLAG_turbo_asm_deoptimization) {
info()->MarkAsDeoptimizationEnabled();
@ -755,7 +734,6 @@ static void RecordFunctionCompilation(Logger::LogEventsAndTags tag,
}
}
static bool CompileUnoptimizedCode(CompilationInfo* info) {
DCHECK(AllowCompilation::IsAllowed(info->isolate()));
if (!Compiler::Analyze(info->parse_info()) ||
@ -768,32 +746,12 @@ static bool CompileUnoptimizedCode(CompilationInfo* info) {
}
// TODO(rmcilroy): Remove this temporary work-around when ignition supports
// catch and eval.
static bool IgnitionShouldFallbackToFullCodeGen(Scope* scope) {
if (scope->is_eval_scope() || scope->is_catch_scope() ||
scope->calls_eval()) {
return true;
}
for (auto inner_scope : *scope->inner_scopes()) {
if (IgnitionShouldFallbackToFullCodeGen(inner_scope)) return true;
}
return false;
}
static bool UseIgnition(CompilationInfo* info) {
// Cannot use Ignition when the {function_data} is already used.
if (info->has_shared_info() && info->shared_info()->HasBuiltinFunctionId()) {
return false;
}
// Checks whether the scope chain is supported.
if (FLAG_ignition_fallback_on_eval_and_catch &&
IgnitionShouldFallbackToFullCodeGen(info->scope())) {
return false;
}
// Checks whether top level functions should be passed by the filter.
if (info->closure().is_null()) {
Vector<const char> filter = CStrVector(FLAG_ignition_filter);
@ -804,13 +762,39 @@ static bool UseIgnition(CompilationInfo* info) {
return info->closure()->PassesFilter(FLAG_ignition_filter);
}
static int CodeAndMetadataSize(CompilationInfo* info) {
int size = 0;
if (info->has_bytecode_array()) {
Handle<BytecodeArray> bytecode_array = info->bytecode_array();
size += bytecode_array->BytecodeArraySize();
size += bytecode_array->constant_pool()->Size();
size += bytecode_array->handler_table()->Size();
size += bytecode_array->source_position_table()->Size();
} else {
Handle<Code> code = info->code();
size += code->CodeSize();
size += code->relocation_info()->Size();
size += code->deoptimization_data()->Size();
size += code->handler_table()->Size();
}
return size;
}
static bool GenerateBaselineCode(CompilationInfo* info) {
bool success;
if (FLAG_ignition && UseIgnition(info)) {
return interpreter::Interpreter::MakeBytecode(info);
success = interpreter::Interpreter::MakeBytecode(info);
} else {
return FullCodeGenerator::MakeCode(info);
success = FullCodeGenerator::MakeCode(info);
}
if (success) {
Isolate* isolate = info->isolate();
Counters* counters = isolate->counters();
counters->total_baseline_code_size()->Increment(CodeAndMetadataSize(info));
counters->total_baseline_compile_count()->Increment(1);
}
return success;
}
@ -947,10 +931,13 @@ bool Compiler::ParseAndAnalyze(ParseInfo* info) {
static bool GetOptimizedCodeNow(CompilationInfo* info) {
Isolate* isolate = info->isolate();
CanonicalHandleScope canonical(isolate);
TimerEventScope<TimerEventOptimizeCode> optimize_code_timer(isolate);
TRACE_EVENT0("v8", "V8.OptimizeCode");
if (!Compiler::ParseAndAnalyze(info->parse_info())) return false;
TimerEventScope<TimerEventRecompileSynchronous> timer(isolate);
TRACE_EVENT0("v8", "V8.RecompileSynchronous");
OptimizedCompileJob job(info);
if (job.CreateGraph() != OptimizedCompileJob::SUCCEEDED ||
@ -976,6 +963,8 @@ static bool GetOptimizedCodeNow(CompilationInfo* info) {
static bool GetOptimizedCodeLater(CompilationInfo* info) {
Isolate* isolate = info->isolate();
CanonicalHandleScope canonical(isolate);
TimerEventScope<TimerEventOptimizeCode> optimize_code_timer(isolate);
TRACE_EVENT0("v8", "V8.OptimizeCode");
if (!isolate->optimizing_compile_dispatcher()->IsQueueAvailable()) {
if (FLAG_trace_concurrent_recompilation) {
@ -994,6 +983,7 @@ static bool GetOptimizedCodeLater(CompilationInfo* info) {
info->parse_info()->ReopenHandlesInNewHandleScope();
TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate());
TRACE_EVENT0("v8", "V8.RecompileSynchronous");
OptimizedCompileJob* job = new (info->zone()) OptimizedCompileJob(info);
OptimizedCompileJob::Status status = job->CreateGraph();
@ -1033,6 +1023,8 @@ MaybeHandle<Code> Compiler::GetLazyCode(Handle<JSFunction> function) {
Isolate* isolate = function->GetIsolate();
DCHECK(!isolate->has_pending_exception());
DCHECK(!function->is_compiled());
TimerEventScope<TimerEventCompileCode> compile_timer(isolate);
TRACE_EVENT0("v8", "V8.CompileCode");
AggregatedHistogramTimerScope timer(isolate->counters()->compile_lazy());
// If the debugger is active, do not compile with turbofan unless we can
// deopt from turbofan code.
@ -1044,7 +1036,7 @@ MaybeHandle<Code> Compiler::GetLazyCode(Handle<JSFunction> function) {
VMState<COMPILER> state(isolate);
PostponeInterruptsScope postpone(isolate);
info.SetOptimizing(BailoutId::None(), handle(function->shared()->code()));
info.SetOptimizing();
if (GetOptimizedCodeNow(&info)) {
DCHECK(function->shared()->is_compiled());
@ -1066,9 +1058,8 @@ MaybeHandle<Code> Compiler::GetLazyCode(Handle<JSFunction> function) {
if (FLAG_always_opt) {
Handle<Code> opt_code;
if (Compiler::GetOptimizedCode(
function, result,
Compiler::NOT_CONCURRENT).ToHandle(&opt_code)) {
if (Compiler::GetOptimizedCode(function, Compiler::NOT_CONCURRENT)
.ToHandle(&opt_code)) {
result = opt_code;
}
}
@ -1241,6 +1232,8 @@ void Compiler::CompileForLiveEdit(Handle<Script> script) {
static Handle<SharedFunctionInfo> CompileToplevel(CompilationInfo* info) {
Isolate* isolate = info->isolate();
TimerEventScope<TimerEventCompileCode> timer(isolate);
TRACE_EVENT0("v8", "V8.CompileCode");
PostponeInterruptsScope postpone(isolate);
DCHECK(!isolate->native_context().is_null());
ParseInfo* parse_info = info->parse_info();
@ -1300,6 +1293,7 @@ static Handle<SharedFunctionInfo> CompileToplevel(CompilationInfo* info) {
? info->isolate()->counters()->compile_eval()
: info->isolate()->counters()->compile();
HistogramTimerScope timer(rate);
TRACE_EVENT0("v8", info->is_eval() ? "V8.CompileEval" : "V8.Compile");
// Compile the code.
if (!CompileBaselineCode(info)) {
@ -1470,6 +1464,7 @@ Handle<SharedFunctionInfo> Compiler::CompileScript(
!isolate->debug()->is_loaded()) {
// Then check cached code provided by embedder.
HistogramTimerScope timer(isolate->counters()->compile_deserialize());
TRACE_EVENT0("v8", "V8.CompileDeserialize");
Handle<SharedFunctionInfo> result;
if (CodeSerializer::Deserialize(isolate, *cached_data, source)
.ToHandle(&result)) {
@ -1495,6 +1490,9 @@ Handle<SharedFunctionInfo> Compiler::CompileScript(
if (natives == NATIVES_CODE) {
script->set_type(Script::TYPE_NATIVE);
script->set_hide_source(true);
} else if (natives == EXTENSION_CODE) {
script->set_type(Script::TYPE_EXTENSION);
script->set_hide_source(true);
}
if (!script_name.is_null()) {
script->set_name(*script_name);
@ -1535,6 +1533,7 @@ Handle<SharedFunctionInfo> Compiler::CompileScript(
compile_options == ScriptCompiler::kProduceCodeCache) {
HistogramTimerScope histogram_timer(
isolate->counters()->compile_serialize());
TRACE_EVENT0("v8", "V8.CompileSerialize");
*cached_data = CodeSerializer::Serialize(isolate, result, source);
if (FLAG_profile_deserialization) {
PrintF("[Compiling and serializing took %0.3f ms]\n",
@ -1635,6 +1634,8 @@ Handle<SharedFunctionInfo> Compiler::GetSharedFunctionInfo(
bool lazy = FLAG_lazy && allow_lazy && !literal->should_eager_compile();
// Generate code
TimerEventScope<TimerEventCompileCode> timer(isolate);
TRACE_EVENT0("v8", "V8.CompileCode");
Handle<ScopeInfo> scope_info;
if (lazy) {
Handle<Code> code = isolate->builtins()->CompileLazy();
@ -1700,9 +1701,39 @@ Handle<SharedFunctionInfo> Compiler::GetSharedFunctionInfo(
return existing;
}
Handle<SharedFunctionInfo> Compiler::GetSharedFunctionInfoForNative(
v8::Extension* extension, Handle<String> name) {
Isolate* isolate = name->GetIsolate();
v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate);
// Compute the function template for the native function.
v8::Local<v8::FunctionTemplate> fun_template =
extension->GetNativeFunctionTemplate(v8_isolate,
v8::Utils::ToLocal(name));
DCHECK(!fun_template.IsEmpty());
// Instantiate the function and create a shared function info from it.
Handle<JSFunction> fun = Handle<JSFunction>::cast(Utils::OpenHandle(
*fun_template->GetFunction(v8_isolate->GetCurrentContext())
.ToLocalChecked()));
const int literals = fun->NumberOfLiterals();
Handle<Code> code = Handle<Code>(fun->shared()->code());
Handle<Code> construct_stub = Handle<Code>(fun->shared()->construct_stub());
Handle<SharedFunctionInfo> shared = isolate->factory()->NewSharedFunctionInfo(
name, literals, FunctionKind::kNormalFunction, code,
Handle<ScopeInfo>(fun->shared()->scope_info()),
Handle<TypeFeedbackVector>(fun->shared()->feedback_vector()));
shared->set_construct_stub(*construct_stub);
// Copy the function data to the shared function info.
shared->set_function_data(fun->shared()->function_data());
int parameters = fun->shared()->internal_formal_parameter_count();
shared->set_internal_formal_parameter_count(parameters);
return shared;
}
MaybeHandle<Code> Compiler::GetOptimizedCode(Handle<JSFunction> function,
Handle<Code> current_code,
ConcurrencyMode mode,
BailoutId osr_ast_id,
JavaScriptFrame* osr_frame) {
@ -1726,6 +1757,7 @@ MaybeHandle<Code> Compiler::GetOptimizedCode(Handle<JSFunction> function,
DCHECK(AllowCompilation::IsAllowed(isolate));
Handle<Code> current_code(shared->code());
if (!shared->is_compiled() ||
shared->scope_info() == ScopeInfo::Empty(isolate)) {
// The function was never compiled. Compile it unoptimized first.
@ -1758,7 +1790,7 @@ MaybeHandle<Code> Compiler::GetOptimizedCode(Handle<JSFunction> function,
DCHECK(!isolate->has_pending_exception());
PostponeInterruptsScope postpone(isolate);
info->SetOptimizing(osr_ast_id, current_code);
info->SetOptimizingForOsr(osr_ast_id, current_code);
if (mode == CONCURRENT) {
if (GetOptimizedCodeLater(info.get())) {
@ -1774,8 +1806,8 @@ MaybeHandle<Code> Compiler::GetOptimizedCode(Handle<JSFunction> function,
return MaybeHandle<Code>();
}
Handle<Code> Compiler::GetConcurrentlyOptimizedCode(OptimizedCompileJob* job) {
MaybeHandle<Code> Compiler::GetConcurrentlyOptimizedCode(
OptimizedCompileJob* job) {
// Take ownership of compilation info. Deleting compilation info
// also tears down the zone and the recompile job.
base::SmartPointer<CompilationInfo> info(job->info());
@ -1783,6 +1815,7 @@ Handle<Code> Compiler::GetConcurrentlyOptimizedCode(OptimizedCompileJob* job) {
VMState<COMPILER> state(isolate);
TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate());
TRACE_EVENT0("v8", "V8.RecompileSynchronous");
Handle<SharedFunctionInfo> shared = info->shared_info();
shared->code()->set_profiler_ticks(0);
@ -1820,7 +1853,7 @@ Handle<Code> Compiler::GetConcurrentlyOptimizedCode(OptimizedCompileJob* job) {
info->closure()->ShortPrint();
PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason()));
}
return Handle<Code>::null();
return MaybeHandle<Code>();
}

115
deps/v8/src/compiler.h

@ -10,79 +10,18 @@
#include "src/bailout-reason.h"
#include "src/compilation-dependencies.h"
#include "src/signature.h"
#include "src/source-position.h"
#include "src/zone.h"
namespace v8 {
namespace internal {
class AstValueFactory;
class HydrogenCodeStub;
// Forward declarations.
class JavaScriptFrame;
class ParseInfo;
class ScriptData;
// This class encapsulates encoding and decoding of sources positions from
// which hydrogen values originated.
// When FLAG_track_hydrogen_positions is set this object encodes the
// identifier of the inlining and absolute offset from the start of the
// inlined function.
// When the flag is not set we simply track absolute offset from the
// script start.
class SourcePosition {
public:
static SourcePosition Unknown() {
return SourcePosition::FromRaw(kNoPosition);
}
bool IsUnknown() const { return value_ == kNoPosition; }
uint32_t position() const { return PositionField::decode(value_); }
void set_position(uint32_t position) {
if (FLAG_hydrogen_track_positions) {
value_ = static_cast<uint32_t>(PositionField::update(value_, position));
} else {
value_ = position;
}
}
uint32_t inlining_id() const { return InliningIdField::decode(value_); }
void set_inlining_id(uint32_t inlining_id) {
if (FLAG_hydrogen_track_positions) {
value_ =
static_cast<uint32_t>(InliningIdField::update(value_, inlining_id));
}
}
uint32_t raw() const { return value_; }
private:
static const uint32_t kNoPosition =
static_cast<uint32_t>(RelocInfo::kNoPosition);
typedef BitField<uint32_t, 0, 9> InliningIdField;
// Offset from the start of the inlined function.
typedef BitField<uint32_t, 9, 23> PositionField;
friend class HPositionInfo;
friend class Deoptimizer;
static SourcePosition FromRaw(uint32_t raw_position) {
SourcePosition position;
position.value_ = raw_position;
return position;
}
// If FLAG_hydrogen_track_positions is set contains bitfields InliningIdField
// and PositionField.
// Otherwise contains absolute offset from the script start.
uint32_t value_;
};
std::ostream& operator<<(std::ostream& os, const SourcePosition& p);
struct InlinedFunctionInfo {
InlinedFunctionInfo(int parent_id, SourcePosition inline_position,
int script_id, int start_position)
@ -125,11 +64,12 @@ class CompilationInfo {
kDeoptimizationEnabled = 1 << 16,
kSourcePositionsEnabled = 1 << 17,
kFirstCompile = 1 << 18,
kBailoutOnUninitialized = 1 << 19,
};
explicit CompilationInfo(ParseInfo* parse_info);
CompilationInfo(CodeStub* stub, Isolate* isolate, Zone* zone);
CompilationInfo(const char* debug_name, Isolate* isolate, Zone* zone);
CompilationInfo(const char* debug_name, Isolate* isolate, Zone* zone,
Code::Flags code_flags = Code::ComputeFlags(Code::STUB));
virtual ~CompilationInfo();
ParseInfo* parse_info() const { return parse_info_; }
@ -159,7 +99,7 @@ class CompilationInfo {
Zone* zone() { return zone_; }
bool is_osr() const { return !osr_ast_id_.IsNone(); }
Handle<Code> code() const { return code_; }
CodeStub* code_stub() const { return code_stub_; }
Code::Flags code_flags() const { return code_flags_; }
BailoutId osr_ast_id() const { return osr_ast_id_; }
Handle<Code> unoptimized_code() const { return unoptimized_code_; }
int opt_count() const { return opt_count_; }
@ -268,12 +208,18 @@ class CompilationInfo {
bool is_first_compile() const { return GetFlag(kFirstCompile); }
void MarkAsBailoutOnUninitialized() { SetFlag(kBailoutOnUninitialized); }
bool is_bailout_on_uninitialized() const {
return GetFlag(kBailoutOnUninitialized);
}
bool GeneratePreagedPrologue() const {
// Generate a pre-aged prologue if we are optimizing for size, which
// will make code flushing more aggressive. Only apply to Code::FUNCTION,
// since StaticMarkingVisitor::IsFlushable only flushes proper functions.
return FLAG_optimize_for_size && FLAG_age_code && !will_serialize() &&
!is_debug() && output_code_kind_ == Code::FUNCTION;
!is_debug() && output_code_kind() == Code::FUNCTION;
}
void EnsureFeedbackVector();
@ -308,13 +254,17 @@ class CompilationInfo {
// Accessors for the different compilation modes.
bool IsOptimizing() const { return mode_ == OPTIMIZE; }
bool IsStub() const { return mode_ == STUB; }
void SetOptimizing(BailoutId osr_ast_id, Handle<Code> unoptimized) {
void SetOptimizing() {
DCHECK(has_shared_info());
SetMode(OPTIMIZE);
optimization_id_ = isolate()->NextOptimizationId();
code_flags_ =
Code::KindField::update(code_flags_, Code::OPTIMIZED_FUNCTION);
}
void SetOptimizingForOsr(BailoutId osr_ast_id, Handle<Code> unoptimized) {
SetOptimizing();
osr_ast_id_ = osr_ast_id;
unoptimized_code_ = unoptimized;
optimization_id_ = isolate()->NextOptimizationId();
set_output_code_kind(Code::OPTIMIZED_FUNCTION);
}
// Deoptimization support.
@ -423,9 +373,9 @@ class CompilationInfo {
base::SmartArrayPointer<char> GetDebugName() const;
Code::Kind output_code_kind() const { return output_code_kind_; }
void set_output_code_kind(Code::Kind kind) { output_code_kind_ = kind; }
Code::Kind output_code_kind() const {
return Code::ExtractKindFromFlags(code_flags_);
}
protected:
ParseInfo* parse_info_;
@ -446,8 +396,8 @@ class CompilationInfo {
STUB
};
CompilationInfo(ParseInfo* parse_info, CodeStub* code_stub,
const char* debug_name, Mode mode, Isolate* isolate,
CompilationInfo(ParseInfo* parse_info, const char* debug_name,
Code::Flags code_flags, Mode mode, Isolate* isolate,
Zone* zone);
Isolate* isolate_;
@ -466,10 +416,8 @@ class CompilationInfo {
unsigned flags_;
Code::Kind output_code_kind_;
Code::Flags code_flags_;
// For compiled stubs, the stub object
CodeStub* code_stub_;
// The compiled code.
Handle<Code> code_;
@ -683,19 +631,24 @@ class Compiler : public AllStatic {
static Handle<SharedFunctionInfo> GetSharedFunctionInfo(
FunctionLiteral* node, Handle<Script> script, CompilationInfo* outer);
// Create a shared function info object for a native function literal.
static Handle<SharedFunctionInfo> GetSharedFunctionInfoForNative(
v8::Extension* extension, Handle<String> name);
enum ConcurrencyMode { NOT_CONCURRENT, CONCURRENT };
// Generate and return optimized code or start a concurrent optimization job.
// In the latter case, return the InOptimizationQueue builtin. On failure,
// return the empty handle.
MUST_USE_RESULT static MaybeHandle<Code> GetOptimizedCode(
Handle<JSFunction> function, Handle<Code> current_code,
ConcurrencyMode mode, BailoutId osr_ast_id = BailoutId::None(),
Handle<JSFunction> function, ConcurrencyMode mode,
BailoutId osr_ast_id = BailoutId::None(),
JavaScriptFrame* osr_frame = nullptr);
// Generate and return code from previously queued optimization job.
// On failure, return the empty handle.
static Handle<Code> GetConcurrentlyOptimizedCode(OptimizedCompileJob* job);
MUST_USE_RESULT static MaybeHandle<Code> GetConcurrentlyOptimizedCode(
OptimizedCompileJob* job);
};

14
deps/v8/src/compiler/access-builder.cc

@ -6,9 +6,9 @@
#include "src/contexts.h"
#include "src/frames.h"
#include "src/handles-inl.h"
#include "src/heap/heap.h"
#include "src/type-cache.h"
#include "src/types-inl.h"
namespace v8 {
namespace internal {
@ -268,20 +268,16 @@ FieldAccess AccessBuilder::ForValue() {
// static
FieldAccess AccessBuilder::ForArgumentsLength() {
int offset =
JSObject::kHeaderSize + Heap::kArgumentsLengthIndex * kPointerSize;
FieldAccess access = {kTaggedBase, offset, Handle<Name>(), Type::Any(),
MachineType::AnyTagged()};
FieldAccess access = {kTaggedBase, JSArgumentsObject::kLengthOffset,
Handle<Name>(), Type::Any(), MachineType::AnyTagged()};
return access;
}
// static
FieldAccess AccessBuilder::ForArgumentsCallee() {
int offset =
JSObject::kHeaderSize + Heap::kArgumentsCalleeIndex * kPointerSize;
FieldAccess access = {kTaggedBase, offset, Handle<Name>(), Type::Any(),
MachineType::AnyTagged()};
FieldAccess access = {kTaggedBase, JSSloppyArgumentsObject::kCalleeOffset,
Handle<Name>(), Type::Any(), MachineType::AnyTagged()};
return access;
}

15
deps/v8/src/compiler/access-info.cc

@ -8,9 +8,9 @@
#include "src/compilation-dependencies.h"
#include "src/compiler/access-info.h"
#include "src/field-index-inl.h"
#include "src/field-type.h"
#include "src/objects-inl.h" // TODO(mstarzinger): Temporary cycle breaker!
#include "src/type-cache.h"
#include "src/types-inl.h"
namespace v8 {
namespace internal {
@ -232,6 +232,9 @@ bool AccessInfoFactory::ComputePropertyAccessInfo(
// Compute the receiver type.
Handle<Map> receiver_map = map;
// Property lookups require the name to be internalized.
name = isolate()->factory()->InternalizeName(name);
// We support fast inline cases for certain JSObject getters.
if (access_mode == AccessMode::kLoad &&
LookupSpecialFieldAccessor(map, name, access_info)) {
@ -242,7 +245,7 @@ bool AccessInfoFactory::ComputePropertyAccessInfo(
do {
// Lookup the named property on the {map}.
Handle<DescriptorArray> descriptors(map->instance_descriptors(), isolate());
int const number = descriptors->SearchWithCache(*name, *map);
int const number = descriptors->SearchWithCache(isolate(), *name, *map);
if (number != DescriptorArray::kNotFound) {
PropertyDetails const details = descriptors->GetDetails(number);
if (access_mode == AccessMode::kStore) {
@ -277,8 +280,7 @@ bool AccessInfoFactory::ComputePropertyAccessInfo(
// Extract the field type from the property details (make sure its
// representation is TaggedPointer to reflect the heap object case).
field_type = Type::Intersect(
Type::Convert<HeapType>(
handle(descriptors->GetFieldType(number), isolate()), zone()),
descriptors->GetFieldType(number)->Convert(zone()),
Type::TaggedPointer(), zone());
if (field_type->Is(Type::None())) {
// Store is not safe if the field type was cleared.
@ -454,10 +456,7 @@ bool AccessInfoFactory::LookupTransition(Handle<Map> map, Handle<Name> name,
// Extract the field type from the property details (make sure its
// representation is TaggedPointer to reflect the heap object case).
field_type = Type::Intersect(
Type::Convert<HeapType>(
handle(
transition_map->instance_descriptors()->GetFieldType(number),
isolate()),
transition_map->instance_descriptors()->GetFieldType(number)->Convert(
zone()),
Type::TaggedPointer(), zone());
if (field_type->Is(Type::None())) {

120
deps/v8/src/compiler/arm/code-generator-arm.cc

@ -206,6 +206,19 @@ class OutOfLineRecordWrite final : public OutOfLineCode {
: OutOfLineCode(gen),
object_(object),
index_(index),
index_immediate_(0),
value_(value),
scratch0_(scratch0),
scratch1_(scratch1),
mode_(mode) {}
OutOfLineRecordWrite(CodeGenerator* gen, Register object, int32_t index,
Register value, Register scratch0, Register scratch1,
RecordWriteMode mode)
: OutOfLineCode(gen),
object_(object),
index_(no_reg),
index_immediate_(index),
value_(value),
scratch0_(scratch0),
scratch1_(scratch1),
@ -215,24 +228,36 @@ class OutOfLineRecordWrite final : public OutOfLineCode {
if (mode_ > RecordWriteMode::kValueIsPointer) {
__ JumpIfSmi(value_, exit());
}
if (mode_ > RecordWriteMode::kValueIsMap) {
__ CheckPageFlag(value_, scratch0_,
MemoryChunk::kPointersToHereAreInterestingMask, eq,
exit());
}
RememberedSetAction const remembered_set_action =
mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET
: OMIT_REMEMBERED_SET;
SaveFPRegsMode const save_fp_mode =
frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs;
// TODO(turbofan): Once we get frame elision working, we need to save
// and restore lr properly here if the frame was elided.
if (!frame()->needs_frame()) {
// We need to save and restore lr if the frame was elided.
__ Push(lr);
}
RecordWriteStub stub(isolate(), object_, scratch0_, scratch1_,
EMIT_REMEMBERED_SET, save_fp_mode);
__ add(scratch1_, object_, index_);
remembered_set_action, save_fp_mode);
if (index_.is(no_reg)) {
__ add(scratch1_, object_, Operand(index_immediate_));
} else {
DCHECK_EQ(0, index_immediate_);
__ add(scratch1_, object_, Operand(index_));
}
__ CallStub(&stub);
if (!frame()->needs_frame()) {
__ Pop(lr);
}
}
private:
Register const object_;
Register const index_;
int32_t const index_immediate_; // Valid if index_.is(no_reg).
Register const value_;
Register const scratch0_;
Register const scratch1_;
@ -449,11 +474,6 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
frame_access_state()->ClearSPDelta();
break;
}
case kArchLazyBailout: {
EnsureSpaceForLazyDeopt();
RecordCallPosition(instr);
break;
}
case kArchPrepareCallCFunction: {
int const num_parameters = MiscField::decode(instr->opcode());
__ PrepareCallCFunction(num_parameters, kScratchReg);
@ -514,6 +534,13 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
__ mov(i.OutputRegister(), fp);
DCHECK_EQ(LeaveCC, i.OutputSBit());
break;
case kArchParentFramePointer:
if (frame_access_state()->frame()->needs_frame()) {
__ ldr(i.OutputRegister(), MemOperand(fp, 0));
} else {
__ mov(i.OutputRegister(), fp);
}
break;
case kArchTruncateDoubleToI:
__ TruncateDoubleToI(i.OutputRegister(), i.InputFloat64Register(0));
DCHECK_EQ(LeaveCC, i.OutputSBit());
@ -522,19 +549,43 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
RecordWriteMode mode =
static_cast<RecordWriteMode>(MiscField::decode(instr->opcode()));
Register object = i.InputRegister(0);
Register index = i.InputRegister(1);
Register value = i.InputRegister(2);
Register scratch0 = i.TempRegister(0);
Register scratch1 = i.TempRegister(1);
auto ool = new (zone()) OutOfLineRecordWrite(this, object, index, value,
OutOfLineRecordWrite* ool;
AddressingMode addressing_mode =
AddressingModeField::decode(instr->opcode());
if (addressing_mode == kMode_Offset_RI) {
int32_t index = i.InputInt32(1);
ool = new (zone()) OutOfLineRecordWrite(this, object, index, value,
scratch0, scratch1, mode);
__ str(value, MemOperand(object, index));
} else {
DCHECK_EQ(kMode_Offset_RR, addressing_mode);
Register index(i.InputRegister(1));
ool = new (zone()) OutOfLineRecordWrite(this, object, index, value,
scratch0, scratch1, mode);
__ str(value, MemOperand(object, index));
}
__ CheckPageFlag(object, scratch0,
MemoryChunk::kPointersFromHereAreInterestingMask, ne,
ool->entry());
__ bind(ool->exit());
break;
}
case kArchStackSlot: {
FrameOffset offset =
frame_access_state()->GetFrameOffset(i.InputInt32(0));
Register base;
if (offset.from_stack_pointer()) {
base = sp;
} else {
base = fp;
}
__ add(i.OutputRegister(0), base, Operand(offset.offset()));
break;
}
case kArmAdd:
__ add(i.OutputRegister(), i.InputRegister(0), i.InputOperand2(1),
i.OutputSBit());
@ -622,6 +673,13 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
DCHECK_EQ(LeaveCC, i.OutputSBit());
break;
}
case kArmSbfx: {
CpuFeatureScope scope(masm(), ARMv7);
__ sbfx(i.OutputRegister(), i.InputRegister(0), i.InputInt8(1),
i.InputInt8(2));
DCHECK_EQ(LeaveCC, i.OutputSBit());
break;
}
case kArmSxtb:
__ sxtb(i.OutputRegister(), i.InputRegister(0), i.InputInt32(1));
DCHECK_EQ(LeaveCC, i.OutputSBit());
@ -658,6 +716,12 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
i.InputInt32(2));
DCHECK_EQ(LeaveCC, i.OutputSBit());
break;
case kArmRbit: {
CpuFeatureScope scope(masm(), ARMv7);
__ rbit(i.OutputRegister(), i.InputRegister(0));
DCHECK_EQ(LeaveCC, i.OutputSBit());
break;
}
case kArmClz:
__ clz(i.OutputRegister(), i.InputRegister(0));
DCHECK_EQ(LeaveCC, i.OutputSBit());
@ -831,6 +895,20 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
DCHECK_EQ(LeaveCC, i.OutputSBit());
break;
}
case kArmVcvtF32S32: {
SwVfpRegister scratch = kScratchDoubleReg.low();
__ vmov(scratch, i.InputRegister(0));
__ vcvt_f32_s32(i.OutputFloat32Register(), scratch);
DCHECK_EQ(LeaveCC, i.OutputSBit());
break;
}
case kArmVcvtF32U32: {
SwVfpRegister scratch = kScratchDoubleReg.low();
__ vmov(scratch, i.InputRegister(0));
__ vcvt_f32_u32(i.OutputFloat32Register(), scratch);
DCHECK_EQ(LeaveCC, i.OutputSBit());
break;
}
case kArmVcvtF64S32: {
SwVfpRegister scratch = kScratchDoubleReg.low();
__ vmov(scratch, i.InputRegister(0));
@ -845,6 +923,20 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
DCHECK_EQ(LeaveCC, i.OutputSBit());
break;
}
case kArmVcvtS32F32: {
SwVfpRegister scratch = kScratchDoubleReg.low();
__ vcvt_s32_f32(scratch, i.InputFloat32Register(0));
__ vmov(i.OutputRegister(), scratch);
DCHECK_EQ(LeaveCC, i.OutputSBit());
break;
}
case kArmVcvtU32F32: {
SwVfpRegister scratch = kScratchDoubleReg.low();
__ vcvt_u32_f32(scratch, i.InputFloat32Register(0));
__ vmov(i.OutputRegister(), scratch);
DCHECK_EQ(LeaveCC, i.OutputSBit());
break;
}
case kArmVcvtS32F64: {
SwVfpRegister scratch = kScratchDoubleReg.low();
__ vcvt_s32_f64(scratch, i.InputFloat64Register(0));
@ -1098,8 +1190,6 @@ void CodeGenerator::AssemblePrologue() {
// remaining stack slots.
if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --");
osr_pc_offset_ = __ pc_offset();
// TODO(titzer): cannot address target function == local #-1
__ ldr(r1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
stack_shrink_slots -= OsrHelper(info()).UnoptimizedFrameSlots();
}

Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save