Browse Source

deps: upgrade to V8 4.8.271.17

Pick up V8 4.8 branch-head. This branch brings in @@isConcatSpreadable,
@@toPrimitive and ToLength ES6 changes. For full details see:
http://v8project.blogspot.de/2015/11/v8-release-48.html

https://github.com/v8/v8/commit/fa163e2

Ref: https://github.com/nodejs/node/pull/4399
PR-URL: https://github.com/nodejs/node/pull/4785
Reviewed-By: bnoordhuis - Ben Noordhuis <info@bnoordhuis.nl>
process-exit-stdio-flushing
Ali Ijaz Sheikh 9 years ago
parent
commit
ef4170ea03
  1. 23
      deps/v8/.gitignore
  2. 2
      deps/v8/AUTHORS
  3. 432
      deps/v8/BUILD.gn
  4. 1496
      deps/v8/ChangeLog
  5. 61
      deps/v8/DEPS
  6. 15
      deps/v8/WATCHLISTS
  7. 16
      deps/v8/build/all.gyp
  8. 77
      deps/v8/build/config/win/msvs_dependencies.isolate
  9. 2
      deps/v8/build/features.gypi
  10. 1
      deps/v8/build/get_landmines.py
  11. 2
      deps/v8/build/gyp_environment.py
  12. 29
      deps/v8/build/gyp_v8
  13. 24
      deps/v8/build/isolate.gypi
  14. 40
      deps/v8/build/standalone.gypi
  15. 4
      deps/v8/build/toolchain.gypi
  16. 268
      deps/v8/build/vs_toolchain.py
  17. 6
      deps/v8/docs/becoming_v8_committer.md
  18. 7
      deps/v8/docs/merging_and_patching.md
  19. 9
      deps/v8/docs/runtime_functions.md
  20. 6
      deps/v8/docs/source.md
  21. 6
      deps/v8/docs/v8_c_plus_plus_styleand_sops.md
  22. 14
      deps/v8/docs/v8_committers_responsibility.md
  23. 6
      deps/v8/include/v8-version.h
  24. 166
      deps/v8/include/v8.h
  25. 17
      deps/v8/include/v8config.h
  26. 8
      deps/v8/samples/shell.cc
  27. 60
      deps/v8/src/accessors.cc
  28. 3
      deps/v8/src/accessors.h
  29. 38
      deps/v8/src/address-map.cc
  30. 184
      deps/v8/src/address-map.h
  31. 5
      deps/v8/src/allocation-site-scopes.h
  32. 3
      deps/v8/src/allocation.h
  33. 130
      deps/v8/src/api-natives.cc
  34. 9
      deps/v8/src/api-natives.h
  35. 342
      deps/v8/src/api.cc
  36. 11
      deps/v8/src/api.h
  37. 10
      deps/v8/src/arguments.h
  38. 47
      deps/v8/src/arm/assembler-arm-inl.h
  39. 65
      deps/v8/src/arm/assembler-arm.cc
  40. 254
      deps/v8/src/arm/assembler-arm.h
  41. 296
      deps/v8/src/arm/builtins-arm.cc
  42. 186
      deps/v8/src/arm/code-stubs-arm.cc
  43. 3
      deps/v8/src/arm/code-stubs-arm.h
  44. 3
      deps/v8/src/arm/codegen-arm.h
  45. 11
      deps/v8/src/arm/constants-arm.cc
  46. 3
      deps/v8/src/arm/constants-arm.h
  47. 19
      deps/v8/src/arm/deoptimizer-arm.cc
  48. 2
      deps/v8/src/arm/disasm-arm.cc
  49. 3
      deps/v8/src/arm/frames-arm.h
  50. 46
      deps/v8/src/arm/interface-descriptors-arm.cc
  51. 4
      deps/v8/src/arm/interface-descriptors-arm.h
  52. 28
      deps/v8/src/arm/macro-assembler-arm.cc
  53. 32
      deps/v8/src/arm/macro-assembler-arm.h
  54. 3
      deps/v8/src/arm/simulator-arm.cc
  55. 6
      deps/v8/src/arm/simulator-arm.h
  56. 9
      deps/v8/src/arm64/assembler-arm64-inl.h
  57. 40
      deps/v8/src/arm64/assembler-arm64.cc
  58. 283
      deps/v8/src/arm64/assembler-arm64.h
  59. 236
      deps/v8/src/arm64/builtins-arm64.cc
  60. 189
      deps/v8/src/arm64/code-stubs-arm64.cc
  61. 3
      deps/v8/src/arm64/code-stubs-arm64.h
  62. 3
      deps/v8/src/arm64/codegen-arm64.h
  63. 60
      deps/v8/src/arm64/constants-arm64.h
  64. 3
      deps/v8/src/arm64/decoder-arm64-inl.h
  65. 3
      deps/v8/src/arm64/decoder-arm64.h
  66. 9
      deps/v8/src/arm64/deoptimizer-arm64.cc
  67. 164
      deps/v8/src/arm64/disasm-arm64.cc
  68. 13
      deps/v8/src/arm64/disasm-arm64.h
  69. 3
      deps/v8/src/arm64/frames-arm64.h
  70. 3
      deps/v8/src/arm64/instructions-arm64.h
  71. 3
      deps/v8/src/arm64/instrument-arm64.h
  72. 47
      deps/v8/src/arm64/interface-descriptors-arm64.cc
  73. 4
      deps/v8/src/arm64/interface-descriptors-arm64.h
  74. 3
      deps/v8/src/arm64/macro-assembler-arm64-inl.h
  75. 46
      deps/v8/src/arm64/macro-assembler-arm64.cc
  76. 7
      deps/v8/src/arm64/macro-assembler-arm64.h
  77. 9
      deps/v8/src/arm64/simulator-arm64.h
  78. 9
      deps/v8/src/arm64/utils-arm64.h
  79. 58
      deps/v8/src/assembler.cc
  80. 21
      deps/v8/src/assembler.h
  81. 3
      deps/v8/src/assert-scope.h
  82. 59
      deps/v8/src/ast-expression-visitor.cc
  83. 11
      deps/v8/src/ast-expression-visitor.h
  84. 9
      deps/v8/src/ast-literal-reindexer.cc
  85. 6
      deps/v8/src/ast-literal-reindexer.h
  86. 33
      deps/v8/src/ast-numbering.cc
  87. 2
      deps/v8/src/ast-value-factory.cc
  88. 4
      deps/v8/src/ast-value-factory.h
  89. 80
      deps/v8/src/ast.cc
  90. 276
      deps/v8/src/ast.h
  91. 4
      deps/v8/src/background-parsing-task.h
  92. 35
      deps/v8/src/bailout-reason.h
  93. 46
      deps/v8/src/base.isolate
  94. 3
      deps/v8/src/base/atomicops.h
  95. 3
      deps/v8/src/base/atomicops_internals_arm64_gcc.h
  96. 6
      deps/v8/src/base/atomicops_internals_arm_gcc.h
  97. 3
      deps/v8/src/base/atomicops_internals_atomicword_compat.h
  98. 3
      deps/v8/src/base/atomicops_internals_mac.h
  99. 3
      deps/v8/src/base/atomicops_internals_mips64_gcc.h
  100. 3
      deps/v8/src/base/atomicops_internals_mips_gcc.h

23
deps/v8/.gitignore

@ -20,6 +20,7 @@
*.xcodeproj *.xcodeproj
#*# #*#
*~ *~
.#*
.cpplint-cache .cpplint-cache
.cproject .cproject
.d8_history .d8_history
@ -42,33 +43,20 @@ shell_g
/build/gyp /build/gyp
/build/ipch/ /build/ipch/
/build/Release /build/Release
/build/win_toolchain.json
/buildtools /buildtools
/hydrogen.cfg /hydrogen.cfg
/obj /obj
/out /out
/perf.data /perf.data
/perf.data.old /perf.data.old
/test/benchmarks/CHECKED_OUT_* /test/benchmarks/data
/test/benchmarks/downloaded_*
/test/benchmarks/kraken
/test/benchmarks/octane
/test/benchmarks/sunspider
/test/mozilla/CHECKED_OUT_VERSION
/test/mozilla/data /test/mozilla/data
/test/mozilla/data.old
/test/mozilla/downloaded_*
/test/promises-aplus/promises-tests /test/promises-aplus/promises-tests
/test/promises-aplus/promises-tests.tar.gz /test/promises-aplus/promises-tests.tar.gz
/test/promises-aplus/sinon /test/promises-aplus/sinon
/test/simdjs/CHECKED_OUT_* /test/simdjs/data
/test/simdjs/ecmascript_simd*
/test/simdjs/data*
/test/test262/data /test/test262/data
/test/test262/data.old
/test/test262/tc39-test262-*
/test/test262-es6/data
/test/test262-es6/data.old
/test/test262-es6/tc39-test262-*
/testing/gmock /testing/gmock
/testing/gtest /testing/gtest
/third_party /third_party
@ -78,6 +66,9 @@ shell_g
/tools/clang /tools/clang
/tools/jsfunfuzz /tools/jsfunfuzz
/tools/jsfunfuzz.zip /tools/jsfunfuzz.zip
/tools/luci-go/linux64/isolate
/tools/luci-go/mac64/isolate
/tools/luci-go/win64/isolate.exe
/tools/oom_dump/oom_dump /tools/oom_dump/oom_dump
/tools/oom_dump/oom_dump.o /tools/oom_dump/oom_dump.o
/tools/swarming_client /tools/swarming_client

2
deps/v8/AUTHORS

@ -54,6 +54,7 @@ Erich Ocean <erich.ocean@me.com>
Fedor Indutny <fedor@indutny.com> Fedor Indutny <fedor@indutny.com>
Felix Geisendörfer <haimuiba@gmail.com> Felix Geisendörfer <haimuiba@gmail.com>
Filipe David Manana <fdmanana@gmail.com> Filipe David Manana <fdmanana@gmail.com>
Franziska Hinkelmann <franziska.hinkelmann@gmail.com>
Geoffrey Garside <ggarside@gmail.com> Geoffrey Garside <ggarside@gmail.com>
Han Choongwoo <cwhan.tunz@gmail.com> Han Choongwoo <cwhan.tunz@gmail.com>
Hirofumi Mako <mkhrfm@gmail.com> Hirofumi Mako <mkhrfm@gmail.com>
@ -81,6 +82,7 @@ Michael Lutz <michi@icosahedron.de>
Michael Smith <mike@w3.org> Michael Smith <mike@w3.org>
Mike Gilbert <floppymaster@gmail.com> Mike Gilbert <floppymaster@gmail.com>
Mike Pennisi <mike@mikepennisi.com> Mike Pennisi <mike@mikepennisi.com>
Milton Chiang <milton.chiang@mediatek.com>
Nicolas Antonius Ernst Leopold Maria Kaiser <nikai@nikai.net> Nicolas Antonius Ernst Leopold Maria Kaiser <nikai@nikai.net>
Paolo Giarrusso <p.giarrusso@gmail.com> Paolo Giarrusso <p.giarrusso@gmail.com>
Patrick Gansterer <paroga@paroga.com> Patrick Gansterer <paroga@paroga.com>

432
deps/v8/BUILD.gn

@ -7,12 +7,22 @@ import("//build/config/arm.gni")
import("//build/config/mips.gni") import("//build/config/mips.gni")
import("//build/config/sanitizers/sanitizers.gni") import("//build/config/sanitizers/sanitizers.gni")
if (is_android) {
import("//build/config/android/rules.gni")
}
# Because standalone V8 builds are not supported, assume this is part of a # Because standalone V8 builds are not supported, assume this is part of a
# Chromium build. # Chromium build.
import("//build/module_args/v8.gni") import("//build_overrides/v8.gni")
import("snapshot_toolchain.gni") import("snapshot_toolchain.gni")
declare_args() {
# Enable the snapshot feature, for fast context creation.
# http://v8project.blogspot.com/2015/09/custom-startup-snapshots.html
v8_use_snapshot = true
}
# TODO(jochen): These will need to be user-settable to support standalone V8 # TODO(jochen): These will need to be user-settable to support standalone V8
# builds. # builds.
v8_deprecation_warnings = false v8_deprecation_warnings = false
@ -24,7 +34,6 @@ v8_enable_verify_heap = false
v8_interpreted_regexp = false v8_interpreted_regexp = false
v8_object_print = false v8_object_print = false
v8_postmortem_support = false v8_postmortem_support = false
v8_use_snapshot = true
v8_random_seed = "314159265" v8_random_seed = "314159265"
v8_toolset_for_d8 = "host" v8_toolset_for_d8 = "host"
@ -71,6 +80,14 @@ config("external_config") {
include_dirs = [ "include" ] include_dirs = [ "include" ]
} }
# This config should only be applied to code that needs to be explicitly
# aware of whether we are using startup data or not.
config("external_startup_data") {
if (v8_use_external_startup_data) {
defines = [ "V8_USE_EXTERNAL_STARTUP_DATA" ]
}
}
config("features") { config("features") {
visibility = [ ":*" ] # Only targets in this file can depend on this. visibility = [ ":*" ] # Only targets in this file can depend on this.
@ -194,35 +211,34 @@ action("js2c") {
inputs = [ "tools/jsmin.py" ] inputs = [ "tools/jsmin.py" ]
sources = [ sources = [
"src/macros.py", "src/js/macros.py",
"src/messages.h", "src/messages.h",
"src/prologue.js", "src/js/prologue.js",
"src/runtime.js", "src/js/runtime.js",
"src/v8natives.js", "src/js/v8natives.js",
"src/symbol.js", "src/js/symbol.js",
"src/array.js", "src/js/array.js",
"src/string.js", "src/js/string.js",
"src/uri.js", "src/js/uri.js",
"src/math.js", "src/js/math.js",
"src/third_party/fdlibm/fdlibm.js", "src/third_party/fdlibm/fdlibm.js",
"src/date.js", "src/js/date.js",
"src/regexp.js", "src/js/regexp.js",
"src/arraybuffer.js", "src/js/arraybuffer.js",
"src/typedarray.js", "src/js/typedarray.js",
"src/iterator-prototype.js", "src/js/iterator-prototype.js",
"src/generator.js", "src/js/generator.js",
"src/object-observe.js", "src/js/object-observe.js",
"src/collection.js", "src/js/collection.js",
"src/weak-collection.js", "src/js/weak-collection.js",
"src/collection-iterator.js", "src/js/collection-iterator.js",
"src/promise.js", "src/js/promise.js",
"src/messages.js", "src/js/messages.js",
"src/json.js", "src/js/json.js",
"src/array-iterator.js", "src/js/array-iterator.js",
"src/string-iterator.js", "src/js/string-iterator.js",
"src/templates.js", "src/js/templates.js",
"src/harmony-array.js", "src/js/spread.js",
"src/harmony-typedarray.js",
"src/debug/mirrors.js", "src/debug/mirrors.js",
"src/debug/debug.js", "src/debug/debug.js",
"src/debug/liveedit.js", "src/debug/liveedit.js",
@ -233,7 +249,7 @@ action("js2c") {
] ]
if (v8_enable_i18n_support) { if (v8_enable_i18n_support) {
sources += [ "src/i18n.js" ] sources += [ "src/js/i18n.js" ]
} }
args = [ args = [
@ -260,9 +276,9 @@ action("js2c_code_stubs") {
inputs = [ "tools/jsmin.py" ] inputs = [ "tools/jsmin.py" ]
sources = [ sources = [
"src/macros.py", "src/js/macros.py",
"src/messages.h", "src/messages.h",
"src/code-stubs.js" "src/js/code-stubs.js"
] ]
outputs = [ outputs = [
@ -294,20 +310,17 @@ action("js2c_experimental") {
inputs = [ "tools/jsmin.py" ] inputs = [ "tools/jsmin.py" ]
sources = [ sources = [
"src/macros.py", "src/js/macros.py",
"src/messages.h", "src/messages.h",
"src/proxy.js", "src/js/proxy.js",
"src/generator.js", "src/js/generator.js",
"src/harmony-atomics.js", "src/js/harmony-atomics.js",
"src/harmony-array-includes.js", "src/js/harmony-array-includes.js",
"src/harmony-concat-spreadable.js", "src/js/harmony-regexp.js",
"src/harmony-tostring.js", "src/js/harmony-reflect.js",
"src/harmony-regexp.js", "src/js/harmony-object-observe.js",
"src/harmony-reflect.js", "src/js/harmony-sharedarraybuffer.js",
"src/harmony-spread.js", "src/js/harmony-simd.js"
"src/harmony-object-observe.js",
"src/harmony-sharedarraybuffer.js",
"src/harmony-simd.js"
] ]
outputs = [ outputs = [
@ -396,7 +409,7 @@ action("d8_js2c") {
inputs = [ inputs = [
"src/d8.js", "src/d8.js",
"src/macros.py", "src/js/macros.py",
] ]
outputs = [ outputs = [
@ -407,6 +420,19 @@ action("d8_js2c") {
rebase_path(inputs, root_build_dir) rebase_path(inputs, root_build_dir)
} }
if (is_android) {
android_assets("v8_external_startup_data_assets") {
if (v8_use_external_startup_data) {
deps = [
"//v8",
]
renaming_sources = v8_external_startup_data_renaming_sources
renaming_destinations = v8_external_startup_data_renaming_destinations
disable_compression = true
}
}
}
if (v8_use_external_startup_data) { if (v8_use_external_startup_data) {
action("natives_blob") { action("natives_blob") {
visibility = [ ":*" ] # Only targets in this file can depend on this. visibility = [ ":*" ] # Only targets in this file can depend on this.
@ -620,6 +646,8 @@ source_set("v8_base") {
"include/v8config.h", "include/v8config.h",
"src/accessors.cc", "src/accessors.cc",
"src/accessors.h", "src/accessors.h",
"src/address-map.cc",
"src/address-map.h",
"src/allocation.cc", "src/allocation.cc",
"src/allocation.h", "src/allocation.h",
"src/allocation-site-scopes.cc", "src/allocation-site-scopes.cc",
@ -684,6 +712,8 @@ source_set("v8_base") {
"src/compilation-statistics.h", "src/compilation-statistics.h",
"src/compiler/access-builder.cc", "src/compiler/access-builder.cc",
"src/compiler/access-builder.h", "src/compiler/access-builder.h",
"src/compiler/access-info.cc",
"src/compiler/access-info.h",
"src/compiler/all-nodes.cc", "src/compiler/all-nodes.cc",
"src/compiler/all-nodes.h", "src/compiler/all-nodes.h",
"src/compiler/ast-graph-builder.cc", "src/compiler/ast-graph-builder.cc",
@ -692,6 +722,10 @@ source_set("v8_base") {
"src/compiler/ast-loop-assignment-analyzer.h", "src/compiler/ast-loop-assignment-analyzer.h",
"src/compiler/basic-block-instrumentor.cc", "src/compiler/basic-block-instrumentor.cc",
"src/compiler/basic-block-instrumentor.h", "src/compiler/basic-block-instrumentor.h",
"src/compiler/binary-operator-reducer.cc",
"src/compiler/binary-operator-reducer.h",
"src/compiler/branch-elimination.cc",
"src/compiler/branch-elimination.h",
"src/compiler/bytecode-graph-builder.cc", "src/compiler/bytecode-graph-builder.cc",
"src/compiler/bytecode-graph-builder.h", "src/compiler/bytecode-graph-builder.h",
"src/compiler/change-lowering.cc", "src/compiler/change-lowering.cc",
@ -755,18 +789,20 @@ source_set("v8_base") {
"src/compiler/js-frame-specialization.h", "src/compiler/js-frame-specialization.h",
"src/compiler/js-generic-lowering.cc", "src/compiler/js-generic-lowering.cc",
"src/compiler/js-generic-lowering.h", "src/compiler/js-generic-lowering.h",
"src/compiler/js-global-object-specialization.cc",
"src/compiler/js-global-object-specialization.h",
"src/compiler/js-graph.cc", "src/compiler/js-graph.cc",
"src/compiler/js-graph.h", "src/compiler/js-graph.h",
"src/compiler/js-inlining.cc", "src/compiler/js-inlining.cc",
"src/compiler/js-inlining.h", "src/compiler/js-inlining.h",
"src/compiler/js-inlining-heuristic.cc",
"src/compiler/js-inlining-heuristic.h",
"src/compiler/js-intrinsic-lowering.cc", "src/compiler/js-intrinsic-lowering.cc",
"src/compiler/js-intrinsic-lowering.h", "src/compiler/js-intrinsic-lowering.h",
"src/compiler/js-native-context-specialization.cc",
"src/compiler/js-native-context-specialization.h",
"src/compiler/js-operator.cc", "src/compiler/js-operator.cc",
"src/compiler/js-operator.h", "src/compiler/js-operator.h",
"src/compiler/js-type-feedback.cc",
"src/compiler/js-type-feedback.h",
"src/compiler/js-type-feedback-lowering.cc",
"src/compiler/js-type-feedback-lowering.h",
"src/compiler/js-typed-lowering.cc", "src/compiler/js-typed-lowering.cc",
"src/compiler/js-typed-lowering.h", "src/compiler/js-typed-lowering.h",
"src/compiler/jump-threading.cc", "src/compiler/jump-threading.cc",
@ -819,8 +855,6 @@ source_set("v8_base") {
"src/compiler/register-allocator.h", "src/compiler/register-allocator.h",
"src/compiler/register-allocator-verifier.cc", "src/compiler/register-allocator-verifier.cc",
"src/compiler/register-allocator-verifier.h", "src/compiler/register-allocator-verifier.h",
"src/compiler/register-configuration.cc",
"src/compiler/register-configuration.h",
"src/compiler/representation-change.h", "src/compiler/representation-change.h",
"src/compiler/schedule.cc", "src/compiler/schedule.cc",
"src/compiler/schedule.h", "src/compiler/schedule.h",
@ -860,6 +894,68 @@ source_set("v8_base") {
"src/conversions.h", "src/conversions.h",
"src/counters.cc", "src/counters.cc",
"src/counters.h", "src/counters.h",
"src/crankshaft/hydrogen-alias-analysis.h",
"src/crankshaft/hydrogen-bce.cc",
"src/crankshaft/hydrogen-bce.h",
"src/crankshaft/hydrogen-bch.cc",
"src/crankshaft/hydrogen-bch.h",
"src/crankshaft/hydrogen-canonicalize.cc",
"src/crankshaft/hydrogen-canonicalize.h",
"src/crankshaft/hydrogen-check-elimination.cc",
"src/crankshaft/hydrogen-check-elimination.h",
"src/crankshaft/hydrogen-dce.cc",
"src/crankshaft/hydrogen-dce.h",
"src/crankshaft/hydrogen-dehoist.cc",
"src/crankshaft/hydrogen-dehoist.h",
"src/crankshaft/hydrogen-environment-liveness.cc",
"src/crankshaft/hydrogen-environment-liveness.h",
"src/crankshaft/hydrogen-escape-analysis.cc",
"src/crankshaft/hydrogen-escape-analysis.h",
"src/crankshaft/hydrogen-flow-engine.h",
"src/crankshaft/hydrogen-gvn.cc",
"src/crankshaft/hydrogen-gvn.h",
"src/crankshaft/hydrogen-infer-representation.cc",
"src/crankshaft/hydrogen-infer-representation.h",
"src/crankshaft/hydrogen-infer-types.cc",
"src/crankshaft/hydrogen-infer-types.h",
"src/crankshaft/hydrogen-instructions.cc",
"src/crankshaft/hydrogen-instructions.h",
"src/crankshaft/hydrogen-load-elimination.cc",
"src/crankshaft/hydrogen-load-elimination.h",
"src/crankshaft/hydrogen-mark-deoptimize.cc",
"src/crankshaft/hydrogen-mark-deoptimize.h",
"src/crankshaft/hydrogen-mark-unreachable.cc",
"src/crankshaft/hydrogen-mark-unreachable.h",
"src/crankshaft/hydrogen-osr.cc",
"src/crankshaft/hydrogen-osr.h",
"src/crankshaft/hydrogen-range-analysis.cc",
"src/crankshaft/hydrogen-range-analysis.h",
"src/crankshaft/hydrogen-redundant-phi.cc",
"src/crankshaft/hydrogen-redundant-phi.h",
"src/crankshaft/hydrogen-removable-simulates.cc",
"src/crankshaft/hydrogen-removable-simulates.h",
"src/crankshaft/hydrogen-representation-changes.cc",
"src/crankshaft/hydrogen-representation-changes.h",
"src/crankshaft/hydrogen-sce.cc",
"src/crankshaft/hydrogen-sce.h",
"src/crankshaft/hydrogen-store-elimination.cc",
"src/crankshaft/hydrogen-store-elimination.h",
"src/crankshaft/hydrogen-types.cc",
"src/crankshaft/hydrogen-types.h",
"src/crankshaft/hydrogen-uint32-analysis.cc",
"src/crankshaft/hydrogen-uint32-analysis.h",
"src/crankshaft/hydrogen.cc",
"src/crankshaft/hydrogen.h",
"src/crankshaft/lithium-allocator-inl.h",
"src/crankshaft/lithium-allocator.cc",
"src/crankshaft/lithium-allocator.h",
"src/crankshaft/lithium-codegen.cc",
"src/crankshaft/lithium-codegen.h",
"src/crankshaft/lithium.cc",
"src/crankshaft/lithium.h",
"src/crankshaft/typing.cc",
"src/crankshaft/typing.h",
"src/crankshaft/unique.h",
"src/date.cc", "src/date.cc",
"src/date.h", "src/date.h",
"src/dateparser-inl.h", "src/dateparser-inl.h",
@ -968,58 +1064,6 @@ source_set("v8_base") {
"src/heap/store-buffer-inl.h", "src/heap/store-buffer-inl.h",
"src/heap/store-buffer.cc", "src/heap/store-buffer.cc",
"src/heap/store-buffer.h", "src/heap/store-buffer.h",
"src/hydrogen-alias-analysis.h",
"src/hydrogen-bce.cc",
"src/hydrogen-bce.h",
"src/hydrogen-bch.cc",
"src/hydrogen-bch.h",
"src/hydrogen-canonicalize.cc",
"src/hydrogen-canonicalize.h",
"src/hydrogen-check-elimination.cc",
"src/hydrogen-check-elimination.h",
"src/hydrogen-dce.cc",
"src/hydrogen-dce.h",
"src/hydrogen-dehoist.cc",
"src/hydrogen-dehoist.h",
"src/hydrogen-environment-liveness.cc",
"src/hydrogen-environment-liveness.h",
"src/hydrogen-escape-analysis.cc",
"src/hydrogen-escape-analysis.h",
"src/hydrogen-flow-engine.h",
"src/hydrogen-instructions.cc",
"src/hydrogen-instructions.h",
"src/hydrogen.cc",
"src/hydrogen.h",
"src/hydrogen-gvn.cc",
"src/hydrogen-gvn.h",
"src/hydrogen-infer-representation.cc",
"src/hydrogen-infer-representation.h",
"src/hydrogen-infer-types.cc",
"src/hydrogen-infer-types.h",
"src/hydrogen-load-elimination.cc",
"src/hydrogen-load-elimination.h",
"src/hydrogen-mark-deoptimize.cc",
"src/hydrogen-mark-deoptimize.h",
"src/hydrogen-mark-unreachable.cc",
"src/hydrogen-mark-unreachable.h",
"src/hydrogen-osr.cc",
"src/hydrogen-osr.h",
"src/hydrogen-range-analysis.cc",
"src/hydrogen-range-analysis.h",
"src/hydrogen-redundant-phi.cc",
"src/hydrogen-redundant-phi.h",
"src/hydrogen-removable-simulates.cc",
"src/hydrogen-removable-simulates.h",
"src/hydrogen-representation-changes.cc",
"src/hydrogen-representation-changes.h",
"src/hydrogen-sce.cc",
"src/hydrogen-sce.h",
"src/hydrogen-store-elimination.cc",
"src/hydrogen-store-elimination.h",
"src/hydrogen-types.cc",
"src/hydrogen-types.h",
"src/hydrogen-uint32-analysis.cc",
"src/hydrogen-uint32-analysis.h",
"src/i18n.cc", "src/i18n.cc",
"src/i18n.h", "src/i18n.h",
"src/icu_util.cc", "src/icu_util.cc",
@ -1051,6 +1095,9 @@ source_set("v8_base") {
"src/interpreter/bytecode-array-iterator.h", "src/interpreter/bytecode-array-iterator.h",
"src/interpreter/bytecode-generator.cc", "src/interpreter/bytecode-generator.cc",
"src/interpreter/bytecode-generator.h", "src/interpreter/bytecode-generator.h",
"src/interpreter/bytecode-traits.h",
"src/interpreter/control-flow-builders.cc",
"src/interpreter/control-flow-builders.h",
"src/interpreter/interpreter.cc", "src/interpreter/interpreter.cc",
"src/interpreter/interpreter.h", "src/interpreter/interpreter.h",
"src/isolate-inl.h", "src/isolate-inl.h",
@ -1058,24 +1105,18 @@ source_set("v8_base") {
"src/isolate.h", "src/isolate.h",
"src/json-parser.h", "src/json-parser.h",
"src/json-stringifier.h", "src/json-stringifier.h",
"src/key-accumulator.h",
"src/key-accumulator.cc",
"src/layout-descriptor-inl.h", "src/layout-descriptor-inl.h",
"src/layout-descriptor.cc", "src/layout-descriptor.cc",
"src/layout-descriptor.h", "src/layout-descriptor.h",
"src/list-inl.h", "src/list-inl.h",
"src/list.h", "src/list.h",
"src/lithium-allocator-inl.h",
"src/lithium-allocator.cc",
"src/lithium-allocator.h",
"src/lithium-codegen.cc",
"src/lithium-codegen.h",
"src/lithium.cc",
"src/lithium.h",
"src/log-inl.h", "src/log-inl.h",
"src/log-utils.cc", "src/log-utils.cc",
"src/log-utils.h", "src/log-utils.h",
"src/log.cc", "src/log.cc",
"src/log.h", "src/log.h",
"src/lookup-inl.h",
"src/lookup.cc", "src/lookup.cc",
"src/lookup.h", "src/lookup.h",
"src/macro-assembler.h", "src/macro-assembler.h",
@ -1093,9 +1134,11 @@ source_set("v8_base") {
"src/optimizing-compile-dispatcher.h", "src/optimizing-compile-dispatcher.h",
"src/ostreams.cc", "src/ostreams.cc",
"src/ostreams.h", "src/ostreams.h",
"src/pattern-rewriter.cc", "src/parameter-initializer-rewriter.cc",
"src/parameter-initializer-rewriter.h",
"src/parser.cc", "src/parser.cc",
"src/parser.h", "src/parser.h",
"src/pattern-rewriter.cc",
"src/pending-compilation-error-handler.cc", "src/pending-compilation-error-handler.cc",
"src/pending-compilation-error-handler.h", "src/pending-compilation-error-handler.h",
"src/preparse-data-format.h", "src/preparse-data-format.h",
@ -1122,8 +1165,12 @@ source_set("v8_base") {
"src/profiler/profile-generator.h", "src/profiler/profile-generator.h",
"src/profiler/sampler.cc", "src/profiler/sampler.cc",
"src/profiler/sampler.h", "src/profiler/sampler.h",
"src/profiler/strings-storage.cc",
"src/profiler/strings-storage.h",
"src/profiler/unbound-queue-inl.h", "src/profiler/unbound-queue-inl.h",
"src/profiler/unbound-queue.h", "src/profiler/unbound-queue.h",
"src/property-descriptor.cc",
"src/property-descriptor.h",
"src/property-details.h", "src/property-details.h",
"src/property.cc", "src/property.cc",
"src/property.h", "src/property.h",
@ -1145,6 +1192,8 @@ source_set("v8_base") {
"src/regexp/regexp-macro-assembler.h", "src/regexp/regexp-macro-assembler.h",
"src/regexp/regexp-stack.cc", "src/regexp/regexp-stack.cc",
"src/regexp/regexp-stack.h", "src/regexp/regexp-stack.h",
"src/register-configuration.cc",
"src/register-configuration.h",
"src/runtime-profiler.cc", "src/runtime-profiler.cc",
"src/runtime-profiler.h", "src/runtime-profiler.h",
"src/runtime/runtime-array.cc", "src/runtime/runtime-array.cc",
@ -1211,8 +1260,6 @@ source_set("v8_base") {
"src/string-search.h", "src/string-search.h",
"src/string-stream.cc", "src/string-stream.cc",
"src/string-stream.h", "src/string-stream.h",
"src/strings-storage.cc",
"src/strings-storage.h",
"src/strtod.cc", "src/strtod.cc",
"src/strtod.h", "src/strtod.h",
"src/token.cc", "src/token.cc",
@ -1220,6 +1267,8 @@ source_set("v8_base") {
"src/transitions-inl.h", "src/transitions-inl.h",
"src/transitions.cc", "src/transitions.cc",
"src/transitions.h", "src/transitions.h",
"src/type-cache.cc",
"src/type-cache.h",
"src/type-feedback-vector-inl.h", "src/type-feedback-vector-inl.h",
"src/type-feedback-vector.cc", "src/type-feedback-vector.cc",
"src/type-feedback-vector.h", "src/type-feedback-vector.h",
@ -1232,8 +1281,6 @@ source_set("v8_base") {
"src/typing-asm.h", "src/typing-asm.h",
"src/typing-reset.cc", "src/typing-reset.cc",
"src/typing-reset.h", "src/typing-reset.h",
"src/typing.cc",
"src/typing.h",
"src/unicode-inl.h", "src/unicode-inl.h",
"src/unicode.cc", "src/unicode.cc",
"src/unicode.h", "src/unicode.h",
@ -1241,7 +1288,6 @@ source_set("v8_base") {
"src/unicode-cache.h", "src/unicode-cache.h",
"src/unicode-decoder.cc", "src/unicode-decoder.cc",
"src/unicode-decoder.h", "src/unicode-decoder.h",
"src/unique.h",
"src/utils.cc", "src/utils.cc",
"src/utils.h", "src/utils.h",
"src/v8.cc", "src/v8.cc",
@ -1255,7 +1301,6 @@ source_set("v8_base") {
"src/version.h", "src/version.h",
"src/vm-state-inl.h", "src/vm-state-inl.h",
"src/vm-state.h", "src/vm-state.h",
"src/zone-type-cache.h",
"src/zone.cc", "src/zone.cc",
"src/zone.h", "src/zone.h",
"src/zone-allocator.h", "src/zone-allocator.h",
@ -1266,6 +1311,17 @@ source_set("v8_base") {
if (v8_target_arch == "x86") { if (v8_target_arch == "x86") {
sources += [ sources += [
"src/crankshaft/ia32/lithium-codegen-ia32.cc",
"src/crankshaft/ia32/lithium-codegen-ia32.h",
"src/crankshaft/ia32/lithium-gap-resolver-ia32.cc",
"src/crankshaft/ia32/lithium-gap-resolver-ia32.h",
"src/crankshaft/ia32/lithium-ia32.cc",
"src/crankshaft/ia32/lithium-ia32.h",
"src/compiler/ia32/code-generator-ia32.cc",
"src/compiler/ia32/instruction-codes-ia32.h",
"src/compiler/ia32/instruction-selector-ia32.cc",
"src/debug/ia32/debug-ia32.cc",
"src/full-codegen/ia32/full-codegen-ia32.cc",
"src/ia32/assembler-ia32-inl.h", "src/ia32/assembler-ia32-inl.h",
"src/ia32/assembler-ia32.cc", "src/ia32/assembler-ia32.cc",
"src/ia32/assembler-ia32.h", "src/ia32/assembler-ia32.h",
@ -1280,19 +1336,8 @@ source_set("v8_base") {
"src/ia32/frames-ia32.cc", "src/ia32/frames-ia32.cc",
"src/ia32/frames-ia32.h", "src/ia32/frames-ia32.h",
"src/ia32/interface-descriptors-ia32.cc", "src/ia32/interface-descriptors-ia32.cc",
"src/ia32/lithium-codegen-ia32.cc",
"src/ia32/lithium-codegen-ia32.h",
"src/ia32/lithium-gap-resolver-ia32.cc",
"src/ia32/lithium-gap-resolver-ia32.h",
"src/ia32/lithium-ia32.cc",
"src/ia32/lithium-ia32.h",
"src/ia32/macro-assembler-ia32.cc", "src/ia32/macro-assembler-ia32.cc",
"src/ia32/macro-assembler-ia32.h", "src/ia32/macro-assembler-ia32.h",
"src/compiler/ia32/code-generator-ia32.cc",
"src/compiler/ia32/instruction-codes-ia32.h",
"src/compiler/ia32/instruction-selector-ia32.cc",
"src/debug/ia32/debug-ia32.cc",
"src/full-codegen/ia32/full-codegen-ia32.cc",
"src/ic/ia32/access-compiler-ia32.cc", "src/ic/ia32/access-compiler-ia32.cc",
"src/ic/ia32/handler-compiler-ia32.cc", "src/ic/ia32/handler-compiler-ia32.cc",
"src/ic/ia32/ic-ia32.cc", "src/ic/ia32/ic-ia32.cc",
@ -1303,6 +1348,24 @@ source_set("v8_base") {
] ]
} else if (v8_target_arch == "x64") { } else if (v8_target_arch == "x64") {
sources += [ sources += [
"src/compiler/x64/code-generator-x64.cc",
"src/compiler/x64/instruction-codes-x64.h",
"src/compiler/x64/instruction-selector-x64.cc",
"src/crankshaft/x64/lithium-codegen-x64.cc",
"src/crankshaft/x64/lithium-codegen-x64.h",
"src/crankshaft/x64/lithium-gap-resolver-x64.cc",
"src/crankshaft/x64/lithium-gap-resolver-x64.h",
"src/crankshaft/x64/lithium-x64.cc",
"src/crankshaft/x64/lithium-x64.h",
"src/debug/x64/debug-x64.cc",
"src/full-codegen/x64/full-codegen-x64.cc",
"src/ic/x64/access-compiler-x64.cc",
"src/ic/x64/handler-compiler-x64.cc",
"src/ic/x64/ic-x64.cc",
"src/ic/x64/ic-compiler-x64.cc",
"src/ic/x64/stub-cache-x64.cc",
"src/regexp/x64/regexp-macro-assembler-x64.cc",
"src/regexp/x64/regexp-macro-assembler-x64.h",
"src/x64/assembler-x64-inl.h", "src/x64/assembler-x64-inl.h",
"src/x64/assembler-x64.cc", "src/x64/assembler-x64.cc",
"src/x64/assembler-x64.h", "src/x64/assembler-x64.h",
@ -1317,26 +1380,8 @@ source_set("v8_base") {
"src/x64/frames-x64.cc", "src/x64/frames-x64.cc",
"src/x64/frames-x64.h", "src/x64/frames-x64.h",
"src/x64/interface-descriptors-x64.cc", "src/x64/interface-descriptors-x64.cc",
"src/x64/lithium-codegen-x64.cc",
"src/x64/lithium-codegen-x64.h",
"src/x64/lithium-gap-resolver-x64.cc",
"src/x64/lithium-gap-resolver-x64.h",
"src/x64/lithium-x64.cc",
"src/x64/lithium-x64.h",
"src/x64/macro-assembler-x64.cc", "src/x64/macro-assembler-x64.cc",
"src/x64/macro-assembler-x64.h", "src/x64/macro-assembler-x64.h",
"src/compiler/x64/code-generator-x64.cc",
"src/compiler/x64/instruction-codes-x64.h",
"src/compiler/x64/instruction-selector-x64.cc",
"src/debug/x64/debug-x64.cc",
"src/full-codegen/x64/full-codegen-x64.cc",
"src/ic/x64/access-compiler-x64.cc",
"src/ic/x64/handler-compiler-x64.cc",
"src/ic/x64/ic-x64.cc",
"src/ic/x64/ic-compiler-x64.cc",
"src/ic/x64/stub-cache-x64.cc",
"src/regexp/x64/regexp-macro-assembler-x64.cc",
"src/regexp/x64/regexp-macro-assembler-x64.h",
] ]
} else if (v8_target_arch == "arm") { } else if (v8_target_arch == "arm") {
sources += [ sources += [
@ -1357,12 +1402,6 @@ source_set("v8_base") {
"src/arm/frames-arm.h", "src/arm/frames-arm.h",
"src/arm/interface-descriptors-arm.cc", "src/arm/interface-descriptors-arm.cc",
"src/arm/interface-descriptors-arm.h", "src/arm/interface-descriptors-arm.h",
"src/arm/lithium-arm.cc",
"src/arm/lithium-arm.h",
"src/arm/lithium-codegen-arm.cc",
"src/arm/lithium-codegen-arm.h",
"src/arm/lithium-gap-resolver-arm.cc",
"src/arm/lithium-gap-resolver-arm.h",
"src/arm/macro-assembler-arm.cc", "src/arm/macro-assembler-arm.cc",
"src/arm/macro-assembler-arm.h", "src/arm/macro-assembler-arm.h",
"src/arm/simulator-arm.cc", "src/arm/simulator-arm.cc",
@ -1370,6 +1409,12 @@ source_set("v8_base") {
"src/compiler/arm/code-generator-arm.cc", "src/compiler/arm/code-generator-arm.cc",
"src/compiler/arm/instruction-codes-arm.h", "src/compiler/arm/instruction-codes-arm.h",
"src/compiler/arm/instruction-selector-arm.cc", "src/compiler/arm/instruction-selector-arm.cc",
"src/crankshaft/arm/lithium-arm.cc",
"src/crankshaft/arm/lithium-arm.h",
"src/crankshaft/arm/lithium-codegen-arm.cc",
"src/crankshaft/arm/lithium-codegen-arm.h",
"src/crankshaft/arm/lithium-gap-resolver-arm.cc",
"src/crankshaft/arm/lithium-gap-resolver-arm.h",
"src/debug/arm/debug-arm.cc", "src/debug/arm/debug-arm.cc",
"src/full-codegen/arm/full-codegen-arm.cc", "src/full-codegen/arm/full-codegen-arm.cc",
"src/ic/arm/access-compiler-arm.cc", "src/ic/arm/access-compiler-arm.cc",
@ -1395,9 +1440,6 @@ source_set("v8_base") {
"src/arm64/decoder-arm64.cc", "src/arm64/decoder-arm64.cc",
"src/arm64/decoder-arm64.h", "src/arm64/decoder-arm64.h",
"src/arm64/decoder-arm64-inl.h", "src/arm64/decoder-arm64-inl.h",
"src/arm64/delayed-masm-arm64.cc",
"src/arm64/delayed-masm-arm64.h",
"src/arm64/delayed-masm-arm64-inl.h",
"src/arm64/deoptimizer-arm64.cc", "src/arm64/deoptimizer-arm64.cc",
"src/arm64/disasm-arm64.cc", "src/arm64/disasm-arm64.cc",
"src/arm64/disasm-arm64.h", "src/arm64/disasm-arm64.h",
@ -1409,12 +1451,6 @@ source_set("v8_base") {
"src/arm64/instrument-arm64.h", "src/arm64/instrument-arm64.h",
"src/arm64/interface-descriptors-arm64.cc", "src/arm64/interface-descriptors-arm64.cc",
"src/arm64/interface-descriptors-arm64.h", "src/arm64/interface-descriptors-arm64.h",
"src/arm64/lithium-arm64.cc",
"src/arm64/lithium-arm64.h",
"src/arm64/lithium-codegen-arm64.cc",
"src/arm64/lithium-codegen-arm64.h",
"src/arm64/lithium-gap-resolver-arm64.cc",
"src/arm64/lithium-gap-resolver-arm64.h",
"src/arm64/macro-assembler-arm64.cc", "src/arm64/macro-assembler-arm64.cc",
"src/arm64/macro-assembler-arm64.h", "src/arm64/macro-assembler-arm64.h",
"src/arm64/macro-assembler-arm64-inl.h", "src/arm64/macro-assembler-arm64-inl.h",
@ -1425,6 +1461,15 @@ source_set("v8_base") {
"src/compiler/arm64/code-generator-arm64.cc", "src/compiler/arm64/code-generator-arm64.cc",
"src/compiler/arm64/instruction-codes-arm64.h", "src/compiler/arm64/instruction-codes-arm64.h",
"src/compiler/arm64/instruction-selector-arm64.cc", "src/compiler/arm64/instruction-selector-arm64.cc",
"src/crankshaft/arm64/delayed-masm-arm64.cc",
"src/crankshaft/arm64/delayed-masm-arm64.h",
"src/crankshaft/arm64/delayed-masm-arm64-inl.h",
"src/crankshaft/arm64/lithium-arm64.cc",
"src/crankshaft/arm64/lithium-arm64.h",
"src/crankshaft/arm64/lithium-codegen-arm64.cc",
"src/crankshaft/arm64/lithium-codegen-arm64.h",
"src/crankshaft/arm64/lithium-gap-resolver-arm64.cc",
"src/crankshaft/arm64/lithium-gap-resolver-arm64.h",
"src/debug/arm64/debug-arm64.cc", "src/debug/arm64/debug-arm64.cc",
"src/full-codegen/arm64/full-codegen-arm64.cc", "src/full-codegen/arm64/full-codegen-arm64.cc",
"src/ic/arm64/access-compiler-arm64.cc", "src/ic/arm64/access-compiler-arm64.cc",
@ -1437,6 +1482,22 @@ source_set("v8_base") {
] ]
} else if (v8_target_arch == "mipsel") { } else if (v8_target_arch == "mipsel") {
sources += [ sources += [
"src/compiler/mips/code-generator-mips.cc",
"src/compiler/mips/instruction-codes-mips.h",
"src/compiler/mips/instruction-selector-mips.cc",
"src/crankshaft/mips/lithium-codegen-mips.cc",
"src/crankshaft/mips/lithium-codegen-mips.h",
"src/crankshaft/mips/lithium-gap-resolver-mips.cc",
"src/crankshaft/mips/lithium-gap-resolver-mips.h",
"src/crankshaft/mips/lithium-mips.cc",
"src/crankshaft/mips/lithium-mips.h",
"src/debug/mips/debug-mips.cc",
"src/full-codegen/mips/full-codegen-mips.cc",
"src/ic/mips/access-compiler-mips.cc",
"src/ic/mips/handler-compiler-mips.cc",
"src/ic/mips/ic-mips.cc",
"src/ic/mips/ic-compiler-mips.cc",
"src/ic/mips/stub-cache-mips.cc",
"src/mips/assembler-mips.cc", "src/mips/assembler-mips.cc",
"src/mips/assembler-mips.h", "src/mips/assembler-mips.h",
"src/mips/assembler-mips-inl.h", "src/mips/assembler-mips-inl.h",
@ -1453,31 +1514,31 @@ source_set("v8_base") {
"src/mips/frames-mips.cc", "src/mips/frames-mips.cc",
"src/mips/frames-mips.h", "src/mips/frames-mips.h",
"src/mips/interface-descriptors-mips.cc", "src/mips/interface-descriptors-mips.cc",
"src/mips/lithium-codegen-mips.cc",
"src/mips/lithium-codegen-mips.h",
"src/mips/lithium-gap-resolver-mips.cc",
"src/mips/lithium-gap-resolver-mips.h",
"src/mips/lithium-mips.cc",
"src/mips/lithium-mips.h",
"src/mips/macro-assembler-mips.cc", "src/mips/macro-assembler-mips.cc",
"src/mips/macro-assembler-mips.h", "src/mips/macro-assembler-mips.h",
"src/mips/simulator-mips.cc", "src/mips/simulator-mips.cc",
"src/mips/simulator-mips.h", "src/mips/simulator-mips.h",
"src/compiler/mips/code-generator-mips.cc",
"src/compiler/mips/instruction-codes-mips.h",
"src/compiler/mips/instruction-selector-mips.cc",
"src/debug/mips/debug-mips.cc",
"src/full-codegen/mips/full-codegen-mips.cc",
"src/ic/mips/access-compiler-mips.cc",
"src/ic/mips/handler-compiler-mips.cc",
"src/ic/mips/ic-mips.cc",
"src/ic/mips/ic-compiler-mips.cc",
"src/ic/mips/stub-cache-mips.cc",
"src/regexp/mips/regexp-macro-assembler-mips.cc", "src/regexp/mips/regexp-macro-assembler-mips.cc",
"src/regexp/mips/regexp-macro-assembler-mips.h", "src/regexp/mips/regexp-macro-assembler-mips.h",
] ]
} else if (v8_target_arch == "mips64el") { } else if (v8_target_arch == "mips64el") {
sources += [ sources += [
"compiler/mips64/code-generator-mips64.cc",
"compiler/mips64/instruction-codes-mips64.h",
"compiler/mips64/instruction-selector-mips64.cc",
"src/crankshaft/mips64/lithium-codegen-mips64.cc",
"src/crankshaft/mips64/lithium-codegen-mips64.h",
"src/crankshaft/mips64/lithium-gap-resolver-mips64.cc",
"src/crankshaft/mips64/lithium-gap-resolver-mips64.h",
"src/crankshaft/mips64/lithium-mips64.cc",
"src/crankshaft/mips64/lithium-mips64.h",
"src/debug/mips64/debug-mips64.cc",
"src/full-codegen/mips64/full-codegen-mips64.cc",
"src/ic/mips64/access-compiler-mips64.cc",
"src/ic/mips64/handler-compiler-mips64.cc",
"src/ic/mips64/ic-mips64.cc",
"src/ic/mips64/ic-compiler-mips64.cc",
"src/ic/mips64/stub-cache-mips64.cc",
"src/mips64/assembler-mips64.cc", "src/mips64/assembler-mips64.cc",
"src/mips64/assembler-mips64.h", "src/mips64/assembler-mips64.h",
"src/mips64/assembler-mips64-inl.h", "src/mips64/assembler-mips64-inl.h",
@ -1494,23 +1555,10 @@ source_set("v8_base") {
"src/mips64/frames-mips64.cc", "src/mips64/frames-mips64.cc",
"src/mips64/frames-mips64.h", "src/mips64/frames-mips64.h",
"src/mips64/interface-descriptors-mips64.cc", "src/mips64/interface-descriptors-mips64.cc",
"src/mips64/lithium-codegen-mips64.cc",
"src/mips64/lithium-codegen-mips64.h",
"src/mips64/lithium-gap-resolver-mips64.cc",
"src/mips64/lithium-gap-resolver-mips64.h",
"src/mips64/lithium-mips64.cc",
"src/mips64/lithium-mips64.h",
"src/mips64/macro-assembler-mips64.cc", "src/mips64/macro-assembler-mips64.cc",
"src/mips64/macro-assembler-mips64.h", "src/mips64/macro-assembler-mips64.h",
"src/mips64/simulator-mips64.cc", "src/mips64/simulator-mips64.cc",
"src/mips64/simulator-mips64.h", "src/mips64/simulator-mips64.h",
"src/debug/mips64/debug-mips64.cc",
"src/full-codegen/mips64/full-codegen-mips64.cc",
"src/ic/mips64/access-compiler-mips64.cc",
"src/ic/mips64/handler-compiler-mips64.cc",
"src/ic/mips64/ic-mips64.cc",
"src/ic/mips64/ic-compiler-mips64.cc",
"src/ic/mips64/stub-cache-mips64.cc",
"src/regexp/mips64/regexp-macro-assembler-mips64.cc", "src/regexp/mips64/regexp-macro-assembler-mips64.cc",
"src/regexp/mips64/regexp-macro-assembler-mips64.h", "src/regexp/mips64/regexp-macro-assembler-mips64.h",
] ]

1496
deps/v8/ChangeLog

File diff suppressed because it is too large

61
deps/v8/DEPS

@ -8,25 +8,32 @@ vars = {
deps = { deps = {
"v8/build/gyp": "v8/build/gyp":
Var("git_url") + "/external/gyp.git" + "@" + "01528c7244837168a1c80f06ff60fa5a9793c824", Var("git_url") + "/external/gyp.git" + "@" + "2c1e6cced23554ce84806e570acea637f6473afc",
"v8/third_party/icu": "v8/third_party/icu":
Var("git_url") + "/chromium/deps/icu.git" + "@" + "423fc7e1107fb08ccf007c4aeb76dcab8b2747c1", Var("git_url") + "/chromium/deps/icu.git" + "@" + "42c58d4e49f2250039f0e98d43e0b76e8f5ca024",
"v8/buildtools": "v8/buildtools":
Var("git_url") + "/chromium/buildtools.git" + "@" + "e7111440c07a883b82ffbbe6d26c744dfc6c9673", Var("git_url") + "/chromium/buildtools.git" + "@" + "4a95614772d9bcbd8bc197e1d9bd034e088fc740",
"v8/tools/swarming_client": "v8/tools/swarming_client":
Var('git_url') + '/external/swarming.client.git' + '@' + "6e5d2b21f0ac98396cd736097a985346feed1328", Var('git_url') + '/external/swarming.client.git' + '@' + "8fce79620b04bbe5415ace1103db27505bdc4c06",
"v8/testing/gtest": "v8/testing/gtest":
Var("git_url") + "/external/googletest.git" + "@" + "9855a87157778d39b95eccfb201a9dc90f6d61c6", Var("git_url") + "/external/github.com/google/googletest.git" + "@" + "6f8a66431cb592dad629028a50b3dd418a408c87",
"v8/testing/gmock": "v8/testing/gmock":
Var("git_url") + "/external/googlemock.git" + "@" + "0421b6f358139f02e102c9c332ce19a33faf75be", Var("git_url") + "/external/googlemock.git" + "@" + "0421b6f358139f02e102c9c332ce19a33faf75be",
"v8/test/benchmarks/data":
Var("git_url") + "/v8/deps/third_party/benchmarks.git" + "@" + "05d7188267b4560491ff9155c5ee13e207ecd65f",
"v8/test/mozilla/data":
Var("git_url") + "/v8/deps/third_party/mozilla-tests.git" + "@" + "f6c578a10ea707b1a8ab0b88943fe5115ce2b9be",
"v8/test/simdjs/data": Var("git_url") + "/external/github.com/tc39/ecmascript_simd.git" + "@" + "c8ef63c728283debc25891123eb00482fee4b8cd",
"v8/test/test262/data":
Var("git_url") + "/external/github.com/tc39/test262.git" + "@" + "ea222fb7d09e334c321b987656315ad4056ded96",
"v8/tools/clang": "v8/tools/clang":
Var("git_url") + "/chromium/src/tools/clang.git" + "@" + "0150e39a3112dbc7e4c7a3ab25276b8d7781f3b6", Var("git_url") + "/chromium/src/tools/clang.git" + "@" + "66f5328417331216569e8beb244fd887f62e8997",
} }
deps_os = { deps_os = {
"android": { "android": {
"v8/third_party/android_tools": "v8/third_party/android_tools":
Var("git_url") + "/android_tools.git" + "@" + "4238a28593b7e6178c95431f91ca8c24e45fa7eb", Var("git_url") + "/android_tools.git" + "@" + "54492f99c84cab0826a8e656efeb33a1b1bf5a04",
}, },
"win": { "win": {
"v8/third_party/cygwin": "v8/third_party/cygwin":
@ -93,6 +100,46 @@ hooks = [
"-s", "v8/buildtools/linux64/clang-format.sha1", "-s", "v8/buildtools/linux64/clang-format.sha1",
], ],
}, },
# Pull luci-go binaries (isolate, swarming) using checked-in hashes.
{
'name': 'luci-go_win',
'pattern': '.',
'action': [ 'download_from_google_storage',
'--no_resume',
'--platform=win32',
'--no_auth',
'--bucket', 'chromium-luci',
'-d', 'v8/tools/luci-go/win64',
],
},
{
'name': 'luci-go_mac',
'pattern': '.',
'action': [ 'download_from_google_storage',
'--no_resume',
'--platform=darwin',
'--no_auth',
'--bucket', 'chromium-luci',
'-d', 'v8/tools/luci-go/mac64',
],
},
{
'name': 'luci-go_linux',
'pattern': '.',
'action': [ 'download_from_google_storage',
'--no_resume',
'--platform=linux*',
'--no_auth',
'--bucket', 'chromium-luci',
'-d', 'v8/tools/luci-go/linux64',
],
},
{
# Update the Windows toolchain if necessary.
'name': 'win_toolchain',
'pattern': '.',
'action': ['python', 'v8/build/vs_toolchain.py', 'update'],
},
# Pull binutils for linux, enabled debug fission for faster linking / # Pull binutils for linux, enabled debug fission for faster linking /
# debugging when used with clang on Ubuntu Precise. # debugging when used with clang on Ubuntu Precise.
# https://code.google.com/p/chromium/issues/detail?id=352046 # https://code.google.com/p/chromium/issues/detail?id=352046

15
deps/v8/WATCHLISTS

@ -42,6 +42,14 @@
'debugger': { 'debugger': {
'filepath': 'src/debug/', 'filepath': 'src/debug/',
}, },
'interpreter': {
'filepath': 'src/interpreter/',
'filepath': 'test/cctest/interpreter/',
'filepath': 'test/unittests/interpreter/',
},
'feature_shipping_status': {
'filepath': 'src/flag-definitions.h',
},
}, },
'WATCHLISTS': { 'WATCHLISTS': {
@ -54,5 +62,12 @@
'debugger': [ 'debugger': [
'yangguo@chromium.org', 'yangguo@chromium.org',
], ],
'interpreter': [
'rmcilroy@chromium.org',
'oth@chromium.org',
],
'feature_shipping_status': [
'hablich@chromium.org',
],
}, },
} }

16
deps/v8/build/all.gyp

@ -19,6 +19,22 @@
'../tools/parser-shell.gyp:parser-shell', '../tools/parser-shell.gyp:parser-shell',
], ],
}], }],
['test_isolation_mode != "noop"', {
'dependencies': [
'../test/bot_default.gyp:*',
'../test/benchmarks/benchmarks.gyp:*',
'../test/default.gyp:*',
'../test/intl/intl.gyp:*',
'../test/message/message.gyp:*',
'../test/mjsunit/mjsunit.gyp:*',
'../test/mozilla/mozilla.gyp:*',
'../test/optimize_for_size.gyp:*',
'../test/preparser/preparser.gyp:*',
'../test/simdjs/simdjs.gyp:*',
'../test/test262/test262.gyp:*',
'../test/webkit/webkit.gyp:*',
],
}],
] ]
} }
] ]

77
deps/v8/build/config/win/msvs_dependencies.isolate

@ -0,0 +1,77 @@
# Copyright 2015 the V8 project authors. All rights reserved.
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'conditions': [
# Copy the VS runtime DLLs into the isolate so that they
# don't have to be preinstalled on the target machine.
#
# VS2013 runtimes
['OS=="win" and msvs_version==2013 and component=="shared_library" and CONFIGURATION_NAME=="Debug"', {
'variables': {
'files': [
'<(PRODUCT_DIR)/x64/msvcp120d.dll',
'<(PRODUCT_DIR)/x64/msvcr120d.dll',
],
},
}],
['OS=="win" and msvs_version==2013 and component=="shared_library" and CONFIGURATION_NAME=="Release"', {
'variables': {
'files': [
'<(PRODUCT_DIR)/x64/msvcp120.dll',
'<(PRODUCT_DIR)/x64/msvcr120.dll',
],
},
}],
['OS=="win" and msvs_version==2013 and component=="shared_library" and (CONFIGURATION_NAME=="Debug" or CONFIGURATION_NAME=="Debug_x64")', {
'variables': {
'files': [
'<(PRODUCT_DIR)/msvcp120d.dll',
'<(PRODUCT_DIR)/msvcr120d.dll',
],
},
}],
['OS=="win" and msvs_version==2013 and component=="shared_library" and (CONFIGURATION_NAME=="Release" or CONFIGURATION_NAME=="Release_x64")', {
'variables': {
'files': [
'<(PRODUCT_DIR)/msvcp120.dll',
'<(PRODUCT_DIR)/msvcr120.dll',
],
},
}],
# VS2015 runtimes
['OS=="win" and msvs_version==2015 and component=="shared_library" and CONFIGURATION_NAME=="Debug"', {
'variables': {
'files': [
'<(PRODUCT_DIR)/x64/msvcp140d.dll',
'<(PRODUCT_DIR)/x64/vccorlib140d.dll',
],
},
}],
['OS=="win" and msvs_version==2015 and component=="shared_library" and CONFIGURATION_NAME=="Release"', {
'variables': {
'files': [
'<(PRODUCT_DIR)/x64/msvcp140.dll',
'<(PRODUCT_DIR)/x64/vccorlib140.dll',
],
},
}],
['OS=="win" and msvs_version==2015 and component=="shared_library" and (CONFIGURATION_NAME=="Debug" or CONFIGURATION_NAME=="Debug_x64")', {
'variables': {
'files': [
'<(PRODUCT_DIR)/msvcp140d.dll',
'<(PRODUCT_DIR)/vccorlib140d.dll',
],
},
}],
['OS=="win" and msvs_version==2015 and component=="shared_library" and (CONFIGURATION_NAME=="Release" or CONFIGURATION_NAME=="Release_x64")', {
'variables': {
'files': [
'<(PRODUCT_DIR)/msvcp140.dll',
'<(PRODUCT_DIR)/vccorlib140.dll',
],
},
}],
],
}

2
deps/v8/build/features.gypi

@ -39,6 +39,8 @@
'v8_trace_maps%': 0, 'v8_trace_maps%': 0,
# Enable the snapshot feature, for fast context creation.
# http://v8project.blogspot.com/2015/09/custom-startup-snapshots.html
'v8_use_snapshot%': 'true', 'v8_use_snapshot%': 'true',
'v8_enable_verify_predictable%': 0, 'v8_enable_verify_predictable%': 0,

1
deps/v8/build/get_landmines.py

@ -24,6 +24,7 @@ def main():
print 'Moar clobbering...' print 'Moar clobbering...'
print 'Remove build/android.gypi' print 'Remove build/android.gypi'
print 'Cleanup after windows ninja switch attempt.' print 'Cleanup after windows ninja switch attempt.'
print 'Switching to pinned msvs toolchain.'
return 0 return 0

2
deps/v8/build/gyp_environment.py

@ -10,6 +10,7 @@ make sure settings are consistent between them, all setup should happen here.
import os import os
import sys import sys
import vs_toolchain
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__)) SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
V8_ROOT = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir)) V8_ROOT = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir))
@ -50,3 +51,4 @@ def set_environment():
# Update the environment based on v8.gyp_env # Update the environment based on v8.gyp_env
gyp_env_path = os.path.join(os.path.dirname(V8_ROOT), 'v8.gyp_env') gyp_env_path = os.path.join(os.path.dirname(V8_ROOT), 'v8.gyp_env')
apply_gyp_environment(gyp_env_path) apply_gyp_environment(gyp_env_path)
vs_toolchain.SetEnvironmentAndGetRuntimeDllDirs()

29
deps/v8/build/gyp_v8

@ -30,6 +30,7 @@
# This script is wrapper for V8 that adds some support for how GYP # This script is wrapper for V8 that adds some support for how GYP
# is invoked by V8 beyond what can be done in the gclient hooks. # is invoked by V8 beyond what can be done in the gclient hooks.
import argparse
import glob import glob
import gyp_environment import gyp_environment
import os import os
@ -37,6 +38,7 @@ import platform
import shlex import shlex
import subprocess import subprocess
import sys import sys
import vs_toolchain
script_dir = os.path.dirname(os.path.realpath(__file__)) script_dir = os.path.dirname(os.path.realpath(__file__))
v8_root = os.path.abspath(os.path.join(script_dir, os.pardir)) v8_root = os.path.abspath(os.path.join(script_dir, os.pardir))
@ -49,6 +51,25 @@ sys.path.insert(
1, os.path.abspath(os.path.join(v8_root, 'tools', 'generate_shim_headers'))) 1, os.path.abspath(os.path.join(v8_root, 'tools', 'generate_shim_headers')))
def GetOutputDirectory():
"""Returns the output directory that GYP will use."""
# Handle command line generator flags.
parser = argparse.ArgumentParser()
parser.add_argument('-G', dest='genflags', default=[], action='append')
genflags = parser.parse_known_args()[0].genflags
# Handle generator flags from the environment.
genflags += shlex.split(os.environ.get('GYP_GENERATOR_FLAGS', ''))
needle = 'output_dir='
for item in genflags:
if item.startswith(needle):
return item[len(needle):]
return 'out'
def additional_include_files(args=[]): def additional_include_files(args=[]):
""" """
Returns a list of additional (.gypi) files to include, without Returns a list of additional (.gypi) files to include, without
@ -82,6 +103,13 @@ def additional_include_files(args=[]):
def run_gyp(args): def run_gyp(args):
rc = gyp.main(args) rc = gyp.main(args)
vs2013_runtime_dll_dirs = vs_toolchain.SetEnvironmentAndGetRuntimeDllDirs()
if vs2013_runtime_dll_dirs:
x64_runtime, x86_runtime = vs2013_runtime_dll_dirs
vs_toolchain.CopyVsRuntimeDlls(
os.path.join(v8_root, GetOutputDirectory()),
(x86_runtime, x64_runtime))
if rc != 0: if rc != 0:
print 'Error running GYP' print 'Error running GYP'
sys.exit(rc) sys.exit(rc)
@ -130,6 +158,7 @@ if __name__ == '__main__':
# Generate for the architectures supported on the given platform. # Generate for the architectures supported on the given platform.
gyp_args = list(args) gyp_args = list(args)
gyp_args.extend(['-D', 'gyp_output_dir=' + GetOutputDirectory()])
gyp_generators = os.environ.get('GYP_GENERATORS', '') gyp_generators = os.environ.get('GYP_GENERATORS', '')
if platform.system() == 'Linux' and gyp_generators != 'ninja': if platform.system() == 'Linux' and gyp_generators != 'ninja':
# Work around for crbug.com/331475. # Work around for crbug.com/331475.

24
deps/v8/build/isolate.gypi

@ -44,6 +44,7 @@
'extension': 'isolate', 'extension': 'isolate',
'inputs': [ 'inputs': [
# Files that are known to be involved in this step. # Files that are known to be involved in this step.
'<(DEPTH)/tools/isolate_driver.py',
'<(DEPTH)/tools/swarming_client/isolate.py', '<(DEPTH)/tools/swarming_client/isolate.py',
'<(DEPTH)/tools/swarming_client/run_isolated.py', '<(DEPTH)/tools/swarming_client/run_isolated.py',
], ],
@ -52,7 +53,7 @@
], ],
'action': [ 'action': [
'python', 'python',
'<(DEPTH)/tools/swarming_client/isolate.py', '<(DEPTH)/tools/isolate_driver.py',
'<(test_isolation_mode)', '<(test_isolation_mode)',
'--isolated', '<(PRODUCT_DIR)/<(RULE_INPUT_ROOT).isolated', '--isolated', '<(PRODUCT_DIR)/<(RULE_INPUT_ROOT).isolated',
'--isolate', '<(RULE_INPUT_PATH)', '--isolate', '<(RULE_INPUT_PATH)',
@ -66,8 +67,29 @@
'--path-variable', 'DEPTH', '<(DEPTH)', '--path-variable', 'DEPTH', '<(DEPTH)',
'--path-variable', 'PRODUCT_DIR', '<(PRODUCT_DIR)', '--path-variable', 'PRODUCT_DIR', '<(PRODUCT_DIR)',
'--config-variable', 'CONFIGURATION_NAME=<(CONFIGURATION_NAME)',
'--config-variable', 'OS=<(OS)', '--config-variable', 'OS=<(OS)',
'--config-variable', 'asan=<(asan)',
'--config-variable', 'cfi_vptr=<(cfi_vptr)',
'--config-variable', 'icu_use_data_file_flag=0',
'--config-variable', 'msan=<(msan)',
'--config-variable', 'tsan=<(tsan)',
'--config-variable', 'component=<(component)',
'--config-variable', 'target_arch=<(target_arch)',
'--config-variable', 'use_custom_libcxx=<(use_custom_libcxx)',
'--config-variable', 'v8_use_external_startup_data=<(v8_use_external_startup_data)', '--config-variable', 'v8_use_external_startup_data=<(v8_use_external_startup_data)',
'--config-variable', 'v8_use_snapshot=<(v8_use_snapshot)',
],
'conditions': [
['OS=="win"', {
'action': [
'--config-variable', 'msvs_version=2013',
],
}, {
'action': [
'--config-variable', 'msvs_version=0',
],
}],
], ],
}, },
], ],

40
deps/v8/build/standalone.gypi

@ -97,6 +97,10 @@
'cfi_blacklist%': '<(base_dir)/tools/cfi/blacklist.txt', 'cfi_blacklist%': '<(base_dir)/tools/cfi/blacklist.txt',
# Set to 1 to enable fast builds.
# TODO(machenbach): Only configured for windows.
'fastbuild%': 0,
# goma settings. # goma settings.
# 1 to use goma. # 1 to use goma.
# If no gomadir is set, it uses the default gomadir. # If no gomadir is set, it uses the default gomadir.
@ -127,7 +131,7 @@
# TODO(machenbach): Remove the conditions as more configurations are # TODO(machenbach): Remove the conditions as more configurations are
# supported. # supported.
['OS=="linux"', { ['OS=="linux" or OS=="win"', {
'test_isolation_mode%': 'check', 'test_isolation_mode%': 'check',
}, { }, {
'test_isolation_mode%': 'noop', 'test_isolation_mode%': 'noop',
@ -155,6 +159,7 @@
'cfi_diag%': '<(cfi_diag)', 'cfi_diag%': '<(cfi_diag)',
'cfi_blacklist%': '<(cfi_blacklist)', 'cfi_blacklist%': '<(cfi_blacklist)',
'test_isolation_mode%': '<(test_isolation_mode)', 'test_isolation_mode%': '<(test_isolation_mode)',
'fastbuild%': '<(fastbuild)',
# Add a simple extras solely for the purpose of the cctests # Add a simple extras solely for the purpose of the cctests
'v8_extra_library_files': ['../test/cctest/test-extra.js'], 'v8_extra_library_files': ['../test/cctest/test-extra.js'],
@ -206,12 +211,8 @@
['OS=="win" and use_goma==1', { ['OS=="win" and use_goma==1', {
# goma doesn't support pch yet. # goma doesn't support pch yet.
'chromium_win_pch': 0, 'chromium_win_pch': 0,
# goma doesn't support PDB yet, so win_z7=1 or fastbuild=1. # goma doesn't support PDB yet.
'conditions': [ 'fastbuild%': 1,
['win_z7==0 and fastbuild==0', {
'fastbuild': 1,
}],
],
}], }],
['((v8_target_arch=="ia32" or v8_target_arch=="x64" or v8_target_arch=="x87") and \ ['((v8_target_arch=="ia32" or v8_target_arch=="x64" or v8_target_arch=="x87") and \
(OS=="linux" or OS=="mac")) or (v8_target_arch=="ppc64" and OS=="linux")', { (OS=="linux" or OS=="mac")) or (v8_target_arch=="ppc64" and OS=="linux")', {
@ -433,6 +434,23 @@
}], }],
], ],
}], }],
['fastbuild!=0', {
'conditions': [
['OS=="win" and fastbuild==1', {
'msvs_settings': {
'VCLinkerTool': {
# This tells the linker to generate .pdbs, so that
# we can get meaningful stack traces.
'GenerateDebugInformation': 'true',
},
'VCCLCompilerTool': {
# No debug info to be generated by compiler.
'DebugInformationFormat': '0',
},
},
}],
],
}], # fastbuild!=0
], ],
'target_conditions': [ 'target_conditions': [
['v8_code == 0', { ['v8_code == 0', {
@ -492,8 +510,8 @@
'target_defaults': { 'target_defaults': {
'conditions': [ 'conditions': [
# Common options for AddressSanitizer, LeakSanitizer, # Common options for AddressSanitizer, LeakSanitizer,
# ThreadSanitizer and MemorySanitizer. # ThreadSanitizer, MemorySanitizer and CFI builds.
['asan==1 or lsan==1 or tsan==1 or msan==1', { ['asan==1 or lsan==1 or tsan==1 or msan==1 or cfi_vptr==1', {
'target_conditions': [ 'target_conditions': [
['_toolset=="target"', { ['_toolset=="target"', {
'cflags': [ 'cflags': [
@ -1275,7 +1293,7 @@
['_toolset=="target"', { ['_toolset=="target"', {
'cflags': [ 'cflags': [
'-fno-sanitize-trap=cfi', '-fno-sanitize-trap=cfi',
'-fsanitize-recover=cfi', '-fno-sanitize-recover=cfi',
], ],
'cflags_cc!': [ 'cflags_cc!': [
'-fno-rtti', '-fno-rtti',
@ -1285,7 +1303,7 @@
], ],
'ldflags': [ 'ldflags': [
'-fno-sanitize-trap=cfi', '-fno-sanitize-trap=cfi',
'-fsanitize-recover=cfi', '-fno-sanitize-recover=cfi',
], ],
}], }],
], ],

4
deps/v8/build/toolchain.gypi

@ -638,8 +638,8 @@
'_MIPS_ARCH_LOONGSON', '_MIPS_ARCH_LOONGSON',
'FPU_MODE_FP32', 'FPU_MODE_FP32',
], ],
'cflags!': ['-mfp64', '-mfp32', '-mfpxx'], 'cflags!': ['-mfp64', '-mfpxx'],
'cflags': ['-mips3', '-Wa,-mips3'], 'cflags': ['-mips3', '-Wa,-mips3', '-mfp32'],
}], }],
], ],
}, { }, {

268
deps/v8/build/vs_toolchain.py

@ -0,0 +1,268 @@
#!/usr/bin/env python
# Copyright 2015 the V8 project authors. All rights reserved.
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import os
import pipes
import shutil
import subprocess
import sys
import vs_toolchain
script_dir = os.path.dirname(os.path.realpath(__file__))
chrome_src = os.path.abspath(os.path.join(script_dir, os.pardir))
SRC_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(1, os.path.join(chrome_src, 'tools'))
sys.path.insert(0, os.path.join(chrome_src, 'build', 'gyp', 'pylib'))
json_data_file = os.path.join(script_dir, 'win_toolchain.json')
import gyp
def SetEnvironmentAndGetRuntimeDllDirs():
"""Sets up os.environ to use the depot_tools VS toolchain with gyp, and
returns the location of the VS runtime DLLs so they can be copied into
the output directory after gyp generation.
"""
vs2013_runtime_dll_dirs = None
depot_tools_win_toolchain = \
bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1')))
# When running on a non-Windows host, only do this if the SDK has explicitly
# been downloaded before (in which case json_data_file will exist).
if ((sys.platform in ('win32', 'cygwin') or os.path.exists(json_data_file))
and depot_tools_win_toolchain):
if not os.path.exists(json_data_file):
Update()
with open(json_data_file, 'r') as tempf:
toolchain_data = json.load(tempf)
toolchain = toolchain_data['path']
version = toolchain_data['version']
win_sdk = toolchain_data.get('win_sdk')
if not win_sdk:
win_sdk = toolchain_data['win8sdk']
wdk = toolchain_data['wdk']
# TODO(scottmg): The order unfortunately matters in these. They should be
# split into separate keys for x86 and x64. (See CopyVsRuntimeDlls call
# below). http://crbug.com/345992
vs2013_runtime_dll_dirs = toolchain_data['runtime_dirs']
os.environ['GYP_MSVS_OVERRIDE_PATH'] = toolchain
os.environ['GYP_MSVS_VERSION'] = version
# We need to make sure windows_sdk_path is set to the automated
# toolchain values in GYP_DEFINES, but don't want to override any
# otheroptions.express
# values there.
gyp_defines_dict = gyp.NameValueListToDict(gyp.ShlexEnv('GYP_DEFINES'))
gyp_defines_dict['windows_sdk_path'] = win_sdk
os.environ['GYP_DEFINES'] = ' '.join('%s=%s' % (k, pipes.quote(str(v)))
for k, v in gyp_defines_dict.iteritems())
os.environ['WINDOWSSDKDIR'] = win_sdk
os.environ['WDK_DIR'] = wdk
# Include the VS runtime in the PATH in case it's not machine-installed.
runtime_path = ';'.join(vs2013_runtime_dll_dirs)
os.environ['PATH'] = runtime_path + ';' + os.environ['PATH']
return vs2013_runtime_dll_dirs
def _VersionNumber():
"""Gets the standard version number ('120', '140', etc.) based on
GYP_MSVS_VERSION."""
if os.environ['GYP_MSVS_VERSION'] == '2013':
return '120'
elif os.environ['GYP_MSVS_VERSION'] == '2015':
return '140'
else:
raise ValueError('Unexpected GYP_MSVS_VERSION')
def _CopyRuntimeImpl(target, source):
"""Copy |source| to |target| if it doesn't already exist or if it
needs to be updated.
"""
if (os.path.isdir(os.path.dirname(target)) and
(not os.path.isfile(target) or
os.stat(target).st_mtime != os.stat(source).st_mtime)):
print 'Copying %s to %s...' % (source, target)
if os.path.exists(target):
os.unlink(target)
shutil.copy2(source, target)
def _CopyRuntime2013(target_dir, source_dir, dll_pattern):
"""Copy both the msvcr and msvcp runtime DLLs, only if the target doesn't
exist, but the target directory does exist."""
for file_part in ('p', 'r'):
dll = dll_pattern % file_part
target = os.path.join(target_dir, dll)
source = os.path.join(source_dir, dll)
_CopyRuntimeImpl(target, source)
def _CopyRuntime2015(target_dir, source_dir, dll_pattern):
"""Copy both the msvcp and vccorlib runtime DLLs, only if the target doesn't
exist, but the target directory does exist."""
for file_part in ('msvcp', 'vccorlib'):
dll = dll_pattern % file_part
target = os.path.join(target_dir, dll)
source = os.path.join(source_dir, dll)
_CopyRuntimeImpl(target, source)
def CopyVsRuntimeDlls(output_dir, runtime_dirs):
"""Copies the VS runtime DLLs from the given |runtime_dirs| to the output
directory so that even if not system-installed, built binaries are likely to
be able to run.
This needs to be run after gyp has been run so that the expected target
output directories are already created.
"""
x86, x64 = runtime_dirs
out_debug = os.path.join(output_dir, 'Debug')
out_debug_nacl64 = os.path.join(output_dir, 'Debug', 'x64')
out_release = os.path.join(output_dir, 'Release')
out_release_nacl64 = os.path.join(output_dir, 'Release', 'x64')
out_debug_x64 = os.path.join(output_dir, 'Debug_x64')
out_release_x64 = os.path.join(output_dir, 'Release_x64')
if os.path.exists(out_debug) and not os.path.exists(out_debug_nacl64):
os.makedirs(out_debug_nacl64)
if os.path.exists(out_release) and not os.path.exists(out_release_nacl64):
os.makedirs(out_release_nacl64)
if os.environ.get('GYP_MSVS_VERSION') == '2015':
_CopyRuntime2015(out_debug, x86, '%s140d.dll')
_CopyRuntime2015(out_release, x86, '%s140.dll')
_CopyRuntime2015(out_debug_x64, x64, '%s140d.dll')
_CopyRuntime2015(out_release_x64, x64, '%s140.dll')
_CopyRuntime2015(out_debug_nacl64, x64, '%s140d.dll')
_CopyRuntime2015(out_release_nacl64, x64, '%s140.dll')
else:
# VS2013 is the default.
_CopyRuntime2013(out_debug, x86, 'msvc%s120d.dll')
_CopyRuntime2013(out_release, x86, 'msvc%s120.dll')
_CopyRuntime2013(out_debug_x64, x64, 'msvc%s120d.dll')
_CopyRuntime2013(out_release_x64, x64, 'msvc%s120.dll')
_CopyRuntime2013(out_debug_nacl64, x64, 'msvc%s120d.dll')
_CopyRuntime2013(out_release_nacl64, x64, 'msvc%s120.dll')
# Copy the PGO runtime library to the release directories.
if os.environ.get('GYP_MSVS_OVERRIDE_PATH'):
pgo_x86_runtime_dir = os.path.join(os.environ.get('GYP_MSVS_OVERRIDE_PATH'),
'VC', 'bin')
pgo_x64_runtime_dir = os.path.join(pgo_x86_runtime_dir, 'amd64')
pgo_runtime_dll = 'pgort' + _VersionNumber() + '.dll'
source_x86 = os.path.join(pgo_x86_runtime_dir, pgo_runtime_dll)
if os.path.exists(source_x86):
_CopyRuntimeImpl(os.path.join(out_release, pgo_runtime_dll), source_x86)
source_x64 = os.path.join(pgo_x64_runtime_dir, pgo_runtime_dll)
if os.path.exists(source_x64):
_CopyRuntimeImpl(os.path.join(out_release_x64, pgo_runtime_dll),
source_x64)
def CopyDlls(target_dir, configuration, target_cpu):
"""Copy the VS runtime DLLs into the requested directory as needed.
configuration is one of 'Debug' or 'Release'.
target_cpu is one of 'x86' or 'x64'.
The debug configuration gets both the debug and release DLLs; the
release config only the latter.
"""
vs2013_runtime_dll_dirs = SetEnvironmentAndGetRuntimeDllDirs()
if not vs2013_runtime_dll_dirs:
return
x64_runtime, x86_runtime = vs2013_runtime_dll_dirs
runtime_dir = x64_runtime if target_cpu == 'x64' else x86_runtime
_CopyRuntime2013(
target_dir, runtime_dir, 'msvc%s' + _VersionNumber() + '.dll')
if configuration == 'Debug':
_CopyRuntime2013(
target_dir, runtime_dir, 'msvc%s' + _VersionNumber() + 'd.dll')
def _GetDesiredVsToolchainHashes():
"""Load a list of SHA1s corresponding to the toolchains that we want installed
to build with."""
if os.environ.get('GYP_MSVS_VERSION') == '2015':
return ['49ae4b60d898182fc3f521c2fcda82c453915011']
else:
# Default to VS2013.
return ['ee7d718ec60c2dc5d255bbe325909c2021a7efef']
def Update(force=False):
"""Requests an update of the toolchain to the specific hashes we have at
this revision. The update outputs a .json of the various configuration
information required to pass to gyp which we use in |GetToolchainDir()|.
"""
if force != False and force != '--force':
print >>sys.stderr, 'Unknown parameter "%s"' % force
return 1
if force == '--force' or os.path.exists(json_data_file):
force = True
depot_tools_win_toolchain = \
bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1')))
if ((sys.platform in ('win32', 'cygwin') or force) and
depot_tools_win_toolchain):
import find_depot_tools
depot_tools_path = find_depot_tools.add_depot_tools_to_path()
get_toolchain_args = [
sys.executable,
os.path.join(depot_tools_path,
'win_toolchain',
'get_toolchain_if_necessary.py'),
'--output-json', json_data_file,
] + _GetDesiredVsToolchainHashes()
if force:
get_toolchain_args.append('--force')
subprocess.check_call(get_toolchain_args)
return 0
def GetToolchainDir():
"""Gets location information about the current toolchain (must have been
previously updated by 'update'). This is used for the GN build."""
runtime_dll_dirs = SetEnvironmentAndGetRuntimeDllDirs()
# If WINDOWSSDKDIR is not set, search the default SDK path and set it.
if not 'WINDOWSSDKDIR' in os.environ:
default_sdk_path = 'C:\\Program Files (x86)\\Windows Kits\\8.1'
if os.path.isdir(default_sdk_path):
os.environ['WINDOWSSDKDIR'] = default_sdk_path
print '''vs_path = "%s"
sdk_path = "%s"
vs_version = "%s"
wdk_dir = "%s"
runtime_dirs = "%s"
''' % (
os.environ['GYP_MSVS_OVERRIDE_PATH'],
os.environ['WINDOWSSDKDIR'],
os.environ['GYP_MSVS_VERSION'],
os.environ.get('WDK_DIR', ''),
';'.join(runtime_dll_dirs or ['None']))
def main():
commands = {
'update': Update,
'get_toolchain_dir': GetToolchainDir,
'copy_dlls': CopyDlls,
}
if len(sys.argv) < 2 or sys.argv[1] not in commands:
print >>sys.stderr, 'Expected one of: %s' % ', '.join(commands)
return 1
return commands[sys.argv[1]](*sys.argv[2:])
if __name__ == '__main__':
sys.exit(main())

6
deps/v8/docs/becoming_v8_committer.md

@ -1,6 +1,8 @@
# Becoming a V8 committer
## What is a committer? ## What is a committer?
Technically, a committer is someone who has write access to the V8 SVN repository. A committer can submit his or her own patches or patches from others. Technically, a committer is someone who has write access to the V8 Git repository. A committer can submit his or her own patches or patches from others.
This privilege is granted with some expectation of responsibility: committers are people who care about the V8 project and want to help meet its goals. A committer is not just someone who can make changes, but someone who has demonstrated his or her ability to collaborate with the team, get the most knowledgeable people to review code, contribute high-quality code, and follow through to fix issues (in code or tests). This privilege is granted with some expectation of responsibility: committers are people who care about the V8 project and want to help meet its goals. A committer is not just someone who can make changes, but someone who has demonstrated his or her ability to collaborate with the team, get the most knowledgeable people to review code, contribute high-quality code, and follow through to fix issues (in code or tests).
@ -35,4 +37,4 @@ You don't really need to do much to maintain committer status: just keep being a
In the unhappy event that a committer continues to disregard good citizenship (or actively disrupts the project), we may need to revoke that person's status. The process is the same as for nominating a new committer: someone suggests the revocation with a good reason, two people second the motion, and a vote may be called if consensus cannot be reached. I hope that's simple enough, and that we never have to test it in practice. In the unhappy event that a committer continues to disregard good citizenship (or actively disrupts the project), we may need to revoke that person's status. The process is the same as for nominating a new committer: someone suggests the revocation with a good reason, two people second the motion, and a vote may be called if consensus cannot be reached. I hope that's simple enough, and that we never have to test it in practice.
(Source: inspired by http://dev.chromium.org/getting-involved/become-a-committer ) (Source: inspired by http://dev.chromium.org/getting-involved/become-a-committer )

7
deps/v8/docs/merging_and_patching.md

@ -4,7 +4,8 @@ If you have a patch to the master branch (e.g. an important bug fix) that needs
For the examples, a branched 2.4 version of V8 will be used. Substitute "2.4" with your version number. For the examples, a branched 2.4 version of V8 will be used. Substitute "2.4" with your version number.
**An associated issue is mandatory if a patch is merged. This helps with keeping track of merges.** **An associated issue on Chromium's or V8's issue tracker is mandatory if a patch is merged. This helps with keeping track of merges.
You can use [a template](https://code.google.com/p/v8/issues/entry?template=Merge%20request) to create an issue.**
# Merge process outlined # Merge process outlined
@ -60,7 +61,7 @@ Version 2.4.9.10: Issue xxx: The parser doesn't parse.
## I get an error during merge that is related to tagging. What should I do? ## I get an error during merge that is related to tagging. What should I do?
When two people are merging at the same time a race-condition can happen in the merge scripts. If this is the case, contact machenbach@chromium.org and hablich@chromium.org. When two people are merging at the same time a race-condition can happen in the merge scripts. If this is the case, contact machenbach@chromium.org and hablich@chromium.org.
## Is there a TL;DR;? ## Is there a TL;DR;?
1. Create issue 1. [Create issue on issue tracker](https://code.google.com/p/v8/issues/entry?template=Merge%20request)
1. Add Merge-Request-{Branch} to the issue 1. Add Merge-Request-{Branch} to the issue
1. Wait until somebody will add Merge-Approved-{Branch} 1. Wait until somebody will add Merge-Approved-{Branch}
1. Merge 1. Merge

9
deps/v8/docs/runtime_functions.md

@ -1,14 +1,7 @@
# Introduction # Runtime functions
Much of the JavaScript library is implemented in JavaScript code itself, Much of the JavaScript library is implemented in JavaScript code itself,
using a minimal set of C++ runtime functions callable from JavaScript. using a minimal set of C++ runtime functions callable from JavaScript.
Some of these are called using names that start with %, and using the flag Some of these are called using names that start with %, and using the flag
"--allow-natives-syntax". Others are only called by code generated by the "--allow-natives-syntax". Others are only called by code generated by the
code generators, and are not visible in JS, even using the % syntax. code generators, and are not visible in JS, even using the % syntax.
<a href='Hidden comment:
= Details =
Here are the V8 runtime functions, their JS names, if they are visible,
and their documentation.
<wiki:comment>

6
deps/v8/docs/source.md

@ -1,4 +1,6 @@
**Quick links:** [browse](http://code.google.com/p/v8/source/browse) | [browse bleeding edge](http://code.google.com/p/v8/source/browse/branches/bleeding_edge) | [changes](https://chromium.googlesource.com/v8/v8.git). # Source
**Quick links:** [browse](https://chromium.googlesource.com/v8/v8/) | [browse bleeding edge](https://chromium.googlesource.com/v8/v8/+/master) | [changes](https://chromium.googlesource.com/v8/v8/+log/master).
## Command-Line Access ## Command-Line Access
@ -36,4 +38,4 @@ V8 public API (basically the files under include/ directory) may change over tim
## GUI and IDE Access ## GUI and IDE Access
This project's Subversion repository may be accessed using many different client programs and plug-ins. See your client's documentation for more information. This project's Subversion repository may be accessed using many different client programs and plug-ins. See your client's documentation for more information.

6
deps/v8/docs/v8_c_plus_plus_styleand_sops.md

@ -1,7 +1,3 @@
# Introduction # V8 C++ Style Guide
In general, V8 should conform to Google's/Chrome's C++ Style Guide for new code that is written. Your V8 code should conform to them as much as possible. There will always be cases where Google/Chrome Style Guide conformity or Google/Chrome best practices are extremely cumbersome or underspecified for our use cases. We document these exceptions here. In general, V8 should conform to Google's/Chrome's C++ Style Guide for new code that is written. Your V8 code should conform to them as much as possible. There will always be cases where Google/Chrome Style Guide conformity or Google/Chrome best practices are extremely cumbersome or underspecified for our use cases. We document these exceptions here.
# Details
Coming Soon

14
deps/v8/docs/v8_committers_responsibility.md

@ -1,3 +1,5 @@
# V8 committer's responsibility
## Basic commit guidelines ## Basic commit guidelines
When you're committing to the V8 repositories, ensure that you follow those guidelines: When you're committing to the V8 repositories, ensure that you follow those guidelines:
@ -25,17 +27,13 @@ At the same time, we want to encourage many people to participate in the review
So, here are some guidelines to help clarify the process: So, here are some guidelines to help clarify the process:
1. When a patch author requests more than one reviewer, they should make clear in the review request email what they expect the responsibility of each reviewer to be. For example, you could write this in the email: 1. When a patch author requests more than one reviewer, they should make clear in the review request email what they expect the responsibility of each reviewer to be. For example, you could write this in the email:
``` * larry: bitmap changes
* sergey: process hacks
a. larry: bitmap changes * everybody else: FYI
b. sergey: process hacks
c. everybody else: FYI
```
1. In this case, you might be on the review list because you've asked to be in the loop for multiprocess changes, but you wouldn't be the primary reviewer and the author and other reviewers wouldn't be expecting you to review all the diffs in detail. 1. In this case, you might be on the review list because you've asked to be in the loop for multiprocess changes, but you wouldn't be the primary reviewer and the author and other reviewers wouldn't be expecting you to review all the diffs in detail.
1. If you get a review that includes many other people, and the author didn't do (1), please ask them what part you're responsible for if you don't want to review the whole thing in detail. 1. If you get a review that includes many other people, and the author didn't do (1), please ask them what part you're responsible for if you don't want to review the whole thing in detail.
1. The author should wait for approval from everybody on the reviewer list before checking in. 1. The author should wait for approval from everybody on the reviewer list before checking in.
1. People who are on a review without clear review responsibility (i.e. drive-by reviews) should be super responsive and not hold up the review. The patch author should feel free to ping them mercilessly if they are. 1. People who are on a review without clear review responsibility (i.e. drive-by reviews) should be super responsive and not hold up the review. The patch author should feel free to ping them mercilessly if they are.
1. If you're an "FYI" person on a review and you didn't actually review in detail (or at all), but don't have a problem with the patch, note this. You could say something like "rubber stamp" or "ACK" instead of "LGTM." This way the real reviewers know not to trust that you did their work for them, but the author of the patch knows they don't have to wait for further feedback from you. Hopefully we can still keep everybody in the loop but have clear ownership and detailed reviews. It might even speed up some changes since you can quickly "ACK" changes you don't care about, and the author knows they don't have to wait for feedback from you. 1. If you're an "FYI" person on a review and you didn't actually review in detail (or at all), but don't have a problem with the patch, note this. You could say something like "rubber stamp" or "ACK" instead of "LGTM." This way the real reviewers know not to trust that you did their work for them, but the author of the patch knows they don't have to wait for further feedback from you. Hopefully we can still keep everybody in the loop but have clear ownership and detailed reviews. It might even speed up some changes since you can quickly "ACK" changes you don't care about, and the author knows they don't have to wait for feedback from you.
(Adapted from: http://dev.chromium.org/developers/committers-responsibility ) (Adapted from: http://dev.chromium.org/developers/committers-responsibility )

6
deps/v8/include/v8-version.h

@ -9,9 +9,9 @@
// NOTE these macros are used by some of the tool scripts and the build // NOTE these macros are used by some of the tool scripts and the build
// system so their names cannot be changed without changing the scripts. // system so their names cannot be changed without changing the scripts.
#define V8_MAJOR_VERSION 4 #define V8_MAJOR_VERSION 4
#define V8_MINOR_VERSION 7 #define V8_MINOR_VERSION 8
#define V8_BUILD_NUMBER 80 #define V8_BUILD_NUMBER 271
#define V8_PATCH_LEVEL 32 #define V8_PATCH_LEVEL 17
// Use 1 for candidates and 0 otherwise. // Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.) // (Boolean macro values are not supported by all preprocessors.)

166
deps/v8/include/v8.h

@ -103,6 +103,7 @@ class String;
class StringObject; class StringObject;
class Symbol; class Symbol;
class SymbolObject; class SymbolObject;
class Private;
class Uint32; class Uint32;
class Utils; class Utils;
class Value; class Value;
@ -146,7 +147,7 @@ template<typename T> class CustomArguments;
class PropertyCallbackArguments; class PropertyCallbackArguments;
class FunctionCallbackArguments; class FunctionCallbackArguments;
class GlobalHandles; class GlobalHandles;
} } // namespace internal
/** /**
@ -311,6 +312,7 @@ class Local {
friend class String; friend class String;
friend class Object; friend class Object;
friend class Context; friend class Context;
friend class Private;
template<class F> friend class internal::CustomArguments; template<class F> friend class internal::CustomArguments;
friend Local<Primitive> Undefined(Isolate* isolate); friend Local<Primitive> Undefined(Isolate* isolate);
friend Local<Primitive> Null(Isolate* isolate); friend Local<Primitive> Null(Isolate* isolate);
@ -603,6 +605,13 @@ template <class T> class PersistentBase {
*/ */
V8_INLINE void MarkPartiallyDependent(); V8_INLINE void MarkPartiallyDependent();
/**
* Marks the reference to this object as active. The scavenge garbage
* collection should not reclaim the objects marked as active.
* This bit is cleared after the each garbage collection pass.
*/
V8_INLINE void MarkActive();
V8_INLINE bool IsIndependent() const; V8_INLINE bool IsIndependent() const;
/** Checks if the handle holds the only reference to an object. */ /** Checks if the handle holds the only reference to an object. */
@ -966,8 +975,8 @@ class V8_EXPORT SealHandleScope {
void operator delete(void*, size_t); void operator delete(void*, size_t);
internal::Isolate* isolate_; internal::Isolate* isolate_;
int prev_level_;
internal::Object** prev_limit_; internal::Object** prev_limit_;
int prev_sealed_level_;
}; };
@ -2465,6 +2474,7 @@ class V8_EXPORT Symbol : public Name {
static Local<Symbol> GetIterator(Isolate* isolate); static Local<Symbol> GetIterator(Isolate* isolate);
static Local<Symbol> GetUnscopables(Isolate* isolate); static Local<Symbol> GetUnscopables(Isolate* isolate);
static Local<Symbol> GetToStringTag(Isolate* isolate); static Local<Symbol> GetToStringTag(Isolate* isolate);
static Local<Symbol> GetIsConcatSpreadable(Isolate* isolate);
V8_INLINE static Symbol* Cast(v8::Value* obj); V8_INLINE static Symbol* Cast(v8::Value* obj);
@ -2474,6 +2484,34 @@ class V8_EXPORT Symbol : public Name {
}; };
/**
* A private symbol
*
* This is an experimental feature. Use at your own risk.
*/
class V8_EXPORT Private : public Data {
public:
// Returns the print name string of the private symbol, or undefined if none.
Local<Value> Name() const;
// Create a private symbol. If name is not empty, it will be the description.
static Local<Private> New(Isolate* isolate,
Local<String> name = Local<String>());
// Retrieve a global private symbol. If a symbol with this name has not
// been retrieved in the same isolate before, it is created.
// Note that private symbols created this way are never collected, so
// they should only be used for statically fixed properties.
// Also, there is only one global name space for the names used as keys.
// To minimize the potential for clashes, use qualified names as keys,
// e.g., "Class#property".
static Local<Private> ForApi(Isolate* isolate, Local<String> name);
private:
Private();
};
/** /**
* A JavaScript number value (ECMA-262, 4.3.20) * A JavaScript number value (ECMA-262, 4.3.20)
*/ */
@ -2701,6 +2739,18 @@ class V8_EXPORT Object : public Value {
PropertyAttribute attribute = None, PropertyAttribute attribute = None,
AccessControl settings = DEFAULT); AccessControl settings = DEFAULT);
/**
* Functionality for private properties.
* This is an experimental feature, use at your own risk.
* Note: Private properties are not inherited. Do not rely on this, since it
* may change.
*/
Maybe<bool> HasPrivate(Local<Context> context, Local<Private> key);
Maybe<bool> SetPrivate(Local<Context> context, Local<Private> key,
Local<Value> value);
Maybe<bool> DeletePrivate(Local<Context> context, Local<Private> key);
MaybeLocal<Value> GetPrivate(Local<Context> context, Local<Private> key);
/** /**
* Returns an array containing the names of the enumerable properties * Returns an array containing the names of the enumerable properties
* of this object, including properties from prototype objects. The * of this object, including properties from prototype objects. The
@ -2869,16 +2919,12 @@ class V8_EXPORT Object : public Value {
*/ */
int GetIdentityHash(); int GetIdentityHash();
/** V8_DEPRECATE_SOON("Use v8::Object::SetPrivate instead.",
* Access hidden properties on JavaScript objects. These properties are bool SetHiddenValue(Local<String> key, Local<Value> value));
* hidden from the executing JavaScript and only accessible through the V8 V8_DEPRECATE_SOON("Use v8::Object::GetHidden instead.",
* C++ API. Hidden properties introduced by V8 internally (for example the Local<Value> GetHiddenValue(Local<String> key));
* identity hash) are prefixed with "v8::". V8_DEPRECATE_SOON("Use v8::Object::DeletePrivate instead.",
*/ bool DeleteHiddenValue(Local<String> key));
// TODO(dcarney): convert these to take a isolate and optionally bailout?
bool SetHiddenValue(Local<String> key, Local<Value> value);
Local<Value> GetHiddenValue(Local<String> key);
bool DeleteHiddenValue(Local<String> key);
/** /**
* Clone this object with a fast but shallow copy. Values will point * Clone this object with a fast but shallow copy. Values will point
@ -3955,7 +4001,9 @@ class V8_EXPORT RegExp : public Object {
kNone = 0, kNone = 0,
kGlobal = 1, kGlobal = 1,
kIgnoreCase = 2, kIgnoreCase = 2,
kMultiline = 4 kMultiline = 4,
kSticky = 8,
kUnicode = 16
}; };
/** /**
@ -4007,6 +4055,15 @@ class V8_EXPORT External : public Value {
}; };
#define V8_INTRINSICS_LIST(F) F(ArrayProto_values, array_values_iterator)
enum Intrinsic {
#define V8_DECL_INTRINSIC(name, iname) k##name,
V8_INTRINSICS_LIST(V8_DECL_INTRINSIC)
#undef V8_DECL_INTRINSIC
};
// --- Templates --- // --- Templates ---
@ -4027,13 +4084,6 @@ class V8_EXPORT Template : public Data {
PropertyAttribute attribute = None, PropertyAttribute attribute = None,
AccessControl settings = DEFAULT); AccessControl settings = DEFAULT);
#ifdef V8_JS_ACCESSORS
void SetAccessorProperty(Local<Name> name,
Local<Function> getter = Local<Function>(),
Local<Function> setter = Local<Function>(),
PropertyAttribute attribute = None);
#endif // V8_JS_ACCESSORS
/** /**
* Whenever the property with the given name is accessed on objects * Whenever the property with the given name is accessed on objects
* created from this Template the getter and setter callbacks * created from this Template the getter and setter callbacks
@ -4076,6 +4126,13 @@ class V8_EXPORT Template : public Data {
Local<AccessorSignature> signature = Local<AccessorSignature>(), Local<AccessorSignature> signature = Local<AccessorSignature>(),
AccessControl settings = DEFAULT); AccessControl settings = DEFAULT);
/**
* During template instantiation, sets the value with the intrinsic property
* from the correct context.
*/
void SetIntrinsicDataProperty(Local<Name> name, Intrinsic intrinsic,
PropertyAttribute attribute = None);
private: private:
Template(); Template();
@ -4234,6 +4291,14 @@ enum AccessType {
}; };
/**
* Returns true if the given context should be allowed to access the given
* object.
*/
typedef bool (*AccessCheckCallback)(Local<Context> accessing_context,
Local<Object> accessed_object);
/** /**
* Returns true if cross-context access should be allowed to the named * Returns true if cross-context access should be allowed to the named
* property with the given key on the host object. * property with the given key on the host object.
@ -4642,16 +4707,21 @@ class V8_EXPORT ObjectTemplate : public Template {
void MarkAsUndetectable(); void MarkAsUndetectable();
/** /**
* Sets access check callbacks on the object template and enables * Sets access check callback on the object template and enables access
* access checks. * checks.
* *
* When accessing properties on instances of this object template, * When accessing properties on instances of this object template,
* the access check callback will be called to determine whether or * the access check callback will be called to determine whether or
* not to allow cross-context access to the properties. * not to allow cross-context access to the properties.
*/ */
void SetAccessCheckCallbacks(NamedSecurityCallback named_handler, void SetAccessCheckCallback(AccessCheckCallback callback,
IndexedSecurityCallback indexed_handler, Local<Value> data = Local<Value>());
Local<Value> data = Local<Value>());
V8_DEPRECATE_SOON(
"Use SetAccessCheckCallback instead",
void SetAccessCheckCallbacks(NamedSecurityCallback named_handler,
IndexedSecurityCallback indexed_handler,
Local<Value> data = Local<Value>()));
/** /**
* Gets the number of internal fields for objects generated from * Gets the number of internal fields for objects generated from
@ -5006,6 +5076,7 @@ class V8_EXPORT HeapStatistics {
size_t total_available_size() { return total_available_size_; } size_t total_available_size() { return total_available_size_; }
size_t used_heap_size() { return used_heap_size_; } size_t used_heap_size() { return used_heap_size_; }
size_t heap_size_limit() { return heap_size_limit_; } size_t heap_size_limit() { return heap_size_limit_; }
size_t does_zap_garbage() { return does_zap_garbage_; }
private: private:
size_t total_heap_size_; size_t total_heap_size_;
@ -5014,6 +5085,7 @@ class V8_EXPORT HeapStatistics {
size_t total_available_size_; size_t total_available_size_;
size_t used_heap_size_; size_t used_heap_size_;
size_t heap_size_limit_; size_t heap_size_limit_;
bool does_zap_garbage_;
friend class V8; friend class V8;
friend class Isolate; friend class Isolate;
@ -5351,6 +5423,9 @@ class V8_EXPORT Isolate {
kSlotsBufferOverflow = 5, kSlotsBufferOverflow = 5,
kObjectObserve = 6, kObjectObserve = 6,
kForcedGC = 7, kForcedGC = 7,
kSloppyMode = 8,
kStrictMode = 9,
kStrongMode = 10,
kUseCounterFeatureCount // This enum value must be last. kUseCounterFeatureCount // This enum value must be last.
}; };
@ -5521,7 +5596,10 @@ class V8_EXPORT Isolate {
/** Returns true if this isolate has a current context. */ /** Returns true if this isolate has a current context. */
bool InContext(); bool InContext();
/** Returns the context that is on the top of the stack. */ /**
* Returns the context of the currently running JavaScript, or the context
* on the top of the stack if no JavaScript is running.
*/
Local<Context> GetCurrentContext(); Local<Context> GetCurrentContext();
/** /**
@ -5529,9 +5607,12 @@ class V8_EXPORT Isolate {
* context of the top-most JavaScript frame. If there are no * context of the top-most JavaScript frame. If there are no
* JavaScript frames an empty handle is returned. * JavaScript frames an empty handle is returned.
*/ */
Local<Context> GetCallingContext(); V8_DEPRECATE_SOON(
"Calling context concept is not compatible with tail calls, and will be "
"removed.",
Local<Context> GetCallingContext());
/** Returns the last entered context. */ /** Returns the last context entered through V8's C++ API. */
Local<Context> GetEnteredContext(); Local<Context> GetEnteredContext();
/** /**
@ -5789,6 +5870,18 @@ class V8_EXPORT Isolate {
*/ */
int ContextDisposedNotification(bool dependant_context = true); int ContextDisposedNotification(bool dependant_context = true);
/**
* Optional notification that the isolate switched to the foreground.
* V8 uses these notifications to guide heuristics.
*/
void IsolateInForegroundNotification();
/**
* Optional notification that the isolate switched to the background.
* V8 uses these notifications to guide heuristics.
*/
void IsolateInBackgroundNotification();
/** /**
* Allows the host application to provide the address of a function that is * Allows the host application to provide the address of a function that is
* notified each time code is added, moved or removed. * notified each time code is added, moved or removed.
@ -5918,6 +6011,13 @@ class V8_EXPORT Isolate {
*/ */
void VisitHandlesForPartialDependence(PersistentHandleVisitor* visitor); void VisitHandlesForPartialDependence(PersistentHandleVisitor* visitor);
/**
* Iterates through all the persistent handles in the current isolate's heap
* that have class_ids and are weak to be marked as inactive if there is no
* pending activity for the handle.
*/
void VisitWeakHandles(PersistentHandleVisitor* visitor);
private: private:
template <class K, class V, class Traits> template <class K, class V, class Traits>
friend class PersistentValueMapBase; friend class PersistentValueMapBase;
@ -7000,6 +7100,7 @@ class Internals {
static const int kNodeStateIsNearDeathValue = 4; static const int kNodeStateIsNearDeathValue = 4;
static const int kNodeIsIndependentShift = 3; static const int kNodeIsIndependentShift = 3;
static const int kNodeIsPartiallyDependentShift = 4; static const int kNodeIsPartiallyDependentShift = 4;
static const int kNodeIsActiveShift = 4;
static const int kJSObjectType = 0xb7; static const int kJSObjectType = 0xb7;
static const int kFirstNonstringType = 0x80; static const int kFirstNonstringType = 0x80;
@ -7326,6 +7427,15 @@ void PersistentBase<T>::MarkPartiallyDependent() {
} }
template <class T>
void PersistentBase<T>::MarkActive() {
typedef internal::Internals I;
if (this->IsEmpty()) return;
I::UpdateNodeFlag(reinterpret_cast<internal::Object**>(this->val_), true,
I::kNodeIsActiveShift);
}
template <class T> template <class T>
void PersistentBase<T>::SetWrapperClassId(uint16_t class_id) { void PersistentBase<T>::SetWrapperClassId(uint16_t class_id) {
typedef internal::Internals I; typedef internal::Internals I;

17
deps/v8/include/v8config.h

@ -174,6 +174,7 @@
// supported // supported
// V8_HAS_ATTRIBUTE_DEPRECATED - __attribute__((deprecated)) supported // V8_HAS_ATTRIBUTE_DEPRECATED - __attribute__((deprecated)) supported
// V8_HAS_ATTRIBUTE_NOINLINE - __attribute__((noinline)) supported // V8_HAS_ATTRIBUTE_NOINLINE - __attribute__((noinline)) supported
// V8_HAS_ATTRIBUTE_NORETURN - __attribute__((noreturn)) supported
// V8_HAS_ATTRIBUTE_UNUSED - __attribute__((unused)) supported // V8_HAS_ATTRIBUTE_UNUSED - __attribute__((unused)) supported
// V8_HAS_ATTRIBUTE_VISIBILITY - __attribute__((visibility)) supported // V8_HAS_ATTRIBUTE_VISIBILITY - __attribute__((visibility)) supported
// V8_HAS_ATTRIBUTE_WARN_UNUSED_RESULT - __attribute__((warn_unused_result)) // V8_HAS_ATTRIBUTE_WARN_UNUSED_RESULT - __attribute__((warn_unused_result))
@ -190,6 +191,7 @@
// V8_HAS_DECLSPEC_DEPRECATED - __declspec(deprecated) supported // V8_HAS_DECLSPEC_DEPRECATED - __declspec(deprecated) supported
// V8_HAS_DECLSPEC_NOINLINE - __declspec(noinline) supported // V8_HAS_DECLSPEC_NOINLINE - __declspec(noinline) supported
// V8_HAS_DECLSPEC_SELECTANY - __declspec(selectany) supported // V8_HAS_DECLSPEC_SELECTANY - __declspec(selectany) supported
// V8_HAS_DECLSPEC_NORETURN - __declspec(noreturn) supported
// V8_HAS___FORCEINLINE - __forceinline supported // V8_HAS___FORCEINLINE - __forceinline supported
// //
// Note that testing for compilers and/or features must be done using #if // Note that testing for compilers and/or features must be done using #if
@ -212,6 +214,7 @@
# define V8_HAS_ATTRIBUTE_ALWAYS_INLINE (__has_attribute(always_inline)) # define V8_HAS_ATTRIBUTE_ALWAYS_INLINE (__has_attribute(always_inline))
# define V8_HAS_ATTRIBUTE_DEPRECATED (__has_attribute(deprecated)) # define V8_HAS_ATTRIBUTE_DEPRECATED (__has_attribute(deprecated))
# define V8_HAS_ATTRIBUTE_NOINLINE (__has_attribute(noinline)) # define V8_HAS_ATTRIBUTE_NOINLINE (__has_attribute(noinline))
# define V8_HAS_ATTRIBUTE_NORETURN (__has_attribute(noreturn))
# define V8_HAS_ATTRIBUTE_UNUSED (__has_attribute(unused)) # define V8_HAS_ATTRIBUTE_UNUSED (__has_attribute(unused))
# define V8_HAS_ATTRIBUTE_VISIBILITY (__has_attribute(visibility)) # define V8_HAS_ATTRIBUTE_VISIBILITY (__has_attribute(visibility))
# define V8_HAS_ATTRIBUTE_WARN_UNUSED_RESULT \ # define V8_HAS_ATTRIBUTE_WARN_UNUSED_RESULT \
@ -253,6 +256,7 @@
# define V8_HAS_ATTRIBUTE_DEPRECATED (V8_GNUC_PREREQ(3, 4, 0)) # define V8_HAS_ATTRIBUTE_DEPRECATED (V8_GNUC_PREREQ(3, 4, 0))
# define V8_HAS_ATTRIBUTE_DEPRECATED_MESSAGE (V8_GNUC_PREREQ(4, 5, 0)) # define V8_HAS_ATTRIBUTE_DEPRECATED_MESSAGE (V8_GNUC_PREREQ(4, 5, 0))
# define V8_HAS_ATTRIBUTE_NOINLINE (V8_GNUC_PREREQ(3, 4, 0)) # define V8_HAS_ATTRIBUTE_NOINLINE (V8_GNUC_PREREQ(3, 4, 0))
# define V8_HAS_ATTRIBUTE_NORETURN (V8_GNUC_PREREQ(2, 5, 0))
# define V8_HAS_ATTRIBUTE_UNUSED (V8_GNUC_PREREQ(2, 95, 0)) # define V8_HAS_ATTRIBUTE_UNUSED (V8_GNUC_PREREQ(2, 95, 0))
# define V8_HAS_ATTRIBUTE_VISIBILITY (V8_GNUC_PREREQ(4, 3, 0)) # define V8_HAS_ATTRIBUTE_VISIBILITY (V8_GNUC_PREREQ(4, 3, 0))
# define V8_HAS_ATTRIBUTE_WARN_UNUSED_RESULT \ # define V8_HAS_ATTRIBUTE_WARN_UNUSED_RESULT \
@ -285,6 +289,7 @@
# define V8_HAS_DECLSPEC_DEPRECATED 1 # define V8_HAS_DECLSPEC_DEPRECATED 1
# define V8_HAS_DECLSPEC_NOINLINE 1 # define V8_HAS_DECLSPEC_NOINLINE 1
# define V8_HAS_DECLSPEC_SELECTANY 1 # define V8_HAS_DECLSPEC_SELECTANY 1
# define V8_HAS_DECLSPEC_NORETURN 1
# define V8_HAS___FORCEINLINE 1 # define V8_HAS___FORCEINLINE 1
@ -319,6 +324,18 @@
#endif #endif
// A macro used to tell the compiler that a particular function never returns.
// Use like:
// V8_NORETURN void MyAbort() { abort(); }
#if V8_HAS_ATTRIBUTE_NORETURN
# define V8_NORETURN __attribute__((noreturn))
#elif HAS_DECLSPEC_NORETURN
# define V8_NORETURN __declspec(noreturn)
#else
# define V8_NORETURN /* NOT SUPPORTED */
#endif
// A macro (V8_DEPRECATED) to mark classes or functions as deprecated. // A macro (V8_DEPRECATED) to mark classes or functions as deprecated.
#if defined(V8_DEPRECATION_WARNINGS) && V8_HAS_ATTRIBUTE_DEPRECATED_MESSAGE #if defined(V8_DEPRECATION_WARNINGS) && V8_HAS_ATTRIBUTE_DEPRECATED_MESSAGE
#define V8_DEPRECATED(message, declarator) \ #define V8_DEPRECATED(message, declarator) \

8
deps/v8/samples/shell.cc

@ -412,9 +412,11 @@ void ReportException(v8::Isolate* isolate, v8::TryCatch* try_catch) {
fprintf(stderr, "^"); fprintf(stderr, "^");
} }
fprintf(stderr, "\n"); fprintf(stderr, "\n");
v8::String::Utf8Value stack_trace( v8::Local<v8::Value> stack_trace_string;
try_catch->StackTrace(context).ToLocalChecked()); if (try_catch->StackTrace(context).ToLocal(&stack_trace_string) &&
if (stack_trace.length() > 0) { stack_trace_string->IsString() &&
v8::Local<v8::String>::Cast(stack_trace_string)->Length() > 0) {
v8::String::Utf8Value stack_trace(stack_trace_string);
const char* stack_trace_string = ToCString(stack_trace); const char* stack_trace_string = ToCString(stack_trace);
fprintf(stderr, "%s\n", stack_trace_string); fprintf(stderr, "%s\n", stack_trace_string);
} }

60
deps/v8/src/accessors.cc

@ -198,18 +198,6 @@ void Accessors::ArrayLengthGetter(
} }
// Tries to non-observably convert |value| to a valid array length.
// Returns false if it fails.
static bool FastAsArrayLength(Isolate* isolate, Handle<Object> value,
uint32_t* length) {
if (value->ToArrayLength(length)) return true;
// We don't support AsArrayLength, so use AsArrayIndex for now. This just
// misses out on kMaxUInt32.
if (value->IsString()) return String::cast(*value)->AsArrayIndex(length);
return false;
}
void Accessors::ArrayLengthSetter( void Accessors::ArrayLengthSetter(
v8::Local<v8::Name> name, v8::Local<v8::Name> name,
v8::Local<v8::Value> val, v8::Local<v8::Value> val,
@ -222,26 +210,9 @@ void Accessors::ArrayLengthSetter(
Handle<Object> length_obj = Utils::OpenHandle(*val); Handle<Object> length_obj = Utils::OpenHandle(*val);
uint32_t length = 0; uint32_t length = 0;
if (!FastAsArrayLength(isolate, length_obj, &length)) { if (!JSArray::AnythingToArrayLength(isolate, length_obj, &length)) {
Handle<Object> uint32_v; isolate->OptionalRescheduleException(false);
if (!Object::ToUint32(isolate, length_obj).ToHandle(&uint32_v)) { return;
isolate->OptionalRescheduleException(false);
return;
}
Handle<Object> number_v;
if (!Object::ToNumber(length_obj).ToHandle(&number_v)) {
isolate->OptionalRescheduleException(false);
return;
}
if (uint32_v->Number() != number_v->Number()) {
Handle<Object> exception = isolate->factory()->NewRangeError(
MessageTemplate::kInvalidArrayLength);
return isolate->ScheduleThrow(*exception);
}
CHECK(uint32_v->ToArrayLength(&length));
} }
if (JSArray::ObservableSetLength(array, length).is_null()) { if (JSArray::ObservableSetLength(array, length).is_null()) {
@ -260,7 +231,6 @@ Handle<AccessorInfo> Accessors::ArrayLengthInfo(
} }
// //
// Accessors::StringLength // Accessors::StringLength
// //
@ -1074,7 +1044,12 @@ void Accessors::FunctionNameGetter(
HandleScope scope(isolate); HandleScope scope(isolate);
Handle<JSFunction> function = Handle<JSFunction> function =
Handle<JSFunction>::cast(Utils::OpenHandle(*info.Holder())); Handle<JSFunction>::cast(Utils::OpenHandle(*info.Holder()));
Handle<Object> result(function->shared()->name(), isolate); Handle<Object> result;
if (function->shared()->name_should_print_as_anonymous()) {
result = isolate->factory()->anonymous_string();
} else {
result = handle(function->shared()->name(), isolate);
}
info.GetReturnValue().Set(Utils::ToLocal(result)); info.GetReturnValue().Set(Utils::ToLocal(result));
} }
@ -1200,20 +1175,7 @@ Handle<Object> GetFunctionArguments(Isolate* isolate,
return ArgumentsForInlinedFunction(frame, function, function_index); return ArgumentsForInlinedFunction(frame, function, function_index);
} }
if (!frame->is_optimized()) { // Find the frame that holds the actual arguments passed to the function.
// If there is an arguments variable in the stack, we return that.
Handle<ScopeInfo> scope_info(function->shared()->scope_info());
int index = scope_info->StackSlotIndex(
isolate->heap()->arguments_string());
if (index >= 0) {
Handle<Object> arguments(frame->GetExpression(index), isolate);
if (!arguments->IsArgumentsMarker()) return arguments;
}
}
// If there is no arguments variable in the stack or we have an
// optimized frame, we find the frame that holds the actual arguments
// passed to the function.
it.AdvanceToArgumentsFrame(); it.AdvanceToArgumentsFrame();
frame = it.frame(); frame = it.frame();
@ -1359,7 +1321,7 @@ MaybeHandle<JSFunction> FindCaller(Isolate* isolate,
// If caller is a built-in function and caller's caller is also built-in, // If caller is a built-in function and caller's caller is also built-in,
// use that instead. // use that instead.
JSFunction* potential_caller = caller; JSFunction* potential_caller = caller;
while (potential_caller != NULL && potential_caller->IsBuiltin()) { while (potential_caller != NULL && potential_caller->shared()->IsBuiltin()) {
caller = potential_caller; caller = potential_caller;
potential_caller = it.next(); potential_caller = it.next();
} }

3
deps/v8/src/accessors.h

@ -106,6 +106,7 @@ class Accessors : public AllStatic {
Handle<ExecutableAccessorInfo> accessor); Handle<ExecutableAccessorInfo> accessor);
}; };
} } // namespace v8::internal } // namespace internal
} // namespace v8
#endif // V8_ACCESSORS_H_ #endif // V8_ACCESSORS_H_

38
deps/v8/src/address-map.cc

@ -0,0 +1,38 @@
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/address-map.h"
#include "src/heap/heap.h"
#include "src/isolate.h"
#include "src/objects-inl.h"
namespace v8 {
namespace internal {
RootIndexMap::RootIndexMap(Isolate* isolate) {
map_ = isolate->root_index_map();
if (map_ != NULL) return;
map_ = new HashMap(HashMap::PointersMatch);
for (uint32_t i = 0; i < Heap::kStrongRootListLength; i++) {
Heap::RootListIndex root_index = static_cast<Heap::RootListIndex>(i);
Object* root = isolate->heap()->root(root_index);
// Omit root entries that can be written after initialization. They must
// not be referenced through the root list in the snapshot.
if (root->IsHeapObject() &&
isolate->heap()->RootCanBeTreatedAsConstant(root_index)) {
HeapObject* heap_object = HeapObject::cast(root);
HashMap::Entry* entry = LookupEntry(map_, heap_object, false);
if (entry != NULL) {
// Some are initialized to a previous value in the root list.
DCHECK_LT(GetValue(entry), i);
} else {
SetValue(LookupEntry(map_, heap_object, true), i);
}
}
}
isolate->set_root_index_map(map_);
}
} // namespace internal
} // namespace v8

184
deps/v8/src/address-map.h

@ -0,0 +1,184 @@
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_ADDRESS_MAP_H_
#define V8_ADDRESS_MAP_H_
#include "src/assert-scope.h"
#include "src/hashmap.h"
#include "src/objects.h"
namespace v8 {
namespace internal {
class AddressMapBase {
protected:
static void SetValue(HashMap::Entry* entry, uint32_t v) {
entry->value = reinterpret_cast<void*>(v);
}
static uint32_t GetValue(HashMap::Entry* entry) {
return static_cast<uint32_t>(reinterpret_cast<intptr_t>(entry->value));
}
inline static HashMap::Entry* LookupEntry(HashMap* map, HeapObject* obj,
bool insert) {
if (insert) {
map->LookupOrInsert(Key(obj), Hash(obj));
}
return map->Lookup(Key(obj), Hash(obj));
}
private:
static uint32_t Hash(HeapObject* obj) {
return static_cast<int32_t>(reinterpret_cast<intptr_t>(obj->address()));
}
static void* Key(HeapObject* obj) {
return reinterpret_cast<void*>(obj->address());
}
};
class RootIndexMap : public AddressMapBase {
public:
explicit RootIndexMap(Isolate* isolate);
static const int kInvalidRootIndex = -1;
int Lookup(HeapObject* obj) {
HashMap::Entry* entry = LookupEntry(map_, obj, false);
if (entry) return GetValue(entry);
return kInvalidRootIndex;
}
private:
HashMap* map_;
DISALLOW_COPY_AND_ASSIGN(RootIndexMap);
};
class BackReference {
public:
explicit BackReference(uint32_t bitfield) : bitfield_(bitfield) {}
BackReference() : bitfield_(kInvalidValue) {}
static BackReference SourceReference() { return BackReference(kSourceValue); }
static BackReference GlobalProxyReference() {
return BackReference(kGlobalProxyValue);
}
static BackReference LargeObjectReference(uint32_t index) {
return BackReference(SpaceBits::encode(LO_SPACE) |
ChunkOffsetBits::encode(index));
}
static BackReference DummyReference() { return BackReference(kDummyValue); }
static BackReference Reference(AllocationSpace space, uint32_t chunk_index,
uint32_t chunk_offset) {
DCHECK(IsAligned(chunk_offset, kObjectAlignment));
DCHECK_NE(LO_SPACE, space);
return BackReference(
SpaceBits::encode(space) | ChunkIndexBits::encode(chunk_index) |
ChunkOffsetBits::encode(chunk_offset >> kObjectAlignmentBits));
}
bool is_valid() const { return bitfield_ != kInvalidValue; }
bool is_source() const { return bitfield_ == kSourceValue; }
bool is_global_proxy() const { return bitfield_ == kGlobalProxyValue; }
AllocationSpace space() const {
DCHECK(is_valid());
return SpaceBits::decode(bitfield_);
}
uint32_t chunk_offset() const {
DCHECK(is_valid());
return ChunkOffsetBits::decode(bitfield_) << kObjectAlignmentBits;
}
uint32_t large_object_index() const {
DCHECK(is_valid());
DCHECK(chunk_index() == 0);
return ChunkOffsetBits::decode(bitfield_);
}
uint32_t chunk_index() const {
DCHECK(is_valid());
return ChunkIndexBits::decode(bitfield_);
}
uint32_t reference() const {
DCHECK(is_valid());
return bitfield_ & (ChunkOffsetBits::kMask | ChunkIndexBits::kMask);
}
uint32_t bitfield() const { return bitfield_; }
private:
static const uint32_t kInvalidValue = 0xFFFFFFFF;
static const uint32_t kSourceValue = 0xFFFFFFFE;
static const uint32_t kGlobalProxyValue = 0xFFFFFFFD;
static const uint32_t kDummyValue = 0xFFFFFFFC;
static const int kChunkOffsetSize = kPageSizeBits - kObjectAlignmentBits;
static const int kChunkIndexSize = 32 - kChunkOffsetSize - kSpaceTagSize;
public:
static const int kMaxChunkIndex = (1 << kChunkIndexSize) - 1;
private:
class ChunkOffsetBits : public BitField<uint32_t, 0, kChunkOffsetSize> {};
class ChunkIndexBits
: public BitField<uint32_t, ChunkOffsetBits::kNext, kChunkIndexSize> {};
class SpaceBits
: public BitField<AllocationSpace, ChunkIndexBits::kNext, kSpaceTagSize> {
};
uint32_t bitfield_;
};
// Mapping objects to their location after deserialization.
// This is used during building, but not at runtime by V8.
class BackReferenceMap : public AddressMapBase {
public:
BackReferenceMap()
: no_allocation_(), map_(new HashMap(HashMap::PointersMatch)) {}
~BackReferenceMap() { delete map_; }
BackReference Lookup(HeapObject* obj) {
HashMap::Entry* entry = LookupEntry(map_, obj, false);
return entry ? BackReference(GetValue(entry)) : BackReference();
}
void Add(HeapObject* obj, BackReference b) {
DCHECK(b.is_valid());
DCHECK_NULL(LookupEntry(map_, obj, false));
HashMap::Entry* entry = LookupEntry(map_, obj, true);
SetValue(entry, b.bitfield());
}
void AddSourceString(String* string) {
Add(string, BackReference::SourceReference());
}
void AddGlobalProxy(HeapObject* global_proxy) {
Add(global_proxy, BackReference::GlobalProxyReference());
}
private:
DisallowHeapAllocation no_allocation_;
HashMap* map_;
DISALLOW_COPY_AND_ASSIGN(BackReferenceMap);
};
} // namespace internal
} // namespace v8
#endif // V8_ADDRESS_MAP_H_

5
deps/v8/src/allocation-site-scopes.h

@ -36,7 +36,7 @@ class AllocationSiteContext {
void InitializeTraversal(Handle<AllocationSite> site) { void InitializeTraversal(Handle<AllocationSite> site) {
top_ = site; top_ = site;
current_ = Handle<AllocationSite>(*top_, isolate()); current_ = Handle<AllocationSite>::New(*top_, isolate());
} }
private: private:
@ -95,6 +95,7 @@ class AllocationSiteUsageContext : public AllocationSiteContext {
}; };
} } // namespace v8::internal } // namespace internal
} // namespace v8
#endif // V8_ALLOCATION_SITE_SCOPES_H_ #endif // V8_ALLOCATION_SITE_SCOPES_H_

3
deps/v8/src/allocation.h

@ -88,6 +88,7 @@ class FreeStoreAllocationPolicy {
void* AlignedAlloc(size_t size, size_t alignment); void* AlignedAlloc(size_t size, size_t alignment);
void AlignedFree(void *ptr); void AlignedFree(void *ptr);
} } // namespace v8::internal } // namespace internal
} // namespace v8
#endif // V8_ALLOCATION_H_ #endif // V8_ALLOCATION_H_

130
deps/v8/src/api-natives.cc

@ -37,25 +37,6 @@ MaybeHandle<Object> Instantiate(Isolate* isolate, Handle<Object> data,
} }
MaybeHandle<JSFunction> InstantiateFunctionOrMaybeDont(Isolate* isolate,
Handle<Object> data) {
DCHECK(data->IsFunctionTemplateInfo() || data->IsJSFunction());
if (data->IsFunctionTemplateInfo()) {
// A function template needs to be instantiated.
return InstantiateFunction(isolate,
Handle<FunctionTemplateInfo>::cast(data));
#ifdef V8_JS_ACCESSORS
} else if (data->IsJSFunction()) {
// If we already have a proper function, we do not need additional work.
// (This should only happen for JavaScript API accessors.)
return Handle<JSFunction>::cast(data);
#endif // V8_JS_ACCESSORS
} else {
UNREACHABLE();
return MaybeHandle<JSFunction>();
}
}
MaybeHandle<Object> DefineAccessorProperty(Isolate* isolate, MaybeHandle<Object> DefineAccessorProperty(Isolate* isolate,
Handle<JSObject> object, Handle<JSObject> object,
Handle<Name> name, Handle<Name> name,
@ -63,14 +44,18 @@ MaybeHandle<Object> DefineAccessorProperty(Isolate* isolate,
Handle<Object> setter, Handle<Object> setter,
PropertyAttributes attributes) { PropertyAttributes attributes) {
if (!getter->IsUndefined()) { if (!getter->IsUndefined()) {
ASSIGN_RETURN_ON_EXCEPTION(isolate, getter, ASSIGN_RETURN_ON_EXCEPTION(
InstantiateFunctionOrMaybeDont(isolate, getter), isolate, getter,
Object); InstantiateFunction(isolate,
Handle<FunctionTemplateInfo>::cast(getter)),
Object);
} }
if (!setter->IsUndefined()) { if (!setter->IsUndefined()) {
ASSIGN_RETURN_ON_EXCEPTION(isolate, setter, ASSIGN_RETURN_ON_EXCEPTION(
InstantiateFunctionOrMaybeDont(isolate, setter), isolate, setter,
Object); InstantiateFunction(isolate,
Handle<FunctionTemplateInfo>::cast(setter)),
Object);
} }
RETURN_ON_EXCEPTION(isolate, JSObject::DefineAccessor(object, name, getter, RETURN_ON_EXCEPTION(isolate, JSObject::DefineAccessor(object, name, getter,
setter, attributes), setter, attributes),
@ -102,8 +87,10 @@ MaybeHandle<Object> DefineDataProperty(Isolate* isolate,
} }
#endif #endif
return Object::AddDataProperty(&it, value, attributes, STRICT, MAYBE_RETURN_NULL(
Object::CERTAINLY_NOT_STORE_FROM_KEYED); Object::AddDataProperty(&it, value, attributes, Object::THROW_ON_ERROR,
Object::CERTAINLY_NOT_STORE_FROM_KEYED));
return value;
} }
@ -148,6 +135,20 @@ class AccessCheckDisableScope {
}; };
Object* GetIntrinsic(Isolate* isolate, v8::Intrinsic intrinsic) {
Handle<Context> native_context = isolate->native_context();
DCHECK(!native_context.is_null());
switch (intrinsic) {
#define GET_INTRINSIC_VALUE(name, iname) \
case v8::k##name: \
return native_context->iname();
V8_INTRINSICS_LIST(GET_INTRINSIC_VALUE)
#undef GET_INTRINSIC_VALUE
}
return nullptr;
}
MaybeHandle<JSObject> ConfigureInstance(Isolate* isolate, Handle<JSObject> obj, MaybeHandle<JSObject> ConfigureInstance(Isolate* isolate, Handle<JSObject> obj,
Handle<TemplateInfo> data) { Handle<TemplateInfo> data) {
auto property_list = handle(data->property_list(), isolate); auto property_list = handle(data->property_list(), isolate);
@ -162,23 +163,40 @@ MaybeHandle<JSObject> ConfigureInstance(Isolate* isolate, Handle<JSObject> obj,
int i = 0; int i = 0;
for (int c = 0; c < data->number_of_properties(); c++) { for (int c = 0; c < data->number_of_properties(); c++) {
auto name = handle(Name::cast(properties.get(i++)), isolate); auto name = handle(Name::cast(properties.get(i++)), isolate);
PropertyDetails details(Smi::cast(properties.get(i++))); auto bit = handle(properties.get(i++), isolate);
PropertyAttributes attributes = details.attributes(); if (bit->IsSmi()) {
PropertyKind kind = details.kind(); PropertyDetails details(Smi::cast(*bit));
PropertyAttributes attributes = details.attributes();
PropertyKind kind = details.kind();
if (kind == kData) {
auto prop_data = handle(properties.get(i++), isolate);
RETURN_ON_EXCEPTION(isolate, DefineDataProperty(isolate, obj, name,
prop_data, attributes),
JSObject);
} else {
auto getter = handle(properties.get(i++), isolate);
auto setter = handle(properties.get(i++), isolate);
RETURN_ON_EXCEPTION(isolate,
DefineAccessorProperty(isolate, obj, name, getter,
setter, attributes),
JSObject);
}
} else {
// Intrinsic data property --- Get appropriate value from the current
// context.
PropertyDetails details(Smi::cast(properties.get(i++)));
PropertyAttributes attributes = details.attributes();
DCHECK_EQ(kData, details.kind());
if (kind == kData) { v8::Intrinsic intrinsic =
auto prop_data = handle(properties.get(i++), isolate); static_cast<v8::Intrinsic>(Smi::cast(properties.get(i++))->value());
auto prop_data = handle(GetIntrinsic(isolate, intrinsic), isolate);
RETURN_ON_EXCEPTION(isolate, DefineDataProperty(isolate, obj, name, RETURN_ON_EXCEPTION(isolate, DefineDataProperty(isolate, obj, name,
prop_data, attributes), prop_data, attributes),
JSObject); JSObject);
} else {
auto getter = handle(properties.get(i++), isolate);
auto setter = handle(properties.get(i++), isolate);
RETURN_ON_EXCEPTION(isolate,
DefineAccessorProperty(isolate, obj, name, getter,
setter, attributes),
JSObject);
} }
} }
return obj; return obj;
@ -268,9 +286,9 @@ MaybeHandle<JSFunction> InstantiateFunction(Isolate* isolate,
JSObject::GetProperty(parent_instance, JSObject::GetProperty(parent_instance,
isolate->factory()->prototype_string()), isolate->factory()->prototype_string()),
JSFunction); JSFunction);
RETURN_ON_EXCEPTION( MAYBE_RETURN(JSObject::SetPrototype(prototype, parent_prototype, false,
isolate, JSObject::SetPrototype(prototype, parent_prototype, false), Object::THROW_ON_ERROR),
JSFunction); MaybeHandle<JSFunction>());
} }
} }
auto function = ApiNatives::CreateApiFunction( auto function = ApiNatives::CreateApiFunction(
@ -377,21 +395,25 @@ void ApiNatives::AddDataProperty(Isolate* isolate, Handle<TemplateInfo> info,
} }
void ApiNatives::AddDataProperty(Isolate* isolate, Handle<TemplateInfo> info,
Handle<Name> name, v8::Intrinsic intrinsic,
PropertyAttributes attributes) {
const int kSize = 4;
auto value = handle(Smi::FromInt(intrinsic), isolate);
auto intrinsic_marker = isolate->factory()->true_value();
PropertyDetails details(attributes, DATA, 0, PropertyCellType::kNoCell);
auto details_handle = handle(details.AsSmi(), isolate);
Handle<Object> data[kSize] = {name, intrinsic_marker, details_handle, value};
AddPropertyToPropertyList(isolate, info, kSize, data);
}
void ApiNatives::AddAccessorProperty(Isolate* isolate, void ApiNatives::AddAccessorProperty(Isolate* isolate,
Handle<TemplateInfo> info, Handle<TemplateInfo> info,
Handle<Name> name, Handle<Object> getter, Handle<Name> name,
Handle<Object> setter, Handle<FunctionTemplateInfo> getter,
Handle<FunctionTemplateInfo> setter,
PropertyAttributes attributes) { PropertyAttributes attributes) {
#ifdef V8_JS_ACCESSORS
DCHECK(getter.is_null() || getter->IsFunctionTemplateInfo() ||
getter->IsJSFunction());
DCHECK(setter.is_null() || setter->IsFunctionTemplateInfo() ||
setter->IsJSFunction());
#else
DCHECK(getter.is_null() || getter->IsFunctionTemplateInfo());
DCHECK(setter.is_null() || setter->IsFunctionTemplateInfo());
#endif // V8_JS_ACCESSORS
const int kSize = 4; const int kSize = 4;
PropertyDetails details(attributes, ACCESSOR, 0, PropertyCellType::kNoCell); PropertyDetails details(attributes, ACCESSOR, 0, PropertyCellType::kNoCell);
auto details_handle = handle(details.AsSmi(), isolate); auto details_handle = handle(details.AsSmi(), isolate);

9
deps/v8/src/api-natives.h

@ -44,9 +44,14 @@ class ApiNatives {
Handle<Name> name, Handle<Object> value, Handle<Name> name, Handle<Object> value,
PropertyAttributes attributes); PropertyAttributes attributes);
static void AddDataProperty(Isolate* isolate, Handle<TemplateInfo> info,
Handle<Name> name, v8::Intrinsic intrinsic,
PropertyAttributes attributes);
static void AddAccessorProperty(Isolate* isolate, Handle<TemplateInfo> info, static void AddAccessorProperty(Isolate* isolate, Handle<TemplateInfo> info,
Handle<Name> name, Handle<Object> getter, Handle<Name> name,
Handle<Object> setter, Handle<FunctionTemplateInfo> getter,
Handle<FunctionTemplateInfo> setter,
PropertyAttributes attributes); PropertyAttributes attributes);
static void AddNativeDataProperty(Isolate* isolate, Handle<TemplateInfo> info, static void AddNativeDataProperty(Isolate* isolate, Handle<TemplateInfo> info,

342
deps/v8/src/api.cc

@ -43,6 +43,7 @@
#include "src/profiler/profile-generator-inl.h" #include "src/profiler/profile-generator-inl.h"
#include "src/profiler/sampler.h" #include "src/profiler/sampler.h"
#include "src/property.h" #include "src/property.h"
#include "src/property-descriptor.h"
#include "src/property-details.h" #include "src/property-details.h"
#include "src/prototype.h" #include "src/prototype.h"
#include "src/runtime/runtime.h" #include "src/runtime/runtime.h"
@ -158,6 +159,7 @@ class CallDepthScope {
do_callback_(do_callback) { do_callback_(do_callback) {
// TODO(dcarney): remove this when blink stops crashing. // TODO(dcarney): remove this when blink stops crashing.
DCHECK(!isolate_->external_caught_exception()); DCHECK(!isolate_->external_caught_exception());
isolate_->IncrementJsCallsFromApiCounter();
isolate_->handle_scope_implementer()->IncrementCallDepth(); isolate_->handle_scope_implementer()->IncrementCallDepth();
if (!context_.IsEmpty()) context_->Enter(); if (!context_.IsEmpty()) context_->Enter();
} }
@ -734,17 +736,17 @@ SealHandleScope::SealHandleScope(Isolate* isolate) {
i::HandleScopeData* current = internal_isolate->handle_scope_data(); i::HandleScopeData* current = internal_isolate->handle_scope_data();
prev_limit_ = current->limit; prev_limit_ = current->limit;
current->limit = current->next; current->limit = current->next;
prev_level_ = current->level; prev_sealed_level_ = current->sealed_level;
current->level = 0; current->sealed_level = current->level;
} }
SealHandleScope::~SealHandleScope() { SealHandleScope::~SealHandleScope() {
i::HandleScopeData* current = isolate_->handle_scope_data(); i::HandleScopeData* current = isolate_->handle_scope_data();
DCHECK_EQ(0, current->level);
current->level = prev_level_;
DCHECK_EQ(current->next, current->limit); DCHECK_EQ(current->next, current->limit);
current->limit = prev_limit_; current->limit = prev_limit_;
DCHECK_EQ(current->level, current->sealed_level);
current->sealed_level = prev_sealed_level_;
} }
@ -955,25 +957,6 @@ void Template::SetAccessorProperty(
} }
#ifdef V8_JS_ACCESSORS
void Template::SetAccessorProperty(v8::Local<v8::Name> name,
v8::Local<Function> getter,
v8::Local<Function> setter,
v8::PropertyAttribute attribute) {
auto templ = Utils::OpenHandle(this);
auto isolate = templ->GetIsolate();
ENTER_V8(isolate);
DCHECK(!name.IsEmpty());
DCHECK(!getter.IsEmpty() || !setter.IsEmpty());
i::HandleScope scope(isolate);
i::ApiNatives::AddAccessorProperty(
isolate, templ, Utils::OpenHandle(*name),
Utils::OpenHandle(*getter, true), Utils::OpenHandle(*setter, true),
static_cast<PropertyAttributes>(attribute));
}
#endif // V8_JS_ACCESSORS
// --- F u n c t i o n T e m p l a t e --- // --- F u n c t i o n T e m p l a t e ---
static void InitializeFunctionTemplate( static void InitializeFunctionTemplate(
i::Handle<i::FunctionTemplateInfo> info) { i::Handle<i::FunctionTemplateInfo> info) {
@ -1352,6 +1335,18 @@ void Template::SetNativeDataProperty(v8::Local<Name> name,
} }
void Template::SetIntrinsicDataProperty(Local<Name> name, Intrinsic intrinsic,
PropertyAttribute attribute) {
auto templ = Utils::OpenHandle(this);
i::Isolate* isolate = templ->GetIsolate();
ENTER_V8(isolate);
i::HandleScope scope(isolate);
i::ApiNatives::AddDataProperty(isolate, templ, Utils::OpenHandle(*name),
intrinsic,
static_cast<PropertyAttributes>(attribute));
}
void ObjectTemplate::SetAccessor(v8::Local<String> name, void ObjectTemplate::SetAccessor(v8::Local<String> name,
AccessorGetterCallback getter, AccessorGetterCallback getter,
AccessorSetterCallback setter, AccessorSetterCallback setter,
@ -1440,6 +1435,33 @@ void ObjectTemplate::MarkAsUndetectable() {
} }
void ObjectTemplate::SetAccessCheckCallback(AccessCheckCallback callback,
Local<Value> data) {
i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
ENTER_V8(isolate);
i::HandleScope scope(isolate);
auto cons = EnsureConstructor(isolate, this);
EnsureNotInstantiated(cons, "v8::ObjectTemplate::SetAccessCheckCallback");
i::Handle<i::Struct> struct_info =
isolate->factory()->NewStruct(i::ACCESS_CHECK_INFO_TYPE);
i::Handle<i::AccessCheckInfo> info =
i::Handle<i::AccessCheckInfo>::cast(struct_info);
SET_FIELD_WRAPPED(info, set_callback, callback);
SET_FIELD_WRAPPED(info, set_named_callback, nullptr);
SET_FIELD_WRAPPED(info, set_indexed_callback, nullptr);
if (data.IsEmpty()) {
data = v8::Undefined(reinterpret_cast<v8::Isolate*>(isolate));
}
info->set_data(*Utils::OpenHandle(*data));
cons->set_access_check_info(*info);
cons->set_needs_access_check(true);
}
void ObjectTemplate::SetAccessCheckCallbacks( void ObjectTemplate::SetAccessCheckCallbacks(
NamedSecurityCallback named_callback, NamedSecurityCallback named_callback,
IndexedSecurityCallback indexed_callback, Local<Value> data) { IndexedSecurityCallback indexed_callback, Local<Value> data) {
@ -1454,6 +1476,7 @@ void ObjectTemplate::SetAccessCheckCallbacks(
i::Handle<i::AccessCheckInfo> info = i::Handle<i::AccessCheckInfo> info =
i::Handle<i::AccessCheckInfo>::cast(struct_info); i::Handle<i::AccessCheckInfo>::cast(struct_info);
SET_FIELD_WRAPPED(info, set_callback, nullptr);
SET_FIELD_WRAPPED(info, set_named_callback, named_callback); SET_FIELD_WRAPPED(info, set_named_callback, named_callback);
SET_FIELD_WRAPPED(info, set_indexed_callback, indexed_callback); SET_FIELD_WRAPPED(info, set_indexed_callback, indexed_callback);
@ -1968,7 +1991,8 @@ MaybeLocal<Function> ScriptCompiler::CompileFunctionInContext(
Utils::OpenHandle(*v8_context->Global()), 0, Utils::OpenHandle(*v8_context->Global()), 0,
nullptr).ToHandle(&result); nullptr).ToHandle(&result);
RETURN_ON_FAILED_EXECUTION(Function); RETURN_ON_FAILED_EXECUTION(Function);
RETURN_ESCAPED(Utils::ToLocal(i::Handle<i::JSFunction>::cast(result))); RETURN_ESCAPED(
Utils::CallableToLocal(i::Handle<i::JSFunction>::cast(result)));
} }
@ -2686,9 +2710,7 @@ bool Value::IsFalse() const {
} }
bool Value::IsFunction() const { bool Value::IsFunction() const { return Utils::OpenHandle(this)->IsCallable(); }
return Utils::OpenHandle(this)->IsJSFunction();
}
bool Value::IsName() const { bool Value::IsName() const {
@ -3027,8 +3049,7 @@ void v8::Object::CheckCast(Value* that) {
void v8::Function::CheckCast(Value* that) { void v8::Function::CheckCast(Value* that) {
i::Handle<i::Object> obj = Utils::OpenHandle(that); i::Handle<i::Object> obj = Utils::OpenHandle(that);
Utils::ApiCheck(obj->IsJSFunction(), Utils::ApiCheck(obj->IsCallable(), "v8::Function::Cast()",
"v8::Function::Cast()",
"Could not convert to function"); "Could not convert to function");
} }
@ -3494,30 +3515,26 @@ Maybe<bool> v8::Object::DefineOwnProperty(v8::Local<v8::Context> context,
v8::PropertyAttribute attributes) { v8::PropertyAttribute attributes) {
PREPARE_FOR_EXECUTION_PRIMITIVE(context, "v8::Object::DefineOwnProperty()", PREPARE_FOR_EXECUTION_PRIMITIVE(context, "v8::Object::DefineOwnProperty()",
bool); bool);
auto self = Utils::OpenHandle(this); i::Handle<i::JSObject> self = Utils::OpenHandle(this);
auto key_obj = Utils::OpenHandle(*key); i::Handle<i::Name> key_obj = Utils::OpenHandle(*key);
auto value_obj = Utils::OpenHandle(*value); i::Handle<i::Object> value_obj = Utils::OpenHandle(*value);
if (self->IsAccessCheckNeeded() && !isolate->MayAccess(self)) { if (self->IsAccessCheckNeeded() &&
!isolate->MayAccess(handle(isolate->context()), self)) {
isolate->ReportFailedAccessCheck(self); isolate->ReportFailedAccessCheck(self);
return Nothing<bool>(); return Nothing<bool>();
} }
i::Handle<i::FixedArray> desc = isolate->factory()->NewFixedArray(3); i::PropertyDescriptor desc;
desc->set(0, isolate->heap()->ToBoolean(!(attributes & v8::ReadOnly))); desc.set_writable(!(attributes & v8::ReadOnly));
desc->set(1, isolate->heap()->ToBoolean(!(attributes & v8::DontEnum))); desc.set_enumerable(!(attributes & v8::DontEnum));
desc->set(2, isolate->heap()->ToBoolean(!(attributes & v8::DontDelete))); desc.set_configurable(!(attributes & v8::DontDelete));
i::Handle<i::JSArray> desc_array = desc.set_value(value_obj);
isolate->factory()->NewJSArrayWithElements(desc, i::FAST_ELEMENTS, 3); bool success = i::JSReceiver::DefineOwnProperty(isolate, self, key_obj, &desc,
i::Handle<i::Object> args[] = {self, key_obj, value_obj, desc_array}; i::Object::DONT_THROW);
i::Handle<i::Object> undefined = isolate->factory()->undefined_value(); // Even though we said DONT_THROW, there might be accessors that do throw.
i::Handle<i::JSFunction> fun = isolate->object_define_own_property();
i::Handle<i::Object> result;
has_pending_exception =
!i::Execution::Call(isolate, fun, undefined, arraysize(args), args)
.ToHandle(&result);
RETURN_ON_FAILED_EXECUTION_PRIMITIVE(bool); RETURN_ON_FAILED_EXECUTION_PRIMITIVE(bool);
return Just(result->BooleanValue()); return Just(success);
} }
@ -3526,20 +3543,12 @@ static i::MaybeHandle<i::Object> DefineObjectProperty(
i::Handle<i::JSObject> js_object, i::Handle<i::Object> key, i::Handle<i::JSObject> js_object, i::Handle<i::Object> key,
i::Handle<i::Object> value, PropertyAttributes attrs) { i::Handle<i::Object> value, PropertyAttributes attrs) {
i::Isolate* isolate = js_object->GetIsolate(); i::Isolate* isolate = js_object->GetIsolate();
// Check if the given key is an array index. bool success = false;
uint32_t index = 0; i::LookupIterator it = i::LookupIterator::PropertyOrElement(
if (key->ToArrayIndex(&index)) { isolate, js_object, key, &success, i::LookupIterator::OWN);
return i::JSObject::SetOwnElementIgnoreAttributes(js_object, index, value, if (!success) return i::MaybeHandle<i::Object>();
attrs);
}
i::Handle<i::Name> name;
ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, name,
i::Object::ToName(isolate, key),
i::MaybeHandle<i::Object>());
return i::JSObject::DefinePropertyOrElementIgnoreAttributes(js_object, name, return i::JSObject::DefineOwnPropertyIgnoreAttributes(&it, value, attrs);
value, attrs);
} }
@ -3575,6 +3584,13 @@ bool v8::Object::ForceSet(v8::Local<Value> key, v8::Local<Value> value,
} }
Maybe<bool> v8::Object::SetPrivate(Local<Context> context, Local<Private> key,
Local<Value> value) {
return DefineOwnProperty(context, Local<Name>(reinterpret_cast<Name*>(*key)),
value, DontEnum);
}
MaybeLocal<Value> v8::Object::Get(Local<v8::Context> context, MaybeLocal<Value> v8::Object::Get(Local<v8::Context> context,
Local<Value> key) { Local<Value> key) {
PREPARE_FOR_EXECUTION(context, "v8::Object::Get()", Value); PREPARE_FOR_EXECUTION(context, "v8::Object::Get()", Value);
@ -3611,6 +3627,12 @@ Local<Value> v8::Object::Get(uint32_t index) {
} }
MaybeLocal<Value> v8::Object::GetPrivate(Local<Context> context,
Local<Private> key) {
return Get(context, Local<Value>(reinterpret_cast<Value*>(*key)));
}
Maybe<PropertyAttribute> v8::Object::GetPropertyAttributes( Maybe<PropertyAttribute> v8::Object::GetPropertyAttributes(
Local<Context> context, Local<Value> key) { Local<Context> context, Local<Value> key) {
PREPARE_FOR_EXECUTION_PRIMITIVE( PREPARE_FOR_EXECUTION_PRIMITIVE(
@ -3680,8 +3702,9 @@ Maybe<bool> v8::Object::SetPrototype(Local<Context> context,
// We do not allow exceptions thrown while setting the prototype // We do not allow exceptions thrown while setting the prototype
// to propagate outside. // to propagate outside.
TryCatch try_catch(reinterpret_cast<v8::Isolate*>(isolate)); TryCatch try_catch(reinterpret_cast<v8::Isolate*>(isolate));
auto result = i::JSObject::SetPrototype(self, value_obj, false); auto result = i::JSObject::SetPrototype(self, value_obj, false,
has_pending_exception = result.is_null(); i::Object::THROW_ON_ERROR);
has_pending_exception = result.IsNothing();
RETURN_ON_FAILED_EXECUTION_PRIMITIVE(bool); RETURN_ON_FAILED_EXECUTION_PRIMITIVE(bool);
return Just(true); return Just(true);
} }
@ -3705,6 +3728,7 @@ Local<Object> v8::Object::FindInstanceInPrototypeChain(
return Local<Object>(); return Local<Object>();
} }
} }
// IsTemplateFor() ensures that iter.GetCurrent() can't be a Proxy here.
return Utils::ToLocal(i::handle(iter.GetCurrent<i::JSObject>(), isolate)); return Utils::ToLocal(i::handle(iter.GetCurrent<i::JSObject>(), isolate));
} }
@ -3846,6 +3870,12 @@ bool v8::Object::Delete(v8::Local<Value> key) {
} }
Maybe<bool> v8::Object::DeletePrivate(Local<Context> context,
Local<Private> key) {
return Delete(context, Local<Value>(reinterpret_cast<Value*>(*key)));
}
Maybe<bool> v8::Object::Has(Local<Context> context, Local<Value> key) { Maybe<bool> v8::Object::Has(Local<Context> context, Local<Value> key) {
PREPARE_FOR_EXECUTION_PRIMITIVE(context, "v8::Object::Get()", bool); PREPARE_FOR_EXECUTION_PRIMITIVE(context, "v8::Object::Get()", bool);
auto self = Utils::OpenHandle(this); auto self = Utils::OpenHandle(this);
@ -3874,6 +3904,11 @@ bool v8::Object::Has(v8::Local<Value> key) {
} }
Maybe<bool> v8::Object::HasPrivate(Local<Context> context, Local<Private> key) {
return HasOwnProperty(context, Local<Name>(reinterpret_cast<Name*>(*key)));
}
Maybe<bool> v8::Object::Delete(Local<Context> context, uint32_t index) { Maybe<bool> v8::Object::Delete(Local<Context> context, uint32_t index) {
PREPARE_FOR_EXECUTION_PRIMITIVE(context, "v8::Object::DeleteProperty()", PREPARE_FOR_EXECUTION_PRIMITIVE(context, "v8::Object::DeleteProperty()",
bool); bool);
@ -4072,13 +4107,14 @@ MaybeLocal<Value> v8::Object::GetRealNamedPropertyInPrototypeChain(
Local<Context> context, Local<Name> key) { Local<Context> context, Local<Name> key) {
PREPARE_FOR_EXECUTION( PREPARE_FOR_EXECUTION(
context, "v8::Object::GetRealNamedPropertyInPrototypeChain()", Value); context, "v8::Object::GetRealNamedPropertyInPrototypeChain()", Value);
auto self = Utils::OpenHandle(this); i::Handle<i::JSObject> self = Utils::OpenHandle(this);
auto key_obj = Utils::OpenHandle(*key); i::Handle<i::Name> key_obj = Utils::OpenHandle(*key);
i::PrototypeIterator iter(isolate, self); i::PrototypeIterator iter(isolate, self);
if (iter.IsAtEnd()) return MaybeLocal<Value>(); if (iter.IsAtEnd()) return MaybeLocal<Value>();
auto proto = i::PrototypeIterator::GetCurrent(iter); i::Handle<i::JSReceiver> proto =
i::PrototypeIterator::GetCurrent<i::JSReceiver>(iter);
i::LookupIterator it = i::LookupIterator::PropertyOrElement( i::LookupIterator it = i::LookupIterator::PropertyOrElement(
isolate, self, key_obj, i::Handle<i::JSReceiver>::cast(proto), isolate, self, key_obj, proto,
i::LookupIterator::PROTOTYPE_CHAIN_SKIP_INTERCEPTOR); i::LookupIterator::PROTOTYPE_CHAIN_SKIP_INTERCEPTOR);
Local<Value> result; Local<Value> result;
has_pending_exception = !ToLocal<Value>(i::Object::GetProperty(&it), &result); has_pending_exception = !ToLocal<Value>(i::Object::GetProperty(&it), &result);
@ -4102,22 +4138,20 @@ v8::Object::GetRealNamedPropertyAttributesInPrototypeChain(
PREPARE_FOR_EXECUTION_PRIMITIVE( PREPARE_FOR_EXECUTION_PRIMITIVE(
context, "v8::Object::GetRealNamedPropertyAttributesInPrototypeChain()", context, "v8::Object::GetRealNamedPropertyAttributesInPrototypeChain()",
PropertyAttribute); PropertyAttribute);
auto self = Utils::OpenHandle(this); i::Handle<i::JSObject> self = Utils::OpenHandle(this);
auto key_obj = Utils::OpenHandle(*key); i::Handle<i::Name> key_obj = Utils::OpenHandle(*key);
i::PrototypeIterator iter(isolate, self); i::PrototypeIterator iter(isolate, self);
if (iter.IsAtEnd()) return Nothing<PropertyAttribute>(); if (iter.IsAtEnd()) return Nothing<PropertyAttribute>();
auto proto = i::PrototypeIterator::GetCurrent(iter); i::Handle<i::JSReceiver> proto =
i::PrototypeIterator::GetCurrent<i::JSReceiver>(iter);
i::LookupIterator it = i::LookupIterator::PropertyOrElement( i::LookupIterator it = i::LookupIterator::PropertyOrElement(
isolate, self, key_obj, i::Handle<i::JSReceiver>::cast(proto), isolate, self, key_obj, proto,
i::LookupIterator::PROTOTYPE_CHAIN_SKIP_INTERCEPTOR); i::LookupIterator::PROTOTYPE_CHAIN_SKIP_INTERCEPTOR);
auto result = i::JSReceiver::GetPropertyAttributes(&it); Maybe<PropertyAttributes> result = i::JSReceiver::GetPropertyAttributes(&it);
RETURN_ON_FAILED_EXECUTION_PRIMITIVE(PropertyAttribute); RETURN_ON_FAILED_EXECUTION_PRIMITIVE(PropertyAttribute);
if (!it.IsFound()) return Nothing<PropertyAttribute>(); if (!it.IsFound()) return Nothing<PropertyAttribute>();
if (result.FromJust() == ABSENT) { if (result.FromJust() == ABSENT) return Just(None);
return Just(static_cast<PropertyAttribute>(NONE)); return Just(static_cast<PropertyAttribute>(result.FromJust()));
}
return Just<PropertyAttribute>(
static_cast<PropertyAttribute>(result.FromJust()));
} }
@ -4206,13 +4240,16 @@ int v8::Object::GetIdentityHash() {
bool v8::Object::SetHiddenValue(v8::Local<v8::String> key, bool v8::Object::SetHiddenValue(v8::Local<v8::String> key,
v8::Local<v8::Value> value) { v8::Local<v8::Value> value) {
i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate(); i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
if (value.IsEmpty()) return DeleteHiddenValue(key);
ENTER_V8(isolate); ENTER_V8(isolate);
i::HandleScope scope(isolate); i::HandleScope scope(isolate);
i::Handle<i::JSObject> self = Utils::OpenHandle(this); i::Handle<i::JSObject> self = Utils::OpenHandle(this);
i::Handle<i::String> key_obj = Utils::OpenHandle(*key); i::Handle<i::String> key_obj = Utils::OpenHandle(*key);
i::Handle<i::String> key_string = i::Handle<i::String> key_string =
isolate->factory()->InternalizeString(key_obj); isolate->factory()->InternalizeString(key_obj);
if (value.IsEmpty()) {
i::JSObject::DeleteHiddenProperty(self, key_string);
return true;
}
i::Handle<i::Object> value_obj = Utils::OpenHandle(*value); i::Handle<i::Object> value_obj = Utils::OpenHandle(*value);
i::Handle<i::Object> result = i::Handle<i::Object> result =
i::JSObject::SetHiddenProperty(self, key_string, value_obj); i::JSObject::SetHiddenProperty(self, key_string, value_obj);
@ -4336,8 +4373,8 @@ MaybeLocal<Object> Function::NewInstance(Local<Context> context, int argc,
STATIC_ASSERT(sizeof(v8::Local<v8::Value>) == sizeof(i::Object**)); STATIC_ASSERT(sizeof(v8::Local<v8::Value>) == sizeof(i::Object**));
i::Handle<i::Object>* args = reinterpret_cast<i::Handle<i::Object>*>(argv); i::Handle<i::Object>* args = reinterpret_cast<i::Handle<i::Object>*>(argv);
Local<Object> result; Local<Object> result;
has_pending_exception = has_pending_exception = !ToLocal<Object>(
!ToLocal<Object>(i::Execution::New(self, argc, args), &result); i::Execution::New(isolate, self, self, argc, args), &result);
RETURN_ON_FAILED_EXECUTION(Object); RETURN_ON_FAILED_EXECUTION(Object);
RETURN_ESCAPED(result); RETURN_ESCAPED(result);
} }
@ -4375,20 +4412,32 @@ Local<v8::Value> Function::Call(v8::Local<v8::Value> recv, int argc,
void Function::SetName(v8::Local<v8::String> name) { void Function::SetName(v8::Local<v8::String> name) {
i::Handle<i::JSFunction> func = Utils::OpenHandle(this); auto self = Utils::OpenHandle(this);
if (!self->IsJSFunction()) return;
auto func = i::Handle<i::JSFunction>::cast(self);
func->shared()->set_name(*Utils::OpenHandle(*name)); func->shared()->set_name(*Utils::OpenHandle(*name));
} }
Local<Value> Function::GetName() const { Local<Value> Function::GetName() const {
i::Handle<i::JSFunction> func = Utils::OpenHandle(this); auto self = Utils::OpenHandle(this);
if (!self->IsJSFunction()) {
return ToApiHandle<Primitive>(
self->GetIsolate()->factory()->undefined_value());
}
auto func = i::Handle<i::JSFunction>::cast(self);
return Utils::ToLocal(i::Handle<i::Object>(func->shared()->name(), return Utils::ToLocal(i::Handle<i::Object>(func->shared()->name(),
func->GetIsolate())); func->GetIsolate()));
} }
Local<Value> Function::GetInferredName() const { Local<Value> Function::GetInferredName() const {
i::Handle<i::JSFunction> func = Utils::OpenHandle(this); auto self = Utils::OpenHandle(this);
if (!self->IsJSFunction()) {
return ToApiHandle<Primitive>(
self->GetIsolate()->factory()->undefined_value());
}
auto func = i::Handle<i::JSFunction>::cast(self);
return Utils::ToLocal(i::Handle<i::Object>(func->shared()->inferred_name(), return Utils::ToLocal(i::Handle<i::Object>(func->shared()->inferred_name(),
func->GetIsolate())); func->GetIsolate()));
} }
@ -4397,7 +4446,11 @@ Local<Value> Function::GetInferredName() const {
Local<Value> Function::GetDisplayName() const { Local<Value> Function::GetDisplayName() const {
i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate(); i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
ENTER_V8(isolate); ENTER_V8(isolate);
i::Handle<i::JSFunction> func = Utils::OpenHandle(this); auto self = Utils::OpenHandle(this);
if (!self->IsJSFunction()) {
return ToApiHandle<Primitive>(isolate->factory()->undefined_value());
}
auto func = i::Handle<i::JSFunction>::cast(self);
i::Handle<i::String> property_name = i::Handle<i::String> property_name =
isolate->factory()->NewStringFromStaticChars("displayName"); isolate->factory()->NewStringFromStaticChars("displayName");
i::Handle<i::Object> value = i::Handle<i::Object> value =
@ -4411,7 +4464,11 @@ Local<Value> Function::GetDisplayName() const {
ScriptOrigin Function::GetScriptOrigin() const { ScriptOrigin Function::GetScriptOrigin() const {
i::Handle<i::JSFunction> func = Utils::OpenHandle(this); auto self = Utils::OpenHandle(this);
if (!self->IsJSFunction()) {
return v8::ScriptOrigin(Local<Value>());
}
auto func = i::Handle<i::JSFunction>::cast(self);
if (func->shared()->script()->IsScript()) { if (func->shared()->script()->IsScript()) {
i::Handle<i::Script> script(i::Script::cast(func->shared()->script())); i::Handle<i::Script> script(i::Script::cast(func->shared()->script()));
return GetScriptOriginForScript(func->GetIsolate(), script); return GetScriptOriginForScript(func->GetIsolate(), script);
@ -4424,7 +4481,11 @@ const int Function::kLineOffsetNotFound = -1;
int Function::GetScriptLineNumber() const { int Function::GetScriptLineNumber() const {
i::Handle<i::JSFunction> func = Utils::OpenHandle(this); auto self = Utils::OpenHandle(this);
if (!self->IsJSFunction()) {
return kLineOffsetNotFound;
}
auto func = i::Handle<i::JSFunction>::cast(self);
if (func->shared()->script()->IsScript()) { if (func->shared()->script()->IsScript()) {
i::Handle<i::Script> script(i::Script::cast(func->shared()->script())); i::Handle<i::Script> script(i::Script::cast(func->shared()->script()));
return i::Script::GetLineNumber(script, func->shared()->start_position()); return i::Script::GetLineNumber(script, func->shared()->start_position());
@ -4434,7 +4495,11 @@ int Function::GetScriptLineNumber() const {
int Function::GetScriptColumnNumber() const { int Function::GetScriptColumnNumber() const {
i::Handle<i::JSFunction> func = Utils::OpenHandle(this); auto self = Utils::OpenHandle(this);
if (!self->IsJSFunction()) {
return kLineOffsetNotFound;
}
auto func = i::Handle<i::JSFunction>::cast(self);
if (func->shared()->script()->IsScript()) { if (func->shared()->script()->IsScript()) {
i::Handle<i::Script> script(i::Script::cast(func->shared()->script())); i::Handle<i::Script> script(i::Script::cast(func->shared()->script()));
return i::Script::GetColumnNumber(script, func->shared()->start_position()); return i::Script::GetColumnNumber(script, func->shared()->start_position());
@ -4444,13 +4509,21 @@ int Function::GetScriptColumnNumber() const {
bool Function::IsBuiltin() const { bool Function::IsBuiltin() const {
i::Handle<i::JSFunction> func = Utils::OpenHandle(this); auto self = Utils::OpenHandle(this);
return func->IsBuiltin(); if (!self->IsJSFunction()) {
return false;
}
auto func = i::Handle<i::JSFunction>::cast(self);
return func->shared()->IsBuiltin();
} }
int Function::ScriptId() const { int Function::ScriptId() const {
i::Handle<i::JSFunction> func = Utils::OpenHandle(this); auto self = Utils::OpenHandle(this);
if (!self->IsJSFunction()) {
return v8::UnboundScript::kNoScriptId;
}
auto func = i::Handle<i::JSFunction>::cast(self);
if (!func->shared()->script()->IsScript()) { if (!func->shared()->script()->IsScript()) {
return v8::UnboundScript::kNoScriptId; return v8::UnboundScript::kNoScriptId;
} }
@ -4460,16 +4533,19 @@ int Function::ScriptId() const {
Local<v8::Value> Function::GetBoundFunction() const { Local<v8::Value> Function::GetBoundFunction() const {
i::Handle<i::JSFunction> func = Utils::OpenHandle(this); auto self = Utils::OpenHandle(this);
if (!self->IsJSFunction()) {
return v8::Undefined(reinterpret_cast<v8::Isolate*>(self->GetIsolate()));
}
auto func = i::Handle<i::JSFunction>::cast(self);
if (!func->shared()->bound()) { if (!func->shared()->bound()) {
return v8::Undefined(reinterpret_cast<v8::Isolate*>(func->GetIsolate())); return v8::Undefined(reinterpret_cast<v8::Isolate*>(func->GetIsolate()));
} }
i::Handle<i::FixedArray> bound_args = i::Handle<i::FixedArray>( i::Handle<i::BindingsArray> bound_args = i::Handle<i::BindingsArray>(
i::FixedArray::cast(func->function_bindings())); i::BindingsArray::cast(func->function_bindings()));
i::Handle<i::Object> original( i::Handle<i::Object> original(bound_args->bound_function(),
bound_args->get(i::JSFunction::kBoundFunctionIndex), func->GetIsolate());
func->GetIsolate()); return Utils::CallableToLocal(i::Handle<i::JSFunction>::cast(original));
return Utils::ToLocal(i::Handle<i::JSFunction>::cast(original));
} }
@ -5178,6 +5254,11 @@ Local<Value> Symbol::Name() const {
} }
Local<Value> Private::Name() const {
return reinterpret_cast<const Symbol*>(this)->Name();
}
double Number::Value() const { double Number::Value() const {
i::Handle<i::Object> obj = Utils::OpenHandle(this); i::Handle<i::Object> obj = Utils::OpenHandle(this);
return obj->Number(); return obj->Number();
@ -6047,6 +6128,8 @@ REGEXP_FLAG_ASSERT_EQ(kNone, NONE);
REGEXP_FLAG_ASSERT_EQ(kGlobal, GLOBAL); REGEXP_FLAG_ASSERT_EQ(kGlobal, GLOBAL);
REGEXP_FLAG_ASSERT_EQ(kIgnoreCase, IGNORE_CASE); REGEXP_FLAG_ASSERT_EQ(kIgnoreCase, IGNORE_CASE);
REGEXP_FLAG_ASSERT_EQ(kMultiline, MULTILINE); REGEXP_FLAG_ASSERT_EQ(kMultiline, MULTILINE);
REGEXP_FLAG_ASSERT_EQ(kSticky, STICKY);
REGEXP_FLAG_ASSERT_EQ(kUnicode, UNICODE_ESCAPES);
#undef REGEXP_FLAG_ASSERT_EQ #undef REGEXP_FLAG_ASSERT_EQ
v8::RegExp::Flags v8::RegExp::GetFlags() const { v8::RegExp::Flags v8::RegExp::GetFlags() const {
@ -6750,7 +6833,8 @@ Local<Symbol> v8::Symbol::New(Isolate* isolate, Local<String> name) {
static i::Handle<i::Symbol> SymbolFor(i::Isolate* isolate, static i::Handle<i::Symbol> SymbolFor(i::Isolate* isolate,
i::Handle<i::String> name, i::Handle<i::String> name,
i::Handle<i::String> part) { i::Handle<i::String> part,
bool private_symbol) {
i::Handle<i::JSObject> registry = isolate->GetSymbolRegistry(); i::Handle<i::JSObject> registry = isolate->GetSymbolRegistry();
i::Handle<i::JSObject> symbols = i::Handle<i::JSObject> symbols =
i::Handle<i::JSObject>::cast( i::Handle<i::JSObject>::cast(
@ -6759,7 +6843,10 @@ static i::Handle<i::Symbol> SymbolFor(i::Isolate* isolate,
i::Object::GetPropertyOrElement(symbols, name).ToHandleChecked(); i::Object::GetPropertyOrElement(symbols, name).ToHandleChecked();
if (!symbol->IsSymbol()) { if (!symbol->IsSymbol()) {
DCHECK(symbol->IsUndefined()); DCHECK(symbol->IsUndefined());
symbol = isolate->factory()->NewSymbol(); if (private_symbol)
symbol = isolate->factory()->NewPrivateSymbol();
else
symbol = isolate->factory()->NewSymbol();
i::Handle<i::Symbol>::cast(symbol)->set_name(*name); i::Handle<i::Symbol>::cast(symbol)->set_name(*name);
i::JSObject::SetProperty(symbols, name, symbol, i::STRICT).Assert(); i::JSObject::SetProperty(symbols, name, symbol, i::STRICT).Assert();
} }
@ -6771,7 +6858,7 @@ Local<Symbol> v8::Symbol::For(Isolate* isolate, Local<String> name) {
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate); i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
i::Handle<i::String> i_name = Utils::OpenHandle(*name); i::Handle<i::String> i_name = Utils::OpenHandle(*name);
i::Handle<i::String> part = i_isolate->factory()->for_string(); i::Handle<i::String> part = i_isolate->factory()->for_string();
return Utils::ToLocal(SymbolFor(i_isolate, i_name, part)); return Utils::ToLocal(SymbolFor(i_isolate, i_name, part, false));
} }
@ -6779,7 +6866,7 @@ Local<Symbol> v8::Symbol::ForApi(Isolate* isolate, Local<String> name) {
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate); i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
i::Handle<i::String> i_name = Utils::OpenHandle(*name); i::Handle<i::String> i_name = Utils::OpenHandle(*name);
i::Handle<i::String> part = i_isolate->factory()->for_api_string(); i::Handle<i::String> part = i_isolate->factory()->for_api_string();
return Utils::ToLocal(SymbolFor(i_isolate, i_name, part)); return Utils::ToLocal(SymbolFor(i_isolate, i_name, part, false));
} }
@ -6801,6 +6888,33 @@ Local<Symbol> v8::Symbol::GetToStringTag(Isolate* isolate) {
} }
Local<Symbol> v8::Symbol::GetIsConcatSpreadable(Isolate* isolate) {
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
return Utils::ToLocal(i_isolate->factory()->is_concat_spreadable_symbol());
}
Local<Private> v8::Private::New(Isolate* isolate, Local<String> name) {
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
LOG_API(i_isolate, "Private::New()");
ENTER_V8(i_isolate);
i::Handle<i::Symbol> symbol = i_isolate->factory()->NewPrivateSymbol();
if (!name.IsEmpty()) symbol->set_name(*Utils::OpenHandle(*name));
Local<Symbol> result = Utils::ToLocal(symbol);
return v8::Local<Private>(reinterpret_cast<Private*>(*result));
}
Local<Private> v8::Private::ForApi(Isolate* isolate, Local<String> name) {
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
i::Handle<i::String> i_name = Utils::OpenHandle(*name);
i::Handle<i::String> part = i_isolate->factory()->private_api_string();
Local<Symbol> result =
Utils::ToLocal(SymbolFor(i_isolate, i_name, part, true));
return v8::Local<Private>(reinterpret_cast<Private*>(*result));
}
Local<Number> v8::Number::New(Isolate* isolate, double value) { Local<Number> v8::Number::New(Isolate* isolate, double value) {
i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(isolate); i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(isolate);
if (std::isnan(value)) { if (std::isnan(value)) {
@ -7175,6 +7289,7 @@ void Isolate::GetHeapStatistics(HeapStatistics* heap_statistics) {
heap_statistics->total_available_size_ = heap->Available(); heap_statistics->total_available_size_ = heap->Available();
heap_statistics->used_heap_size_ = heap->SizeOfObjects(); heap_statistics->used_heap_size_ = heap->SizeOfObjects();
heap_statistics->heap_size_limit_ = heap->MaxReserved(); heap_statistics->heap_size_limit_ = heap->MaxReserved();
heap_statistics->does_zap_garbage_ = heap->ShouldZapGarbage();
} }
@ -7369,6 +7484,18 @@ int Isolate::ContextDisposedNotification(bool dependant_context) {
} }
void Isolate::IsolateInForegroundNotification() {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
return isolate->heap()->SetOptimizeForLatency();
}
void Isolate::IsolateInBackgroundNotification() {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
return isolate->heap()->SetOptimizeForMemoryUsage();
}
void Isolate::SetJitCodeEventHandler(JitCodeEventOptions options, void Isolate::SetJitCodeEventHandler(JitCodeEventOptions options,
JitCodeEventHandler event_handler) { JitCodeEventHandler event_handler) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this); i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
@ -7472,10 +7599,10 @@ class VisitorAdapter : public i::ObjectVisitor {
public: public:
explicit VisitorAdapter(PersistentHandleVisitor* visitor) explicit VisitorAdapter(PersistentHandleVisitor* visitor)
: visitor_(visitor) {} : visitor_(visitor) {}
virtual void VisitPointers(i::Object** start, i::Object** end) { void VisitPointers(i::Object** start, i::Object** end) override {
UNREACHABLE(); UNREACHABLE();
} }
virtual void VisitEmbedderReference(i::Object** p, uint16_t class_id) { void VisitEmbedderReference(i::Object** p, uint16_t class_id) override {
Value* value = ToApi<Value>(i::Handle<i::Object>(p)); Value* value = ToApi<Value>(i::Handle<i::Object>(p));
visitor_->VisitPersistentHandle( visitor_->VisitPersistentHandle(
reinterpret_cast<Persistent<Value>*>(&value), class_id); reinterpret_cast<Persistent<Value>*>(&value), class_id);
@ -7504,6 +7631,15 @@ void Isolate::VisitHandlesForPartialDependence(
} }
void Isolate::VisitWeakHandles(PersistentHandleVisitor* visitor) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
i::DisallowHeapAllocation no_allocation;
VisitorAdapter visitor_adapter(visitor);
isolate->global_handles()->IterateWeakRootsInNewSpaceWithClassIds(
&visitor_adapter);
}
String::Utf8Value::Utf8Value(v8::Local<v8::Value> obj) String::Utf8Value::Utf8Value(v8::Local<v8::Value> obj)
: str_(NULL), length_(0) { : str_(NULL), length_(0) {
if (obj.IsEmpty()) return; if (obj.IsEmpty()) return;
@ -7679,7 +7815,7 @@ MaybeLocal<Value> Debug::GetMirror(Local<Context> context,
i::Handle<i::JSObject> debug(isolate_debug->debug_context()->global_object()); i::Handle<i::JSObject> debug(isolate_debug->debug_context()->global_object());
auto name = isolate->factory()->NewStringFromStaticChars("MakeMirror"); auto name = isolate->factory()->NewStringFromStaticChars("MakeMirror");
auto fun_obj = i::Object::GetProperty(debug, name).ToHandleChecked(); auto fun_obj = i::Object::GetProperty(debug, name).ToHandleChecked();
auto v8_fun = Utils::ToLocal(i::Handle<i::JSFunction>::cast(fun_obj)); auto v8_fun = Utils::CallableToLocal(i::Handle<i::JSFunction>::cast(fun_obj));
const int kArgc = 1; const int kArgc = 1;
v8::Local<v8::Value> argv[kArgc] = {obj}; v8::Local<v8::Value> argv[kArgc] = {obj};
Local<Value> result; Local<Value> result;

11
deps/v8/src/api.h

@ -168,7 +168,7 @@ class RegisteredExtension {
V(Symbol, Symbol) \ V(Symbol, Symbol) \
V(Script, JSFunction) \ V(Script, JSFunction) \
V(UnboundScript, SharedFunctionInfo) \ V(UnboundScript, SharedFunctionInfo) \
V(Function, JSFunction) \ V(Function, JSReceiver) \
V(Message, JSMessageObject) \ V(Message, JSMessageObject) \
V(Context, Context) \ V(Context, Context) \
V(External, Object) \ V(External, Object) \
@ -192,8 +192,6 @@ class Utils {
v8::internal::Handle<v8::internal::Context> obj); v8::internal::Handle<v8::internal::Context> obj);
static inline Local<Value> ToLocal( static inline Local<Value> ToLocal(
v8::internal::Handle<v8::internal::Object> obj); v8::internal::Handle<v8::internal::Object> obj);
static inline Local<Function> ToLocal(
v8::internal::Handle<v8::internal::JSFunction> obj);
static inline Local<Name> ToLocal( static inline Local<Name> ToLocal(
v8::internal::Handle<v8::internal::Name> obj); v8::internal::Handle<v8::internal::Name> obj);
static inline Local<String> ToLocal( static inline Local<String> ToLocal(
@ -269,6 +267,8 @@ class Utils {
v8::internal::Handle<v8::internal::JSObject> obj); v8::internal::Handle<v8::internal::JSObject> obj);
static inline Local<NativeWeakMap> NativeWeakMapToLocal( static inline Local<NativeWeakMap> NativeWeakMapToLocal(
v8::internal::Handle<v8::internal::JSWeakMap> obj); v8::internal::Handle<v8::internal::JSWeakMap> obj);
static inline Local<Function> CallableToLocal(
v8::internal::Handle<v8::internal::JSReceiver> obj);
#define DECLARE_OPEN_HANDLE(From, To) \ #define DECLARE_OPEN_HANDLE(From, To) \
static inline v8::internal::Handle<v8::internal::To> \ static inline v8::internal::Handle<v8::internal::To> \
@ -349,7 +349,6 @@ inline bool ToLocal(v8::internal::MaybeHandle<v8::internal::Object> maybe,
MAKE_TO_LOCAL(ToLocal, Context, Context) MAKE_TO_LOCAL(ToLocal, Context, Context)
MAKE_TO_LOCAL(ToLocal, Object, Value) MAKE_TO_LOCAL(ToLocal, Object, Value)
MAKE_TO_LOCAL(ToLocal, JSFunction, Function)
MAKE_TO_LOCAL(ToLocal, Name, Name) MAKE_TO_LOCAL(ToLocal, Name, Name)
MAKE_TO_LOCAL(ToLocal, String, String) MAKE_TO_LOCAL(ToLocal, String, String)
MAKE_TO_LOCAL(ToLocal, Symbol, Symbol) MAKE_TO_LOCAL(ToLocal, Symbol, Symbol)
@ -380,6 +379,7 @@ MAKE_TO_LOCAL(IntegerToLocal, Object, Integer)
MAKE_TO_LOCAL(Uint32ToLocal, Object, Uint32) MAKE_TO_LOCAL(Uint32ToLocal, Object, Uint32)
MAKE_TO_LOCAL(ExternalToLocal, JSObject, External) MAKE_TO_LOCAL(ExternalToLocal, JSObject, External)
MAKE_TO_LOCAL(NativeWeakMapToLocal, JSWeakMap, NativeWeakMap) MAKE_TO_LOCAL(NativeWeakMapToLocal, JSWeakMap, NativeWeakMap)
MAKE_TO_LOCAL(CallableToLocal, JSReceiver, Function)
#undef MAKE_TO_LOCAL_TYPED_ARRAY #undef MAKE_TO_LOCAL_TYPED_ARRAY
#undef MAKE_TO_LOCAL #undef MAKE_TO_LOCAL
@ -642,6 +642,7 @@ class Testing {
static v8::Testing::StressType stress_type_; static v8::Testing::StressType stress_type_;
}; };
} } // namespace v8::internal } // namespace internal
} // namespace v8
#endif // V8_API_H_ #endif // V8_API_H_

10
deps/v8/src/arguments.h

@ -29,10 +29,13 @@ namespace internal {
class Arguments BASE_EMBEDDED { class Arguments BASE_EMBEDDED {
public: public:
Arguments(int length, Object** arguments) Arguments(int length, Object** arguments)
: length_(length), arguments_(arguments) { } : length_(length), arguments_(arguments) {
DCHECK_GE(length_, 0);
}
Object*& operator[] (int index) { Object*& operator[] (int index) {
DCHECK(0 <= index && index < length_); DCHECK_GE(index, 0);
DCHECK_LT(static_cast<uint32_t>(index), static_cast<uint32_t>(length_));
return *(reinterpret_cast<Object**>(reinterpret_cast<intptr_t>(arguments_) - return *(reinterpret_cast<Object**>(reinterpret_cast<intptr_t>(arguments_) -
index * kPointerSize)); index * kPointerSize));
} }
@ -283,6 +286,7 @@ static Type __RT_impl_##Name(Arguments args, Isolate* isolate)
#define RUNTIME_FUNCTION_RETURN_PAIR(Name) \ #define RUNTIME_FUNCTION_RETURN_PAIR(Name) \
RUNTIME_FUNCTION_RETURNS_TYPE(ObjectPair, Name) RUNTIME_FUNCTION_RETURNS_TYPE(ObjectPair, Name)
} } // namespace v8::internal } // namespace internal
} // namespace v8
#endif // V8_ARGUMENTS_H_ #endif // V8_ARGUMENTS_H_

47
deps/v8/src/arm/assembler-arm-inl.h

@ -50,53 +50,11 @@ namespace internal {
bool CpuFeatures::SupportsCrankshaft() { return IsSupported(VFP3); } bool CpuFeatures::SupportsCrankshaft() { return IsSupported(VFP3); }
int Register::NumAllocatableRegisters() { int DoubleRegister::NumRegisters() {
return kMaxNumAllocatableRegisters;
}
int DwVfpRegister::NumRegisters() {
return CpuFeatures::IsSupported(VFP32DREGS) ? 32 : 16; return CpuFeatures::IsSupported(VFP32DREGS) ? 32 : 16;
} }
int DwVfpRegister::NumReservedRegisters() {
return kNumReservedRegisters;
}
int DwVfpRegister::NumAllocatableRegisters() {
return NumRegisters() - kNumReservedRegisters;
}
// static
int DwVfpRegister::NumAllocatableAliasedRegisters() {
return LowDwVfpRegister::kMaxNumLowRegisters - kNumReservedRegisters;
}
int DwVfpRegister::ToAllocationIndex(DwVfpRegister reg) {
DCHECK(!reg.is(kDoubleRegZero));
DCHECK(!reg.is(kScratchDoubleReg));
if (reg.code() > kDoubleRegZero.code()) {
return reg.code() - kNumReservedRegisters;
}
return reg.code();
}
DwVfpRegister DwVfpRegister::FromAllocationIndex(int index) {
DCHECK(index >= 0 && index < NumAllocatableRegisters());
DCHECK(kScratchDoubleReg.code() - kDoubleRegZero.code() ==
kNumReservedRegisters - 1);
if (index >= kDoubleRegZero.code()) {
return from_code(index + kNumReservedRegisters);
}
return from_code(index);
}
void RelocInfo::apply(intptr_t delta) { void RelocInfo::apply(intptr_t delta) {
if (RelocInfo::IsInternalReference(rmode_)) { if (RelocInfo::IsInternalReference(rmode_)) {
// absolute code pointer inside code object moves with the code object. // absolute code pointer inside code object moves with the code object.
@ -666,6 +624,7 @@ void Assembler::set_target_address_at(Address pc, Address constant_pool,
} }
} } // namespace v8::internal } // namespace internal
} // namespace v8
#endif // V8_ARM_ASSEMBLER_ARM_INL_H_ #endif // V8_ARM_ASSEMBLER_ARM_INL_H_

65
deps/v8/src/arm/assembler-arm.cc

@ -52,6 +52,14 @@ namespace internal {
// snapshot. // snapshot.
static unsigned CpuFeaturesImpliedByCompiler() { static unsigned CpuFeaturesImpliedByCompiler() {
unsigned answer = 0; unsigned answer = 0;
#ifdef CAN_USE_ARMV8_INSTRUCTIONS
if (FLAG_enable_armv8) {
answer |= 1u << ARMv8;
// ARMv8 always features VFP and NEON.
answer |= 1u << ARMv7 | 1u << VFP3 | 1u << NEON | 1u << VFP32DREGS;
answer |= 1u << SUDIV | 1u << MLS;
}
#endif // CAN_USE_ARMV8_INSTRUCTIONS
#ifdef CAN_USE_ARMV7_INSTRUCTIONS #ifdef CAN_USE_ARMV7_INSTRUCTIONS
if (FLAG_enable_armv7) answer |= 1u << ARMv7; if (FLAG_enable_armv7) answer |= 1u << ARMv7;
#endif // CAN_USE_ARMV7_INSTRUCTIONS #endif // CAN_USE_ARMV7_INSTRUCTIONS
@ -81,6 +89,13 @@ void CpuFeatures::ProbeImpl(bool cross_compile) {
#ifndef __arm__ #ifndef __arm__
// For the simulator build, use whatever the flags specify. // For the simulator build, use whatever the flags specify.
if (FLAG_enable_armv8) {
supported_ |= 1u << ARMv8;
// ARMv8 always features VFP and NEON.
supported_ |= 1u << ARMv7 | 1u << VFP3 | 1u << NEON | 1u << VFP32DREGS;
supported_ |= 1u << SUDIV | 1u << MLS;
if (FLAG_enable_movw_movt) supported_ |= 1u << MOVW_MOVT_IMMEDIATE_LOADS;
}
if (FLAG_enable_armv7) { if (FLAG_enable_armv7) {
supported_ |= 1u << ARMv7; supported_ |= 1u << ARMv7;
if (FLAG_enable_vfp3) supported_ |= 1u << VFP3; if (FLAG_enable_vfp3) supported_ |= 1u << VFP3;
@ -154,7 +169,9 @@ void CpuFeatures::PrintTarget() {
arm_no_probe = " noprobe"; arm_no_probe = " noprobe";
#endif #endif
#if defined CAN_USE_ARMV7_INSTRUCTIONS #if defined CAN_USE_ARMV8_INSTRUCTIONS
arm_arch = "arm v8";
#elif defined CAN_USE_ARMV7_INSTRUCTIONS
arm_arch = "arm v7"; arm_arch = "arm v7";
#else #else
arm_arch = "arm v6"; arm_arch = "arm v6";
@ -192,13 +209,15 @@ void CpuFeatures::PrintTarget() {
void CpuFeatures::PrintFeatures() { void CpuFeatures::PrintFeatures() {
printf( printf(
"ARMv7=%d VFP3=%d VFP32DREGS=%d NEON=%d SUDIV=%d UNALIGNED_ACCESSES=%d " "ARMv8=%d ARMv7=%d VFP3=%d VFP32DREGS=%d NEON=%d SUDIV=%d MLS=%d"
"MOVW_MOVT_IMMEDIATE_LOADS=%d COHERENT_CACHE=%d", "UNALIGNED_ACCESSES=%d MOVW_MOVT_IMMEDIATE_LOADS=%d COHERENT_CACHE=%d",
CpuFeatures::IsSupported(ARMv8),
CpuFeatures::IsSupported(ARMv7), CpuFeatures::IsSupported(ARMv7),
CpuFeatures::IsSupported(VFP3), CpuFeatures::IsSupported(VFP3),
CpuFeatures::IsSupported(VFP32DREGS), CpuFeatures::IsSupported(VFP32DREGS),
CpuFeatures::IsSupported(NEON), CpuFeatures::IsSupported(NEON),
CpuFeatures::IsSupported(SUDIV), CpuFeatures::IsSupported(SUDIV),
CpuFeatures::IsSupported(MLS),
CpuFeatures::IsSupported(UNALIGNED_ACCESSES), CpuFeatures::IsSupported(UNALIGNED_ACCESSES),
CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS), CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS),
CpuFeatures::IsSupported(COHERENT_CACHE)); CpuFeatures::IsSupported(COHERENT_CACHE));
@ -213,18 +232,6 @@ void CpuFeatures::PrintFeatures() {
} }
// -----------------------------------------------------------------------------
// Implementation of DwVfpRegister
const char* DwVfpRegister::AllocationIndexToString(int index) {
DCHECK(index >= 0 && index < NumAllocatableRegisters());
DCHECK(kScratchDoubleReg.code() - kDoubleRegZero.code() ==
kNumReservedRegisters - 1);
if (index >= kDoubleRegZero.code()) index += kNumReservedRegisters;
return VFPRegisters::Name(index, true);
}
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// Implementation of RelocInfo // Implementation of RelocInfo
@ -398,26 +405,26 @@ NeonListOperand::NeonListOperand(DoubleRegister base, int registers_count) {
// str(r, MemOperand(sp, 4, NegPreIndex), al) instruction (aka push(r)) // str(r, MemOperand(sp, 4, NegPreIndex), al) instruction (aka push(r))
// register r is not encoded. // register r is not encoded.
const Instr kPushRegPattern = const Instr kPushRegPattern =
al | B26 | 4 | NegPreIndex | kRegister_sp_Code * B16; al | B26 | 4 | NegPreIndex | Register::kCode_sp * B16;
// ldr(r, MemOperand(sp, 4, PostIndex), al) instruction (aka pop(r)) // ldr(r, MemOperand(sp, 4, PostIndex), al) instruction (aka pop(r))
// register r is not encoded. // register r is not encoded.
const Instr kPopRegPattern = const Instr kPopRegPattern =
al | B26 | L | 4 | PostIndex | kRegister_sp_Code * B16; al | B26 | L | 4 | PostIndex | Register::kCode_sp * B16;
// ldr rd, [pc, #offset] // ldr rd, [pc, #offset]
const Instr kLdrPCImmedMask = 15 * B24 | 7 * B20 | 15 * B16; const Instr kLdrPCImmedMask = 15 * B24 | 7 * B20 | 15 * B16;
const Instr kLdrPCImmedPattern = 5 * B24 | L | kRegister_pc_Code * B16; const Instr kLdrPCImmedPattern = 5 * B24 | L | Register::kCode_pc * B16;
// ldr rd, [pp, #offset] // ldr rd, [pp, #offset]
const Instr kLdrPpImmedMask = 15 * B24 | 7 * B20 | 15 * B16; const Instr kLdrPpImmedMask = 15 * B24 | 7 * B20 | 15 * B16;
const Instr kLdrPpImmedPattern = 5 * B24 | L | kRegister_r8_Code * B16; const Instr kLdrPpImmedPattern = 5 * B24 | L | Register::kCode_r8 * B16;
// ldr rd, [pp, rn] // ldr rd, [pp, rn]
const Instr kLdrPpRegMask = 15 * B24 | 7 * B20 | 15 * B16; const Instr kLdrPpRegMask = 15 * B24 | 7 * B20 | 15 * B16;
const Instr kLdrPpRegPattern = 7 * B24 | L | kRegister_r8_Code * B16; const Instr kLdrPpRegPattern = 7 * B24 | L | Register::kCode_r8 * B16;
// vldr dd, [pc, #offset] // vldr dd, [pc, #offset]
const Instr kVldrDPCMask = 15 * B24 | 3 * B20 | 15 * B16 | 15 * B8; const Instr kVldrDPCMask = 15 * B24 | 3 * B20 | 15 * B16 | 15 * B8;
const Instr kVldrDPCPattern = 13 * B24 | L | kRegister_pc_Code * B16 | 11 * B8; const Instr kVldrDPCPattern = 13 * B24 | L | Register::kCode_pc * B16 | 11 * B8;
// vldr dd, [pp, #offset] // vldr dd, [pp, #offset]
const Instr kVldrDPpMask = 15 * B24 | 3 * B20 | 15 * B16 | 15 * B8; const Instr kVldrDPpMask = 15 * B24 | 3 * B20 | 15 * B16 | 15 * B8;
const Instr kVldrDPpPattern = 13 * B24 | L | kRegister_r8_Code * B16 | 11 * B8; const Instr kVldrDPpPattern = 13 * B24 | L | Register::kCode_r8 * B16 | 11 * B8;
// blxcc rm // blxcc rm
const Instr kBlxRegMask = const Instr kBlxRegMask =
15 * B24 | 15 * B20 | 15 * B16 | 15 * B12 | 15 * B8 | 15 * B4; 15 * B24 | 15 * B20 | 15 * B16 | 15 * B12 | 15 * B8 | 15 * B4;
@ -444,13 +451,13 @@ const Instr kAndBicFlip = 0xe * B21;
// A mask for the Rd register for push, pop, ldr, str instructions. // A mask for the Rd register for push, pop, ldr, str instructions.
const Instr kLdrRegFpOffsetPattern = const Instr kLdrRegFpOffsetPattern =
al | B26 | L | Offset | kRegister_fp_Code * B16; al | B26 | L | Offset | Register::kCode_fp * B16;
const Instr kStrRegFpOffsetPattern = const Instr kStrRegFpOffsetPattern =
al | B26 | Offset | kRegister_fp_Code * B16; al | B26 | Offset | Register::kCode_fp * B16;
const Instr kLdrRegFpNegOffsetPattern = const Instr kLdrRegFpNegOffsetPattern =
al | B26 | L | NegOffset | kRegister_fp_Code * B16; al | B26 | L | NegOffset | Register::kCode_fp * B16;
const Instr kStrRegFpNegOffsetPattern = const Instr kStrRegFpNegOffsetPattern =
al | B26 | NegOffset | kRegister_fp_Code * B16; al | B26 | NegOffset | Register::kCode_fp * B16;
const Instr kLdrStrInstrTypeMask = 0xffff0000; const Instr kLdrStrInstrTypeMask = 0xffff0000;
@ -626,21 +633,21 @@ Instr Assembler::SetAddRegisterImmediateOffset(Instr instr, int offset) {
Register Assembler::GetRd(Instr instr) { Register Assembler::GetRd(Instr instr) {
Register reg; Register reg;
reg.code_ = Instruction::RdValue(instr); reg.reg_code = Instruction::RdValue(instr);
return reg; return reg;
} }
Register Assembler::GetRn(Instr instr) { Register Assembler::GetRn(Instr instr) {
Register reg; Register reg;
reg.code_ = Instruction::RnValue(instr); reg.reg_code = Instruction::RnValue(instr);
return reg; return reg;
} }
Register Assembler::GetRm(Instr instr) { Register Assembler::GetRm(Instr instr) {
Register reg; Register reg;
reg.code_ = Instruction::RmValue(instr); reg.reg_code = Instruction::RmValue(instr);
return reg; return reg;
} }

254
deps/v8/src/arm/assembler-arm.h

@ -45,11 +45,35 @@
#include "src/arm/constants-arm.h" #include "src/arm/constants-arm.h"
#include "src/assembler.h" #include "src/assembler.h"
#include "src/compiler.h"
namespace v8 { namespace v8 {
namespace internal { namespace internal {
// clang-format off
#define GENERAL_REGISTERS(V) \
V(r0) V(r1) V(r2) V(r3) V(r4) V(r5) V(r6) V(r7) \
V(r8) V(r9) V(r10) V(fp) V(ip) V(sp) V(lr) V(pc)
#define ALLOCATABLE_GENERAL_REGISTERS(V) \
V(r0) V(r1) V(r2) V(r3) V(r4) V(r5) V(r6) V(r7) V(r8)
#define DOUBLE_REGISTERS(V) \
V(d0) V(d1) V(d2) V(d3) V(d4) V(d5) V(d6) V(d7) \
V(d8) V(d9) V(d10) V(d11) V(d12) V(d13) V(d14) V(d15) \
V(d16) V(d17) V(d18) V(d19) V(d20) V(d21) V(d22) V(d23) \
V(d24) V(d25) V(d26) V(d27) V(d28) V(d29) V(d30) V(d31)
#define ALLOCATABLE_DOUBLE_REGISTERS(V) \
V(d0) V(d1) V(d2) V(d3) V(d4) V(d5) V(d6) V(d7) \
V(d8) V(d9) V(d10) V(d11) V(d12) V(d13) \
V(d16) V(d17) V(d18) V(d19) V(d20) V(d21) V(d22) V(d23) \
V(d24) V(d25) V(d26) V(d27) V(d28) V(d29) V(d30) V(d31)
#define ALLOCATABLE_NO_VFP32_DOUBLE_REGISTERS(V) \
V(d0) V(d1) V(d2) V(d3) V(d4) V(d5) V(d6) V(d7) \
V(d8) V(d9) V(d10) V(d11) V(d12) V(d13) \
// clang-format on
// CPU Registers. // CPU Registers.
// //
// 1) We would prefer to use an enum, but enum values are assignment- // 1) We would prefer to use an enum, but enum values are assignment-
@ -71,190 +95,123 @@ namespace internal {
// mode. This way we get the compile-time error checking in debug mode // mode. This way we get the compile-time error checking in debug mode
// and best performance in optimized code. // and best performance in optimized code.
// These constants are used in several locations, including static initializers
const int kRegister_no_reg_Code = -1;
const int kRegister_r0_Code = 0;
const int kRegister_r1_Code = 1;
const int kRegister_r2_Code = 2;
const int kRegister_r3_Code = 3;
const int kRegister_r4_Code = 4;
const int kRegister_r5_Code = 5;
const int kRegister_r6_Code = 6;
const int kRegister_r7_Code = 7;
const int kRegister_r8_Code = 8;
const int kRegister_r9_Code = 9;
const int kRegister_r10_Code = 10;
const int kRegister_fp_Code = 11;
const int kRegister_ip_Code = 12;
const int kRegister_sp_Code = 13;
const int kRegister_lr_Code = 14;
const int kRegister_pc_Code = 15;
// Core register
struct Register { struct Register {
static const int kNumRegisters = 16; enum Code {
static const int kMaxNumAllocatableRegisters = #define REGISTER_CODE(R) kCode_##R,
FLAG_enable_embedded_constant_pool ? 8 : 9; GENERAL_REGISTERS(REGISTER_CODE)
static const int kSizeInBytes = 4; #undef REGISTER_CODE
kAfterLast,
inline static int NumAllocatableRegisters(); kCode_no_reg = -1
};
static int ToAllocationIndex(Register reg) {
DCHECK(reg.code() < kMaxNumAllocatableRegisters);
return reg.code();
}
static Register FromAllocationIndex(int index) {
DCHECK(index >= 0 && index < kMaxNumAllocatableRegisters);
return from_code(index);
}
static const char* AllocationIndexToString(int index) { static const int kNumRegisters = Code::kAfterLast;
DCHECK(index >= 0 && index < kMaxNumAllocatableRegisters);
const char* const names[] = {
"r0",
"r1",
"r2",
"r3",
"r4",
"r5",
"r6",
"r7",
"r8",
};
if (FLAG_enable_embedded_constant_pool && (index >= 7)) {
return names[index + 1];
}
return names[index];
}
static Register from_code(int code) { static Register from_code(int code) {
Register r = { code }; DCHECK(code >= 0);
DCHECK(code < kNumRegisters);
Register r = {code};
return r; return r;
} }
const char* ToString();
bool is_valid() const { return 0 <= code_ && code_ < kNumRegisters; } bool IsAllocatable() const;
bool is(Register reg) const { return code_ == reg.code_; } bool is_valid() const { return 0 <= reg_code && reg_code < kNumRegisters; }
bool is(Register reg) const { return reg_code == reg.reg_code; }
int code() const { int code() const {
DCHECK(is_valid()); DCHECK(is_valid());
return code_; return reg_code;
} }
int bit() const { int bit() const {
DCHECK(is_valid()); DCHECK(is_valid());
return 1 << code_; return 1 << reg_code;
} }
void set_code(int code) { void set_code(int code) {
code_ = code; reg_code = code;
DCHECK(is_valid()); DCHECK(is_valid());
} }
// Unfortunately we can't make this private in a struct. // Unfortunately we can't make this private in a struct.
int code_; int reg_code;
}; };
const Register no_reg = { kRegister_no_reg_Code }; // r7: context register
// r8: constant pool pointer register if FLAG_enable_embedded_constant_pool.
const Register r0 = { kRegister_r0_Code }; // r9: lithium scratch
const Register r1 = { kRegister_r1_Code }; #define DECLARE_REGISTER(R) const Register R = {Register::kCode_##R};
const Register r2 = { kRegister_r2_Code }; GENERAL_REGISTERS(DECLARE_REGISTER)
const Register r3 = { kRegister_r3_Code }; #undef DECLARE_REGISTER
const Register r4 = { kRegister_r4_Code }; const Register no_reg = {Register::kCode_no_reg};
const Register r5 = { kRegister_r5_Code };
const Register r6 = { kRegister_r6_Code };
// Used as context register.
const Register r7 = {kRegister_r7_Code};
// Used as constant pool pointer register if FLAG_enable_embedded_constant_pool.
const Register r8 = { kRegister_r8_Code };
// Used as lithium codegen scratch register.
const Register r9 = { kRegister_r9_Code };
// Used as roots register.
const Register r10 = { kRegister_r10_Code };
const Register fp = { kRegister_fp_Code };
const Register ip = { kRegister_ip_Code };
const Register sp = { kRegister_sp_Code };
const Register lr = { kRegister_lr_Code };
const Register pc = { kRegister_pc_Code };
// Single word VFP register. // Single word VFP register.
struct SwVfpRegister { struct SwVfpRegister {
static const int kSizeInBytes = 4; static const int kSizeInBytes = 4;
bool is_valid() const { return 0 <= code_ && code_ < 32; } bool is_valid() const { return 0 <= reg_code && reg_code < 32; }
bool is(SwVfpRegister reg) const { return code_ == reg.code_; } bool is(SwVfpRegister reg) const { return reg_code == reg.reg_code; }
int code() const { int code() const {
DCHECK(is_valid()); DCHECK(is_valid());
return code_; return reg_code;
} }
int bit() const { int bit() const {
DCHECK(is_valid()); DCHECK(is_valid());
return 1 << code_; return 1 << reg_code;
} }
void split_code(int* vm, int* m) const { void split_code(int* vm, int* m) const {
DCHECK(is_valid()); DCHECK(is_valid());
*m = code_ & 0x1; *m = reg_code & 0x1;
*vm = code_ >> 1; *vm = reg_code >> 1;
} }
int code_; int reg_code;
}; };
// Double word VFP register. // Double word VFP register.
struct DwVfpRegister { struct DoubleRegister {
static const int kMaxNumRegisters = 32; enum Code {
#define REGISTER_CODE(R) kCode_##R,
DOUBLE_REGISTERS(REGISTER_CODE)
#undef REGISTER_CODE
kAfterLast,
kCode_no_reg = -1
};
static const int kMaxNumRegisters = Code::kAfterLast;
inline static int NumRegisters();
// A few double registers are reserved: one as a scratch register and one to // A few double registers are reserved: one as a scratch register and one to
// hold 0.0, that does not fit in the immediate field of vmov instructions. // hold 0.0, that does not fit in the immediate field of vmov instructions.
// d14: 0.0 // d14: 0.0
// d15: scratch register. // d15: scratch register.
static const int kNumReservedRegisters = 2;
static const int kMaxNumAllocatableRegisters = kMaxNumRegisters -
kNumReservedRegisters;
static const int kSizeInBytes = 8; static const int kSizeInBytes = 8;
// Note: the number of registers can be different at snapshot and run-time. const char* ToString();
// Any code included in the snapshot must be able to run both with 16 or 32 bool IsAllocatable() const;
// registers. bool is_valid() const { return 0 <= reg_code && reg_code < kMaxNumRegisters; }
inline static int NumRegisters(); bool is(DoubleRegister reg) const { return reg_code == reg.reg_code; }
inline static int NumReservedRegisters();
inline static int NumAllocatableRegisters();
// TODO(turbofan): This is a temporary work-around required because our
// register allocator does not yet support the aliasing of single/double
// registers on ARM.
inline static int NumAllocatableAliasedRegisters();
inline static int ToAllocationIndex(DwVfpRegister reg);
static const char* AllocationIndexToString(int index);
inline static DwVfpRegister FromAllocationIndex(int index);
static DwVfpRegister from_code(int code) {
DwVfpRegister r = { code };
return r;
}
bool is_valid() const {
return 0 <= code_ && code_ < kMaxNumRegisters;
}
bool is(DwVfpRegister reg) const { return code_ == reg.code_; }
int code() const { int code() const {
DCHECK(is_valid()); DCHECK(is_valid());
return code_; return reg_code;
} }
int bit() const { int bit() const {
DCHECK(is_valid()); DCHECK(is_valid());
return 1 << code_; return 1 << reg_code;
}
static DoubleRegister from_code(int code) {
DoubleRegister r = {code};
return r;
} }
void split_code(int* vm, int* m) const { void split_code(int* vm, int* m) const {
DCHECK(is_valid()); DCHECK(is_valid());
*m = (code_ & 0x10) >> 4; *m = (reg_code & 0x10) >> 4;
*vm = code_ & 0x0F; *vm = reg_code & 0x0F;
} }
int code_; int reg_code;
}; };
typedef DwVfpRegister DoubleRegister; typedef DoubleRegister DwVfpRegister;
// Double word VFP register d0-15. // Double word VFP register d0-15.
@ -262,7 +219,7 @@ struct LowDwVfpRegister {
public: public:
static const int kMaxNumLowRegisters = 16; static const int kMaxNumLowRegisters = 16;
operator DwVfpRegister() const { operator DwVfpRegister() const {
DwVfpRegister r = { code_ }; DwVfpRegister r = { reg_code };
return r; return r;
} }
static LowDwVfpRegister from_code(int code) { static LowDwVfpRegister from_code(int code) {
@ -271,30 +228,30 @@ struct LowDwVfpRegister {
} }
bool is_valid() const { bool is_valid() const {
return 0 <= code_ && code_ < kMaxNumLowRegisters; return 0 <= reg_code && reg_code < kMaxNumLowRegisters;
} }
bool is(DwVfpRegister reg) const { return code_ == reg.code_; } bool is(DwVfpRegister reg) const { return reg_code == reg.reg_code; }
bool is(LowDwVfpRegister reg) const { return code_ == reg.code_; } bool is(LowDwVfpRegister reg) const { return reg_code == reg.reg_code; }
int code() const { int code() const {
DCHECK(is_valid()); DCHECK(is_valid());
return code_; return reg_code;
} }
SwVfpRegister low() const { SwVfpRegister low() const {
SwVfpRegister reg; SwVfpRegister reg;
reg.code_ = code_ * 2; reg.reg_code = reg_code * 2;
DCHECK(reg.is_valid()); DCHECK(reg.is_valid());
return reg; return reg;
} }
SwVfpRegister high() const { SwVfpRegister high() const {
SwVfpRegister reg; SwVfpRegister reg;
reg.code_ = (code_ * 2) + 1; reg.reg_code = (reg_code * 2) + 1;
DCHECK(reg.is_valid()); DCHECK(reg.is_valid());
return reg; return reg;
} }
int code_; int reg_code;
}; };
@ -308,21 +265,21 @@ struct QwNeonRegister {
} }
bool is_valid() const { bool is_valid() const {
return (0 <= code_) && (code_ < kMaxNumRegisters); return (0 <= reg_code) && (reg_code < kMaxNumRegisters);
} }
bool is(QwNeonRegister reg) const { return code_ == reg.code_; } bool is(QwNeonRegister reg) const { return reg_code == reg.reg_code; }
int code() const { int code() const {
DCHECK(is_valid()); DCHECK(is_valid());
return code_; return reg_code;
} }
void split_code(int* vm, int* m) const { void split_code(int* vm, int* m) const {
DCHECK(is_valid()); DCHECK(is_valid());
int encoded_code = code_ << 1; int encoded_code = reg_code << 1;
*m = (encoded_code & 0x10) >> 4; *m = (encoded_code & 0x10) >> 4;
*vm = encoded_code & 0x0F; *vm = encoded_code & 0x0F;
} }
int code_; int reg_code;
}; };
@ -427,19 +384,19 @@ const QwNeonRegister q15 = { 15 };
// Coprocessor register // Coprocessor register
struct CRegister { struct CRegister {
bool is_valid() const { return 0 <= code_ && code_ < 16; } bool is_valid() const { return 0 <= reg_code && reg_code < 16; }
bool is(CRegister creg) const { return code_ == creg.code_; } bool is(CRegister creg) const { return reg_code == creg.reg_code; }
int code() const { int code() const {
DCHECK(is_valid()); DCHECK(is_valid());
return code_; return reg_code;
} }
int bit() const { int bit() const {
DCHECK(is_valid()); DCHECK(is_valid());
return 1 << code_; return 1 << reg_code;
} }
// Unfortunately we can't make this private in a struct. // Unfortunately we can't make this private in a struct.
int code_; int reg_code;
}; };
@ -1667,6 +1624,7 @@ class EnsureSpace BASE_EMBEDDED {
}; };
} } // namespace v8::internal } // namespace internal
} // namespace v8
#endif // V8_ARM_ASSEMBLER_ARM_H_ #endif // V8_ARM_ASSEMBLER_ARM_H_

296
deps/v8/src/arm/builtins-arm.cc

@ -22,11 +22,12 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm,
BuiltinExtraArguments extra_args) { BuiltinExtraArguments extra_args) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- r0 : number of arguments excluding receiver // -- r0 : number of arguments excluding receiver
// -- r1 : called function (only guaranteed when // (only guaranteed when the called function
// extra_args requires it) // is not marked as DontAdaptArguments)
// -- r1 : called function
// -- sp[0] : last argument // -- sp[0] : last argument
// -- ... // -- ...
// -- sp[4 * (argc - 1)] : first argument (argc == r0) // -- sp[4 * (argc - 1)] : first argument
// -- sp[4 * argc] : receiver // -- sp[4 * argc] : receiver
// ----------------------------------- // -----------------------------------
__ AssertFunction(r1); __ AssertFunction(r1);
@ -48,8 +49,17 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm,
} }
// JumpToExternalReference expects r0 to contain the number of arguments // JumpToExternalReference expects r0 to contain the number of arguments
// including the receiver and the extra arguments. // including the receiver and the extra arguments. But r0 is only valid
// if the called function is marked as DontAdaptArguments, otherwise we
// need to load the argument count from the SharedFunctionInfo.
__ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
__ ldr(r2,
FieldMemOperand(r2, SharedFunctionInfo::kFormalParameterCountOffset));
__ SmiUntag(r2);
__ cmp(r2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
__ mov(r0, r2, LeaveCC, ne);
__ add(r0, r0, Operand(num_extra_args + 1)); __ add(r0, r0, Operand(num_extra_args + 1));
__ JumpToExternalReference(ExternalReference(id, masm->isolate())); __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
} }
@ -61,8 +71,7 @@ static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
__ ldr(result, __ ldr(result,
MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
__ ldr(result, __ ldr(result, FieldMemOperand(result, JSGlobalObject::kNativeContextOffset));
FieldMemOperand(result, GlobalObject::kNativeContextOffset));
// Load the InternalArray function from the native context. // Load the InternalArray function from the native context.
__ ldr(result, __ ldr(result,
MemOperand(result, MemOperand(result,
@ -77,8 +86,7 @@ static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
__ ldr(result, __ ldr(result,
MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
__ ldr(result, __ ldr(result, FieldMemOperand(result, JSGlobalObject::kNativeContextOffset));
FieldMemOperand(result, GlobalObject::kNativeContextOffset));
// Load the Array function from the native context. // Load the Array function from the native context.
__ ldr(result, __ ldr(result,
MemOperand(result, MemOperand(result,
@ -203,39 +211,42 @@ void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- r0 : number of arguments // -- r0 : number of arguments
// -- r1 : constructor function // -- r1 : constructor function
// -- r3 : original constructor
// -- lr : return address // -- lr : return address
// -- sp[(argc - n - 1) * 4] : arg[n] (zero based) // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
// -- sp[argc * 4] : receiver // -- sp[argc * 4] : receiver
// ----------------------------------- // -----------------------------------
// 1. Load the first argument into r0 and get rid of the rest (including the // 1. Load the first argument into r2 and get rid of the rest (including the
// receiver). // receiver).
{ {
Label no_arguments, done; Label no_arguments, done;
__ sub(r0, r0, Operand(1), SetCC); __ sub(r0, r0, Operand(1), SetCC);
__ b(lo, &no_arguments); __ b(lo, &no_arguments);
__ ldr(r0, MemOperand(sp, r0, LSL, kPointerSizeLog2, PreIndex)); __ ldr(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2, PreIndex));
__ Drop(2); __ Drop(2);
__ b(&done); __ b(&done);
__ bind(&no_arguments); __ bind(&no_arguments);
__ LoadRoot(r0, Heap::kempty_stringRootIndex); __ LoadRoot(r2, Heap::kempty_stringRootIndex);
__ Drop(1); __ Drop(1);
__ bind(&done); __ bind(&done);
} }
// 2. Make sure r0 is a string. // 2. Make sure r2 is a string.
{ {
Label convert, done_convert; Label convert, done_convert;
__ JumpIfSmi(r0, &convert); __ JumpIfSmi(r2, &convert);
__ CompareObjectType(r0, r2, r2, FIRST_NONSTRING_TYPE); __ CompareObjectType(r2, r4, r4, FIRST_NONSTRING_TYPE);
__ b(lo, &done_convert); __ b(lo, &done_convert);
__ bind(&convert); __ bind(&convert);
{ {
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
ToStringStub stub(masm->isolate()); ToStringStub stub(masm->isolate());
__ Push(r1); __ Push(r1, r3);
__ Move(r0, r2);
__ CallStub(&stub); __ CallStub(&stub);
__ Pop(r1); __ Move(r2, r0);
__ Pop(r1, r3);
} }
__ bind(&done_convert); __ bind(&done_convert);
} }
@ -243,13 +254,18 @@ void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
// 3. Allocate a JSValue wrapper for the string. // 3. Allocate a JSValue wrapper for the string.
{ {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- r0 : the first argument // -- r2 : the first argument
// -- r1 : constructor function // -- r1 : constructor function
// -- r3 : original constructor
// -- lr : return address // -- lr : return address
// ----------------------------------- // -----------------------------------
Label allocate, done_allocate; Label allocate, done_allocate, rt_call;
__ Move(r2, r0);
// Fall back to runtime if the original constructor and function differ.
__ cmp(r1, r3);
__ b(ne, &rt_call);
__ Allocate(JSValue::kSize, r0, r3, r4, &allocate, TAG_OBJECT); __ Allocate(JSValue::kSize, r0, r3, r4, &allocate, TAG_OBJECT);
__ bind(&done_allocate); __ bind(&done_allocate);
@ -273,6 +289,18 @@ void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
__ Pop(r1, r2); __ Pop(r1, r2);
} }
__ b(&done_allocate); __ b(&done_allocate);
// Fallback to the runtime to create new object.
__ bind(&rt_call);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(r1, r2);
__ Push(r1, r3); // constructor function, original constructor
__ CallRuntime(Runtime::kNewObject, 2);
__ Pop(r1, r2);
}
__ str(r2, FieldMemOperand(r0, JSValue::kValueOffset));
__ Ret();
} }
} }
@ -360,17 +388,23 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
__ tst(r2, r2); __ tst(r2, r2);
__ b(ne, &rt_call); __ b(ne, &rt_call);
// Fall back to runtime if the original constructor and function differ. // Verify that the original constructor is a JSFunction.
__ cmp(r1, r3); __ CompareObjectType(r3, r5, r4, JS_FUNCTION_TYPE);
__ b(ne, &rt_call); __ b(ne, &rt_call);
// Load the initial map and verify that it is in fact a map. // Load the initial map and verify that it is in fact a map.
// r1: constructor function // r3: original constructor
__ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); __ ldr(r2, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset));
__ JumpIfSmi(r2, &rt_call); __ JumpIfSmi(r2, &rt_call);
__ CompareObjectType(r2, r5, r4, MAP_TYPE); __ CompareObjectType(r2, r5, r4, MAP_TYPE);
__ b(ne, &rt_call); __ b(ne, &rt_call);
// Fall back to runtime if the expected base constructor and base
// constructor differ.
__ ldr(r5, FieldMemOperand(r2, Map::kConstructorOrBackPointerOffset));
__ cmp(r1, r5);
__ b(ne, &rt_call);
// Check that the constructor is not constructing a JSFunction (see // Check that the constructor is not constructing a JSFunction (see
// comments in Runtime_NewObject in runtime.cc). In which case the // comments in Runtime_NewObject in runtime.cc). In which case the
// initial map's instance type would be JS_FUNCTION_TYPE. // initial map's instance type would be JS_FUNCTION_TYPE.
@ -393,9 +427,9 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
__ cmp(r3, Operand(Map::kSlackTrackingCounterEnd)); __ cmp(r3, Operand(Map::kSlackTrackingCounterEnd));
__ b(ne, &allocate); __ b(ne, &allocate);
__ push(r1); __ Push(r1, r2);
__ Push(r2, r1); // r1 = constructor __ push(r2); // r2 = intial map
__ CallRuntime(Runtime::kFinalizeInstanceSize, 1); __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
__ pop(r2); __ pop(r2);
@ -490,8 +524,8 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// r3: original constructor // r3: original constructor
__ bind(&rt_call); __ bind(&rt_call);
__ push(r1); // argument 2/1: constructor function __ push(r1); // constructor function
__ push(r3); // argument 3/2: original constructor __ push(r3); // original constructor
__ CallRuntime(Runtime::kNewObject, 2); __ CallRuntime(Runtime::kNewObject, 2);
__ mov(r4, r0); __ mov(r4, r0);
@ -896,21 +930,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
// - Support profiler (specifically profiling_counter). // - Support profiler (specifically profiling_counter).
// - Call ProfileEntryHookStub when isolate has a function_entry_hook. // - Call ProfileEntryHookStub when isolate has a function_entry_hook.
// - Allow simulator stop operations if FLAG_stop_at is set. // - Allow simulator stop operations if FLAG_stop_at is set.
// - Deal with sloppy mode functions which need to replace the
// receiver with the global proxy when called as functions (without an
// explicit receiver object).
// - Code aging of the BytecodeArray object. // - Code aging of the BytecodeArray object.
// - Supporting FLAG_trace.
//
// The following items are also not done here, and will probably be done using
// explicit bytecodes instead:
// - Allocating a new local context if applicable.
// - Setting up a local binding to the this function, which is used in
// derived constructors with super calls.
// - Setting new.target if required.
// - Dealing with REST parameters (only if
// https://codereview.chromium.org/1235153006 doesn't land by then).
// - Dealing with argument objects.
// Perform stack guard check. // Perform stack guard check.
{ {
@ -918,7 +938,9 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
__ LoadRoot(ip, Heap::kStackLimitRootIndex); __ LoadRoot(ip, Heap::kStackLimitRootIndex);
__ cmp(sp, Operand(ip)); __ cmp(sp, Operand(ip));
__ b(hs, &ok); __ b(hs, &ok);
__ push(kInterpreterBytecodeArrayRegister);
__ CallRuntime(Runtime::kStackGuard, 0); __ CallRuntime(Runtime::kStackGuard, 0);
__ pop(kInterpreterBytecodeArrayRegister);
__ bind(&ok); __ bind(&ok);
} }
@ -966,6 +988,66 @@ void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) {
} }
static void Generate_InterpreterPushArgs(MacroAssembler* masm, Register index,
Register limit, Register scratch) {
Label loop_header, loop_check;
__ b(al, &loop_check);
__ bind(&loop_header);
__ ldr(scratch, MemOperand(index, -kPointerSize, PostIndex));
__ push(scratch);
__ bind(&loop_check);
__ cmp(index, limit);
__ b(gt, &loop_header);
}
// static
void Builtins::Generate_InterpreterPushArgsAndCall(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r0 : the number of arguments (not including the receiver)
// -- r2 : the address of the first argument to be pushed. Subsequent
// arguments should be consecutive above this, in the same order as
// they are to be pushed onto the stack.
// -- r1 : the target to call (can be any Object).
// -----------------------------------
// Find the address of the last argument.
__ add(r3, r0, Operand(1)); // Add one for receiver.
__ mov(r3, Operand(r3, LSL, kPointerSizeLog2));
__ sub(r3, r2, r3);
// Push the arguments.
Generate_InterpreterPushArgs(masm, r2, r3, r4);
// Call the target.
__ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
}
// static
void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r0 : argument count (not including receiver)
// -- r3 : original constructor
// -- r1 : constructor to call
// -- r2 : address of the first argument
// -----------------------------------
// Find the address of the last argument.
__ mov(r4, Operand(r0, LSL, kPointerSizeLog2));
__ sub(r4, r2, r4);
// Push a slot for the receiver to be constructed.
__ push(r0);
// Push the arguments.
Generate_InterpreterPushArgs(masm, r2, r4, r5);
// Call the constructor with r0, r1, and r3 unmodified.
__ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CONSTRUCT_CALL);
}
void Builtins::Generate_CompileLazy(MacroAssembler* masm) { void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
CallRuntimePassFunction(masm, Runtime::kCompileLazy); CallRuntimePassFunction(masm, Runtime::kCompileLazy);
GenerateTailCallToReturnedCode(masm); GenerateTailCallToReturnedCode(masm);
@ -1483,70 +1565,82 @@ static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
// static // static
void Builtins::Generate_CallFunction(MacroAssembler* masm) { void Builtins::Generate_CallFunction(MacroAssembler* masm,
ConvertReceiverMode mode) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- r0 : the number of arguments (not including the receiver) // -- r0 : the number of arguments (not including the receiver)
// -- r1 : the function to call (checked to be a JSFunction) // -- r1 : the function to call (checked to be a JSFunction)
// ----------------------------------- // -----------------------------------
Label convert, convert_global_proxy, convert_to_object, done_convert;
__ AssertFunction(r1); __ AssertFunction(r1);
// TODO(bmeurer): Throw a TypeError if function's [[FunctionKind]] internal
// slot is "classConstructor". // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
// Check that the function is not a "classConstructor".
Label class_constructor;
__ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
__ ldrb(r3, FieldMemOperand(r2, SharedFunctionInfo::kFunctionKindByteOffset));
__ tst(r3, Operand(SharedFunctionInfo::kClassConstructorBitsWithinByte));
__ b(ne, &class_constructor);
// Enter the context of the function; ToObject has to run in the function // Enter the context of the function; ToObject has to run in the function
// context, and we also need to take the global proxy from the function // context, and we also need to take the global proxy from the function
// context in case of conversion. // context in case of conversion.
// See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset == STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
SharedFunctionInfo::kStrictModeByteOffset); SharedFunctionInfo::kStrictModeByteOffset);
__ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
__ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
// We need to convert the receiver for non-native sloppy mode functions. // We need to convert the receiver for non-native sloppy mode functions.
Label done_convert;
__ ldrb(r3, FieldMemOperand(r2, SharedFunctionInfo::kNativeByteOffset)); __ ldrb(r3, FieldMemOperand(r2, SharedFunctionInfo::kNativeByteOffset));
__ tst(r3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) | __ tst(r3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) |
(1 << SharedFunctionInfo::kStrictModeBitWithinByte))); (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
__ b(ne, &done_convert); __ b(ne, &done_convert);
{ {
__ ldr(r3, MemOperand(sp, r0, LSL, kPointerSizeLog2));
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- r0 : the number of arguments (not including the receiver) // -- r0 : the number of arguments (not including the receiver)
// -- r1 : the function to call (checked to be a JSFunction) // -- r1 : the function to call (checked to be a JSFunction)
// -- r2 : the shared function info. // -- r2 : the shared function info.
// -- r3 : the receiver
// -- cp : the function context. // -- cp : the function context.
// ----------------------------------- // -----------------------------------
Label convert_receiver; if (mode == ConvertReceiverMode::kNullOrUndefined) {
__ JumpIfSmi(r3, &convert_to_object);
STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
__ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE);
__ b(hs, &done_convert);
__ JumpIfRoot(r3, Heap::kUndefinedValueRootIndex, &convert_global_proxy);
__ JumpIfNotRoot(r3, Heap::kNullValueRootIndex, &convert_to_object);
__ bind(&convert_global_proxy);
{
// Patch receiver to global proxy. // Patch receiver to global proxy.
__ LoadGlobalProxy(r3); __ LoadGlobalProxy(r3);
} else {
Label convert_to_object, convert_receiver;
__ ldr(r3, MemOperand(sp, r0, LSL, kPointerSizeLog2));
__ JumpIfSmi(r3, &convert_to_object);
STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
__ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE);
__ b(hs, &done_convert);
if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
Label convert_global_proxy;
__ JumpIfRoot(r3, Heap::kUndefinedValueRootIndex,
&convert_global_proxy);
__ JumpIfNotRoot(r3, Heap::kNullValueRootIndex, &convert_to_object);
__ bind(&convert_global_proxy);
{
// Patch receiver to global proxy.
__ LoadGlobalProxy(r3);
}
__ b(&convert_receiver);
}
__ bind(&convert_to_object);
{
// Convert receiver using ToObject.
// TODO(bmeurer): Inline the allocation here to avoid building the frame
// in the fast case? (fall back to AllocateInNewSpace?)
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(r0);
__ Push(r0, r1);
__ mov(r0, r3);
ToObjectStub stub(masm->isolate());
__ CallStub(&stub);
__ mov(r3, r0);
__ Pop(r0, r1);
__ SmiUntag(r0);
}
__ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
__ bind(&convert_receiver);
} }
__ b(&convert_receiver);
__ bind(&convert_to_object);
{
// Convert receiver using ToObject.
// TODO(bmeurer): Inline the allocation here to avoid building the frame
// in the fast case? (fall back to AllocateInNewSpace?)
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(r0);
__ Push(r0, r1);
__ mov(r0, r3);
ToObjectStub stub(masm->isolate());
__ CallStub(&stub);
__ mov(r3, r0);
__ Pop(r0, r1);
__ SmiUntag(r0);
}
__ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
__ bind(&convert_receiver);
__ str(r3, MemOperand(sp, r0, LSL, kPointerSizeLog2)); __ str(r3, MemOperand(sp, r0, LSL, kPointerSizeLog2));
} }
__ bind(&done_convert); __ bind(&done_convert);
@ -1565,11 +1659,18 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm) {
ParameterCount actual(r0); ParameterCount actual(r0);
ParameterCount expected(r2); ParameterCount expected(r2);
__ InvokeCode(r3, expected, actual, JUMP_FUNCTION, NullCallWrapper()); __ InvokeCode(r3, expected, actual, JUMP_FUNCTION, NullCallWrapper());
// The function is a "classConstructor", need to raise an exception.
__ bind(&class_constructor);
{
FrameScope frame(masm, StackFrame::INTERNAL);
__ CallRuntime(Runtime::kThrowConstructorNonCallableError, 0);
}
} }
// static // static
void Builtins::Generate_Call(MacroAssembler* masm) { void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- r0 : the number of arguments (not including the receiver) // -- r0 : the number of arguments (not including the receiver)
// -- r1 : the target to call (can be any Object). // -- r1 : the target to call (can be any Object).
@ -1579,8 +1680,8 @@ void Builtins::Generate_Call(MacroAssembler* masm) {
__ JumpIfSmi(r1, &non_callable); __ JumpIfSmi(r1, &non_callable);
__ bind(&non_smi); __ bind(&non_smi);
__ CompareObjectType(r1, r4, r5, JS_FUNCTION_TYPE); __ CompareObjectType(r1, r4, r5, JS_FUNCTION_TYPE);
__ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET, __ Jump(masm->isolate()->builtins()->CallFunction(mode),
eq); RelocInfo::CODE_TARGET, eq);
__ cmp(r5, Operand(JS_FUNCTION_PROXY_TYPE)); __ cmp(r5, Operand(JS_FUNCTION_PROXY_TYPE));
__ b(ne, &non_function); __ b(ne, &non_function);
@ -1601,7 +1702,9 @@ void Builtins::Generate_Call(MacroAssembler* masm) {
__ str(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2)); __ str(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
// Let the "call_as_function_delegate" take care of the rest. // Let the "call_as_function_delegate" take care of the rest.
__ LoadGlobalFunction(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r1); __ LoadGlobalFunction(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r1);
__ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET); __ Jump(masm->isolate()->builtins()->CallFunction(
ConvertReceiverMode::kNotNullOrUndefined),
RelocInfo::CODE_TARGET);
// 3. Call to something that is not callable. // 3. Call to something that is not callable.
__ bind(&non_callable); __ bind(&non_callable);
@ -1696,35 +1799,6 @@ void Builtins::Generate_Construct(MacroAssembler* masm) {
} }
// static
void Builtins::Generate_PushArgsAndCall(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r0 : the number of arguments (not including the receiver)
// -- r2 : the address of the first argument to be pushed. Subsequent
// arguments should be consecutive above this, in the same order as
// they are to be pushed onto the stack.
// -- r1 : the target to call (can be any Object).
// Find the address of the last argument.
__ add(r3, r0, Operand(1)); // Add one for receiver.
__ mov(r3, Operand(r3, LSL, kPointerSizeLog2));
__ sub(r3, r2, r3);
// Push the arguments.
Label loop_header, loop_check;
__ b(al, &loop_check);
__ bind(&loop_header);
__ ldr(r4, MemOperand(r2, -kPointerSize, PostIndex));
__ push(r4);
__ bind(&loop_check);
__ cmp(r2, r3);
__ b(gt, &loop_header);
// Call the target.
__ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
}
void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- r0 : actual number of arguments // -- r0 : actual number of arguments

186
deps/v8/src/arm/code-stubs-arm.cc

@ -974,14 +974,21 @@ void CEntryStub::Generate(MacroAssembler* masm) {
// fp: frame pointer (restored after C call) // fp: frame pointer (restored after C call)
// sp: stack pointer (restored as callee's sp after C call) // sp: stack pointer (restored as callee's sp after C call)
// cp: current context (C callee-saved) // cp: current context (C callee-saved)
//
// If argv_in_register():
// r2: pointer to the first argument
ProfileEntryHookStub::MaybeCallEntryHook(masm); ProfileEntryHookStub::MaybeCallEntryHook(masm);
__ mov(r5, Operand(r1)); __ mov(r5, Operand(r1));
// Compute the argv pointer in a callee-saved register. if (argv_in_register()) {
__ add(r1, sp, Operand(r0, LSL, kPointerSizeLog2)); // Move argv into the correct register.
__ sub(r1, r1, Operand(kPointerSize)); __ mov(r1, Operand(r2));
} else {
// Compute the argv pointer in a callee-saved register.
__ add(r1, sp, Operand(r0, LSL, kPointerSizeLog2));
__ sub(r1, r1, Operand(kPointerSize));
}
// Enter the exit frame that transitions from JavaScript to C++. // Enter the exit frame that transitions from JavaScript to C++.
FrameScope scope(masm, StackFrame::MANUAL); FrameScope scope(masm, StackFrame::MANUAL);
@ -1057,8 +1064,15 @@ void CEntryStub::Generate(MacroAssembler* masm) {
// r0:r1: result // r0:r1: result
// sp: stack pointer // sp: stack pointer
// fp: frame pointer // fp: frame pointer
// Callee-saved register r4 still holds argc. Register argc;
__ LeaveExitFrame(save_doubles(), r4, true); if (argv_in_register()) {
// We don't want to pop arguments so set argc to no_reg.
argc = no_reg;
} else {
// Callee-saved register r4 still holds argc.
argc = r4;
}
__ LeaveExitFrame(save_doubles(), argc, true);
__ mov(pc, lr); __ mov(pc, lr);
// Handling of exception. // Handling of exception.
@ -1587,7 +1601,7 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX); Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
__ ldr(r4, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); __ ldr(r4, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
__ ldr(r4, FieldMemOperand(r4, GlobalObject::kNativeContextOffset)); __ ldr(r4, FieldMemOperand(r4, JSGlobalObject::kNativeContextOffset));
__ cmp(r6, Operand::Zero()); __ cmp(r6, Operand::Zero());
__ ldr(r4, MemOperand(r4, kNormalOffset), eq); __ ldr(r4, MemOperand(r4, kNormalOffset), eq);
__ ldr(r4, MemOperand(r4, kAliasedOffset), ne); __ ldr(r4, MemOperand(r4, kAliasedOffset), ne);
@ -1780,7 +1794,7 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
// Get the arguments boilerplate from the current native context. // Get the arguments boilerplate from the current native context.
__ ldr(r4, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); __ ldr(r4, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
__ ldr(r4, FieldMemOperand(r4, GlobalObject::kNativeContextOffset)); __ ldr(r4, FieldMemOperand(r4, JSGlobalObject::kNativeContextOffset));
__ ldr(r4, MemOperand( __ ldr(r4, MemOperand(
r4, Context::SlotOffset(Context::STRICT_ARGUMENTS_MAP_INDEX))); r4, Context::SlotOffset(Context::STRICT_ARGUMENTS_MAP_INDEX)));
@ -2365,99 +2379,6 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
} }
static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) {
// Do not transform the receiver for strict mode functions.
__ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
__ ldr(r4, FieldMemOperand(r3, SharedFunctionInfo::kCompilerHintsOffset));
__ tst(r4, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
kSmiTagSize)));
__ b(ne, cont);
// Do not transform the receiver for native (Compilerhints already in r3).
__ tst(r4, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
__ b(ne, cont);
}
static void EmitSlowCase(MacroAssembler* masm, int argc) {
__ mov(r0, Operand(argc));
__ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
}
static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) {
// Wrap the receiver and patch it back onto the stack.
{ FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL);
__ push(r1);
__ mov(r0, r3);
ToObjectStub stub(masm->isolate());
__ CallStub(&stub);
__ pop(r1);
}
__ str(r0, MemOperand(sp, argc * kPointerSize));
__ jmp(cont);
}
static void CallFunctionNoFeedback(MacroAssembler* masm,
int argc, bool needs_checks,
bool call_as_method) {
// r1 : the function to call
Label slow, wrap, cont;
if (needs_checks) {
// Check that the function is really a JavaScript function.
// r1: pushed function (to be verified)
__ JumpIfSmi(r1, &slow);
// Goto slow case if we do not have a function.
__ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE);
__ b(ne, &slow);
}
// Fast-case: Invoke the function now.
// r1: pushed function
ParameterCount actual(argc);
if (call_as_method) {
if (needs_checks) {
EmitContinueIfStrictOrNative(masm, &cont);
}
// Compute the receiver in sloppy mode.
__ ldr(r3, MemOperand(sp, argc * kPointerSize));
if (needs_checks) {
__ JumpIfSmi(r3, &wrap);
__ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
__ b(lt, &wrap);
} else {
__ jmp(&wrap);
}
__ bind(&cont);
}
__ InvokeFunction(r1, actual, JUMP_FUNCTION, NullCallWrapper());
if (needs_checks) {
// Slow-case: Non-function called.
__ bind(&slow);
EmitSlowCase(masm, argc);
}
if (call_as_method) {
__ bind(&wrap);
EmitWrapCase(masm, argc, &cont);
}
}
void CallFunctionStub::Generate(MacroAssembler* masm) {
CallFunctionNoFeedback(masm, argc(), NeedsChecks(), CallAsMethod());
}
void CallConstructStub::Generate(MacroAssembler* masm) { void CallConstructStub::Generate(MacroAssembler* masm) {
// r0 : number of arguments // r0 : number of arguments
// r1 : the function to call // r1 : the function to call
@ -2540,9 +2461,7 @@ void CallICStub::Generate(MacroAssembler* masm) {
FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex); FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex);
const int generic_offset = const int generic_offset =
FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex);
Label extra_checks_or_miss, slow_start; Label extra_checks_or_miss, call;
Label slow, wrap, cont;
Label have_js_function;
int argc = arg_count(); int argc = arg_count();
ParameterCount actual(argc); ParameterCount actual(argc);
@ -2579,34 +2498,15 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ add(r3, r3, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement))); __ add(r3, r3, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement)));
__ str(r3, FieldMemOperand(r2, 0)); __ str(r3, FieldMemOperand(r2, 0));
__ bind(&have_js_function); __ bind(&call);
if (CallAsMethod()) { __ mov(r0, Operand(argc));
EmitContinueIfStrictOrNative(masm, &cont); __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
// Compute the receiver in sloppy mode.
__ ldr(r3, MemOperand(sp, argc * kPointerSize));
__ JumpIfSmi(r3, &wrap);
__ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
__ b(lt, &wrap);
__ bind(&cont);
}
__ InvokeFunction(r1, actual, JUMP_FUNCTION, NullCallWrapper());
__ bind(&slow);
EmitSlowCase(masm, argc);
if (CallAsMethod()) {
__ bind(&wrap);
EmitWrapCase(masm, argc, &cont);
}
__ bind(&extra_checks_or_miss); __ bind(&extra_checks_or_miss);
Label uninitialized, miss, not_allocation_site; Label uninitialized, miss, not_allocation_site;
__ CompareRoot(r4, Heap::kmegamorphic_symbolRootIndex); __ CompareRoot(r4, Heap::kmegamorphic_symbolRootIndex);
__ b(eq, &slow_start); __ b(eq, &call);
// Verify that r4 contains an AllocationSite // Verify that r4 contains an AllocationSite
__ ldr(r5, FieldMemOperand(r4, HeapObject::kMapOffset)); __ ldr(r5, FieldMemOperand(r4, HeapObject::kMapOffset));
@ -2642,7 +2542,7 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ ldr(r4, FieldMemOperand(r2, generic_offset)); __ ldr(r4, FieldMemOperand(r2, generic_offset));
__ add(r4, r4, Operand(Smi::FromInt(1))); __ add(r4, r4, Operand(Smi::FromInt(1)));
__ str(r4, FieldMemOperand(r2, generic_offset)); __ str(r4, FieldMemOperand(r2, generic_offset));
__ jmp(&slow_start); __ jmp(&call);
__ bind(&uninitialized); __ bind(&uninitialized);
@ -2681,23 +2581,14 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ Pop(r1); __ Pop(r1);
} }
__ jmp(&have_js_function); __ jmp(&call);
// We are here because tracing is on or we encountered a MISS case we can't // We are here because tracing is on or we encountered a MISS case we can't
// handle here. // handle here.
__ bind(&miss); __ bind(&miss);
GenerateMiss(masm); GenerateMiss(masm);
// the slow case __ jmp(&call);
__ bind(&slow_start);
// Check that the function is really a JavaScript function.
// r1: pushed function (to be verified)
__ JumpIfSmi(r1, &slow);
// Goto slow case if we do not have a function.
__ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE);
__ b(ne, &slow);
__ jmp(&have_js_function);
} }
@ -2843,7 +2734,7 @@ void StringCharFromCodeGenerator::GenerateSlow(
__ bind(&slow_case_); __ bind(&slow_case_);
call_helper.BeforeCall(masm); call_helper.BeforeCall(masm);
__ push(code_); __ push(code_);
__ CallRuntime(Runtime::kCharFromCode, 1); __ CallRuntime(Runtime::kStringCharFromCode, 1);
__ Move(result_, r0); __ Move(result_, r0);
call_helper.AfterCall(masm); call_helper.AfterCall(masm);
__ jmp(&exit_); __ jmp(&exit_);
@ -3154,6 +3045,21 @@ void ToNumberStub::Generate(MacroAssembler* masm) {
} }
void ToLengthStub::Generate(MacroAssembler* masm) {
// The ToLength stub takes one argument in r0.
Label not_smi;
__ JumpIfNotSmi(r0, &not_smi);
STATIC_ASSERT(kSmiTag == 0);
__ tst(r0, r0);
__ mov(r0, Operand(0), LeaveCC, lt);
__ Ret();
__ bind(&not_smi);
__ push(r0); // Push argument.
__ TailCallRuntime(Runtime::kToLength, 1, 1);
}
void ToStringStub::Generate(MacroAssembler* masm) { void ToStringStub::Generate(MacroAssembler* masm) {
// The ToString stub takes one argument in r0. // The ToString stub takes one argument in r0.
Label is_number; Label is_number;

3
deps/v8/src/arm/code-stubs-arm.h

@ -311,6 +311,7 @@ class NameDictionaryLookupStub: public PlatformCodeStub {
DEFINE_PLATFORM_CODE_STUB(NameDictionaryLookup, PlatformCodeStub); DEFINE_PLATFORM_CODE_STUB(NameDictionaryLookup, PlatformCodeStub);
}; };
} } // namespace v8::internal } // namespace internal
} // namespace v8
#endif // V8_ARM_CODE_STUBS_ARM_H_ #endif // V8_ARM_CODE_STUBS_ARM_H_

3
deps/v8/src/arm/codegen-arm.h

@ -44,6 +44,7 @@ class MathExpGenerator : public AllStatic {
DISALLOW_COPY_AND_ASSIGN(MathExpGenerator); DISALLOW_COPY_AND_ASSIGN(MathExpGenerator);
}; };
} } // namespace v8::internal } // namespace internal
} // namespace v8
#endif // V8_ARM_CODEGEN_ARM_H_ #endif // V8_ARM_CODEGEN_ARM_H_

11
deps/v8/src/arm/constants-arm.cc

@ -51,17 +51,6 @@ const Registers::RegisterAlias Registers::aliases_[] = {
}; };
const char* Registers::Name(int reg) {
const char* result;
if ((0 <= reg) && (reg < kNumRegisters)) {
result = names_[reg];
} else {
result = "noreg";
}
return result;
}
// Support for VFP registers s0 to s31 (d0 to d15) and d16-d31. // Support for VFP registers s0 to s31 (d0 to d15) and d16-d31.
// Note that "sN:sM" is the same as "dN/2" up to d15. // Note that "sN:sM" is the same as "dN/2" up to d15.
// These register names are defined in a way to match the native disassembler // These register names are defined in a way to match the native disassembler

3
deps/v8/src/arm/constants-arm.h

@ -702,6 +702,7 @@ class VFPRegisters {
}; };
} } // namespace v8::internal } // namespace internal
} // namespace v8
#endif // V8_ARM_CONSTANTS_ARM_H_ #endif // V8_ARM_CONSTANTS_ARM_H_

19
deps/v8/src/arm/deoptimizer-arm.cc

@ -5,6 +5,7 @@
#include "src/codegen.h" #include "src/codegen.h"
#include "src/deoptimizer.h" #include "src/deoptimizer.h"
#include "src/full-codegen/full-codegen.h" #include "src/full-codegen/full-codegen.h"
#include "src/register-configuration.h"
#include "src/safepoint-table.h" #include "src/safepoint-table.h"
namespace v8 { namespace v8 {
@ -93,7 +94,7 @@ void Deoptimizer::FillInputFrame(Address tos, JavaScriptFrame* frame) {
} }
input_->SetRegister(sp.code(), reinterpret_cast<intptr_t>(frame->sp())); input_->SetRegister(sp.code(), reinterpret_cast<intptr_t>(frame->sp()));
input_->SetRegister(fp.code(), reinterpret_cast<intptr_t>(frame->fp())); input_->SetRegister(fp.code(), reinterpret_cast<intptr_t>(frame->fp()));
for (int i = 0; i < DoubleRegister::NumAllocatableRegisters(); i++) { for (int i = 0; i < DoubleRegister::kMaxNumRegisters; i++) {
input_->SetDoubleRegister(i, 0.0); input_->SetDoubleRegister(i, 0.0);
} }
@ -142,8 +143,7 @@ void Deoptimizer::TableEntryGenerator::Generate() {
// Everything but pc, lr and ip which will be saved but not restored. // Everything but pc, lr and ip which will be saved but not restored.
RegList restored_regs = kJSCallerSaved | kCalleeSaved | ip.bit(); RegList restored_regs = kJSCallerSaved | kCalleeSaved | ip.bit();
const int kDoubleRegsSize = const int kDoubleRegsSize = kDoubleSize * DwVfpRegister::kMaxNumRegisters;
kDoubleSize * DwVfpRegister::kMaxNumAllocatableRegisters;
// Save all allocatable VFP registers before messing with them. // Save all allocatable VFP registers before messing with them.
DCHECK(kDoubleRegZero.code() == 14); DCHECK(kDoubleRegZero.code() == 14);
@ -152,11 +152,11 @@ void Deoptimizer::TableEntryGenerator::Generate() {
// Check CPU flags for number of registers, setting the Z condition flag. // Check CPU flags for number of registers, setting the Z condition flag.
__ CheckFor32DRegs(ip); __ CheckFor32DRegs(ip);
// Push registers d0-d13, and possibly d16-d31, on the stack. // Push registers d0-d15, and possibly d16-d31, on the stack.
// If d16-d31 are not pushed, decrease the stack pointer instead. // If d16-d31 are not pushed, decrease the stack pointer instead.
__ vstm(db_w, sp, d16, d31, ne); __ vstm(db_w, sp, d16, d31, ne);
__ sub(sp, sp, Operand(16 * kDoubleSize), LeaveCC, eq); __ sub(sp, sp, Operand(16 * kDoubleSize), LeaveCC, eq);
__ vstm(db_w, sp, d0, d13); __ vstm(db_w, sp, d0, d15);
// Push all 16 registers (needed to populate FrameDescription::registers_). // Push all 16 registers (needed to populate FrameDescription::registers_).
// TODO(1588) Note that using pc with stm is deprecated, so we should perhaps // TODO(1588) Note that using pc with stm is deprecated, so we should perhaps
@ -211,9 +211,12 @@ void Deoptimizer::TableEntryGenerator::Generate() {
// Copy VFP registers to // Copy VFP registers to
// double_registers_[DoubleRegister::kMaxNumAllocatableRegisters] // double_registers_[DoubleRegister::kMaxNumAllocatableRegisters]
int double_regs_offset = FrameDescription::double_registers_offset(); int double_regs_offset = FrameDescription::double_registers_offset();
for (int i = 0; i < DwVfpRegister::kMaxNumAllocatableRegisters; ++i) { const RegisterConfiguration* config =
int dst_offset = i * kDoubleSize + double_regs_offset; RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT);
int src_offset = i * kDoubleSize + kNumberOfRegisters * kPointerSize; for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
int code = config->GetAllocatableDoubleCode(i);
int dst_offset = code * kDoubleSize + double_regs_offset;
int src_offset = code * kDoubleSize + kNumberOfRegisters * kPointerSize;
__ vldr(d0, sp, src_offset); __ vldr(d0, sp, src_offset);
__ vstr(d0, r1, dst_offset); __ vstr(d0, r1, dst_offset);
} }

2
deps/v8/src/arm/disasm-arm.cc

@ -1923,7 +1923,7 @@ const char* NameConverter::NameOfConstant(byte* addr) const {
const char* NameConverter::NameOfCPURegister(int reg) const { const char* NameConverter::NameOfCPURegister(int reg) const {
return v8::internal::Registers::Name(reg); return v8::internal::Register::from_code(reg).ToString();
} }

3
deps/v8/src/arm/frames-arm.h

@ -128,6 +128,7 @@ class JavaScriptFrameConstants : public AllStatic {
}; };
} } // namespace v8::internal } // namespace internal
} // namespace v8
#endif // V8_ARM_FRAMES_ARM_H_ #endif // V8_ARM_FRAMES_ARM_H_

46
deps/v8/src/arm/interface-descriptors-arm.cc

@ -80,14 +80,6 @@ const Register GrowArrayElementsDescriptor::ObjectRegister() { return r0; }
const Register GrowArrayElementsDescriptor::KeyRegister() { return r3; } const Register GrowArrayElementsDescriptor::KeyRegister() { return r3; }
void VectorStoreTransitionDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {ReceiverRegister(), NameRegister(), ValueRegister(),
SlotRegister(), VectorRegister(), MapRegister()};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewClosureDescriptor::InitializePlatformSpecific( void FastNewClosureDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) { CallInterfaceDescriptorData* data) {
Register registers[] = {r2}; Register registers[] = {r2};
@ -109,6 +101,10 @@ void ToNumberDescriptor::InitializePlatformSpecific(
} }
// static
const Register ToLengthDescriptor::ReceiverRegister() { return r0; }
// static // static
const Register ToStringDescriptor::ReceiverRegister() { return r0; } const Register ToStringDescriptor::ReceiverRegister() { return r0; }
@ -230,6 +226,13 @@ void AllocateHeapNumberDescriptor::InitializePlatformSpecific(
} }
void AllocateInNewSpaceDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {r0};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void ArrayConstructorConstantArgCountDescriptor::InitializePlatformSpecific( void ArrayConstructorConstantArgCountDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) { CallInterfaceDescriptorData* data) {
// register state // register state
@ -417,16 +420,39 @@ void MathRoundVariantCallFromOptimizedCodeDescriptor::
} }
void PushArgsAndCallDescriptor::InitializePlatformSpecific( void InterpreterPushArgsAndCallDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) { CallInterfaceDescriptorData* data) {
Register registers[] = { Register registers[] = {
r0, // argument count (including receiver) r0, // argument count (not including receiver)
r2, // address of first argument r2, // address of first argument
r1 // the target callable to be call r1 // the target callable to be call
}; };
data->InitializePlatformSpecific(arraysize(registers), registers); data->InitializePlatformSpecific(arraysize(registers), registers);
} }
void InterpreterPushArgsAndConstructDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {
r0, // argument count (not including receiver)
r3, // original constructor
r1, // constructor to call
r2 // address of the first argument
};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void InterpreterCEntryDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {
r0, // argument count (argc)
r2, // address of first argument (argv)
r1 // the runtime function to call
};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8

4
deps/v8/src/arm/interface-descriptors-arm.h

@ -20,7 +20,7 @@ class PlatformInterfaceDescriptor {
private: private:
TargetAddressStorageMode storage_mode_; TargetAddressStorageMode storage_mode_;
}; };
} } // namespace internal
} // namespace v8::internal } // namespace v8
#endif // V8_ARM_INTERFACE_DESCRIPTORS_ARM_H_ #endif // V8_ARM_INTERFACE_DESCRIPTORS_ARM_H_

28
deps/v8/src/arm/macro-assembler-arm.cc

@ -11,6 +11,7 @@
#include "src/bootstrapper.h" #include "src/bootstrapper.h"
#include "src/codegen.h" #include "src/codegen.h"
#include "src/debug/debug.h" #include "src/debug/debug.h"
#include "src/register-configuration.h"
#include "src/runtime/runtime.h" #include "src/runtime/runtime.h"
#include "src/arm/macro-assembler-arm.h" #include "src/arm/macro-assembler-arm.h"
@ -23,8 +24,8 @@ MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
generating_stub_(false), generating_stub_(false),
has_frame_(false) { has_frame_(false) {
if (isolate() != NULL) { if (isolate() != NULL) {
code_object_ = Handle<Object>(isolate()->heap()->undefined_value(), code_object_ =
isolate()); Handle<Object>::New(isolate()->heap()->undefined_value(), isolate());
} }
} }
@ -759,7 +760,9 @@ MemOperand MacroAssembler::SafepointRegistersAndDoublesSlot(Register reg) {
// Number of d-regs not known at snapshot time. // Number of d-regs not known at snapshot time.
DCHECK(!serializer_enabled()); DCHECK(!serializer_enabled());
// General purpose registers are pushed last on the stack. // General purpose registers are pushed last on the stack.
int doubles_size = DwVfpRegister::NumAllocatableRegisters() * kDoubleSize; const RegisterConfiguration* config =
RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT);
int doubles_size = config->num_allocatable_double_registers() * kDoubleSize;
int register_offset = SafepointRegisterStackIndex(reg.code()) * kPointerSize; int register_offset = SafepointRegisterStackIndex(reg.code()) * kPointerSize;
return MemOperand(sp, doubles_size + register_offset); return MemOperand(sp, doubles_size + register_offset);
} }
@ -1474,7 +1477,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
int offset = int offset =
Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize; Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
ldr(scratch, FieldMemOperand(scratch, offset)); ldr(scratch, FieldMemOperand(scratch, offset));
ldr(scratch, FieldMemOperand(scratch, GlobalObject::kNativeContextOffset)); ldr(scratch, FieldMemOperand(scratch, JSGlobalObject::kNativeContextOffset));
// Check the context is a native context. // Check the context is a native context.
if (emit_debug_code()) { if (emit_debug_code()) {
@ -2503,7 +2506,7 @@ void MacroAssembler::GetBuiltinFunction(Register target,
// Load the builtins object into target register. // Load the builtins object into target register.
ldr(target, ldr(target,
MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
ldr(target, FieldMemOperand(target, GlobalObject::kNativeContextOffset)); ldr(target, FieldMemOperand(target, JSGlobalObject::kNativeContextOffset));
// Load the JavaScript builtin function from the builtins object. // Load the JavaScript builtin function from the builtins object.
ldr(target, ContextOperand(target, native_context_index)); ldr(target, ContextOperand(target, native_context_index));
} }
@ -2650,7 +2653,7 @@ void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
void MacroAssembler::LoadGlobalProxy(Register dst) { void MacroAssembler::LoadGlobalProxy(Register dst) {
ldr(dst, GlobalObjectOperand()); ldr(dst, GlobalObjectOperand());
ldr(dst, FieldMemOperand(dst, GlobalObject::kGlobalProxyOffset)); ldr(dst, FieldMemOperand(dst, JSGlobalObject::kGlobalProxyOffset));
} }
@ -2663,7 +2666,7 @@ void MacroAssembler::LoadTransitionedArrayMapConditional(
// Load the global or builtins object from the current context. // Load the global or builtins object from the current context.
ldr(scratch, ldr(scratch,
MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
ldr(scratch, FieldMemOperand(scratch, GlobalObject::kNativeContextOffset)); ldr(scratch, FieldMemOperand(scratch, JSGlobalObject::kNativeContextOffset));
// Check that the function's map is the same as the expected cached map. // Check that the function's map is the same as the expected cached map.
ldr(scratch, ldr(scratch,
@ -2687,8 +2690,8 @@ void MacroAssembler::LoadGlobalFunction(int index, Register function) {
ldr(function, ldr(function,
MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
// Load the native context from the global or builtins object. // Load the native context from the global or builtins object.
ldr(function, FieldMemOperand(function, ldr(function,
GlobalObject::kNativeContextOffset)); FieldMemOperand(function, JSGlobalObject::kNativeContextOffset));
// Load the function from the native context. // Load the function from the native context.
ldr(function, MemOperand(function, Context::SlotOffset(index))); ldr(function, MemOperand(function, Context::SlotOffset(index)));
} }
@ -3578,8 +3581,11 @@ Register GetRegisterThatIsNotOneOf(Register reg1,
if (reg5.is_valid()) regs |= reg5.bit(); if (reg5.is_valid()) regs |= reg5.bit();
if (reg6.is_valid()) regs |= reg6.bit(); if (reg6.is_valid()) regs |= reg6.bit();
for (int i = 0; i < Register::NumAllocatableRegisters(); i++) { const RegisterConfiguration* config =
Register candidate = Register::FromAllocationIndex(i); RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT);
for (int i = 0; i < config->num_allocatable_general_registers(); ++i) {
int code = config->GetAllocatableGeneralCode(i);
Register candidate = Register::from_code(code);
if (regs & candidate.bit()) continue; if (regs & candidate.bit()) continue;
return candidate; return candidate;
} }

32
deps/v8/src/arm/macro-assembler-arm.h

@ -14,17 +14,18 @@ namespace v8 {
namespace internal { namespace internal {
// Give alias names to registers for calling conventions. // Give alias names to registers for calling conventions.
const Register kReturnRegister0 = {kRegister_r0_Code}; const Register kReturnRegister0 = {Register::kCode_r0};
const Register kReturnRegister1 = {kRegister_r1_Code}; const Register kReturnRegister1 = {Register::kCode_r1};
const Register kJSFunctionRegister = {kRegister_r1_Code}; const Register kJSFunctionRegister = {Register::kCode_r1};
const Register kContextRegister = {kRegister_r7_Code}; const Register kContextRegister = {Register::kCode_r7};
const Register kInterpreterAccumulatorRegister = {kRegister_r0_Code}; const Register kInterpreterAccumulatorRegister = {Register::kCode_r0};
const Register kInterpreterRegisterFileRegister = {kRegister_r4_Code}; const Register kInterpreterRegisterFileRegister = {Register::kCode_r4};
const Register kInterpreterBytecodeOffsetRegister = {kRegister_r5_Code}; const Register kInterpreterBytecodeOffsetRegister = {Register::kCode_r5};
const Register kInterpreterBytecodeArrayRegister = {kRegister_r6_Code}; const Register kInterpreterBytecodeArrayRegister = {Register::kCode_r6};
const Register kInterpreterDispatchTableRegister = {kRegister_r8_Code}; const Register kInterpreterDispatchTableRegister = {Register::kCode_r8};
const Register kRuntimeCallFunctionRegister = {kRegister_r1_Code}; const Register kJavaScriptCallArgCountRegister = {Register::kCode_r0};
const Register kRuntimeCallArgCountRegister = {kRegister_r0_Code}; const Register kRuntimeCallFunctionRegister = {Register::kCode_r1};
const Register kRuntimeCallArgCountRegister = {Register::kCode_r0};
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
// Static helper functions // Static helper functions
@ -36,9 +37,9 @@ inline MemOperand FieldMemOperand(Register object, int offset) {
// Give alias names to registers // Give alias names to registers
const Register cp = { kRegister_r7_Code }; // JavaScript context pointer. const Register cp = {Register::kCode_r7}; // JavaScript context pointer.
const Register pp = { kRegister_r8_Code }; // Constant pool pointer. const Register pp = {Register::kCode_r8}; // Constant pool pointer.
const Register kRootRegister = { kRegister_r10_Code }; // Roots array pointer. const Register kRootRegister = {Register::kCode_r10}; // Roots array pointer.
// Flags used for AllocateHeapNumber // Flags used for AllocateHeapNumber
enum TaggingMode { enum TaggingMode {
@ -1564,6 +1565,7 @@ inline MemOperand GlobalObjectOperand() {
#endif #endif
} } // namespace v8::internal } // namespace internal
} // namespace v8
#endif // V8_ARM_MACRO_ASSEMBLER_ARM_H_ #endif // V8_ARM_MACRO_ASSEMBLER_ARM_H_

3
deps/v8/src/arm/simulator-arm.cc

@ -298,7 +298,8 @@ void ArmDebugger::Debug() {
if (strcmp(arg1, "all") == 0) { if (strcmp(arg1, "all") == 0) {
for (int i = 0; i < kNumRegisters; i++) { for (int i = 0; i < kNumRegisters; i++) {
value = GetRegisterValue(i); value = GetRegisterValue(i);
PrintF("%3s: 0x%08x %10d", Registers::Name(i), value, value); PrintF("%3s: 0x%08x %10d", Register::from_code(i).ToString(),
value, value);
if ((argc == 3 && strcmp(arg2, "fp") == 0) && if ((argc == 3 && strcmp(arg2, "fp") == 0) &&
i < 8 && i < 8 &&
(i % 2) == 0) { (i % 2) == 0) {

6
deps/v8/src/arm/simulator-arm.h

@ -55,7 +55,8 @@ class SimulatorStack : public v8::internal::AllStatic {
static inline void UnregisterCTryCatch() { } static inline void UnregisterCTryCatch() { }
}; };
} } // namespace v8::internal } // namespace internal
} // namespace v8
#else // !defined(USE_SIMULATOR) #else // !defined(USE_SIMULATOR)
// Running with a simulator. // Running with a simulator.
@ -459,7 +460,8 @@ class SimulatorStack : public v8::internal::AllStatic {
} }
}; };
} } // namespace v8::internal } // namespace internal
} // namespace v8
#endif // !defined(USE_SIMULATOR) #endif // !defined(USE_SIMULATOR)
#endif // V8_ARM_SIMULATOR_ARM_H_ #endif // V8_ARM_SIMULATOR_ARM_H_

9
deps/v8/src/arm64/assembler-arm64-inl.h

@ -41,7 +41,7 @@ void RelocInfo::set_target_address(Address target,
} }
inline unsigned CPURegister::code() const { inline int CPURegister::code() const {
DCHECK(IsValid()); DCHECK(IsValid());
return reg_code; return reg_code;
} }
@ -54,12 +54,12 @@ inline CPURegister::RegisterType CPURegister::type() const {
inline RegList CPURegister::Bit() const { inline RegList CPURegister::Bit() const {
DCHECK(reg_code < (sizeof(RegList) * kBitsPerByte)); DCHECK(static_cast<size_t>(reg_code) < (sizeof(RegList) * kBitsPerByte));
return IsValid() ? 1UL << reg_code : 0; return IsValid() ? 1UL << reg_code : 0;
} }
inline unsigned CPURegister::SizeInBits() const { inline int CPURegister::SizeInBits() const {
DCHECK(IsValid()); DCHECK(IsValid());
return reg_size; return reg_size;
} }
@ -1259,6 +1259,7 @@ void Assembler::ClearRecordedAstId() {
} }
} } // namespace v8::internal } // namespace internal
} // namespace v8
#endif // V8_ARM64_ASSEMBLER_ARM64_INL_H_ #endif // V8_ARM64_ASSEMBLER_ARM64_INL_H_

40
deps/v8/src/arm64/assembler-arm64.cc

@ -35,6 +35,7 @@
#include "src/arm64/frames-arm64.h" #include "src/arm64/frames-arm64.h"
#include "src/base/bits.h" #include "src/base/bits.h"
#include "src/base/cpu.h" #include "src/base/cpu.h"
#include "src/register-configuration.h"
namespace v8 { namespace v8 {
namespace internal { namespace internal {
@ -109,17 +110,17 @@ void CPURegList::RemoveCalleeSaved() {
} }
CPURegList CPURegList::GetCalleeSaved(unsigned size) { CPURegList CPURegList::GetCalleeSaved(int size) {
return CPURegList(CPURegister::kRegister, size, 19, 29); return CPURegList(CPURegister::kRegister, size, 19, 29);
} }
CPURegList CPURegList::GetCalleeSavedFP(unsigned size) { CPURegList CPURegList::GetCalleeSavedFP(int size) {
return CPURegList(CPURegister::kFPRegister, size, 8, 15); return CPURegList(CPURegister::kFPRegister, size, 8, 15);
} }
CPURegList CPURegList::GetCallerSaved(unsigned size) { CPURegList CPURegList::GetCallerSaved(int size) {
// Registers x0-x18 and lr (x30) are caller-saved. // Registers x0-x18 and lr (x30) are caller-saved.
CPURegList list = CPURegList(CPURegister::kRegister, size, 0, 18); CPURegList list = CPURegList(CPURegister::kRegister, size, 0, 18);
list.Combine(lr); list.Combine(lr);
@ -127,7 +128,7 @@ CPURegList CPURegList::GetCallerSaved(unsigned size) {
} }
CPURegList CPURegList::GetCallerSavedFP(unsigned size) { CPURegList CPURegList::GetCallerSavedFP(int size) {
// Registers d0-d7 and d16-d31 are caller-saved. // Registers d0-d7 and d16-d31 are caller-saved.
CPURegList list = CPURegList(CPURegister::kFPRegister, size, 0, 7); CPURegList list = CPURegList(CPURegister::kFPRegister, size, 0, 7);
list.Combine(CPURegList(CPURegister::kFPRegister, size, 16, 31)); list.Combine(CPURegList(CPURegister::kFPRegister, size, 16, 31));
@ -192,8 +193,11 @@ bool RelocInfo::IsInConstantPool() {
Register GetAllocatableRegisterThatIsNotOneOf(Register reg1, Register reg2, Register GetAllocatableRegisterThatIsNotOneOf(Register reg1, Register reg2,
Register reg3, Register reg4) { Register reg3, Register reg4) {
CPURegList regs(reg1, reg2, reg3, reg4); CPURegList regs(reg1, reg2, reg3, reg4);
for (int i = 0; i < Register::NumAllocatableRegisters(); i++) { const RegisterConfiguration* config =
Register candidate = Register::FromAllocationIndex(i); RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT);
for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
int code = config->GetAllocatableDoubleCode(i);
Register candidate = Register::from_code(code);
if (regs.IncludesAliasOf(candidate)) continue; if (regs.IncludesAliasOf(candidate)) continue;
return candidate; return candidate;
} }
@ -1275,10 +1279,8 @@ void Assembler::rorv(const Register& rd,
// Bitfield operations. // Bitfield operations.
void Assembler::bfm(const Register& rd, void Assembler::bfm(const Register& rd, const Register& rn, int immr,
const Register& rn, int imms) {
unsigned immr,
unsigned imms) {
DCHECK(rd.SizeInBits() == rn.SizeInBits()); DCHECK(rd.SizeInBits() == rn.SizeInBits());
Instr N = SF(rd) >> (kSFOffset - kBitfieldNOffset); Instr N = SF(rd) >> (kSFOffset - kBitfieldNOffset);
Emit(SF(rd) | BFM | N | Emit(SF(rd) | BFM | N |
@ -1288,10 +1290,8 @@ void Assembler::bfm(const Register& rd,
} }
void Assembler::sbfm(const Register& rd, void Assembler::sbfm(const Register& rd, const Register& rn, int immr,
const Register& rn, int imms) {
unsigned immr,
unsigned imms) {
DCHECK(rd.Is64Bits() || rn.Is32Bits()); DCHECK(rd.Is64Bits() || rn.Is32Bits());
Instr N = SF(rd) >> (kSFOffset - kBitfieldNOffset); Instr N = SF(rd) >> (kSFOffset - kBitfieldNOffset);
Emit(SF(rd) | SBFM | N | Emit(SF(rd) | SBFM | N |
@ -1301,10 +1301,8 @@ void Assembler::sbfm(const Register& rd,
} }
void Assembler::ubfm(const Register& rd, void Assembler::ubfm(const Register& rd, const Register& rn, int immr,
const Register& rn, int imms) {
unsigned immr,
unsigned imms) {
DCHECK(rd.SizeInBits() == rn.SizeInBits()); DCHECK(rd.SizeInBits() == rn.SizeInBits());
Instr N = SF(rd) >> (kSFOffset - kBitfieldNOffset); Instr N = SF(rd) >> (kSFOffset - kBitfieldNOffset);
Emit(SF(rd) | UBFM | N | Emit(SF(rd) | UBFM | N |
@ -1314,10 +1312,8 @@ void Assembler::ubfm(const Register& rd,
} }
void Assembler::extr(const Register& rd, void Assembler::extr(const Register& rd, const Register& rn, const Register& rm,
const Register& rn, int lsb) {
const Register& rm,
unsigned lsb) {
DCHECK(rd.SizeInBits() == rn.SizeInBits()); DCHECK(rd.SizeInBits() == rn.SizeInBits());
DCHECK(rd.SizeInBits() == rm.SizeInBits()); DCHECK(rd.SizeInBits() == rm.SizeInBits());
Instr N = SF(rd) >> (kSFOffset - kBitfieldNOffset); Instr N = SF(rd) >> (kSFOffset - kBitfieldNOffset);

283
deps/v8/src/arm64/assembler-arm64.h

@ -12,7 +12,6 @@
#include "src/arm64/instructions-arm64.h" #include "src/arm64/instructions-arm64.h"
#include "src/assembler.h" #include "src/assembler.h"
#include "src/compiler.h"
#include "src/globals.h" #include "src/globals.h"
#include "src/utils.h" #include "src/utils.h"
@ -23,12 +22,36 @@ namespace internal {
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// Registers. // Registers.
#define REGISTER_CODE_LIST(R) \ // clang-format off
R(0) R(1) R(2) R(3) R(4) R(5) R(6) R(7) \ #define GENERAL_REGISTER_CODE_LIST(R) \
R(8) R(9) R(10) R(11) R(12) R(13) R(14) R(15) \ R(0) R(1) R(2) R(3) R(4) R(5) R(6) R(7) \
R(16) R(17) R(18) R(19) R(20) R(21) R(22) R(23) \ R(8) R(9) R(10) R(11) R(12) R(13) R(14) R(15) \
R(24) R(25) R(26) R(27) R(28) R(29) R(30) R(31) R(16) R(17) R(18) R(19) R(20) R(21) R(22) R(23) \
R(24) R(25) R(26) R(27) R(28) R(29) R(30) R(31)
#define GENERAL_REGISTERS(R) \
R(x0) R(x1) R(x2) R(x3) R(x4) R(x5) R(x6) R(x7) \
R(x8) R(x9) R(x10) R(x11) R(x12) R(x13) R(x14) R(x15) \
R(x16) R(x17) R(x18) R(x19) R(x20) R(x21) R(x22) R(x23) \
R(x24) R(x25) R(x26) R(x27) R(x28) R(x29) R(x30) R(x31)
#define ALLOCATABLE_GENERAL_REGISTERS(R) \
R(x0) R(x1) R(x2) R(x3) R(x4) R(x5) R(x6) R(x7) \
R(x8) R(x9) R(x10) R(x11) R(x12) R(x13) R(x14) R(x15) \
R(x18) R(x19) R(x20) R(x21) R(x22) R(x23) R(x24) R(x27)
#define DOUBLE_REGISTERS(R) \
R(d0) R(d1) R(d2) R(d3) R(d4) R(d5) R(d6) R(d7) \
R(d8) R(d9) R(d10) R(d11) R(d12) R(d13) R(d14) R(d15) \
R(d16) R(d17) R(d18) R(d19) R(d20) R(d21) R(d22) R(d23) \
R(d24) R(d25) R(d26) R(d27) R(d28) R(d29) R(d30) R(d31)
#define ALLOCATABLE_DOUBLE_REGISTERS(R) \
R(d0) R(d1) R(d2) R(d3) R(d4) R(d5) R(d6) R(d7) \
R(d8) R(d9) R(d10) R(d11) R(d12) R(d13) R(d14) R(d16) \
R(d17) R(d18) R(d19) R(d20) R(d21) R(d22) R(d23) R(d24) \
R(d25) R(d26) R(d27) R(d28)
// clang-format on
static const int kRegListSizeInBits = sizeof(RegList) * kBitsPerByte; static const int kRegListSizeInBits = sizeof(RegList) * kBitsPerByte;
@ -40,6 +63,14 @@ struct FPRegister;
struct CPURegister { struct CPURegister {
enum Code {
#define REGISTER_CODE(R) kCode_##R,
GENERAL_REGISTERS(REGISTER_CODE)
#undef REGISTER_CODE
kAfterLast,
kCode_no_reg = -1
};
enum RegisterType { enum RegisterType {
// The kInvalid value is used to detect uninitialized static instances, // The kInvalid value is used to detect uninitialized static instances,
// which are always zero-initialized before any constructors are called. // which are always zero-initialized before any constructors are called.
@ -49,15 +80,15 @@ struct CPURegister {
kNoRegister kNoRegister
}; };
static CPURegister Create(unsigned code, unsigned size, RegisterType type) { static CPURegister Create(int code, int size, RegisterType type) {
CPURegister r = {code, size, type}; CPURegister r = {code, size, type};
return r; return r;
} }
unsigned code() const; int code() const;
RegisterType type() const; RegisterType type() const;
RegList Bit() const; RegList Bit() const;
unsigned SizeInBits() const; int SizeInBits() const;
int SizeInBytes() const; int SizeInBytes() const;
bool Is32Bits() const; bool Is32Bits() const;
bool Is64Bits() const; bool Is64Bits() const;
@ -86,14 +117,14 @@ struct CPURegister {
bool is(const CPURegister& other) const { return Is(other); } bool is(const CPURegister& other) const { return Is(other); }
bool is_valid() const { return IsValid(); } bool is_valid() const { return IsValid(); }
unsigned reg_code; int reg_code;
unsigned reg_size; int reg_size;
RegisterType reg_type; RegisterType reg_type;
}; };
struct Register : public CPURegister { struct Register : public CPURegister {
static Register Create(unsigned code, unsigned size) { static Register Create(int code, int size) {
return Register(CPURegister::Create(code, size, CPURegister::kRegister)); return Register(CPURegister::Create(code, size, CPURegister::kRegister));
} }
@ -117,6 +148,8 @@ struct Register : public CPURegister {
DCHECK(IsValidOrNone()); DCHECK(IsValidOrNone());
} }
const char* ToString();
bool IsAllocatable() const;
bool IsValid() const { bool IsValid() const {
DCHECK(IsRegister() || IsNone()); DCHECK(IsRegister() || IsNone());
return IsValidRegister(); return IsValidRegister();
@ -130,6 +163,7 @@ struct Register : public CPURegister {
// A few of them may be unused for now. // A few of them may be unused for now.
static const int kNumRegisters = kNumberOfRegisters; static const int kNumRegisters = kNumberOfRegisters;
STATIC_ASSERT(kNumRegisters == Code::kAfterLast);
static int NumRegisters() { return kNumRegisters; } static int NumRegisters() { return kNumRegisters; }
// We allow crankshaft to use the following registers: // We allow crankshaft to use the following registers:
@ -146,70 +180,6 @@ struct Register : public CPURegister {
// - "low range" // - "low range"
// - "high range" // - "high range"
// - "context" // - "context"
static const unsigned kAllocatableLowRangeBegin = 0;
static const unsigned kAllocatableLowRangeEnd = 15;
static const unsigned kAllocatableHighRangeBegin = 18;
static const unsigned kAllocatableHighRangeEnd = 24;
static const unsigned kAllocatableContext = 27;
// Gap between low and high ranges.
static const int kAllocatableRangeGapSize =
(kAllocatableHighRangeBegin - kAllocatableLowRangeEnd) - 1;
static const int kMaxNumAllocatableRegisters =
(kAllocatableLowRangeEnd - kAllocatableLowRangeBegin + 1) +
(kAllocatableHighRangeEnd - kAllocatableHighRangeBegin + 1) + 1; // cp
static int NumAllocatableRegisters() { return kMaxNumAllocatableRegisters; }
// Return true if the register is one that crankshaft can allocate.
bool IsAllocatable() const {
return ((reg_code == kAllocatableContext) ||
(reg_code <= kAllocatableLowRangeEnd) ||
((reg_code >= kAllocatableHighRangeBegin) &&
(reg_code <= kAllocatableHighRangeEnd)));
}
static Register FromAllocationIndex(unsigned index) {
DCHECK(index < static_cast<unsigned>(NumAllocatableRegisters()));
// cp is the last allocatable register.
if (index == (static_cast<unsigned>(NumAllocatableRegisters() - 1))) {
return from_code(kAllocatableContext);
}
// Handle low and high ranges.
return (index <= kAllocatableLowRangeEnd)
? from_code(index)
: from_code(index + kAllocatableRangeGapSize);
}
static const char* AllocationIndexToString(int index) {
DCHECK((index >= 0) && (index < NumAllocatableRegisters()));
DCHECK((kAllocatableLowRangeBegin == 0) &&
(kAllocatableLowRangeEnd == 15) &&
(kAllocatableHighRangeBegin == 18) &&
(kAllocatableHighRangeEnd == 24) &&
(kAllocatableContext == 27));
const char* const names[] = {
"x0", "x1", "x2", "x3", "x4",
"x5", "x6", "x7", "x8", "x9",
"x10", "x11", "x12", "x13", "x14",
"x15", "x18", "x19", "x20", "x21",
"x22", "x23", "x24", "x27",
};
return names[index];
}
static int ToAllocationIndex(Register reg) {
DCHECK(reg.IsAllocatable());
unsigned code = reg.code();
if (code == kAllocatableContext) {
return NumAllocatableRegisters() - 1;
}
return (code <= kAllocatableLowRangeEnd)
? code
: code - kAllocatableRangeGapSize;
}
static Register from_code(int code) { static Register from_code(int code) {
// Always return an X register. // Always return an X register.
@ -221,7 +191,15 @@ struct Register : public CPURegister {
struct FPRegister : public CPURegister { struct FPRegister : public CPURegister {
static FPRegister Create(unsigned code, unsigned size) { enum Code {
#define REGISTER_CODE(R) kCode_##R,
DOUBLE_REGISTERS(REGISTER_CODE)
#undef REGISTER_CODE
kAfterLast,
kCode_no_reg = -1
};
static FPRegister Create(int code, int size) {
return FPRegister( return FPRegister(
CPURegister::Create(code, size, CPURegister::kFPRegister)); CPURegister::Create(code, size, CPURegister::kFPRegister));
} }
@ -246,6 +224,8 @@ struct FPRegister : public CPURegister {
DCHECK(IsValidOrNone()); DCHECK(IsValidOrNone());
} }
const char* ToString();
bool IsAllocatable() const;
bool IsValid() const { bool IsValid() const {
DCHECK(IsFPRegister() || IsNone()); DCHECK(IsFPRegister() || IsNone());
return IsValidFPRegister(); return IsValidFPRegister();
@ -256,69 +236,12 @@ struct FPRegister : public CPURegister {
// Start of V8 compatibility section --------------------- // Start of V8 compatibility section ---------------------
static const int kMaxNumRegisters = kNumberOfFPRegisters; static const int kMaxNumRegisters = kNumberOfFPRegisters;
STATIC_ASSERT(kMaxNumRegisters == Code::kAfterLast);
// Crankshaft can use all the FP registers except: // Crankshaft can use all the FP registers except:
// - d15 which is used to keep the 0 double value // - d15 which is used to keep the 0 double value
// - d30 which is used in crankshaft as a double scratch register // - d30 which is used in crankshaft as a double scratch register
// - d31 which is used in the MacroAssembler as a double scratch register // - d31 which is used in the MacroAssembler as a double scratch register
static const unsigned kAllocatableLowRangeBegin = 0;
static const unsigned kAllocatableLowRangeEnd = 14;
static const unsigned kAllocatableHighRangeBegin = 16;
static const unsigned kAllocatableHighRangeEnd = 28;
static const RegList kAllocatableFPRegisters = 0x1fff7fff;
// Gap between low and high ranges.
static const int kAllocatableRangeGapSize =
(kAllocatableHighRangeBegin - kAllocatableLowRangeEnd) - 1;
static const int kMaxNumAllocatableRegisters =
(kAllocatableLowRangeEnd - kAllocatableLowRangeBegin + 1) +
(kAllocatableHighRangeEnd - kAllocatableHighRangeBegin + 1);
static int NumAllocatableRegisters() { return kMaxNumAllocatableRegisters; }
// TODO(turbofan): Proper float32 support.
static int NumAllocatableAliasedRegisters() {
return NumAllocatableRegisters();
}
// Return true if the register is one that crankshaft can allocate.
bool IsAllocatable() const {
return (Bit() & kAllocatableFPRegisters) != 0;
}
static FPRegister FromAllocationIndex(unsigned int index) {
DCHECK(index < static_cast<unsigned>(NumAllocatableRegisters()));
return (index <= kAllocatableLowRangeEnd)
? from_code(index)
: from_code(index + kAllocatableRangeGapSize);
}
static const char* AllocationIndexToString(int index) {
DCHECK((index >= 0) && (index < NumAllocatableRegisters()));
DCHECK((kAllocatableLowRangeBegin == 0) &&
(kAllocatableLowRangeEnd == 14) &&
(kAllocatableHighRangeBegin == 16) &&
(kAllocatableHighRangeEnd == 28));
const char* const names[] = {
"d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
"d8", "d9", "d10", "d11", "d12", "d13", "d14",
"d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
"d24", "d25", "d26", "d27", "d28"
};
return names[index];
}
static int ToAllocationIndex(FPRegister reg) {
DCHECK(reg.IsAllocatable());
unsigned code = reg.code();
return (code <= kAllocatableLowRangeEnd)
? code
: code - kAllocatableRangeGapSize;
}
static FPRegister from_code(int code) { static FPRegister from_code(int code) {
// Always return a D register. // Always return a D register.
return FPRegister::Create(code, kDRegSizeInBits); return FPRegister::Create(code, kDRegSizeInBits);
@ -361,7 +284,7 @@ INITIALIZE_REGISTER(Register, no_reg, 0, 0, CPURegister::kNoRegister);
kWRegSizeInBits, CPURegister::kRegister); \ kWRegSizeInBits, CPURegister::kRegister); \
INITIALIZE_REGISTER(Register, x##N, N, \ INITIALIZE_REGISTER(Register, x##N, N, \
kXRegSizeInBits, CPURegister::kRegister); kXRegSizeInBits, CPURegister::kRegister);
REGISTER_CODE_LIST(DEFINE_REGISTERS) GENERAL_REGISTER_CODE_LIST(DEFINE_REGISTERS)
#undef DEFINE_REGISTERS #undef DEFINE_REGISTERS
INITIALIZE_REGISTER(Register, wcsp, kSPRegInternalCode, kWRegSizeInBits, INITIALIZE_REGISTER(Register, wcsp, kSPRegInternalCode, kWRegSizeInBits,
@ -374,7 +297,7 @@ INITIALIZE_REGISTER(Register, csp, kSPRegInternalCode, kXRegSizeInBits,
kSRegSizeInBits, CPURegister::kFPRegister); \ kSRegSizeInBits, CPURegister::kFPRegister); \
INITIALIZE_REGISTER(FPRegister, d##N, N, \ INITIALIZE_REGISTER(FPRegister, d##N, N, \
kDRegSizeInBits, CPURegister::kFPRegister); kDRegSizeInBits, CPURegister::kFPRegister);
REGISTER_CODE_LIST(DEFINE_FPREGISTERS) GENERAL_REGISTER_CODE_LIST(DEFINE_FPREGISTERS)
#undef DEFINE_FPREGISTERS #undef DEFINE_FPREGISTERS
#undef INITIALIZE_REGISTER #undef INITIALIZE_REGISTER
@ -461,13 +384,13 @@ class CPURegList {
DCHECK(IsValid()); DCHECK(IsValid());
} }
CPURegList(CPURegister::RegisterType type, unsigned size, RegList list) CPURegList(CPURegister::RegisterType type, int size, RegList list)
: list_(list), size_(size), type_(type) { : list_(list), size_(size), type_(type) {
DCHECK(IsValid()); DCHECK(IsValid());
} }
CPURegList(CPURegister::RegisterType type, unsigned size, CPURegList(CPURegister::RegisterType type, int size, int first_reg,
unsigned first_reg, unsigned last_reg) int last_reg)
: size_(size), type_(type) { : size_(size), type_(type) {
DCHECK(((type == CPURegister::kRegister) && DCHECK(((type == CPURegister::kRegister) &&
(last_reg < kNumberOfRegisters)) || (last_reg < kNumberOfRegisters)) ||
@ -524,12 +447,12 @@ class CPURegList {
CPURegister PopHighestIndex(); CPURegister PopHighestIndex();
// AAPCS64 callee-saved registers. // AAPCS64 callee-saved registers.
static CPURegList GetCalleeSaved(unsigned size = kXRegSizeInBits); static CPURegList GetCalleeSaved(int size = kXRegSizeInBits);
static CPURegList GetCalleeSavedFP(unsigned size = kDRegSizeInBits); static CPURegList GetCalleeSavedFP(int size = kDRegSizeInBits);
// AAPCS64 caller-saved registers. Note that this includes lr. // AAPCS64 caller-saved registers. Note that this includes lr.
static CPURegList GetCallerSaved(unsigned size = kXRegSizeInBits); static CPURegList GetCallerSaved(int size = kXRegSizeInBits);
static CPURegList GetCallerSavedFP(unsigned size = kDRegSizeInBits); static CPURegList GetCallerSavedFP(int size = kDRegSizeInBits);
// Registers saved as safepoints. // Registers saved as safepoints.
static CPURegList GetSafepointSavedRegisters(); static CPURegList GetSafepointSavedRegisters();
@ -557,25 +480,25 @@ class CPURegList {
return CountSetBits(list_, kRegListSizeInBits); return CountSetBits(list_, kRegListSizeInBits);
} }
unsigned RegisterSizeInBits() const { int RegisterSizeInBits() const {
DCHECK(IsValid()); DCHECK(IsValid());
return size_; return size_;
} }
unsigned RegisterSizeInBytes() const { int RegisterSizeInBytes() const {
int size_in_bits = RegisterSizeInBits(); int size_in_bits = RegisterSizeInBits();
DCHECK((size_in_bits % kBitsPerByte) == 0); DCHECK((size_in_bits % kBitsPerByte) == 0);
return size_in_bits / kBitsPerByte; return size_in_bits / kBitsPerByte;
} }
unsigned TotalSizeInBytes() const { int TotalSizeInBytes() const {
DCHECK(IsValid()); DCHECK(IsValid());
return RegisterSizeInBytes() * Count(); return RegisterSizeInBytes() * Count();
} }
private: private:
RegList list_; RegList list_;
unsigned size_; int size_;
CPURegister::RegisterType type_; CPURegister::RegisterType type_;
bool IsValid() const { bool IsValid() const {
@ -1197,39 +1120,24 @@ class Assembler : public AssemblerBase {
// Bitfield instructions. // Bitfield instructions.
// Bitfield move. // Bitfield move.
void bfm(const Register& rd, void bfm(const Register& rd, const Register& rn, int immr, int imms);
const Register& rn,
unsigned immr,
unsigned imms);
// Signed bitfield move. // Signed bitfield move.
void sbfm(const Register& rd, void sbfm(const Register& rd, const Register& rn, int immr, int imms);
const Register& rn,
unsigned immr,
unsigned imms);
// Unsigned bitfield move. // Unsigned bitfield move.
void ubfm(const Register& rd, void ubfm(const Register& rd, const Register& rn, int immr, int imms);
const Register& rn,
unsigned immr,
unsigned imms);
// Bfm aliases. // Bfm aliases.
// Bitfield insert. // Bitfield insert.
void bfi(const Register& rd, void bfi(const Register& rd, const Register& rn, int lsb, int width) {
const Register& rn,
unsigned lsb,
unsigned width) {
DCHECK(width >= 1); DCHECK(width >= 1);
DCHECK(lsb + width <= rn.SizeInBits()); DCHECK(lsb + width <= rn.SizeInBits());
bfm(rd, rn, (rd.SizeInBits() - lsb) & (rd.SizeInBits() - 1), width - 1); bfm(rd, rn, (rd.SizeInBits() - lsb) & (rd.SizeInBits() - 1), width - 1);
} }
// Bitfield extract and insert low. // Bitfield extract and insert low.
void bfxil(const Register& rd, void bfxil(const Register& rd, const Register& rn, int lsb, int width) {
const Register& rn,
unsigned lsb,
unsigned width) {
DCHECK(width >= 1); DCHECK(width >= 1);
DCHECK(lsb + width <= rn.SizeInBits()); DCHECK(lsb + width <= rn.SizeInBits());
bfm(rd, rn, lsb, lsb + width - 1); bfm(rd, rn, lsb, lsb + width - 1);
@ -1237,26 +1145,20 @@ class Assembler : public AssemblerBase {
// Sbfm aliases. // Sbfm aliases.
// Arithmetic shift right. // Arithmetic shift right.
void asr(const Register& rd, const Register& rn, unsigned shift) { void asr(const Register& rd, const Register& rn, int shift) {
DCHECK(shift < rd.SizeInBits()); DCHECK(shift < rd.SizeInBits());
sbfm(rd, rn, shift, rd.SizeInBits() - 1); sbfm(rd, rn, shift, rd.SizeInBits() - 1);
} }
// Signed bitfield insert in zero. // Signed bitfield insert in zero.
void sbfiz(const Register& rd, void sbfiz(const Register& rd, const Register& rn, int lsb, int width) {
const Register& rn,
unsigned lsb,
unsigned width) {
DCHECK(width >= 1); DCHECK(width >= 1);
DCHECK(lsb + width <= rn.SizeInBits()); DCHECK(lsb + width <= rn.SizeInBits());
sbfm(rd, rn, (rd.SizeInBits() - lsb) & (rd.SizeInBits() - 1), width - 1); sbfm(rd, rn, (rd.SizeInBits() - lsb) & (rd.SizeInBits() - 1), width - 1);
} }
// Signed bitfield extract. // Signed bitfield extract.
void sbfx(const Register& rd, void sbfx(const Register& rd, const Register& rn, int lsb, int width) {
const Register& rn,
unsigned lsb,
unsigned width) {
DCHECK(width >= 1); DCHECK(width >= 1);
DCHECK(lsb + width <= rn.SizeInBits()); DCHECK(lsb + width <= rn.SizeInBits());
sbfm(rd, rn, lsb, lsb + width - 1); sbfm(rd, rn, lsb, lsb + width - 1);
@ -1279,33 +1181,27 @@ class Assembler : public AssemblerBase {
// Ubfm aliases. // Ubfm aliases.
// Logical shift left. // Logical shift left.
void lsl(const Register& rd, const Register& rn, unsigned shift) { void lsl(const Register& rd, const Register& rn, int shift) {
unsigned reg_size = rd.SizeInBits(); int reg_size = rd.SizeInBits();
DCHECK(shift < reg_size); DCHECK(shift < reg_size);
ubfm(rd, rn, (reg_size - shift) % reg_size, reg_size - shift - 1); ubfm(rd, rn, (reg_size - shift) % reg_size, reg_size - shift - 1);
} }
// Logical shift right. // Logical shift right.
void lsr(const Register& rd, const Register& rn, unsigned shift) { void lsr(const Register& rd, const Register& rn, int shift) {
DCHECK(shift < rd.SizeInBits()); DCHECK(shift < rd.SizeInBits());
ubfm(rd, rn, shift, rd.SizeInBits() - 1); ubfm(rd, rn, shift, rd.SizeInBits() - 1);
} }
// Unsigned bitfield insert in zero. // Unsigned bitfield insert in zero.
void ubfiz(const Register& rd, void ubfiz(const Register& rd, const Register& rn, int lsb, int width) {
const Register& rn,
unsigned lsb,
unsigned width) {
DCHECK(width >= 1); DCHECK(width >= 1);
DCHECK(lsb + width <= rn.SizeInBits()); DCHECK(lsb + width <= rn.SizeInBits());
ubfm(rd, rn, (rd.SizeInBits() - lsb) & (rd.SizeInBits() - 1), width - 1); ubfm(rd, rn, (rd.SizeInBits() - lsb) & (rd.SizeInBits() - 1), width - 1);
} }
// Unsigned bitfield extract. // Unsigned bitfield extract.
void ubfx(const Register& rd, void ubfx(const Register& rd, const Register& rn, int lsb, int width) {
const Register& rn,
unsigned lsb,
unsigned width) {
DCHECK(width >= 1); DCHECK(width >= 1);
DCHECK(lsb + width <= rn.SizeInBits()); DCHECK(lsb + width <= rn.SizeInBits());
ubfm(rd, rn, lsb, lsb + width - 1); ubfm(rd, rn, lsb, lsb + width - 1);
@ -1327,10 +1223,8 @@ class Assembler : public AssemblerBase {
} }
// Extract. // Extract.
void extr(const Register& rd, void extr(const Register& rd, const Register& rn, const Register& rm,
const Register& rn, int lsb);
const Register& rm,
unsigned lsb);
// Conditional select: rd = cond ? rn : rm. // Conditional select: rd = cond ? rn : rm.
void csel(const Register& rd, void csel(const Register& rd,
@ -2296,6 +2190,7 @@ class EnsureSpace BASE_EMBEDDED {
} }
}; };
} } // namespace v8::internal } // namespace internal
} // namespace v8
#endif // V8_ARM64_ASSEMBLER_ARM64_H_ #endif // V8_ARM64_ASSEMBLER_ARM64_H_

236
deps/v8/src/arm64/builtins-arm64.cc

@ -22,8 +22,7 @@ namespace internal {
static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) { static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
// Load the native context. // Load the native context.
__ Ldr(result, GlobalObjectMemOperand()); __ Ldr(result, GlobalObjectMemOperand());
__ Ldr(result, __ Ldr(result, FieldMemOperand(result, JSGlobalObject::kNativeContextOffset));
FieldMemOperand(result, GlobalObject::kNativeContextOffset));
// Load the InternalArray function from the native context. // Load the InternalArray function from the native context.
__ Ldr(result, __ Ldr(result,
MemOperand(result, MemOperand(result,
@ -36,8 +35,7 @@ static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
Register result) { Register result) {
// Load the native context. // Load the native context.
__ Ldr(result, GlobalObjectMemOperand()); __ Ldr(result, GlobalObjectMemOperand());
__ Ldr(result, __ Ldr(result, FieldMemOperand(result, JSGlobalObject::kNativeContextOffset));
FieldMemOperand(result, GlobalObject::kNativeContextOffset));
// Load the InternalArray function from the native context. // Load the InternalArray function from the native context.
__ Ldr(result, ContextMemOperand(result, __ Ldr(result, ContextMemOperand(result,
Context::INTERNAL_ARRAY_FUNCTION_INDEX)); Context::INTERNAL_ARRAY_FUNCTION_INDEX));
@ -49,11 +47,12 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm,
BuiltinExtraArguments extra_args) { BuiltinExtraArguments extra_args) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- x0 : number of arguments excluding receiver // -- x0 : number of arguments excluding receiver
// -- x1 : called function (only guaranteed when // (only guaranteed when the called function
// extra_args requires it) // is not marked as DontAdaptArguments)
// -- x1 : called function
// -- sp[0] : last argument // -- sp[0] : last argument
// -- ... // -- ...
// -- sp[4 * (argc - 1)] : first argument (argc == x0) // -- sp[4 * (argc - 1)] : first argument
// -- sp[4 * argc] : receiver // -- sp[4 * argc] : receiver
// ----------------------------------- // -----------------------------------
__ AssertFunction(x1); __ AssertFunction(x1);
@ -75,8 +74,16 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm,
} }
// JumpToExternalReference expects x0 to contain the number of arguments // JumpToExternalReference expects x0 to contain the number of arguments
// including the receiver and the extra arguments. // including the receiver and the extra arguments. But x0 is only valid
// if the called function is marked as DontAdaptArguments, otherwise we
// need to load the argument count from the SharedFunctionInfo.
__ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
__ Ldrsw(
x2, FieldMemOperand(x2, SharedFunctionInfo::kFormalParameterCountOffset));
__ Cmp(x2, SharedFunctionInfo::kDontAdaptArgumentsSentinel);
__ Csel(x0, x0, x2, eq);
__ Add(x0, x0, num_extra_args + 1); __ Add(x0, x0, num_extra_args + 1);
__ JumpToExternalReference(ExternalReference(id, masm->isolate())); __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
} }
@ -200,6 +207,7 @@ void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- x0 : number of arguments // -- x0 : number of arguments
// -- x1 : constructor function // -- x1 : constructor function
// -- x3 : original constructor
// -- lr : return address // -- lr : return address
// -- sp[(argc - n - 1) * 8] : arg[n] (zero based) // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
// -- sp[argc * 8] : receiver // -- sp[argc * 8] : receiver
@ -225,16 +233,16 @@ void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
{ {
Label convert, done_convert; Label convert, done_convert;
__ JumpIfSmi(x2, &convert); __ JumpIfSmi(x2, &convert);
__ JumpIfObjectType(x2, x3, x3, FIRST_NONSTRING_TYPE, &done_convert, lo); __ JumpIfObjectType(x2, x4, x4, FIRST_NONSTRING_TYPE, &done_convert, lo);
__ Bind(&convert); __ Bind(&convert);
{ {
FrameScope scope(masm, StackFrame::INTERNAL); FrameScope scope(masm, StackFrame::INTERNAL);
ToStringStub stub(masm->isolate()); ToStringStub stub(masm->isolate());
__ Push(x1); __ Push(x1, x3);
__ Move(x0, x2); __ Move(x0, x2);
__ CallStub(&stub); __ CallStub(&stub);
__ Move(x2, x0); __ Move(x2, x0);
__ Pop(x1); __ Pop(x1, x3);
} }
__ Bind(&done_convert); __ Bind(&done_convert);
} }
@ -242,12 +250,18 @@ void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
// 3. Allocate a JSValue wrapper for the string. // 3. Allocate a JSValue wrapper for the string.
{ {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- x1 : constructor function
// -- x2 : the first argument // -- x2 : the first argument
// -- x1 : constructor function
// -- x3 : original constructor
// -- lr : return address // -- lr : return address
// ----------------------------------- // -----------------------------------
Label allocate, done_allocate; Label allocate, done_allocate, rt_call;
// Fall back to runtime if the original constructor and function differ.
__ cmp(x1, x3);
__ B(ne, &rt_call);
__ Allocate(JSValue::kSize, x0, x3, x4, &allocate, TAG_OBJECT); __ Allocate(JSValue::kSize, x0, x3, x4, &allocate, TAG_OBJECT);
__ Bind(&done_allocate); __ Bind(&done_allocate);
@ -271,6 +285,17 @@ void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
__ Pop(x2, x1); __ Pop(x2, x1);
} }
__ B(&done_allocate); __ B(&done_allocate);
// Fallback to the runtime to create new object.
__ bind(&rt_call);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(x1, x2, x1, x3); // constructor function, original constructor
__ CallRuntime(Runtime::kNewObject, 2);
__ Pop(x2, x1);
}
__ Str(x2, FieldMemOperand(x0, JSValue::kValueOffset));
__ Ret();
} }
} }
@ -327,7 +352,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// -- x0 : number of arguments // -- x0 : number of arguments
// -- x1 : constructor function // -- x1 : constructor function
// -- x2 : allocation site or undefined // -- x2 : allocation site or undefined
// -- x3 : original constructor // -- x3 : original constructor
// -- lr : return address // -- lr : return address
// -- sp[...]: constructor arguments // -- sp[...]: constructor arguments
// ----------------------------------- // -----------------------------------
@ -365,18 +390,25 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
__ Ldr(x2, MemOperand(x2)); __ Ldr(x2, MemOperand(x2));
__ Cbnz(x2, &rt_call); __ Cbnz(x2, &rt_call);
// Fall back to runtime if the original constructor and function differ. // Verify that the original constructor is a JSFunction.
__ Cmp(constructor, original_constructor); __ JumpIfNotObjectType(original_constructor, x10, x11, JS_FUNCTION_TYPE,
__ B(ne, &rt_call); &rt_call);
// Load the initial map and verify that it is in fact a map. // Load the initial map and verify that it is in fact a map.
Register init_map = x2; Register init_map = x2;
__ Ldr(init_map, __ Ldr(init_map,
FieldMemOperand(constructor, FieldMemOperand(original_constructor,
JSFunction::kPrototypeOrInitialMapOffset)); JSFunction::kPrototypeOrInitialMapOffset));
__ JumpIfSmi(init_map, &rt_call); __ JumpIfSmi(init_map, &rt_call);
__ JumpIfNotObjectType(init_map, x10, x11, MAP_TYPE, &rt_call); __ JumpIfNotObjectType(init_map, x10, x11, MAP_TYPE, &rt_call);
// Fall back to runtime if the expected base constructor and base
// constructor differ.
__ Ldr(x10,
FieldMemOperand(init_map, Map::kConstructorOrBackPointerOffset));
__ Cmp(constructor, x10);
__ B(ne, &rt_call);
// Check that the constructor is not constructing a JSFunction (see // Check that the constructor is not constructing a JSFunction (see
// comments in Runtime_NewObject in runtime.cc). In which case the initial // comments in Runtime_NewObject in runtime.cc). In which case the initial
// map's instance type would be JS_FUNCTION_TYPE. // map's instance type would be JS_FUNCTION_TYPE.
@ -399,9 +431,9 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
__ Cmp(constructon_count, Operand(Map::kSlackTrackingCounterEnd)); __ Cmp(constructon_count, Operand(Map::kSlackTrackingCounterEnd));
__ B(ne, &allocate); __ B(ne, &allocate);
// Push the constructor and map to the stack, and the constructor again // Push the constructor and map to the stack, and the map again
// as argument to the runtime call. // as argument to the runtime call.
__ Push(constructor, init_map, constructor); __ Push(constructor, init_map, init_map);
__ CallRuntime(Runtime::kFinalizeInstanceSize, 1); __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
__ Pop(init_map, constructor); __ Pop(init_map, constructor);
__ Mov(constructon_count, Operand(Map::kSlackTrackingCounterEnd - 1)); __ Mov(constructon_count, Operand(Map::kSlackTrackingCounterEnd - 1));
@ -699,7 +731,6 @@ void Builtins::Generate_JSConstructStubForDerived(MacroAssembler* masm) {
ParameterCount actual(x0); ParameterCount actual(x0);
__ InvokeFunction(x1, actual, CALL_FUNCTION, NullCallWrapper()); __ InvokeFunction(x1, actual, CALL_FUNCTION, NullCallWrapper());
// Restore the context from the frame. // Restore the context from the frame.
// x0: result // x0: result
// jssp[0]: number of arguments (smi-tagged) // jssp[0]: number of arguments (smi-tagged)
@ -924,28 +955,16 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
// - Support profiler (specifically profiling_counter). // - Support profiler (specifically profiling_counter).
// - Call ProfileEntryHookStub when isolate has a function_entry_hook. // - Call ProfileEntryHookStub when isolate has a function_entry_hook.
// - Allow simulator stop operations if FLAG_stop_at is set. // - Allow simulator stop operations if FLAG_stop_at is set.
// - Deal with sloppy mode functions which need to replace the
// receiver with the global proxy when called as functions (without an
// explicit receiver object).
// - Code aging of the BytecodeArray object. // - Code aging of the BytecodeArray object.
// - Supporting FLAG_trace.
//
// The following items are also not done here, and will probably be done using
// explicit bytecodes instead:
// - Allocating a new local context if applicable.
// - Setting up a local binding to the this function, which is used in
// derived constructors with super calls.
// - Setting new.target if required.
// - Dealing with REST parameters (only if
// https://codereview.chromium.org/1235153006 doesn't land by then).
// - Dealing with argument objects.
// Perform stack guard check. // Perform stack guard check.
{ {
Label ok; Label ok;
__ CompareRoot(jssp, Heap::kStackLimitRootIndex); __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
__ B(hs, &ok); __ B(hs, &ok);
__ Push(kInterpreterBytecodeArrayRegister);
__ CallRuntime(Runtime::kStackGuard, 0); __ CallRuntime(Runtime::kStackGuard, 0);
__ Pop(kInterpreterBytecodeArrayRegister);
__ Bind(&ok); __ Bind(&ok);
} }
@ -1542,69 +1561,83 @@ static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
// static // static
void Builtins::Generate_CallFunction(MacroAssembler* masm) { void Builtins::Generate_CallFunction(MacroAssembler* masm,
ConvertReceiverMode mode) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- x0 : the number of arguments (not including the receiver) // -- x0 : the number of arguments (not including the receiver)
// -- x1 : the function to call (checked to be a JSFunction) // -- x1 : the function to call (checked to be a JSFunction)
// ----------------------------------- // -----------------------------------
Label convert, convert_global_proxy, convert_to_object, done_convert;
__ AssertFunction(x1); __ AssertFunction(x1);
// TODO(bmeurer): Throw a TypeError if function's [[FunctionKind]] internal
// slot is "classConstructor". // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
// Check that function is not a "classConstructor".
Label class_constructor;
__ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
__ Ldr(w3, FieldMemOperand(x2, SharedFunctionInfo::kCompilerHintsOffset));
__ TestAndBranchIfAnySet(
w3, (1 << SharedFunctionInfo::kIsDefaultConstructor) |
(1 << SharedFunctionInfo::kIsSubclassConstructor) |
(1 << SharedFunctionInfo::kIsBaseConstructor),
&class_constructor);
// Enter the context of the function; ToObject has to run in the function // Enter the context of the function; ToObject has to run in the function
// context, and we also need to take the global proxy from the function // context, and we also need to take the global proxy from the function
// context in case of conversion. // context in case of conversion.
// See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
__ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset)); __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
__ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
// We need to convert the receiver for non-native sloppy mode functions. // We need to convert the receiver for non-native sloppy mode functions.
__ Ldr(w3, FieldMemOperand(x2, SharedFunctionInfo::kCompilerHintsOffset)); Label done_convert;
__ TestAndBranchIfAnySet(w3, __ TestAndBranchIfAnySet(w3,
(1 << SharedFunctionInfo::kNative) | (1 << SharedFunctionInfo::kNative) |
(1 << SharedFunctionInfo::kStrictModeFunction), (1 << SharedFunctionInfo::kStrictModeFunction),
&done_convert); &done_convert);
{ {
__ Peek(x3, Operand(x0, LSL, kXRegSizeLog2));
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- x0 : the number of arguments (not including the receiver) // -- x0 : the number of arguments (not including the receiver)
// -- x1 : the function to call (checked to be a JSFunction) // -- x1 : the function to call (checked to be a JSFunction)
// -- x2 : the shared function info. // -- x2 : the shared function info.
// -- x3 : the receiver
// -- cp : the function context. // -- cp : the function context.
// ----------------------------------- // -----------------------------------
Label convert_receiver; if (mode == ConvertReceiverMode::kNullOrUndefined) {
__ JumpIfSmi(x3, &convert_to_object);
STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
__ CompareObjectType(x3, x4, x4, FIRST_JS_RECEIVER_TYPE);
__ B(hs, &done_convert);
__ JumpIfRoot(x3, Heap::kUndefinedValueRootIndex, &convert_global_proxy);
__ JumpIfNotRoot(x3, Heap::kNullValueRootIndex, &convert_to_object);
__ Bind(&convert_global_proxy);
{
// Patch receiver to global proxy. // Patch receiver to global proxy.
__ LoadGlobalProxy(x3); __ LoadGlobalProxy(x3);
} else {
Label convert_to_object, convert_receiver;
__ Peek(x3, Operand(x0, LSL, kXRegSizeLog2));
__ JumpIfSmi(x3, &convert_to_object);
STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
__ CompareObjectType(x3, x4, x4, FIRST_JS_RECEIVER_TYPE);
__ B(hs, &done_convert);
if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
Label convert_global_proxy;
__ JumpIfRoot(x3, Heap::kUndefinedValueRootIndex,
&convert_global_proxy);
__ JumpIfNotRoot(x3, Heap::kNullValueRootIndex, &convert_to_object);
__ Bind(&convert_global_proxy);
{
// Patch receiver to global proxy.
__ LoadGlobalProxy(x3);
}
__ B(&convert_receiver);
}
__ Bind(&convert_to_object);
{
// Convert receiver using ToObject.
// TODO(bmeurer): Inline the allocation here to avoid building the frame
// in the fast case? (fall back to AllocateInNewSpace?)
FrameScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(x0);
__ Push(x0, x1);
__ Mov(x0, x3);
ToObjectStub stub(masm->isolate());
__ CallStub(&stub);
__ Mov(x3, x0);
__ Pop(x1, x0);
__ SmiUntag(x0);
}
__ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
__ Bind(&convert_receiver);
} }
__ B(&convert_receiver);
__ Bind(&convert_to_object);
{
// Convert receiver using ToObject.
// TODO(bmeurer): Inline the allocation here to avoid building the frame
// in the fast case? (fall back to AllocateInNewSpace?)
FrameScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(x0);
__ Push(x0, x1);
__ Mov(x0, x3);
ToObjectStub stub(masm->isolate());
__ CallStub(&stub);
__ Mov(x3, x0);
__ Pop(x1, x0);
__ SmiUntag(x0);
}
__ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
__ Bind(&convert_receiver);
__ Poke(x3, Operand(x0, LSL, kXRegSizeLog2)); __ Poke(x3, Operand(x0, LSL, kXRegSizeLog2));
} }
__ Bind(&done_convert); __ Bind(&done_convert);
@ -1622,11 +1655,18 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm) {
ParameterCount actual(x0); ParameterCount actual(x0);
ParameterCount expected(x2); ParameterCount expected(x2);
__ InvokeCode(x3, expected, actual, JUMP_FUNCTION, NullCallWrapper()); __ InvokeCode(x3, expected, actual, JUMP_FUNCTION, NullCallWrapper());
// The function is a "classConstructor", need to raise an exception.
__ bind(&class_constructor);
{
FrameScope frame(masm, StackFrame::INTERNAL);
__ CallRuntime(Runtime::kThrowConstructorNonCallableError, 0);
}
} }
// static // static
void Builtins::Generate_Call(MacroAssembler* masm) { void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- x0 : the number of arguments (not including the receiver) // -- x0 : the number of arguments (not including the receiver)
// -- x1 : the target to call (can be any Object). // -- x1 : the target to call (can be any Object).
@ -1636,8 +1676,8 @@ void Builtins::Generate_Call(MacroAssembler* masm) {
__ JumpIfSmi(x1, &non_callable); __ JumpIfSmi(x1, &non_callable);
__ Bind(&non_smi); __ Bind(&non_smi);
__ CompareObjectType(x1, x4, x5, JS_FUNCTION_TYPE); __ CompareObjectType(x1, x4, x5, JS_FUNCTION_TYPE);
__ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET, __ Jump(masm->isolate()->builtins()->CallFunction(mode),
eq); RelocInfo::CODE_TARGET, eq);
__ Cmp(x5, JS_FUNCTION_PROXY_TYPE); __ Cmp(x5, JS_FUNCTION_PROXY_TYPE);
__ B(ne, &non_function); __ B(ne, &non_function);
@ -1657,7 +1697,9 @@ void Builtins::Generate_Call(MacroAssembler* masm) {
__ Poke(x1, Operand(x0, LSL, kXRegSizeLog2)); __ Poke(x1, Operand(x0, LSL, kXRegSizeLog2));
// Let the "call_as_function_delegate" take care of the rest. // Let the "call_as_function_delegate" take care of the rest.
__ LoadGlobalFunction(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, x1); __ LoadGlobalFunction(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, x1);
__ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET); __ Jump(masm->isolate()->builtins()->CallFunction(
ConvertReceiverMode::kNotNullOrUndefined),
RelocInfo::CODE_TARGET);
// 3. Call to something that is not callable. // 3. Call to something that is not callable.
__ bind(&non_callable); __ bind(&non_callable);
@ -1753,13 +1795,14 @@ void Builtins::Generate_Construct(MacroAssembler* masm) {
// static // static
void Builtins::Generate_PushArgsAndCall(MacroAssembler* masm) { void Builtins::Generate_InterpreterPushArgsAndCall(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- x0 : the number of arguments (not including the receiver) // -- x0 : the number of arguments (not including the receiver)
// -- x2 : the address of the first argument to be pushed. Subsequent // -- x2 : the address of the first argument to be pushed. Subsequent
// arguments should be consecutive above this, in the same order as // arguments should be consecutive above this, in the same order as
// they are to be pushed onto the stack. // they are to be pushed onto the stack.
// -- x1 : the target to call (can be any Object). // -- x1 : the target to call (can be any Object).
// -----------------------------------
// Find the address of the last argument. // Find the address of the last argument.
__ add(x3, x0, Operand(1)); // Add one for receiver. __ add(x3, x0, Operand(1)); // Add one for receiver.
@ -1784,6 +1827,43 @@ void Builtins::Generate_PushArgsAndCall(MacroAssembler* masm) {
} }
// static
void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x0 : argument count (not including receiver)
// -- x3 : original constructor
// -- x1 : constructor to call
// -- x2 : address of the first argument
// -----------------------------------
// Find the address of the last argument.
__ add(x5, x0, Operand(1)); // Add one for receiver (to be constructed).
__ lsl(x5, x5, kPointerSizeLog2);
// Set stack pointer and where to stop.
__ Mov(x6, jssp);
__ Claim(x5, 1);
__ sub(x4, x6, x5);
// Push a slot for the receiver.
__ Str(xzr, MemOperand(x6, -kPointerSize, PreIndex));
Label loop_header, loop_check;
// Push the arguments.
__ B(&loop_check);
__ Bind(&loop_header);
// TODO(rmcilroy): Push two at a time once we ensure we keep stack aligned.
__ Ldr(x5, MemOperand(x2, -kPointerSize, PostIndex));
__ Str(x5, MemOperand(x6, -kPointerSize, PreIndex));
__ Bind(&loop_check);
__ Cmp(x6, x4);
__ B(gt, &loop_header);
// Call the constructor with x0, x1, and x3 unmodified.
__ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CONSTRUCT_CALL);
}
void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
ASM_LOCATION("Builtins::Generate_ArgumentsAdaptorTrampoline"); ASM_LOCATION("Builtins::Generate_ArgumentsAdaptorTrampoline");
// ----------- S t a t e ------------- // ----------- S t a t e -------------

189
deps/v8/src/arm64/code-stubs-arm64.cc

@ -1067,6 +1067,8 @@ void CEntryStub::Generate(MacroAssembler* masm) {
// Register parameters: // Register parameters:
// x0: argc (including receiver, untagged) // x0: argc (including receiver, untagged)
// x1: target // x1: target
// If argv_in_register():
// x11: argv (pointer to first argument)
// //
// The stack on entry holds the arguments and the receiver, with the receiver // The stack on entry holds the arguments and the receiver, with the receiver
// at the highest address: // at the highest address:
@ -1098,9 +1100,11 @@ void CEntryStub::Generate(MacroAssembler* masm) {
// (arg[argc-2]), or just below the receiver in case there are no arguments. // (arg[argc-2]), or just below the receiver in case there are no arguments.
// - Adjust for the arg[] array. // - Adjust for the arg[] array.
Register temp_argv = x11; Register temp_argv = x11;
__ Add(temp_argv, jssp, Operand(x0, LSL, kPointerSizeLog2)); if (!argv_in_register()) {
// - Adjust for the receiver. __ Add(temp_argv, jssp, Operand(x0, LSL, kPointerSizeLog2));
__ Sub(temp_argv, temp_argv, 1 * kPointerSize); // - Adjust for the receiver.
__ Sub(temp_argv, temp_argv, 1 * kPointerSize);
}
// Enter the exit frame. Reserve three slots to preserve x21-x23 callee-saved // Enter the exit frame. Reserve three slots to preserve x21-x23 callee-saved
// registers. // registers.
@ -1204,12 +1208,10 @@ void CEntryStub::Generate(MacroAssembler* masm) {
__ LeaveExitFrame(save_doubles(), x10, true); __ LeaveExitFrame(save_doubles(), x10, true);
DCHECK(jssp.Is(__ StackPointer())); DCHECK(jssp.Is(__ StackPointer()));
// Pop or drop the remaining stack slots and return from the stub. if (!argv_in_register()) {
// jssp[24]: Arguments array (of size argc), including receiver. // Drop the remaining stack slots and return from the stub.
// jssp[16]: Preserved x23 (used for target). __ Drop(x11);
// jssp[8]: Preserved x22 (used for argc). }
// jssp[0]: Preserved x21 (used for argv).
__ Drop(x11);
__ AssertFPCRState(); __ AssertFPCRState();
__ Ret(); __ Ret();
@ -1804,8 +1806,8 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
Register sloppy_args_map = x11; Register sloppy_args_map = x11;
Register aliased_args_map = x10; Register aliased_args_map = x10;
__ Ldr(global_object, GlobalObjectMemOperand()); __ Ldr(global_object, GlobalObjectMemOperand());
__ Ldr(global_ctx, FieldMemOperand(global_object, __ Ldr(global_ctx,
GlobalObject::kNativeContextOffset)); FieldMemOperand(global_object, JSGlobalObject::kNativeContextOffset));
__ Ldr(sloppy_args_map, __ Ldr(sloppy_args_map,
ContextMemOperand(global_ctx, Context::SLOPPY_ARGUMENTS_MAP_INDEX)); ContextMemOperand(global_ctx, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
@ -2049,8 +2051,8 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
Register global_ctx = x10; Register global_ctx = x10;
Register strict_args_map = x4; Register strict_args_map = x4;
__ Ldr(global_object, GlobalObjectMemOperand()); __ Ldr(global_object, GlobalObjectMemOperand());
__ Ldr(global_ctx, FieldMemOperand(global_object, __ Ldr(global_ctx,
GlobalObject::kNativeContextOffset)); FieldMemOperand(global_object, JSGlobalObject::kNativeContextOffset));
__ Ldr(strict_args_map, __ Ldr(strict_args_map,
ContextMemOperand(global_ctx, Context::STRICT_ARGUMENTS_MAP_INDEX)); ContextMemOperand(global_ctx, Context::STRICT_ARGUMENTS_MAP_INDEX));
@ -2745,101 +2747,6 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, Register argc,
} }
static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) {
// Do not transform the receiver for strict mode functions.
__ Ldr(x3, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
__ Ldr(w4, FieldMemOperand(x3, SharedFunctionInfo::kCompilerHintsOffset));
__ Tbnz(w4, SharedFunctionInfo::kStrictModeFunction, cont);
// Do not transform the receiver for native (Compilerhints already in x3).
__ Tbnz(w4, SharedFunctionInfo::kNative, cont);
}
static void EmitSlowCase(MacroAssembler* masm, int argc) {
__ Mov(x0, argc);
__ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
}
static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) {
// Wrap the receiver and patch it back onto the stack.
{ FrameScope frame_scope(masm, StackFrame::INTERNAL);
__ Push(x1);
__ Mov(x0, x3);
ToObjectStub stub(masm->isolate());
__ CallStub(&stub);
__ Pop(x1);
}
__ Poke(x0, argc * kPointerSize);
__ B(cont);
}
static void CallFunctionNoFeedback(MacroAssembler* masm,
int argc, bool needs_checks,
bool call_as_method) {
// x1 function the function to call
Register function = x1;
Register type = x4;
Label slow, wrap, cont;
// TODO(jbramley): This function has a lot of unnamed registers. Name them,
// and tidy things up a bit.
if (needs_checks) {
// Check that the function is really a JavaScript function.
__ JumpIfSmi(function, &slow);
// Goto slow case if we do not have a function.
__ JumpIfNotObjectType(function, x10, type, JS_FUNCTION_TYPE, &slow);
}
// Fast-case: Invoke the function now.
// x1 function pushed function
ParameterCount actual(argc);
if (call_as_method) {
if (needs_checks) {
EmitContinueIfStrictOrNative(masm, &cont);
}
// Compute the receiver in sloppy mode.
__ Peek(x3, argc * kPointerSize);
if (needs_checks) {
__ JumpIfSmi(x3, &wrap);
__ JumpIfObjectType(x3, x10, type, FIRST_SPEC_OBJECT_TYPE, &wrap, lt);
} else {
__ B(&wrap);
}
__ Bind(&cont);
}
__ InvokeFunction(function,
actual,
JUMP_FUNCTION,
NullCallWrapper());
if (needs_checks) {
// Slow-case: Non-function called.
__ Bind(&slow);
EmitSlowCase(masm, argc);
}
if (call_as_method) {
__ Bind(&wrap);
EmitWrapCase(masm, argc, &cont);
}
}
void CallFunctionStub::Generate(MacroAssembler* masm) {
ASM_LOCATION("CallFunctionStub::Generate");
CallFunctionNoFeedback(masm, argc(), NeedsChecks(), CallAsMethod());
}
void CallConstructStub::Generate(MacroAssembler* masm) { void CallConstructStub::Generate(MacroAssembler* masm) {
ASM_LOCATION("CallConstructStub::Generate"); ASM_LOCATION("CallConstructStub::Generate");
// x0 : number of arguments // x0 : number of arguments
@ -2939,16 +2846,13 @@ void CallICStub::Generate(MacroAssembler* masm) {
FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex); FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex);
const int generic_offset = const int generic_offset =
FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex);
Label extra_checks_or_miss, slow_start; Label extra_checks_or_miss, call;
Label slow, wrap, cont;
Label have_js_function;
int argc = arg_count(); int argc = arg_count();
ParameterCount actual(argc); ParameterCount actual(argc);
Register function = x1; Register function = x1;
Register feedback_vector = x2; Register feedback_vector = x2;
Register index = x3; Register index = x3;
Register type = x4;
// The checks. First, does x1 match the recorded monomorphic target? // The checks. First, does x1 match the recorded monomorphic target?
__ Add(x4, feedback_vector, __ Add(x4, feedback_vector,
@ -2986,36 +2890,14 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ Add(index, index, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement))); __ Add(index, index, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement)));
__ Str(index, FieldMemOperand(feedback_vector, 0)); __ Str(index, FieldMemOperand(feedback_vector, 0));
__ bind(&have_js_function); __ bind(&call);
if (CallAsMethod()) { __ Mov(x0, argc);
EmitContinueIfStrictOrNative(masm, &cont); __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
// Compute the receiver in sloppy mode.
__ Peek(x3, argc * kPointerSize);
__ JumpIfSmi(x3, &wrap);
__ JumpIfObjectType(x3, x10, type, FIRST_SPEC_OBJECT_TYPE, &wrap, lt);
__ Bind(&cont);
}
__ InvokeFunction(function,
actual,
JUMP_FUNCTION,
NullCallWrapper());
__ bind(&slow);
EmitSlowCase(masm, argc);
if (CallAsMethod()) {
__ bind(&wrap);
EmitWrapCase(masm, argc, &cont);
}
__ bind(&extra_checks_or_miss); __ bind(&extra_checks_or_miss);
Label uninitialized, miss, not_allocation_site; Label uninitialized, miss, not_allocation_site;
__ JumpIfRoot(x4, Heap::kmegamorphic_symbolRootIndex, &slow_start); __ JumpIfRoot(x4, Heap::kmegamorphic_symbolRootIndex, &call);
__ Ldr(x5, FieldMemOperand(x4, HeapObject::kMapOffset)); __ Ldr(x5, FieldMemOperand(x4, HeapObject::kMapOffset));
__ JumpIfNotRoot(x5, Heap::kAllocationSiteMapRootIndex, &not_allocation_site); __ JumpIfNotRoot(x5, Heap::kAllocationSiteMapRootIndex, &not_allocation_site);
@ -3047,7 +2929,7 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ Ldr(x4, FieldMemOperand(feedback_vector, generic_offset)); __ Ldr(x4, FieldMemOperand(feedback_vector, generic_offset));
__ Adds(x4, x4, Operand(Smi::FromInt(1))); __ Adds(x4, x4, Operand(Smi::FromInt(1)));
__ Str(x4, FieldMemOperand(feedback_vector, generic_offset)); __ Str(x4, FieldMemOperand(feedback_vector, generic_offset));
__ B(&slow_start); __ B(&call);
__ bind(&uninitialized); __ bind(&uninitialized);
@ -3086,22 +2968,14 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ Pop(function); __ Pop(function);
} }
__ B(&have_js_function); __ B(&call);
// We are here because tracing is on or we encountered a MISS case we can't // We are here because tracing is on or we encountered a MISS case we can't
// handle here. // handle here.
__ bind(&miss); __ bind(&miss);
GenerateMiss(masm); GenerateMiss(masm);
// the slow case __ B(&call);
__ bind(&slow_start);
// Check that the function is really a JavaScript function.
__ JumpIfSmi(function, &slow);
// Goto slow case if we do not have a function.
__ JumpIfNotObjectType(function, x10, type, JS_FUNCTION_TYPE, &slow);
__ B(&have_js_function);
} }
@ -3235,7 +3109,7 @@ void StringCharFromCodeGenerator::GenerateSlow(
__ Bind(&slow_case_); __ Bind(&slow_case_);
call_helper.BeforeCall(masm); call_helper.BeforeCall(masm);
__ Push(code_); __ Push(code_);
__ CallRuntime(Runtime::kCharFromCode, 1); __ CallRuntime(Runtime::kStringCharFromCode, 1);
__ Mov(result_, x0); __ Mov(result_, x0);
call_helper.AfterCall(masm); call_helper.AfterCall(masm);
__ B(&exit_); __ B(&exit_);
@ -3912,6 +3786,21 @@ void ToNumberStub::Generate(MacroAssembler* masm) {
} }
void ToLengthStub::Generate(MacroAssembler* masm) {
// The ToLength stub takes one argument in x0.
Label not_smi;
__ JumpIfNotSmi(x0, &not_smi);
STATIC_ASSERT(kSmiTag == 0);
__ Tst(x0, x0);
__ Csel(x0, x0, Operand(0), ge);
__ Ret();
__ Bind(&not_smi);
__ Push(x0); // Push argument.
__ TailCallRuntime(Runtime::kToLength, 1, 1);
}
void ToStringStub::Generate(MacroAssembler* masm) { void ToStringStub::Generate(MacroAssembler* masm) {
// The ToString stub takes one argument in x0. // The ToString stub takes one argument in x0.
Label is_number; Label is_number;

3
deps/v8/src/arm64/code-stubs-arm64.h

@ -384,6 +384,7 @@ class NameDictionaryLookupStub: public PlatformCodeStub {
DEFINE_PLATFORM_CODE_STUB(NameDictionaryLookup, PlatformCodeStub); DEFINE_PLATFORM_CODE_STUB(NameDictionaryLookup, PlatformCodeStub);
}; };
} } // namespace v8::internal } // namespace internal
} // namespace v8
#endif // V8_ARM64_CODE_STUBS_ARM64_H_ #endif // V8_ARM64_CODE_STUBS_ARM64_H_

3
deps/v8/src/arm64/codegen-arm64.h

@ -43,6 +43,7 @@ class MathExpGenerator : public AllStatic {
DISALLOW_COPY_AND_ASSIGN(MathExpGenerator); DISALLOW_COPY_AND_ASSIGN(MathExpGenerator);
}; };
} } // namespace v8::internal } // namespace internal
} // namespace v8
#endif // V8_ARM64_CODEGEN_ARM64_H_ #endif // V8_ARM64_CODEGEN_ARM64_H_

60
deps/v8/src/arm64/constants-arm64.h

@ -32,8 +32,8 @@ const unsigned kInstructionSizeLog2 = 2;
const unsigned kLoadLiteralScaleLog2 = 2; const unsigned kLoadLiteralScaleLog2 = 2;
const unsigned kMaxLoadLiteralRange = 1 * MB; const unsigned kMaxLoadLiteralRange = 1 * MB;
const unsigned kNumberOfRegisters = 32; const int kNumberOfRegisters = 32;
const unsigned kNumberOfFPRegisters = 32; const int kNumberOfFPRegisters = 32;
// Callee saved registers are x19-x30(lr). // Callee saved registers are x19-x30(lr).
const int kNumberOfCalleeSavedRegisters = 11; const int kNumberOfCalleeSavedRegisters = 11;
const int kFirstCalleeSavedRegisterIndex = 19; const int kFirstCalleeSavedRegisterIndex = 19;
@ -42,23 +42,22 @@ const int kNumberOfCalleeSavedFPRegisters = 8;
const int kFirstCalleeSavedFPRegisterIndex = 8; const int kFirstCalleeSavedFPRegisterIndex = 8;
// Callee saved registers with no specific purpose in JS are x19-x25. // Callee saved registers with no specific purpose in JS are x19-x25.
const unsigned kJSCalleeSavedRegList = 0x03f80000; const unsigned kJSCalleeSavedRegList = 0x03f80000;
// TODO(all): k<Y>RegSize should probably be k<Y>RegSizeInBits. const int kWRegSizeInBits = 32;
const unsigned kWRegSizeInBits = 32; const int kWRegSizeInBitsLog2 = 5;
const unsigned kWRegSizeInBitsLog2 = 5; const int kWRegSize = kWRegSizeInBits >> 3;
const unsigned kWRegSize = kWRegSizeInBits >> 3; const int kWRegSizeLog2 = kWRegSizeInBitsLog2 - 3;
const unsigned kWRegSizeLog2 = kWRegSizeInBitsLog2 - 3; const int kXRegSizeInBits = 64;
const unsigned kXRegSizeInBits = 64; const int kXRegSizeInBitsLog2 = 6;
const unsigned kXRegSizeInBitsLog2 = 6; const int kXRegSize = kXRegSizeInBits >> 3;
const unsigned kXRegSize = kXRegSizeInBits >> 3; const int kXRegSizeLog2 = kXRegSizeInBitsLog2 - 3;
const unsigned kXRegSizeLog2 = kXRegSizeInBitsLog2 - 3; const int kSRegSizeInBits = 32;
const unsigned kSRegSizeInBits = 32; const int kSRegSizeInBitsLog2 = 5;
const unsigned kSRegSizeInBitsLog2 = 5; const int kSRegSize = kSRegSizeInBits >> 3;
const unsigned kSRegSize = kSRegSizeInBits >> 3; const int kSRegSizeLog2 = kSRegSizeInBitsLog2 - 3;
const unsigned kSRegSizeLog2 = kSRegSizeInBitsLog2 - 3; const int kDRegSizeInBits = 64;
const unsigned kDRegSizeInBits = 64; const int kDRegSizeInBitsLog2 = 6;
const unsigned kDRegSizeInBitsLog2 = 6; const int kDRegSize = kDRegSizeInBits >> 3;
const unsigned kDRegSize = kDRegSizeInBits >> 3; const int kDRegSizeLog2 = kDRegSizeInBitsLog2 - 3;
const unsigned kDRegSizeLog2 = kDRegSizeInBitsLog2 - 3;
const int64_t kWRegMask = 0x00000000ffffffffL; const int64_t kWRegMask = 0x00000000ffffffffL;
const int64_t kXRegMask = 0xffffffffffffffffL; const int64_t kXRegMask = 0xffffffffffffffffL;
const int64_t kSRegMask = 0x00000000ffffffffL; const int64_t kSRegMask = 0x00000000ffffffffL;
@ -86,13 +85,13 @@ const int64_t kXMaxInt = 0x7fffffffffffffffL;
const int64_t kXMinInt = 0x8000000000000000L; const int64_t kXMinInt = 0x8000000000000000L;
const int32_t kWMaxInt = 0x7fffffff; const int32_t kWMaxInt = 0x7fffffff;
const int32_t kWMinInt = 0x80000000; const int32_t kWMinInt = 0x80000000;
const unsigned kIp0Code = 16; const int kIp0Code = 16;
const unsigned kIp1Code = 17; const int kIp1Code = 17;
const unsigned kFramePointerRegCode = 29; const int kFramePointerRegCode = 29;
const unsigned kLinkRegCode = 30; const int kLinkRegCode = 30;
const unsigned kZeroRegCode = 31; const int kZeroRegCode = 31;
const unsigned kJSSPCode = 28; const int kJSSPCode = 28;
const unsigned kSPRegInternalCode = 63; const int kSPRegInternalCode = 63;
const unsigned kRegCodeMask = 0x1f; const unsigned kRegCodeMask = 0x1f;
const unsigned kShiftAmountWRegMask = 0x1f; const unsigned kShiftAmountWRegMask = 0x1f;
const unsigned kShiftAmountXRegMask = 0x3f; const unsigned kShiftAmountXRegMask = 0x3f;
@ -118,12 +117,6 @@ const unsigned kDoubleExponentBias = 1023;
const unsigned kFloatMantissaBits = 23; const unsigned kFloatMantissaBits = 23;
const unsigned kFloatExponentBits = 8; const unsigned kFloatExponentBits = 8;
#define REGISTER_CODE_LIST(R) \
R(0) R(1) R(2) R(3) R(4) R(5) R(6) R(7) \
R(8) R(9) R(10) R(11) R(12) R(13) R(14) R(15) \
R(16) R(17) R(18) R(19) R(20) R(21) R(22) R(23) \
R(24) R(25) R(26) R(27) R(28) R(29) R(30) R(31)
#define INSTRUCTION_FIELDS_LIST(V_) \ #define INSTRUCTION_FIELDS_LIST(V_) \
/* Register fields */ \ /* Register fields */ \
V_(Rd, 4, 0, Bits) /* Destination register. */ \ V_(Rd, 4, 0, Bits) /* Destination register. */ \
@ -1237,6 +1230,7 @@ enum UnallocatedOp {
UnallocatedFMask = 0x00000000 UnallocatedFMask = 0x00000000
}; };
} } // namespace v8::internal } // namespace internal
} // namespace v8
#endif // V8_ARM64_CONSTANTS_ARM64_H_ #endif // V8_ARM64_CONSTANTS_ARM64_H_

3
deps/v8/src/arm64/decoder-arm64-inl.h

@ -644,6 +644,7 @@ void Decoder<V>::DecodeAdvSIMDDataProcessing(Instruction* instr) {
} }
} } // namespace v8::internal } // namespace internal
} // namespace v8
#endif // V8_ARM64_DECODER_ARM64_INL_H_ #endif // V8_ARM64_DECODER_ARM64_INL_H_

3
deps/v8/src/arm64/decoder-arm64.h

@ -181,6 +181,7 @@ class Decoder : public V {
}; };
} } // namespace v8::internal } // namespace internal
} // namespace v8
#endif // V8_ARM64_DECODER_ARM64_H_ #endif // V8_ARM64_DECODER_ARM64_H_

9
deps/v8/src/arm64/deoptimizer-arm64.cc

@ -6,6 +6,7 @@
#include "src/codegen.h" #include "src/codegen.h"
#include "src/deoptimizer.h" #include "src/deoptimizer.h"
#include "src/full-codegen/full-codegen.h" #include "src/full-codegen/full-codegen.h"
#include "src/register-configuration.h"
#include "src/safepoint-table.h" #include "src/safepoint-table.h"
@ -75,7 +76,7 @@ void Deoptimizer::FillInputFrame(Address tos, JavaScriptFrame* frame) {
input_->SetRegister(jssp.code(), reinterpret_cast<intptr_t>(frame->sp())); input_->SetRegister(jssp.code(), reinterpret_cast<intptr_t>(frame->sp()));
input_->SetRegister(fp.code(), reinterpret_cast<intptr_t>(frame->fp())); input_->SetRegister(fp.code(), reinterpret_cast<intptr_t>(frame->fp()));
for (int i = 0; i < DoubleRegister::NumAllocatableRegisters(); i++) { for (int i = 0; i < DoubleRegister::kMaxNumRegisters; i++) {
input_->SetDoubleRegister(i, 0.0); input_->SetDoubleRegister(i, 0.0);
} }
@ -122,8 +123,10 @@ void Deoptimizer::TableEntryGenerator::Generate() {
// in the input frame. // in the input frame.
// Save all allocatable floating point registers. // Save all allocatable floating point registers.
CPURegList saved_fp_registers(CPURegister::kFPRegister, kDRegSizeInBits, CPURegList saved_fp_registers(
FPRegister::kAllocatableFPRegisters); CPURegister::kFPRegister, kDRegSizeInBits,
RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT)
->allocatable_double_codes_mask());
__ PushCPURegList(saved_fp_registers); __ PushCPURegList(saved_fp_registers);
// We save all the registers expcept jssp, sp and lr. // We save all the registers expcept jssp, sp and lr.

164
deps/v8/src/arm64/disasm-arm64.cc

@ -19,7 +19,7 @@ namespace v8 {
namespace internal { namespace internal {
Disassembler::Disassembler() { DisassemblingDecoder::DisassemblingDecoder() {
buffer_size_ = 256; buffer_size_ = 256;
buffer_ = reinterpret_cast<char*>(malloc(buffer_size_)); buffer_ = reinterpret_cast<char*>(malloc(buffer_size_));
buffer_pos_ = 0; buffer_pos_ = 0;
@ -27,7 +27,7 @@ Disassembler::Disassembler() {
} }
Disassembler::Disassembler(char* text_buffer, int buffer_size) { DisassemblingDecoder::DisassemblingDecoder(char* text_buffer, int buffer_size) {
buffer_size_ = buffer_size; buffer_size_ = buffer_size;
buffer_ = text_buffer; buffer_ = text_buffer;
buffer_pos_ = 0; buffer_pos_ = 0;
@ -35,19 +35,17 @@ Disassembler::Disassembler(char* text_buffer, int buffer_size) {
} }
Disassembler::~Disassembler() { DisassemblingDecoder::~DisassemblingDecoder() {
if (own_buffer_) { if (own_buffer_) {
free(buffer_); free(buffer_);
} }
} }
char* Disassembler::GetOutput() { char* DisassemblingDecoder::GetOutput() { return buffer_; }
return buffer_;
}
void Disassembler::VisitAddSubImmediate(Instruction* instr) { void DisassemblingDecoder::VisitAddSubImmediate(Instruction* instr) {
bool rd_is_zr = RdIsZROrSP(instr); bool rd_is_zr = RdIsZROrSP(instr);
bool stack_op = (rd_is_zr || RnIsZROrSP(instr)) && bool stack_op = (rd_is_zr || RnIsZROrSP(instr)) &&
(instr->ImmAddSub() == 0) ? true : false; (instr->ImmAddSub() == 0) ? true : false;
@ -92,7 +90,7 @@ void Disassembler::VisitAddSubImmediate(Instruction* instr) {
} }
void Disassembler::VisitAddSubShifted(Instruction* instr) { void DisassemblingDecoder::VisitAddSubShifted(Instruction* instr) {
bool rd_is_zr = RdIsZROrSP(instr); bool rd_is_zr = RdIsZROrSP(instr);
bool rn_is_zr = RnIsZROrSP(instr); bool rn_is_zr = RnIsZROrSP(instr);
const char *mnemonic = ""; const char *mnemonic = "";
@ -139,7 +137,7 @@ void Disassembler::VisitAddSubShifted(Instruction* instr) {
} }
void Disassembler::VisitAddSubExtended(Instruction* instr) { void DisassemblingDecoder::VisitAddSubExtended(Instruction* instr) {
bool rd_is_zr = RdIsZROrSP(instr); bool rd_is_zr = RdIsZROrSP(instr);
const char *mnemonic = ""; const char *mnemonic = "";
Extend mode = static_cast<Extend>(instr->ExtendMode()); Extend mode = static_cast<Extend>(instr->ExtendMode());
@ -177,7 +175,7 @@ void Disassembler::VisitAddSubExtended(Instruction* instr) {
} }
void Disassembler::VisitAddSubWithCarry(Instruction* instr) { void DisassemblingDecoder::VisitAddSubWithCarry(Instruction* instr) {
bool rn_is_zr = RnIsZROrSP(instr); bool rn_is_zr = RnIsZROrSP(instr);
const char *mnemonic = ""; const char *mnemonic = "";
const char *form = "'Rd, 'Rn, 'Rm"; const char *form = "'Rd, 'Rn, 'Rm";
@ -212,7 +210,7 @@ void Disassembler::VisitAddSubWithCarry(Instruction* instr) {
} }
void Disassembler::VisitLogicalImmediate(Instruction* instr) { void DisassemblingDecoder::VisitLogicalImmediate(Instruction* instr) {
bool rd_is_zr = RdIsZROrSP(instr); bool rd_is_zr = RdIsZROrSP(instr);
bool rn_is_zr = RnIsZROrSP(instr); bool rn_is_zr = RnIsZROrSP(instr);
const char *mnemonic = ""; const char *mnemonic = "";
@ -255,7 +253,7 @@ void Disassembler::VisitLogicalImmediate(Instruction* instr) {
} }
bool Disassembler::IsMovzMovnImm(unsigned reg_size, uint64_t value) { bool DisassemblingDecoder::IsMovzMovnImm(unsigned reg_size, uint64_t value) {
DCHECK((reg_size == kXRegSizeInBits) || DCHECK((reg_size == kXRegSizeInBits) ||
((reg_size == kWRegSizeInBits) && (value <= 0xffffffff))); ((reg_size == kWRegSizeInBits) && (value <= 0xffffffff)));
@ -284,7 +282,7 @@ bool Disassembler::IsMovzMovnImm(unsigned reg_size, uint64_t value) {
} }
void Disassembler::VisitLogicalShifted(Instruction* instr) { void DisassemblingDecoder::VisitLogicalShifted(Instruction* instr) {
bool rd_is_zr = RdIsZROrSP(instr); bool rd_is_zr = RdIsZROrSP(instr);
bool rn_is_zr = RnIsZROrSP(instr); bool rn_is_zr = RnIsZROrSP(instr);
const char *mnemonic = ""; const char *mnemonic = "";
@ -335,7 +333,7 @@ void Disassembler::VisitLogicalShifted(Instruction* instr) {
} }
void Disassembler::VisitConditionalCompareRegister(Instruction* instr) { void DisassemblingDecoder::VisitConditionalCompareRegister(Instruction* instr) {
const char *mnemonic = ""; const char *mnemonic = "";
const char *form = "'Rn, 'Rm, 'INzcv, 'Cond"; const char *form = "'Rn, 'Rm, 'INzcv, 'Cond";
@ -350,7 +348,8 @@ void Disassembler::VisitConditionalCompareRegister(Instruction* instr) {
} }
void Disassembler::VisitConditionalCompareImmediate(Instruction* instr) { void DisassemblingDecoder::VisitConditionalCompareImmediate(
Instruction* instr) {
const char *mnemonic = ""; const char *mnemonic = "";
const char *form = "'Rn, 'IP, 'INzcv, 'Cond"; const char *form = "'Rn, 'IP, 'INzcv, 'Cond";
@ -365,7 +364,7 @@ void Disassembler::VisitConditionalCompareImmediate(Instruction* instr) {
} }
void Disassembler::VisitConditionalSelect(Instruction* instr) { void DisassemblingDecoder::VisitConditionalSelect(Instruction* instr) {
bool rnm_is_zr = (RnIsZROrSP(instr) && RmIsZROrSP(instr)); bool rnm_is_zr = (RnIsZROrSP(instr) && RmIsZROrSP(instr));
bool rn_is_rm = (instr->Rn() == instr->Rm()); bool rn_is_rm = (instr->Rn() == instr->Rm());
const char *mnemonic = ""; const char *mnemonic = "";
@ -418,7 +417,7 @@ void Disassembler::VisitConditionalSelect(Instruction* instr) {
} }
void Disassembler::VisitBitfield(Instruction* instr) { void DisassemblingDecoder::VisitBitfield(Instruction* instr) {
unsigned s = instr->ImmS(); unsigned s = instr->ImmS();
unsigned r = instr->ImmR(); unsigned r = instr->ImmR();
unsigned rd_size_minus_1 = unsigned rd_size_minus_1 =
@ -496,7 +495,7 @@ void Disassembler::VisitBitfield(Instruction* instr) {
} }
void Disassembler::VisitExtract(Instruction* instr) { void DisassemblingDecoder::VisitExtract(Instruction* instr) {
const char *mnemonic = ""; const char *mnemonic = "";
const char *form = "'Rd, 'Rn, 'Rm, 'IExtract"; const char *form = "'Rd, 'Rn, 'Rm, 'IExtract";
@ -517,7 +516,7 @@ void Disassembler::VisitExtract(Instruction* instr) {
} }
void Disassembler::VisitPCRelAddressing(Instruction* instr) { void DisassemblingDecoder::VisitPCRelAddressing(Instruction* instr) {
switch (instr->Mask(PCRelAddressingMask)) { switch (instr->Mask(PCRelAddressingMask)) {
case ADR: Format(instr, "adr", "'Xd, 'AddrPCRelByte"); break; case ADR: Format(instr, "adr", "'Xd, 'AddrPCRelByte"); break;
// ADRP is not implemented. // ADRP is not implemented.
@ -526,7 +525,7 @@ void Disassembler::VisitPCRelAddressing(Instruction* instr) {
} }
void Disassembler::VisitConditionalBranch(Instruction* instr) { void DisassemblingDecoder::VisitConditionalBranch(Instruction* instr) {
switch (instr->Mask(ConditionalBranchMask)) { switch (instr->Mask(ConditionalBranchMask)) {
case B_cond: Format(instr, "b.'CBrn", "'BImmCond"); break; case B_cond: Format(instr, "b.'CBrn", "'BImmCond"); break;
default: UNREACHABLE(); default: UNREACHABLE();
@ -534,7 +533,8 @@ void Disassembler::VisitConditionalBranch(Instruction* instr) {
} }
void Disassembler::VisitUnconditionalBranchToRegister(Instruction* instr) { void DisassemblingDecoder::VisitUnconditionalBranchToRegister(
Instruction* instr) {
const char *mnemonic = "unimplemented"; const char *mnemonic = "unimplemented";
const char *form = "'Xn"; const char *form = "'Xn";
@ -554,7 +554,7 @@ void Disassembler::VisitUnconditionalBranchToRegister(Instruction* instr) {
} }
void Disassembler::VisitUnconditionalBranch(Instruction* instr) { void DisassemblingDecoder::VisitUnconditionalBranch(Instruction* instr) {
const char *mnemonic = ""; const char *mnemonic = "";
const char *form = "'BImmUncn"; const char *form = "'BImmUncn";
@ -567,7 +567,7 @@ void Disassembler::VisitUnconditionalBranch(Instruction* instr) {
} }
void Disassembler::VisitDataProcessing1Source(Instruction* instr) { void DisassemblingDecoder::VisitDataProcessing1Source(Instruction* instr) {
const char *mnemonic = ""; const char *mnemonic = "";
const char *form = "'Rd, 'Rn"; const char *form = "'Rd, 'Rn";
@ -588,7 +588,7 @@ void Disassembler::VisitDataProcessing1Source(Instruction* instr) {
} }
void Disassembler::VisitDataProcessing2Source(Instruction* instr) { void DisassemblingDecoder::VisitDataProcessing2Source(Instruction* instr) {
const char *mnemonic = "unimplemented"; const char *mnemonic = "unimplemented";
const char *form = "'Rd, 'Rn, 'Rm"; const char *form = "'Rd, 'Rn, 'Rm";
@ -609,7 +609,7 @@ void Disassembler::VisitDataProcessing2Source(Instruction* instr) {
} }
void Disassembler::VisitDataProcessing3Source(Instruction* instr) { void DisassemblingDecoder::VisitDataProcessing3Source(Instruction* instr) {
bool ra_is_zr = RaIsZROrSP(instr); bool ra_is_zr = RaIsZROrSP(instr);
const char *mnemonic = ""; const char *mnemonic = "";
const char *form = "'Xd, 'Wn, 'Wm, 'Xa"; const char *form = "'Xd, 'Wn, 'Wm, 'Xa";
@ -687,7 +687,7 @@ void Disassembler::VisitDataProcessing3Source(Instruction* instr) {
} }
void Disassembler::VisitCompareBranch(Instruction* instr) { void DisassemblingDecoder::VisitCompareBranch(Instruction* instr) {
const char *mnemonic = ""; const char *mnemonic = "";
const char *form = "'Rt, 'BImmCmpa"; const char *form = "'Rt, 'BImmCmpa";
@ -702,7 +702,7 @@ void Disassembler::VisitCompareBranch(Instruction* instr) {
} }
void Disassembler::VisitTestBranch(Instruction* instr) { void DisassemblingDecoder::VisitTestBranch(Instruction* instr) {
const char *mnemonic = ""; const char *mnemonic = "";
// If the top bit of the immediate is clear, the tested register is // If the top bit of the immediate is clear, the tested register is
// disassembled as Wt, otherwise Xt. As the top bit of the immediate is // disassembled as Wt, otherwise Xt. As the top bit of the immediate is
@ -719,7 +719,7 @@ void Disassembler::VisitTestBranch(Instruction* instr) {
} }
void Disassembler::VisitMoveWideImmediate(Instruction* instr) { void DisassemblingDecoder::VisitMoveWideImmediate(Instruction* instr) {
const char *mnemonic = ""; const char *mnemonic = "";
const char *form = "'Rd, 'IMoveImm"; const char *form = "'Rd, 'IMoveImm";
@ -758,7 +758,7 @@ void Disassembler::VisitMoveWideImmediate(Instruction* instr) {
V(LDR_s, "ldr", "'St") \ V(LDR_s, "ldr", "'St") \
V(LDR_d, "ldr", "'Dt") V(LDR_d, "ldr", "'Dt")
void Disassembler::VisitLoadStorePreIndex(Instruction* instr) { void DisassemblingDecoder::VisitLoadStorePreIndex(Instruction* instr) {
const char *mnemonic = "unimplemented"; const char *mnemonic = "unimplemented";
const char *form = "(LoadStorePreIndex)"; const char *form = "(LoadStorePreIndex)";
@ -772,7 +772,7 @@ void Disassembler::VisitLoadStorePreIndex(Instruction* instr) {
} }
void Disassembler::VisitLoadStorePostIndex(Instruction* instr) { void DisassemblingDecoder::VisitLoadStorePostIndex(Instruction* instr) {
const char *mnemonic = "unimplemented"; const char *mnemonic = "unimplemented";
const char *form = "(LoadStorePostIndex)"; const char *form = "(LoadStorePostIndex)";
@ -786,7 +786,7 @@ void Disassembler::VisitLoadStorePostIndex(Instruction* instr) {
} }
void Disassembler::VisitLoadStoreUnsignedOffset(Instruction* instr) { void DisassemblingDecoder::VisitLoadStoreUnsignedOffset(Instruction* instr) {
const char *mnemonic = "unimplemented"; const char *mnemonic = "unimplemented";
const char *form = "(LoadStoreUnsignedOffset)"; const char *form = "(LoadStoreUnsignedOffset)";
@ -801,7 +801,7 @@ void Disassembler::VisitLoadStoreUnsignedOffset(Instruction* instr) {
} }
void Disassembler::VisitLoadStoreRegisterOffset(Instruction* instr) { void DisassemblingDecoder::VisitLoadStoreRegisterOffset(Instruction* instr) {
const char *mnemonic = "unimplemented"; const char *mnemonic = "unimplemented";
const char *form = "(LoadStoreRegisterOffset)"; const char *form = "(LoadStoreRegisterOffset)";
@ -816,7 +816,7 @@ void Disassembler::VisitLoadStoreRegisterOffset(Instruction* instr) {
} }
void Disassembler::VisitLoadStoreUnscaledOffset(Instruction* instr) { void DisassemblingDecoder::VisitLoadStoreUnscaledOffset(Instruction* instr) {
const char *mnemonic = "unimplemented"; const char *mnemonic = "unimplemented";
const char *form = "'Wt, ['Xns'ILS]"; const char *form = "'Wt, ['Xns'ILS]";
const char *form_x = "'Xt, ['Xns'ILS]"; const char *form_x = "'Xt, ['Xns'ILS]";
@ -847,7 +847,7 @@ void Disassembler::VisitLoadStoreUnscaledOffset(Instruction* instr) {
} }
void Disassembler::VisitLoadLiteral(Instruction* instr) { void DisassemblingDecoder::VisitLoadLiteral(Instruction* instr) {
const char *mnemonic = "ldr"; const char *mnemonic = "ldr";
const char *form = "(LoadLiteral)"; const char *form = "(LoadLiteral)";
@ -873,7 +873,7 @@ void Disassembler::VisitLoadLiteral(Instruction* instr) {
V(STP_d, "stp", "'Dt, 'Dt2", "8") \ V(STP_d, "stp", "'Dt, 'Dt2", "8") \
V(LDP_d, "ldp", "'Dt, 'Dt2", "8") V(LDP_d, "ldp", "'Dt, 'Dt2", "8")
void Disassembler::VisitLoadStorePairPostIndex(Instruction* instr) { void DisassemblingDecoder::VisitLoadStorePairPostIndex(Instruction* instr) {
const char *mnemonic = "unimplemented"; const char *mnemonic = "unimplemented";
const char *form = "(LoadStorePairPostIndex)"; const char *form = "(LoadStorePairPostIndex)";
@ -887,7 +887,7 @@ void Disassembler::VisitLoadStorePairPostIndex(Instruction* instr) {
} }
void Disassembler::VisitLoadStorePairPreIndex(Instruction* instr) { void DisassemblingDecoder::VisitLoadStorePairPreIndex(Instruction* instr) {
const char *mnemonic = "unimplemented"; const char *mnemonic = "unimplemented";
const char *form = "(LoadStorePairPreIndex)"; const char *form = "(LoadStorePairPreIndex)";
@ -901,7 +901,7 @@ void Disassembler::VisitLoadStorePairPreIndex(Instruction* instr) {
} }
void Disassembler::VisitLoadStorePairOffset(Instruction* instr) { void DisassemblingDecoder::VisitLoadStorePairOffset(Instruction* instr) {
const char *mnemonic = "unimplemented"; const char *mnemonic = "unimplemented";
const char *form = "(LoadStorePairOffset)"; const char *form = "(LoadStorePairOffset)";
@ -915,7 +915,7 @@ void Disassembler::VisitLoadStorePairOffset(Instruction* instr) {
} }
void Disassembler::VisitFPCompare(Instruction* instr) { void DisassemblingDecoder::VisitFPCompare(Instruction* instr) {
const char *mnemonic = "unimplemented"; const char *mnemonic = "unimplemented";
const char *form = "'Fn, 'Fm"; const char *form = "'Fn, 'Fm";
const char *form_zero = "'Fn, #0.0"; const char *form_zero = "'Fn, #0.0";
@ -931,7 +931,7 @@ void Disassembler::VisitFPCompare(Instruction* instr) {
} }
void Disassembler::VisitFPConditionalCompare(Instruction* instr) { void DisassemblingDecoder::VisitFPConditionalCompare(Instruction* instr) {
const char *mnemonic = "unimplemented"; const char *mnemonic = "unimplemented";
const char *form = "'Fn, 'Fm, 'INzcv, 'Cond"; const char *form = "'Fn, 'Fm, 'INzcv, 'Cond";
@ -946,7 +946,7 @@ void Disassembler::VisitFPConditionalCompare(Instruction* instr) {
} }
void Disassembler::VisitFPConditionalSelect(Instruction* instr) { void DisassemblingDecoder::VisitFPConditionalSelect(Instruction* instr) {
const char *mnemonic = ""; const char *mnemonic = "";
const char *form = "'Fd, 'Fn, 'Fm, 'Cond"; const char *form = "'Fd, 'Fn, 'Fm, 'Cond";
@ -959,7 +959,7 @@ void Disassembler::VisitFPConditionalSelect(Instruction* instr) {
} }
void Disassembler::VisitFPDataProcessing1Source(Instruction* instr) { void DisassemblingDecoder::VisitFPDataProcessing1Source(Instruction* instr) {
const char *mnemonic = "unimplemented"; const char *mnemonic = "unimplemented";
const char *form = "'Fd, 'Fn"; const char *form = "'Fd, 'Fn";
@ -987,7 +987,7 @@ void Disassembler::VisitFPDataProcessing1Source(Instruction* instr) {
} }
void Disassembler::VisitFPDataProcessing2Source(Instruction* instr) { void DisassemblingDecoder::VisitFPDataProcessing2Source(Instruction* instr) {
const char *mnemonic = ""; const char *mnemonic = "";
const char *form = "'Fd, 'Fn, 'Fm"; const char *form = "'Fd, 'Fn, 'Fm";
@ -1011,7 +1011,7 @@ void Disassembler::VisitFPDataProcessing2Source(Instruction* instr) {
} }
void Disassembler::VisitFPDataProcessing3Source(Instruction* instr) { void DisassemblingDecoder::VisitFPDataProcessing3Source(Instruction* instr) {
const char *mnemonic = ""; const char *mnemonic = "";
const char *form = "'Fd, 'Fn, 'Fm, 'Fa"; const char *form = "'Fd, 'Fn, 'Fm, 'Fa";
@ -1030,7 +1030,7 @@ void Disassembler::VisitFPDataProcessing3Source(Instruction* instr) {
} }
void Disassembler::VisitFPImmediate(Instruction* instr) { void DisassemblingDecoder::VisitFPImmediate(Instruction* instr) {
const char *mnemonic = ""; const char *mnemonic = "";
const char *form = "(FPImmediate)"; const char *form = "(FPImmediate)";
@ -1043,7 +1043,7 @@ void Disassembler::VisitFPImmediate(Instruction* instr) {
} }
void Disassembler::VisitFPIntegerConvert(Instruction* instr) { void DisassemblingDecoder::VisitFPIntegerConvert(Instruction* instr) {
const char *mnemonic = "unimplemented"; const char *mnemonic = "unimplemented";
const char *form = "(FPIntegerConvert)"; const char *form = "(FPIntegerConvert)";
const char *form_rf = "'Rd, 'Fn"; const char *form_rf = "'Rd, 'Fn";
@ -1099,7 +1099,7 @@ void Disassembler::VisitFPIntegerConvert(Instruction* instr) {
} }
void Disassembler::VisitFPFixedPointConvert(Instruction* instr) { void DisassemblingDecoder::VisitFPFixedPointConvert(Instruction* instr) {
const char *mnemonic = ""; const char *mnemonic = "";
const char *form = "'Rd, 'Fn, 'IFPFBits"; const char *form = "'Rd, 'Fn, 'IFPFBits";
const char *form_fr = "'Fd, 'Rn, 'IFPFBits"; const char *form_fr = "'Fd, 'Rn, 'IFPFBits";
@ -1126,7 +1126,7 @@ void Disassembler::VisitFPFixedPointConvert(Instruction* instr) {
} }
void Disassembler::VisitSystem(Instruction* instr) { void DisassemblingDecoder::VisitSystem(Instruction* instr) {
// Some system instructions hijack their Op and Cp fields to represent a // Some system instructions hijack their Op and Cp fields to represent a
// range of immediates instead of indicating a different instruction. This // range of immediates instead of indicating a different instruction. This
// makes the decoding tricky. // makes the decoding tricky.
@ -1187,7 +1187,7 @@ void Disassembler::VisitSystem(Instruction* instr) {
} }
void Disassembler::VisitException(Instruction* instr) { void DisassemblingDecoder::VisitException(Instruction* instr) {
const char *mnemonic = "unimplemented"; const char *mnemonic = "unimplemented";
const char *form = "'IDebug"; const char *form = "'IDebug";
@ -1206,23 +1206,23 @@ void Disassembler::VisitException(Instruction* instr) {
} }
void Disassembler::VisitUnimplemented(Instruction* instr) { void DisassemblingDecoder::VisitUnimplemented(Instruction* instr) {
Format(instr, "unimplemented", "(Unimplemented)"); Format(instr, "unimplemented", "(Unimplemented)");
} }
void Disassembler::VisitUnallocated(Instruction* instr) { void DisassemblingDecoder::VisitUnallocated(Instruction* instr) {
Format(instr, "unallocated", "(Unallocated)"); Format(instr, "unallocated", "(Unallocated)");
} }
void Disassembler::ProcessOutput(Instruction* /*instr*/) { void DisassemblingDecoder::ProcessOutput(Instruction* /*instr*/) {
// The base disasm does nothing more than disassembling into a buffer. // The base disasm does nothing more than disassembling into a buffer.
} }
void Disassembler::Format(Instruction* instr, const char* mnemonic, void DisassemblingDecoder::Format(Instruction* instr, const char* mnemonic,
const char* format) { const char* format) {
// TODO(mcapewel) don't think I can use the instr address here - there needs // TODO(mcapewel) don't think I can use the instr address here - there needs
// to be a base address too // to be a base address too
DCHECK(mnemonic != NULL); DCHECK(mnemonic != NULL);
@ -1237,7 +1237,7 @@ void Disassembler::Format(Instruction* instr, const char* mnemonic,
} }
void Disassembler::Substitute(Instruction* instr, const char* string) { void DisassemblingDecoder::Substitute(Instruction* instr, const char* string) {
char chr = *string++; char chr = *string++;
while (chr != '\0') { while (chr != '\0') {
if (chr == '\'') { if (chr == '\'') {
@ -1250,7 +1250,8 @@ void Disassembler::Substitute(Instruction* instr, const char* string) {
} }
int Disassembler::SubstituteField(Instruction* instr, const char* format) { int DisassemblingDecoder::SubstituteField(Instruction* instr,
const char* format) {
switch (format[0]) { switch (format[0]) {
case 'R': // Register. X or W, selected by sf bit. case 'R': // Register. X or W, selected by sf bit.
case 'F': // FP Register. S or D, selected by type field. case 'F': // FP Register. S or D, selected by type field.
@ -1276,8 +1277,8 @@ int Disassembler::SubstituteField(Instruction* instr, const char* format) {
} }
int Disassembler::SubstituteRegisterField(Instruction* instr, int DisassemblingDecoder::SubstituteRegisterField(Instruction* instr,
const char* format) { const char* format) {
unsigned reg_num = 0; unsigned reg_num = 0;
unsigned field_len = 2; unsigned field_len = 2;
switch (format[1]) { switch (format[1]) {
@ -1341,8 +1342,8 @@ int Disassembler::SubstituteRegisterField(Instruction* instr,
} }
int Disassembler::SubstituteImmediateField(Instruction* instr, int DisassemblingDecoder::SubstituteImmediateField(Instruction* instr,
const char* format) { const char* format) {
DCHECK(format[0] == 'I'); DCHECK(format[0] == 'I');
switch (format[1]) { switch (format[1]) {
@ -1452,8 +1453,8 @@ int Disassembler::SubstituteImmediateField(Instruction* instr,
} }
int Disassembler::SubstituteBitfieldImmediateField(Instruction* instr, int DisassemblingDecoder::SubstituteBitfieldImmediateField(Instruction* instr,
const char* format) { const char* format) {
DCHECK((format[0] == 'I') && (format[1] == 'B')); DCHECK((format[0] == 'I') && (format[1] == 'B'));
unsigned r = instr->ImmR(); unsigned r = instr->ImmR();
unsigned s = instr->ImmS(); unsigned s = instr->ImmS();
@ -1488,8 +1489,8 @@ int Disassembler::SubstituteBitfieldImmediateField(Instruction* instr,
} }
int Disassembler::SubstituteLiteralField(Instruction* instr, int DisassemblingDecoder::SubstituteLiteralField(Instruction* instr,
const char* format) { const char* format) {
DCHECK(strncmp(format, "LValue", 6) == 0); DCHECK(strncmp(format, "LValue", 6) == 0);
USE(format); USE(format);
@ -1507,7 +1508,8 @@ int Disassembler::SubstituteLiteralField(Instruction* instr,
} }
int Disassembler::SubstituteShiftField(Instruction* instr, const char* format) { int DisassemblingDecoder::SubstituteShiftField(Instruction* instr,
const char* format) {
DCHECK(format[0] == 'H'); DCHECK(format[0] == 'H');
DCHECK(instr->ShiftDP() <= 0x3); DCHECK(instr->ShiftDP() <= 0x3);
@ -1530,8 +1532,8 @@ int Disassembler::SubstituteShiftField(Instruction* instr, const char* format) {
} }
int Disassembler::SubstituteConditionField(Instruction* instr, int DisassemblingDecoder::SubstituteConditionField(Instruction* instr,
const char* format) { const char* format) {
DCHECK(format[0] == 'C'); DCHECK(format[0] == 'C');
const char* condition_code[] = { "eq", "ne", "hs", "lo", const char* condition_code[] = { "eq", "ne", "hs", "lo",
"mi", "pl", "vs", "vc", "mi", "pl", "vs", "vc",
@ -1551,8 +1553,8 @@ int Disassembler::SubstituteConditionField(Instruction* instr,
} }
int Disassembler::SubstitutePCRelAddressField(Instruction* instr, int DisassemblingDecoder::SubstitutePCRelAddressField(Instruction* instr,
const char* format) { const char* format) {
USE(format); USE(format);
DCHECK(strncmp(format, "AddrPCRel", 9) == 0); DCHECK(strncmp(format, "AddrPCRel", 9) == 0);
@ -1572,8 +1574,8 @@ int Disassembler::SubstitutePCRelAddressField(Instruction* instr,
} }
int Disassembler::SubstituteBranchTargetField(Instruction* instr, int DisassemblingDecoder::SubstituteBranchTargetField(Instruction* instr,
const char* format) { const char* format) {
DCHECK(strncmp(format, "BImm", 4) == 0); DCHECK(strncmp(format, "BImm", 4) == 0);
int64_t offset = 0; int64_t offset = 0;
@ -1599,8 +1601,8 @@ int Disassembler::SubstituteBranchTargetField(Instruction* instr,
} }
int Disassembler::SubstituteExtendField(Instruction* instr, int DisassemblingDecoder::SubstituteExtendField(Instruction* instr,
const char* format) { const char* format) {
DCHECK(strncmp(format, "Ext", 3) == 0); DCHECK(strncmp(format, "Ext", 3) == 0);
DCHECK(instr->ExtendMode() <= 7); DCHECK(instr->ExtendMode() <= 7);
USE(format); USE(format);
@ -1626,8 +1628,8 @@ int Disassembler::SubstituteExtendField(Instruction* instr,
} }
int Disassembler::SubstituteLSRegOffsetField(Instruction* instr, int DisassemblingDecoder::SubstituteLSRegOffsetField(Instruction* instr,
const char* format) { const char* format) {
DCHECK(strncmp(format, "Offsetreg", 9) == 0); DCHECK(strncmp(format, "Offsetreg", 9) == 0);
const char* extend_mode[] = { "undefined", "undefined", "uxtw", "lsl", const char* extend_mode[] = { "undefined", "undefined", "uxtw", "lsl",
"undefined", "undefined", "sxtw", "sxtx" }; "undefined", "undefined", "sxtw", "sxtx" };
@ -1655,8 +1657,8 @@ int Disassembler::SubstituteLSRegOffsetField(Instruction* instr,
} }
int Disassembler::SubstitutePrefetchField(Instruction* instr, int DisassemblingDecoder::SubstitutePrefetchField(Instruction* instr,
const char* format) { const char* format) {
DCHECK(format[0] == 'P'); DCHECK(format[0] == 'P');
USE(format); USE(format);
@ -1670,8 +1672,8 @@ int Disassembler::SubstitutePrefetchField(Instruction* instr,
return 6; return 6;
} }
int Disassembler::SubstituteBarrierField(Instruction* instr, int DisassemblingDecoder::SubstituteBarrierField(Instruction* instr,
const char* format) { const char* format) {
DCHECK(format[0] == 'M'); DCHECK(format[0] == 'M');
USE(format); USE(format);
@ -1689,13 +1691,13 @@ int Disassembler::SubstituteBarrierField(Instruction* instr,
} }
void Disassembler::ResetOutput() { void DisassemblingDecoder::ResetOutput() {
buffer_pos_ = 0; buffer_pos_ = 0;
buffer_[buffer_pos_] = 0; buffer_[buffer_pos_] = 0;
} }
void Disassembler::AppendToOutput(const char* format, ...) { void DisassemblingDecoder::AppendToOutput(const char* format, ...) {
va_list args; va_list args;
va_start(args, format); va_start(args, format);
buffer_pos_ += vsnprintf(&buffer_[buffer_pos_], buffer_size_, format, args); buffer_pos_ += vsnprintf(&buffer_[buffer_pos_], buffer_size_, format, args);
@ -1761,7 +1763,7 @@ const char* NameConverter::NameInCode(byte* addr) const {
//------------------------------------------------------------------------------ //------------------------------------------------------------------------------
class BufferDisassembler : public v8::internal::Disassembler { class BufferDisassembler : public v8::internal::DisassemblingDecoder {
public: public:
explicit BufferDisassembler(v8::internal::Vector<char> out_buffer) explicit BufferDisassembler(v8::internal::Vector<char> out_buffer)
: out_buffer_(out_buffer) { } : out_buffer_(out_buffer) { }

13
deps/v8/src/arm64/disasm-arm64.h

@ -14,11 +14,11 @@ namespace v8 {
namespace internal { namespace internal {
class Disassembler: public DecoderVisitor { class DisassemblingDecoder : public DecoderVisitor {
public: public:
Disassembler(); DisassemblingDecoder();
Disassembler(char* text_buffer, int buffer_size); DisassemblingDecoder(char* text_buffer, int buffer_size);
virtual ~Disassembler(); virtual ~DisassemblingDecoder();
char* GetOutput(); char* GetOutput();
// Declare all Visitor functions. // Declare all Visitor functions.
@ -73,7 +73,7 @@ class Disassembler: public DecoderVisitor {
}; };
class PrintDisassembler: public Disassembler { class PrintDisassembler : public DisassemblingDecoder {
public: public:
explicit PrintDisassembler(FILE* stream) : stream_(stream) { } explicit PrintDisassembler(FILE* stream) : stream_(stream) { }
~PrintDisassembler() { } ~PrintDisassembler() { }
@ -85,6 +85,7 @@ class PrintDisassembler: public Disassembler {
}; };
} } // namespace v8::internal } // namespace internal
} // namespace v8
#endif // V8_ARM64_DISASM_ARM64_H #endif // V8_ARM64_DISASM_ARM64_H

3
deps/v8/src/arm64/frames-arm64.h

@ -63,6 +63,7 @@ class JavaScriptFrameConstants : public AllStatic {
}; };
} } // namespace v8::internal } // namespace internal
} // namespace v8
#endif // V8_ARM64_FRAMES_ARM64_H_ #endif // V8_ARM64_FRAMES_ARM64_H_

3
deps/v8/src/arm64/instructions-arm64.h

@ -532,7 +532,8 @@ enum DebugParameters {
}; };
} } // namespace v8::internal } // namespace internal
} // namespace v8
#endif // V8_ARM64_INSTRUCTIONS_ARM64_H_ #endif // V8_ARM64_INSTRUCTIONS_ARM64_H_

3
deps/v8/src/arm64/instrument-arm64.h

@ -80,6 +80,7 @@ class Instrument: public DecoderVisitor {
uint64_t sample_period_; uint64_t sample_period_;
}; };
} } // namespace v8::internal } // namespace internal
} // namespace v8
#endif // V8_ARM64_INSTRUMENT_ARM64_H_ #endif // V8_ARM64_INSTRUMENT_ARM64_H_

47
deps/v8/src/arm64/interface-descriptors-arm64.cc

@ -78,14 +78,6 @@ const Register GrowArrayElementsDescriptor::ObjectRegister() { return x0; }
const Register GrowArrayElementsDescriptor::KeyRegister() { return x3; } const Register GrowArrayElementsDescriptor::KeyRegister() { return x3; }
void VectorStoreTransitionDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {ReceiverRegister(), NameRegister(), ValueRegister(),
SlotRegister(), VectorRegister(), MapRegister()};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void FastNewClosureDescriptor::InitializePlatformSpecific( void FastNewClosureDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) { CallInterfaceDescriptorData* data) {
// x2: function info // x2: function info
@ -110,6 +102,10 @@ void ToNumberDescriptor::InitializePlatformSpecific(
} }
// static
const Register ToLengthDescriptor::ReceiverRegister() { return x0; }
// static // static
const Register ToStringDescriptor::ReceiverRegister() { return x0; } const Register ToStringDescriptor::ReceiverRegister() { return x0; }
@ -250,6 +246,13 @@ void AllocateHeapNumberDescriptor::InitializePlatformSpecific(
} }
void AllocateInNewSpaceDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {x0};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void ArrayConstructorConstantArgCountDescriptor::InitializePlatformSpecific( void ArrayConstructorConstantArgCountDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) { CallInterfaceDescriptorData* data) {
// x1: function // x1: function
@ -446,16 +449,40 @@ void MathRoundVariantCallFromOptimizedCodeDescriptor::
} }
void PushArgsAndCallDescriptor::InitializePlatformSpecific( void InterpreterPushArgsAndCallDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) { CallInterfaceDescriptorData* data) {
Register registers[] = { Register registers[] = {
x0, // argument count (including receiver) x0, // argument count (not including receiver)
x2, // address of first argument x2, // address of first argument
x1 // the target callable to be call x1 // the target callable to be call
}; };
data->InitializePlatformSpecific(arraysize(registers), registers); data->InitializePlatformSpecific(arraysize(registers), registers);
} }
void InterpreterPushArgsAndConstructDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {
x0, // argument count (not including receiver)
x3, // original constructor
x1, // constructor to call
x2 // address of the first argument
};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void InterpreterCEntryDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {
x0, // argument count (argc)
x11, // address of first argument (argv)
x1 // the runtime function to call
};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8

4
deps/v8/src/arm64/interface-descriptors-arm64.h

@ -20,7 +20,7 @@ class PlatformInterfaceDescriptor {
private: private:
TargetAddressStorageMode storage_mode_; TargetAddressStorageMode storage_mode_;
}; };
} } // namespace internal
} // namespace v8::internal } // namespace v8
#endif // V8_ARM64_INTERFACE_DESCRIPTORS_ARM64_H_ #endif // V8_ARM64_INTERFACE_DESCRIPTORS_ARM64_H_

3
deps/v8/src/arm64/macro-assembler-arm64-inl.h

@ -1683,6 +1683,7 @@ void MacroAssembler::AnnotateInstrumentation(const char* marker_name) {
movn(xzr, (marker_name[1] << 8) | marker_name[0]); movn(xzr, (marker_name[1] << 8) | marker_name[0]);
} }
} } // namespace v8::internal } // namespace internal
} // namespace v8
#endif // V8_ARM64_MACRO_ASSEMBLER_ARM64_INL_H_ #endif // V8_ARM64_MACRO_ASSEMBLER_ARM64_INL_H_

46
deps/v8/src/arm64/macro-assembler-arm64.cc

@ -9,6 +9,7 @@
#include "src/bootstrapper.h" #include "src/bootstrapper.h"
#include "src/codegen.h" #include "src/codegen.h"
#include "src/debug/debug.h" #include "src/debug/debug.h"
#include "src/register-configuration.h"
#include "src/runtime/runtime.h" #include "src/runtime/runtime.h"
#include "src/arm64/frames-arm64.h" #include "src/arm64/frames-arm64.h"
@ -35,8 +36,8 @@ MacroAssembler::MacroAssembler(Isolate* arg_isolate,
tmp_list_(DefaultTmpList()), tmp_list_(DefaultTmpList()),
fptmp_list_(DefaultFPTmpList()) { fptmp_list_(DefaultFPTmpList()) {
if (isolate() != NULL) { if (isolate() != NULL) {
code_object_ = Handle<Object>(isolate()->heap()->undefined_value(), code_object_ =
isolate()); Handle<Object>::New(isolate()->heap()->undefined_value(), isolate());
} }
} }
@ -208,7 +209,7 @@ void MacroAssembler::Mov(const Register& rd, uint64_t imm) {
// halfword, and movk for subsequent halfwords. // halfword, and movk for subsequent halfwords.
DCHECK((reg_size % 16) == 0); DCHECK((reg_size % 16) == 0);
bool first_mov_done = false; bool first_mov_done = false;
for (unsigned i = 0; i < (rd.SizeInBits() / 16); i++) { for (int i = 0; i < (rd.SizeInBits() / 16); i++) {
uint64_t imm16 = (imm >> (16 * i)) & 0xffffL; uint64_t imm16 = (imm >> (16 * i)) & 0xffffL;
if (imm16 != ignored_halfword) { if (imm16 != ignored_halfword) {
if (!first_mov_done) { if (!first_mov_done) {
@ -1704,7 +1705,7 @@ void MacroAssembler::GetBuiltinFunction(Register target,
int native_context_index) { int native_context_index) {
// Load the builtins object into target register. // Load the builtins object into target register.
Ldr(target, GlobalObjectMemOperand()); Ldr(target, GlobalObjectMemOperand());
Ldr(target, FieldMemOperand(target, GlobalObject::kNativeContextOffset)); Ldr(target, FieldMemOperand(target, JSGlobalObject::kNativeContextOffset));
// Load the JavaScript builtin function from the builtins object. // Load the JavaScript builtin function from the builtins object.
Ldr(target, ContextMemOperand(target, native_context_index)); Ldr(target, ContextMemOperand(target, native_context_index));
} }
@ -2423,9 +2424,10 @@ void MacroAssembler::JumpIfEitherInstanceTypeIsNotSequentialOneByte(
Label* failure) { Label* failure) {
DCHECK(!AreAliased(scratch1, second)); DCHECK(!AreAliased(scratch1, second));
DCHECK(!AreAliased(scratch1, scratch2)); DCHECK(!AreAliased(scratch1, scratch2));
static const int kFlatOneByteStringMask = const int kFlatOneByteStringMask =
kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask; kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
static const int kFlatOneByteStringTag = ONE_BYTE_STRING_TYPE; const int kFlatOneByteStringTag =
kStringTag | kOneByteStringTag | kSeqStringTag;
And(scratch1, first, kFlatOneByteStringMask); And(scratch1, first, kFlatOneByteStringMask);
And(scratch2, second, kFlatOneByteStringMask); And(scratch2, second, kFlatOneByteStringMask);
Cmp(scratch1, kFlatOneByteStringTag); Cmp(scratch1, kFlatOneByteStringTag);
@ -3000,7 +3002,7 @@ void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
void MacroAssembler::LoadGlobalProxy(Register dst) { void MacroAssembler::LoadGlobalProxy(Register dst) {
Ldr(dst, GlobalObjectMemOperand()); Ldr(dst, GlobalObjectMemOperand());
Ldr(dst, FieldMemOperand(dst, GlobalObject::kGlobalProxyOffset)); Ldr(dst, FieldMemOperand(dst, JSGlobalObject::kGlobalProxyOffset));
} }
@ -3570,6 +3572,14 @@ void MacroAssembler::TryGetFunctionPrototype(Register function, Register result,
} }
void MacroAssembler::PushRoot(Heap::RootListIndex index) {
UseScratchRegisterScope temps(this);
Register temp = temps.AcquireX();
LoadRoot(temp, index);
Push(temp);
}
void MacroAssembler::CompareRoot(const Register& obj, void MacroAssembler::CompareRoot(const Register& obj,
Heap::RootListIndex index) { Heap::RootListIndex index) {
UseScratchRegisterScope temps(this); UseScratchRegisterScope temps(this);
@ -3772,7 +3782,8 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
int offset = int offset =
Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize; Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
Ldr(scratch1, FieldMemOperand(scratch1, offset)); Ldr(scratch1, FieldMemOperand(scratch1, offset));
Ldr(scratch1, FieldMemOperand(scratch1, GlobalObject::kNativeContextOffset)); Ldr(scratch1,
FieldMemOperand(scratch1, JSGlobalObject::kNativeContextOffset));
// Check the context is a native context. // Check the context is a native context.
if (emit_debug_code()) { if (emit_debug_code()) {
@ -3984,14 +3995,18 @@ void MacroAssembler::PushSafepointRegisters() {
void MacroAssembler::PushSafepointRegistersAndDoubles() { void MacroAssembler::PushSafepointRegistersAndDoubles() {
PushSafepointRegisters(); PushSafepointRegisters();
PushCPURegList(CPURegList(CPURegister::kFPRegister, kDRegSizeInBits, PushCPURegList(CPURegList(
FPRegister::kAllocatableFPRegisters)); CPURegister::kFPRegister, kDRegSizeInBits,
RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT)
->allocatable_double_codes_mask()));
} }
void MacroAssembler::PopSafepointRegistersAndDoubles() { void MacroAssembler::PopSafepointRegistersAndDoubles() {
PopCPURegList(CPURegList(CPURegister::kFPRegister, kDRegSizeInBits, PopCPURegList(CPURegList(
FPRegister::kAllocatableFPRegisters)); CPURegister::kFPRegister, kDRegSizeInBits,
RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT)
->allocatable_double_codes_mask()));
PopSafepointRegisters(); PopSafepointRegisters();
} }
@ -4602,7 +4617,8 @@ void MacroAssembler::LoadTransitionedArrayMapConditional(
Label* no_map_match) { Label* no_map_match) {
// Load the global or builtins object from the current context. // Load the global or builtins object from the current context.
Ldr(scratch1, GlobalObjectMemOperand()); Ldr(scratch1, GlobalObjectMemOperand());
Ldr(scratch1, FieldMemOperand(scratch1, GlobalObject::kNativeContextOffset)); Ldr(scratch1,
FieldMemOperand(scratch1, JSGlobalObject::kNativeContextOffset));
// Check that the function's map is the same as the expected cached map. // Check that the function's map is the same as the expected cached map.
Ldr(scratch1, ContextMemOperand(scratch1, Context::JS_ARRAY_MAPS_INDEX)); Ldr(scratch1, ContextMemOperand(scratch1, Context::JS_ARRAY_MAPS_INDEX));
@ -4621,8 +4637,8 @@ void MacroAssembler::LoadGlobalFunction(int index, Register function) {
// Load the global or builtins object from the current context. // Load the global or builtins object from the current context.
Ldr(function, GlobalObjectMemOperand()); Ldr(function, GlobalObjectMemOperand());
// Load the native context from the global or builtins object. // Load the native context from the global or builtins object.
Ldr(function, FieldMemOperand(function, Ldr(function,
GlobalObject::kNativeContextOffset)); FieldMemOperand(function, JSGlobalObject::kNativeContextOffset));
// Load the function from the native context. // Load the function from the native context.
Ldr(function, ContextMemOperand(function, index)); Ldr(function, ContextMemOperand(function, index));
} }

7
deps/v8/src/arm64/macro-assembler-arm64.h

@ -44,6 +44,7 @@ namespace internal {
#define kInterpreterBytecodeOffsetRegister x19 #define kInterpreterBytecodeOffsetRegister x19
#define kInterpreterBytecodeArrayRegister x20 #define kInterpreterBytecodeArrayRegister x20
#define kInterpreterDispatchTableRegister x21 #define kInterpreterDispatchTableRegister x21
#define kJavaScriptCallArgCountRegister x0
#define kRuntimeCallFunctionRegister x1 #define kRuntimeCallFunctionRegister x1
#define kRuntimeCallArgCountRegister x0 #define kRuntimeCallArgCountRegister x0
@ -1461,6 +1462,9 @@ class MacroAssembler : public Assembler {
// register. // register.
void LoadElementsKindFromMap(Register result, Register map); void LoadElementsKindFromMap(Register result, Register map);
// Load the value from the root list and push it onto the stack.
void PushRoot(Heap::RootListIndex index);
// Compare the object in a register to a value from the root list. // Compare the object in a register to a value from the root list.
void CompareRoot(const Register& obj, Heap::RootListIndex index); void CompareRoot(const Register& obj, Heap::RootListIndex index);
@ -2278,7 +2282,8 @@ class InlineSmiCheckInfo {
class DeltaBits : public BitField<uint32_t, 5, 32-5> {}; class DeltaBits : public BitField<uint32_t, 5, 32-5> {};
}; };
} } // namespace v8::internal } // namespace internal
} // namespace v8
#ifdef GENERATED_CODE_COVERAGE #ifdef GENERATED_CODE_COVERAGE
#error "Unsupported option" #error "Unsupported option"

9
deps/v8/src/arm64/simulator-arm64.h

@ -17,12 +17,6 @@
#include "src/globals.h" #include "src/globals.h"
#include "src/utils.h" #include "src/utils.h"
#define REGISTER_CODE_LIST(R) \
R(0) R(1) R(2) R(3) R(4) R(5) R(6) R(7) \
R(8) R(9) R(10) R(11) R(12) R(13) R(14) R(15) \
R(16) R(17) R(18) R(19) R(20) R(21) R(22) R(23) \
R(24) R(25) R(26) R(27) R(28) R(29) R(30) R(31)
namespace v8 { namespace v8 {
namespace internal { namespace internal {
@ -911,6 +905,7 @@ class SimulatorStack : public v8::internal::AllStatic {
#endif // !defined(USE_SIMULATOR) #endif // !defined(USE_SIMULATOR)
} } // namespace v8::internal } // namespace internal
} // namespace v8
#endif // V8_ARM64_SIMULATOR_ARM64_H_ #endif // V8_ARM64_SIMULATOR_ARM64_H_

9
deps/v8/src/arm64/utils-arm64.h

@ -9,12 +9,6 @@
#include "src/arm64/constants-arm64.h" #include "src/arm64/constants-arm64.h"
#define REGISTER_CODE_LIST(R) \
R(0) R(1) R(2) R(3) R(4) R(5) R(6) R(7) \
R(8) R(9) R(10) R(11) R(12) R(13) R(14) R(15) \
R(16) R(17) R(18) R(19) R(20) R(21) R(22) R(23) \
R(24) R(25) R(26) R(27) R(28) R(29) R(30) R(31)
namespace v8 { namespace v8 {
namespace internal { namespace internal {
@ -151,6 +145,7 @@ inline float FusedMultiplyAdd(float op1, float op2, float a) {
return fmaf(op1, op2, a); return fmaf(op1, op2, a);
} }
} } // namespace v8::internal } // namespace internal
} // namespace v8
#endif // V8_ARM64_UTILS_ARM64_H_ #endif // V8_ARM64_UTILS_ARM64_H_

58
deps/v8/src/assembler.cc

@ -46,13 +46,16 @@
#include "src/counters.h" #include "src/counters.h"
#include "src/debug/debug.h" #include "src/debug/debug.h"
#include "src/deoptimizer.h" #include "src/deoptimizer.h"
#include "src/disassembler.h"
#include "src/execution.h" #include "src/execution.h"
#include "src/ic/ic.h" #include "src/ic/ic.h"
#include "src/ic/stub-cache.h" #include "src/ic/stub-cache.h"
#include "src/ostreams.h"
#include "src/profiler/cpu-profiler.h" #include "src/profiler/cpu-profiler.h"
#include "src/regexp/jsregexp.h" #include "src/regexp/jsregexp.h"
#include "src/regexp/regexp-macro-assembler.h" #include "src/regexp/regexp-macro-assembler.h"
#include "src/regexp/regexp-stack.h" #include "src/regexp/regexp-stack.h"
#include "src/register-configuration.h"
#include "src/runtime/runtime.h" #include "src/runtime/runtime.h"
#include "src/simulator.h" // For flushing instruction cache. #include "src/simulator.h" // For flushing instruction cache.
#include "src/snapshot/serialize.h" #include "src/snapshot/serialize.h"
@ -104,6 +107,39 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
// -----------------------------------------------------------------------------
// Common register code.
const char* Register::ToString() {
// This is the mapping of allocation indices to registers.
DCHECK(reg_code >= 0 && reg_code < kNumRegisters);
return RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT)
->GetGeneralRegisterName(reg_code);
}
bool Register::IsAllocatable() const {
return ((1 << reg_code) &
RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT)
->allocatable_general_codes_mask()) != 0;
}
const char* DoubleRegister::ToString() {
// This is the mapping of allocation indices to registers.
DCHECK(reg_code >= 0 && reg_code < kMaxNumRegisters);
return RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT)
->GetDoubleRegisterName(reg_code);
}
bool DoubleRegister::IsAllocatable() const {
return ((1 << reg_code) &
RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT)
->allocatable_double_codes_mask()) != 0;
}
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// Common double constants. // Common double constants.
@ -181,6 +217,12 @@ void AssemblerBase::FlushICacheWithoutIsolate(void* start, size_t size) {
} }
void AssemblerBase::Print() {
OFStream os(stdout);
v8::internal::Disassembler::Decode(isolate(), &os, buffer_, pc_, nullptr);
}
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// Implementation of PredictableCodeSizeScope // Implementation of PredictableCodeSizeScope
@ -1407,9 +1449,21 @@ ExternalReference
} }
ExternalReference ExternalReference::vector_store_virtual_register( ExternalReference ExternalReference::virtual_handler_register(
Isolate* isolate) {
return ExternalReference(isolate->virtual_handler_register_address());
}
ExternalReference ExternalReference::virtual_slot_register(Isolate* isolate) {
return ExternalReference(isolate->virtual_slot_register_address());
}
ExternalReference ExternalReference::runtime_function_table_address(
Isolate* isolate) { Isolate* isolate) {
return ExternalReference(isolate->vector_store_virtual_register_address()); return ExternalReference(
const_cast<Runtime::Function*>(Runtime::RuntimeFunctionTable(isolate)));
} }

21
deps/v8/src/assembler.h

@ -49,6 +49,7 @@ class ApiFunction;
namespace internal { namespace internal {
// Forward declarations. // Forward declarations.
class SourcePosition;
class StatsCounter; class StatsCounter;
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
@ -99,6 +100,9 @@ class AssemblerBase: public Malloced {
// the assembler could clean up internal data structures. // the assembler could clean up internal data structures.
virtual void AbortedCodeGeneration() { } virtual void AbortedCodeGeneration() { }
// Debugging
void Print();
static const int kMinimalBufferSize = 4*KB; static const int kMinimalBufferSize = 4*KB;
static void FlushICache(Isolate* isolate, void* start, size_t size); static void FlushICache(Isolate* isolate, void* start, size_t size);
@ -319,6 +323,8 @@ class Label {
enum SaveFPRegsMode { kDontSaveFPRegs, kSaveFPRegs }; enum SaveFPRegsMode { kDontSaveFPRegs, kSaveFPRegs };
enum ArgvMode { kArgvOnStack, kArgvInRegister };
// Specifies whether to perform icache flush operations on RelocInfo updates. // Specifies whether to perform icache flush operations on RelocInfo updates.
// If FLUSH_ICACHE_IF_NEEDED, the icache will always be flushed if an // If FLUSH_ICACHE_IF_NEEDED, the icache will always be flushed if an
// instruction was modified. If SKIP_ICACHE_FLUSH the flush will always be // instruction was modified. If SKIP_ICACHE_FLUSH the flush will always be
@ -659,11 +665,6 @@ class RelocInfo {
Mode rmode_; Mode rmode_;
intptr_t data_; intptr_t data_;
Code* host_; Code* host_;
// External-reference pointers are also split across instruction-pairs
// on some platforms, but are accessed via indirect pointers. This location
// provides a place for that pointer to exist naturally. Its address
// is returned by RelocInfo::target_reference_address().
Address reconstructed_adr_ptr_;
friend class RelocIterator; friend class RelocIterator;
}; };
@ -990,7 +991,10 @@ class ExternalReference BASE_EMBEDDED {
static ExternalReference invoke_function_callback(Isolate* isolate); static ExternalReference invoke_function_callback(Isolate* isolate);
static ExternalReference invoke_accessor_getter_callback(Isolate* isolate); static ExternalReference invoke_accessor_getter_callback(Isolate* isolate);
static ExternalReference vector_store_virtual_register(Isolate* isolate); static ExternalReference virtual_handler_register(Isolate* isolate);
static ExternalReference virtual_slot_register(Isolate* isolate);
static ExternalReference runtime_function_table_address(Isolate* isolate);
Address address() const { return reinterpret_cast<Address>(address_); } Address address() const { return reinterpret_cast<Address>(address_); }
@ -1276,7 +1280,6 @@ class ConstantPoolBuilder BASE_EMBEDDED {
PerTypeEntryInfo info_[ConstantPoolEntry::NUMBER_OF_TYPES]; PerTypeEntryInfo info_[ConstantPoolEntry::NUMBER_OF_TYPES];
}; };
} // namespace internal
} } // namespace v8::internal } // namespace v8
#endif // V8_ASSEMBLER_H_ #endif // V8_ASSEMBLER_H_

3
deps/v8/src/assert-scope.h

@ -170,6 +170,7 @@ typedef PerIsolateAssertScopeDebugOnly<COMPILATION_ASSERT, false>
// Scope to introduce an exception to DisallowDeoptimization. // Scope to introduce an exception to DisallowDeoptimization.
typedef PerIsolateAssertScopeDebugOnly<COMPILATION_ASSERT, true> typedef PerIsolateAssertScopeDebugOnly<COMPILATION_ASSERT, true>
AllowCompilation; AllowCompilation;
} } // namespace v8::internal } // namespace internal
} // namespace v8
#endif // V8_ASSERT_SCOPE_H_ #endif // V8_ASSERT_SCOPE_H_

59
deps/v8/src/ast-expression-visitor.cc

@ -32,14 +32,20 @@ namespace internal {
} while (false) } while (false)
AstExpressionVisitor::AstExpressionVisitor(Isolate* isolate, Zone* zone, AstExpressionVisitor::AstExpressionVisitor(Isolate* isolate, Expression* root)
FunctionLiteral* root)
: root_(root), depth_(0) { : root_(root), depth_(0) {
InitializeAstVisitor(isolate, zone); InitializeAstVisitor(isolate);
} }
void AstExpressionVisitor::Run() { RECURSE(VisitFunctionLiteral(root_)); } AstExpressionVisitor::AstExpressionVisitor(uintptr_t stack_limit,
Expression* root)
: root_(root), depth_(0) {
InitializeAstVisitor(stack_limit);
}
void AstExpressionVisitor::Run() { RECURSE(Visit(root_)); }
void AstExpressionVisitor::VisitVariableDeclaration(VariableDeclaration* decl) { void AstExpressionVisitor::VisitVariableDeclaration(VariableDeclaration* decl) {
@ -196,6 +202,12 @@ void AstExpressionVisitor::VisitNativeFunctionLiteral(
NativeFunctionLiteral* expr) {} NativeFunctionLiteral* expr) {}
void AstExpressionVisitor::VisitDoExpression(DoExpression* expr) {
RECURSE(VisitBlock(expr->block()));
RECURSE(VisitVariableProxy(expr->result()));
}
void AstExpressionVisitor::VisitConditional(Conditional* expr) { void AstExpressionVisitor::VisitConditional(Conditional* expr) {
RECURSE(Visit(expr->condition())); RECURSE(Visit(expr->condition()));
RECURSE(Visit(expr->then_expression())); RECURSE(Visit(expr->then_expression()));
@ -223,6 +235,9 @@ void AstExpressionVisitor::VisitObjectLiteral(ObjectLiteral* expr) {
ZoneList<ObjectLiteralProperty*>* props = expr->properties(); ZoneList<ObjectLiteralProperty*>* props = expr->properties();
for (int i = 0; i < props->length(); ++i) { for (int i = 0; i < props->length(); ++i) {
ObjectLiteralProperty* prop = props->at(i); ObjectLiteralProperty* prop = props->at(i);
if (!prop->key()->IsLiteral()) {
RECURSE_EXPRESSION(Visit(prop->key()));
}
RECURSE_EXPRESSION(Visit(prop->value())); RECURSE_EXPRESSION(Visit(prop->value()));
} }
} }
@ -336,21 +351,47 @@ void AstExpressionVisitor::VisitDeclarations(ZoneList<Declaration*>* decls) {
} }
void AstExpressionVisitor::VisitClassLiteral(ClassLiteral* expr) {} void AstExpressionVisitor::VisitClassLiteral(ClassLiteral* expr) {
VisitExpression(expr);
if (expr->extends() != nullptr) {
RECURSE_EXPRESSION(Visit(expr->extends()));
}
RECURSE_EXPRESSION(Visit(expr->constructor()));
ZoneList<ObjectLiteralProperty*>* props = expr->properties();
for (int i = 0; i < props->length(); ++i) {
ObjectLiteralProperty* prop = props->at(i);
if (!prop->key()->IsLiteral()) {
RECURSE_EXPRESSION(Visit(prop->key()));
}
RECURSE_EXPRESSION(Visit(prop->value()));
}
}
void AstExpressionVisitor::VisitSpread(Spread* expr) {} void AstExpressionVisitor::VisitSpread(Spread* expr) {
VisitExpression(expr);
RECURSE_EXPRESSION(Visit(expr->expression()));
}
void AstExpressionVisitor::VisitEmptyParentheses(EmptyParentheses* expr) {} void AstExpressionVisitor::VisitEmptyParentheses(EmptyParentheses* expr) {}
void AstExpressionVisitor::VisitSuperPropertyReference( void AstExpressionVisitor::VisitSuperPropertyReference(
SuperPropertyReference* expr) {} SuperPropertyReference* expr) {
VisitExpression(expr);
RECURSE_EXPRESSION(VisitVariableProxy(expr->this_var()));
RECURSE_EXPRESSION(Visit(expr->home_object()));
}
void AstExpressionVisitor::VisitSuperCallReference(SuperCallReference* expr) {} void AstExpressionVisitor::VisitSuperCallReference(SuperCallReference* expr) {
VisitExpression(expr);
RECURSE_EXPRESSION(VisitVariableProxy(expr->this_var()));
RECURSE_EXPRESSION(VisitVariableProxy(expr->new_target_var()));
RECURSE_EXPRESSION(VisitVariableProxy(expr->this_function_var()));
} }
} // namespace v8::internal } // namespace internal
} // namespace v8

11
deps/v8/src/ast-expression-visitor.h

@ -21,7 +21,8 @@ namespace internal {
class AstExpressionVisitor : public AstVisitor { class AstExpressionVisitor : public AstVisitor {
public: public:
AstExpressionVisitor(Isolate* isolate, Zone* zone, FunctionLiteral* root); AstExpressionVisitor(Isolate* isolate, Expression* root);
AstExpressionVisitor(uintptr_t stack_limit, Expression* root);
void Run(); void Run();
protected: protected:
@ -34,16 +35,16 @@ class AstExpressionVisitor : public AstVisitor {
DEFINE_AST_VISITOR_SUBCLASS_MEMBERS(); DEFINE_AST_VISITOR_SUBCLASS_MEMBERS();
#define DECLARE_VISIT(type) virtual void Visit##type(type* node) override; #define DECLARE_VISIT(type) void Visit##type(type* node) override;
AST_NODE_LIST(DECLARE_VISIT) AST_NODE_LIST(DECLARE_VISIT)
#undef DECLARE_VISIT #undef DECLARE_VISIT
FunctionLiteral* root_; Expression* root_;
int depth_; int depth_;
DISALLOW_COPY_AND_ASSIGN(AstExpressionVisitor); DISALLOW_COPY_AND_ASSIGN(AstExpressionVisitor);
}; };
} } // namespace internal
} // namespace v8::internal } // namespace v8
#endif // V8_AST_EXPRESSION_VISITOR_H_ #endif // V8_AST_EXPRESSION_VISITOR_H_

9
deps/v8/src/ast-literal-reindexer.cc

@ -43,6 +43,11 @@ void AstLiteralReindexer::VisitNativeFunctionLiteral(
NativeFunctionLiteral* node) {} NativeFunctionLiteral* node) {}
void AstLiteralReindexer::VisitDoExpression(DoExpression* node) {
// TODO(caitp): literals in do expressions need re-indexing too.
}
void AstLiteralReindexer::VisitLiteral(Literal* node) {} void AstLiteralReindexer::VisitLiteral(Literal* node) {}
@ -316,5 +321,5 @@ void AstLiteralReindexer::VisitFunctionLiteral(FunctionLiteral* node) {
void AstLiteralReindexer::Reindex(Expression* pattern) { void AstLiteralReindexer::Reindex(Expression* pattern) {
pattern->Accept(this); pattern->Accept(this);
} }
} } // namespace internal
} // namespace v8::internal } // namespace v8

6
deps/v8/src/ast-literal-reindexer.h

@ -20,7 +20,7 @@ class AstLiteralReindexer final : public AstVisitor {
int NextIndex() { return next_index_++; } int NextIndex() { return next_index_++; }
private: private:
#define DEFINE_VISIT(type) virtual void Visit##type(type* node) override; #define DEFINE_VISIT(type) void Visit##type(type* node) override;
AST_NODE_LIST(DEFINE_VISIT) AST_NODE_LIST(DEFINE_VISIT)
#undef DEFINE_VISIT #undef DEFINE_VISIT
@ -39,7 +39,7 @@ class AstLiteralReindexer final : public AstVisitor {
DISALLOW_COPY_AND_ASSIGN(AstLiteralReindexer); DISALLOW_COPY_AND_ASSIGN(AstLiteralReindexer);
}; };
} } // namespace internal
} // namespace v8::internal } // namespace v8
#endif // V8_AST_LITERAL_REINDEXER #endif // V8_AST_LITERAL_REINDEXER

33
deps/v8/src/ast-numbering.cc

@ -14,18 +14,20 @@ class AstNumberingVisitor final : public AstVisitor {
public: public:
AstNumberingVisitor(Isolate* isolate, Zone* zone) AstNumberingVisitor(Isolate* isolate, Zone* zone)
: AstVisitor(), : AstVisitor(),
isolate_(isolate),
zone_(zone),
next_id_(BailoutId::FirstUsable().ToInt()), next_id_(BailoutId::FirstUsable().ToInt()),
properties_(zone), properties_(zone),
ic_slot_cache_(zone), slot_cache_(zone),
dont_optimize_reason_(kNoReason) { dont_optimize_reason_(kNoReason) {
InitializeAstVisitor(isolate, zone); InitializeAstVisitor(isolate);
} }
bool Renumber(FunctionLiteral* node); bool Renumber(FunctionLiteral* node);
private: private:
// AST node visitor interface. // AST node visitor interface.
#define DEFINE_VISIT(type) virtual void Visit##type(type* node) override; #define DEFINE_VISIT(type) void Visit##type(type* node) override;
AST_NODE_LIST(DEFINE_VISIT) AST_NODE_LIST(DEFINE_VISIT)
#undef DEFINE_VISIT #undef DEFINE_VISIT
@ -65,16 +67,18 @@ class AstNumberingVisitor final : public AstVisitor {
template <typename Node> template <typename Node>
void ReserveFeedbackSlots(Node* node) { void ReserveFeedbackSlots(Node* node) {
node->AssignFeedbackVectorSlots(isolate(), properties_.get_spec(), node->AssignFeedbackVectorSlots(isolate_, properties_.get_spec(),
&ic_slot_cache_); &slot_cache_);
} }
BailoutReason dont_optimize_reason() const { return dont_optimize_reason_; } BailoutReason dont_optimize_reason() const { return dont_optimize_reason_; }
Isolate* isolate_;
Zone* zone_;
int next_id_; int next_id_;
AstProperties properties_; AstProperties properties_;
// The slot cache allows us to reuse certain vector IC slots. // The slot cache allows us to reuse certain feedback vector slots.
ICSlotCache ic_slot_cache_; FeedbackVectorSlotCache slot_cache_;
BailoutReason dont_optimize_reason_; BailoutReason dont_optimize_reason_;
DEFINE_AST_VISITOR_SUBCLASS_MEMBERS(); DEFINE_AST_VISITOR_SUBCLASS_MEMBERS();
@ -132,6 +136,15 @@ void AstNumberingVisitor::VisitNativeFunctionLiteral(
} }
void AstNumberingVisitor::VisitDoExpression(DoExpression* node) {
IncrementNodeCount();
DisableCrankshaft(kDoExpression);
node->set_base_id(ReserveIdRange(DoExpression::num_ids()));
Visit(node->block());
Visit(node->result());
}
void AstNumberingVisitor::VisitLiteral(Literal* node) { void AstNumberingVisitor::VisitLiteral(Literal* node) {
IncrementNodeCount(); IncrementNodeCount();
node->set_base_id(ReserveIdRange(Literal::num_ids())); node->set_base_id(ReserveIdRange(Literal::num_ids()));
@ -466,11 +479,11 @@ void AstNumberingVisitor::VisitObjectLiteral(ObjectLiteral* node) {
for (int i = 0; i < node->properties()->length(); i++) { for (int i = 0; i < node->properties()->length(); i++) {
VisitObjectLiteralProperty(node->properties()->at(i)); VisitObjectLiteralProperty(node->properties()->at(i));
} }
node->BuildConstantProperties(isolate()); node->BuildConstantProperties(isolate_);
// Mark all computed expressions that are bound to a key that // Mark all computed expressions that are bound to a key that
// is shadowed by a later occurrence of the same key. For the // is shadowed by a later occurrence of the same key. For the
// marked expressions, no store code will be is emitted. // marked expressions, no store code will be is emitted.
node->CalculateEmitStore(zone()); node->CalculateEmitStore(zone_);
ReserveFeedbackSlots(node); ReserveFeedbackSlots(node);
} }
@ -489,6 +502,8 @@ void AstNumberingVisitor::VisitArrayLiteral(ArrayLiteral* node) {
for (int i = 0; i < node->values()->length(); i++) { for (int i = 0; i < node->values()->length(); i++) {
Visit(node->values()->at(i)); Visit(node->values()->at(i));
} }
node->BuildConstantElements(isolate_);
ReserveFeedbackSlots(node);
} }

2
deps/v8/src/ast-value-factory.cc

@ -50,7 +50,7 @@ class OneByteStringStream {
int pos_; int pos_;
}; };
} } // namespace
class AstRawStringInternalizationKey : public HashTableKey { class AstRawStringInternalizationKey : public HashTableKey {
public: public:

4
deps/v8/src/ast-value-factory.h

@ -255,6 +255,7 @@ class AstValue : public ZoneObject {
F(dot_module, ".module") \ F(dot_module, ".module") \
F(dot_result, ".result") \ F(dot_result, ".result") \
F(dot_switch_tag, ".switch_tag") \ F(dot_switch_tag, ".switch_tag") \
F(dot_catch, ".catch") \
F(empty, "") \ F(empty, "") \
F(eval, "eval") \ F(eval, "eval") \
F(let, "let") \ F(let, "let") \
@ -366,7 +367,8 @@ class AstValueFactory {
OTHER_CONSTANTS(F) OTHER_CONSTANTS(F)
#undef F #undef F
}; };
} } // namespace v8::internal } // namespace internal
} // namespace v8
#undef STRING_CONSTANTS #undef STRING_CONSTANTS
#undef OTHER_CONSTANTS #undef OTHER_CONSTANTS

80
deps/v8/src/ast.cc

@ -71,7 +71,6 @@ VariableProxy::VariableProxy(Zone* zone, Variable* var, int start_position,
bit_field_(IsThisField::encode(var->is_this()) | bit_field_(IsThisField::encode(var->is_this()) |
IsAssignedField::encode(false) | IsAssignedField::encode(false) |
IsResolvedField::encode(false)), IsResolvedField::encode(false)),
variable_feedback_slot_(FeedbackVectorICSlot::Invalid()),
raw_name_(var->raw_name()), raw_name_(var->raw_name()),
end_position_(end_position) { end_position_(end_position) {
BindTo(var); BindTo(var);
@ -85,7 +84,6 @@ VariableProxy::VariableProxy(Zone* zone, const AstRawString* name,
bit_field_(IsThisField::encode(variable_kind == Variable::THIS) | bit_field_(IsThisField::encode(variable_kind == Variable::THIS) |
IsAssignedField::encode(false) | IsAssignedField::encode(false) |
IsResolvedField::encode(false)), IsResolvedField::encode(false)),
variable_feedback_slot_(FeedbackVectorICSlot::Invalid()),
raw_name_(name), raw_name_(name),
end_position_(end_position) {} end_position_(end_position) {}
@ -100,14 +98,14 @@ void VariableProxy::BindTo(Variable* var) {
void VariableProxy::AssignFeedbackVectorSlots(Isolate* isolate, void VariableProxy::AssignFeedbackVectorSlots(Isolate* isolate,
FeedbackVectorSpec* spec, FeedbackVectorSpec* spec,
ICSlotCache* cache) { FeedbackVectorSlotCache* cache) {
if (UsesVariableFeedbackSlot()) { if (UsesVariableFeedbackSlot()) {
// VariableProxies that point to the same Variable within a function can // VariableProxies that point to the same Variable within a function can
// make their loads from the same IC slot. // make their loads from the same IC slot.
if (var()->IsUnallocated()) { if (var()->IsUnallocated()) {
ZoneHashMap::Entry* entry = cache->Get(var()); ZoneHashMap::Entry* entry = cache->Get(var());
if (entry != NULL) { if (entry != NULL) {
variable_feedback_slot_ = FeedbackVectorICSlot( variable_feedback_slot_ = FeedbackVectorSlot(
static_cast<int>(reinterpret_cast<intptr_t>(entry->value))); static_cast<int>(reinterpret_cast<intptr_t>(entry->value)));
return; return;
} }
@ -121,7 +119,7 @@ void VariableProxy::AssignFeedbackVectorSlots(Isolate* isolate,
static void AssignVectorSlots(Expression* expr, FeedbackVectorSpec* spec, static void AssignVectorSlots(Expression* expr, FeedbackVectorSpec* spec,
FeedbackVectorICSlot* out_slot) { FeedbackVectorSlot* out_slot) {
if (FLAG_vector_stores) { if (FLAG_vector_stores) {
Property* property = expr->AsProperty(); Property* property = expr->AsProperty();
LhsKind assign_type = Property::GetAssignType(property); LhsKind assign_type = Property::GetAssignType(property);
@ -138,9 +136,9 @@ static void AssignVectorSlots(Expression* expr, FeedbackVectorSpec* spec,
} }
void ForEachStatement::AssignFeedbackVectorSlots(Isolate* isolate, void ForEachStatement::AssignFeedbackVectorSlots(
FeedbackVectorSpec* spec, Isolate* isolate, FeedbackVectorSpec* spec,
ICSlotCache* cache) { FeedbackVectorSlotCache* cache) {
AssignVectorSlots(each(), spec, &each_slot_); AssignVectorSlots(each(), spec, &each_slot_);
} }
@ -153,20 +151,19 @@ Assignment::Assignment(Zone* zone, Token::Value op, Expression* target,
StoreModeField::encode(STANDARD_STORE) | TokenField::encode(op)), StoreModeField::encode(STANDARD_STORE) | TokenField::encode(op)),
target_(target), target_(target),
value_(value), value_(value),
binary_operation_(NULL), binary_operation_(NULL) {}
slot_(FeedbackVectorICSlot::Invalid()) {}
void Assignment::AssignFeedbackVectorSlots(Isolate* isolate, void Assignment::AssignFeedbackVectorSlots(Isolate* isolate,
FeedbackVectorSpec* spec, FeedbackVectorSpec* spec,
ICSlotCache* cache) { FeedbackVectorSlotCache* cache) {
AssignVectorSlots(target(), spec, &slot_); AssignVectorSlots(target(), spec, &slot_);
} }
void CountOperation::AssignFeedbackVectorSlots(Isolate* isolate, void CountOperation::AssignFeedbackVectorSlots(Isolate* isolate,
FeedbackVectorSpec* spec, FeedbackVectorSpec* spec,
ICSlotCache* cache) { FeedbackVectorSlotCache* cache) {
AssignVectorSlots(expression(), spec, &slot_); AssignVectorSlots(expression(), spec, &slot_);
} }
@ -227,7 +224,6 @@ ObjectLiteralProperty::ObjectLiteralProperty(Expression* key, Expression* value,
bool is_computed_name) bool is_computed_name)
: key_(key), : key_(key),
value_(value), value_(value),
slot_(FeedbackVectorICSlot::Invalid()),
kind_(kind), kind_(kind),
emit_store_(true), emit_store_(true),
is_static_(is_static), is_static_(is_static),
@ -240,7 +236,6 @@ ObjectLiteralProperty::ObjectLiteralProperty(AstValueFactory* ast_value_factory,
bool is_computed_name) bool is_computed_name)
: key_(key), : key_(key),
value_(value), value_(value),
slot_(FeedbackVectorICSlot::Invalid()),
emit_store_(true), emit_store_(true),
is_static_(is_static), is_static_(is_static),
is_computed_name_(is_computed_name) { is_computed_name_(is_computed_name) {
@ -260,7 +255,7 @@ ObjectLiteralProperty::ObjectLiteralProperty(AstValueFactory* ast_value_factory,
void ClassLiteral::AssignFeedbackVectorSlots(Isolate* isolate, void ClassLiteral::AssignFeedbackVectorSlots(Isolate* isolate,
FeedbackVectorSpec* spec, FeedbackVectorSpec* spec,
ICSlotCache* cache) { FeedbackVectorSlotCache* cache) {
if (!FLAG_vector_stores) return; if (!FLAG_vector_stores) return;
// This logic that computes the number of slots needed for vector store // This logic that computes the number of slots needed for vector store
@ -273,7 +268,7 @@ void ClassLiteral::AssignFeedbackVectorSlots(Isolate* isolate,
ObjectLiteral::Property* property = properties()->at(i); ObjectLiteral::Property* property = properties()->at(i);
Expression* value = property->value(); Expression* value = property->value();
if (FunctionLiteral::NeedsHomeObject(value)) { if (FunctionLiteral::NeedsHomeObject(value)) {
property->set_slot(spec->AddStoreICSlot()); property->SetSlot(spec->AddStoreICSlot());
} }
} }
} }
@ -298,7 +293,7 @@ bool ObjectLiteral::Property::emit_store() {
void ObjectLiteral::AssignFeedbackVectorSlots(Isolate* isolate, void ObjectLiteral::AssignFeedbackVectorSlots(Isolate* isolate,
FeedbackVectorSpec* spec, FeedbackVectorSpec* spec,
ICSlotCache* cache) { FeedbackVectorSlotCache* cache) {
if (!FLAG_vector_stores) return; if (!FLAG_vector_stores) return;
// This logic that computes the number of slots needed for vector store // This logic that computes the number of slots needed for vector store
@ -321,27 +316,27 @@ void ObjectLiteral::AssignFeedbackVectorSlots(Isolate* isolate,
// contains computed properties with an uninitialized value. // contains computed properties with an uninitialized value.
if (key->value()->IsInternalizedString()) { if (key->value()->IsInternalizedString()) {
if (property->emit_store()) { if (property->emit_store()) {
property->set_slot(spec->AddStoreICSlot()); property->SetSlot(spec->AddStoreICSlot());
if (FunctionLiteral::NeedsHomeObject(value)) { if (FunctionLiteral::NeedsHomeObject(value)) {
spec->AddStoreICSlot(); property->SetSlot(spec->AddStoreICSlot(), 1);
} }
} }
break; break;
} }
if (property->emit_store() && FunctionLiteral::NeedsHomeObject(value)) { if (property->emit_store() && FunctionLiteral::NeedsHomeObject(value)) {
property->set_slot(spec->AddStoreICSlot()); property->SetSlot(spec->AddStoreICSlot());
} }
break; break;
case ObjectLiteral::Property::PROTOTYPE: case ObjectLiteral::Property::PROTOTYPE:
break; break;
case ObjectLiteral::Property::GETTER: case ObjectLiteral::Property::GETTER:
if (property->emit_store() && FunctionLiteral::NeedsHomeObject(value)) { if (property->emit_store() && FunctionLiteral::NeedsHomeObject(value)) {
property->set_slot(spec->AddStoreICSlot()); property->SetSlot(spec->AddStoreICSlot());
} }
break; break;
case ObjectLiteral::Property::SETTER: case ObjectLiteral::Property::SETTER:
if (property->emit_store() && FunctionLiteral::NeedsHomeObject(value)) { if (property->emit_store() && FunctionLiteral::NeedsHomeObject(value)) {
property->set_slot(spec->AddStoreICSlot()); property->SetSlot(spec->AddStoreICSlot());
} }
break; break;
} }
@ -353,7 +348,7 @@ void ObjectLiteral::AssignFeedbackVectorSlots(Isolate* isolate,
Expression* value = property->value(); Expression* value = property->value();
if (property->kind() != ObjectLiteral::Property::PROTOTYPE) { if (property->kind() != ObjectLiteral::Property::PROTOTYPE) {
if (FunctionLiteral::NeedsHomeObject(value)) { if (FunctionLiteral::NeedsHomeObject(value)) {
property->set_slot(spec->AddStoreICSlot()); property->SetSlot(spec->AddStoreICSlot());
} }
} }
} }
@ -552,6 +547,27 @@ void ArrayLiteral::BuildConstantElements(Isolate* isolate) {
} }
void ArrayLiteral::AssignFeedbackVectorSlots(Isolate* isolate,
FeedbackVectorSpec* spec,
FeedbackVectorSlotCache* cache) {
if (!FLAG_vector_stores) return;
// This logic that computes the number of slots needed for vector store
// ics must mirror FullCodeGenerator::VisitArrayLiteral.
int array_index = 0;
for (; array_index < values()->length(); array_index++) {
Expression* subexpr = values()->at(array_index);
if (subexpr->IsSpread()) break;
if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
// We'll reuse the same literal slot for all of the non-constant
// subexpressions that use a keyed store IC.
literal_slot_ = spec->AddKeyedStoreICSlot();
return;
}
}
Handle<Object> MaterializedLiteral::GetBoilerplateValue(Expression* expression, Handle<Object> MaterializedLiteral::GetBoilerplateValue(Expression* expression,
Isolate* isolate) { Isolate* isolate) {
if (expression->IsLiteral()) { if (expression->IsLiteral()) {
@ -720,12 +736,12 @@ bool Call::IsUsingCallFeedbackSlot(Isolate* isolate) const {
void Call::AssignFeedbackVectorSlots(Isolate* isolate, FeedbackVectorSpec* spec, void Call::AssignFeedbackVectorSlots(Isolate* isolate, FeedbackVectorSpec* spec,
ICSlotCache* cache) { FeedbackVectorSlotCache* cache) {
if (IsUsingCallFeedbackICSlot(isolate)) { if (IsUsingCallFeedbackICSlot(isolate)) {
ic_slot_ = spec->AddCallICSlot(); ic_slot_ = spec->AddCallICSlot();
} }
if (IsUsingCallFeedbackSlot(isolate)) { if (IsUsingCallFeedbackSlot(isolate)) {
slot_ = spec->AddStubSlot(); stub_slot_ = spec->AddGeneralSlot();
} }
} }
@ -745,7 +761,16 @@ Call::CallType Call::GetCallType(Isolate* isolate) const {
if (expression()->IsSuperCallReference()) return SUPER_CALL; if (expression()->IsSuperCallReference()) return SUPER_CALL;
Property* property = expression()->AsProperty(); Property* property = expression()->AsProperty();
return property != NULL ? PROPERTY_CALL : OTHER_CALL; if (property != nullptr) {
bool is_super = property->IsSuperAccess();
if (property->key()->IsPropertyName()) {
return is_super ? NAMED_SUPER_PROPERTY_CALL : NAMED_PROPERTY_CALL;
} else {
return is_super ? KEYED_SUPER_PROPERTY_CALL : KEYED_PROPERTY_CALL;
}
}
return OTHER_CALL;
} }
@ -917,8 +942,7 @@ class RegExpUnparser final : public RegExpVisitor {
public: public:
RegExpUnparser(std::ostream& os, Zone* zone) : os_(os), zone_(zone) {} RegExpUnparser(std::ostream& os, Zone* zone) : os_(os), zone_(zone) {}
void VisitCharacterRange(CharacterRange that); void VisitCharacterRange(CharacterRange that);
#define MAKE_CASE(Name) \ #define MAKE_CASE(Name) void* Visit##Name(RegExp##Name*, void* data) override;
virtual void* Visit##Name(RegExp##Name*, void* data) override;
FOR_EACH_REG_EXP_TREE_TYPE(MAKE_CASE) FOR_EACH_REG_EXP_TREE_TYPE(MAKE_CASE)
#undef MAKE_CASE #undef MAKE_CASE
private: private:

276
deps/v8/src/ast.h

@ -90,7 +90,8 @@ namespace internal {
V(SuperPropertyReference) \ V(SuperPropertyReference) \
V(SuperCallReference) \ V(SuperCallReference) \
V(CaseClause) \ V(CaseClause) \
V(EmptyParentheses) V(EmptyParentheses) \
V(DoExpression)
#define AST_NODE_LIST(V) \ #define AST_NODE_LIST(V) \
DECLARATION_NODE_LIST(V) \ DECLARATION_NODE_LIST(V) \
@ -138,14 +139,14 @@ typedef ZoneList<Handle<Object>> ZoneObjectList;
friend class AstNodeFactory; friend class AstNodeFactory;
class ICSlotCache { class FeedbackVectorSlotCache {
public: public:
explicit ICSlotCache(Zone* zone) explicit FeedbackVectorSlotCache(Zone* zone)
: zone_(zone), : zone_(zone),
hash_map_(HashMap::PointersMatch, ZoneHashMap::kDefaultHashMapCapacity, hash_map_(HashMap::PointersMatch, ZoneHashMap::kDefaultHashMapCapacity,
ZoneAllocationPolicy(zone)) {} ZoneAllocationPolicy(zone)) {}
void Put(Variable* variable, FeedbackVectorICSlot slot) { void Put(Variable* variable, FeedbackVectorSlot slot) {
ZoneHashMap::Entry* entry = hash_map_.LookupOrInsert( ZoneHashMap::Entry* entry = hash_map_.LookupOrInsert(
variable, ComputePointerHash(variable), ZoneAllocationPolicy(zone_)); variable, ComputePointerHash(variable), ZoneAllocationPolicy(zone_));
entry->value = reinterpret_cast<void*>(slot.ToInt()); entry->value = reinterpret_cast<void*>(slot.ToInt());
@ -230,7 +231,7 @@ class AstNode: public ZoneObject {
// vtable entry per node, something we don't want for space reasons. // vtable entry per node, something we don't want for space reasons.
virtual void AssignFeedbackVectorSlots(Isolate* isolate, virtual void AssignFeedbackVectorSlots(Isolate* isolate,
FeedbackVectorSpec* spec, FeedbackVectorSpec* spec,
ICSlotCache* cache) {} FeedbackVectorSlotCache* cache) {}
private: private:
// Hidden to prevent accidental usage. It would have to load the // Hidden to prevent accidental usage. It would have to load the
@ -458,10 +459,6 @@ class Block final : public BreakableStatement {
public: public:
DECLARE_NODE_TYPE(Block) DECLARE_NODE_TYPE(Block)
void AddStatement(Statement* statement, Zone* zone) {
statements_.Add(statement, zone);
}
ZoneList<Statement*>* statements() { return &statements_; } ZoneList<Statement*>* statements() { return &statements_; }
bool ignore_completion_value() const { return ignore_completion_value_; } bool ignore_completion_value() const { return ignore_completion_value_; }
@ -494,6 +491,29 @@ class Block final : public BreakableStatement {
}; };
class DoExpression final : public Expression {
public:
DECLARE_NODE_TYPE(DoExpression)
Block* block() { return block_; }
VariableProxy* result() { return result_; }
protected:
DoExpression(Zone* zone, Block* block, VariableProxy* result, int pos)
: Expression(zone, pos), block_(block), result_(result) {
DCHECK_NOT_NULL(block_);
DCHECK_NOT_NULL(result_);
}
static int parent_num_ids() { return Expression::num_ids(); }
private:
int local_id(int n) const { return base_id() + parent_num_ids() + n; }
Block* block_;
VariableProxy* result_;
};
class Declaration : public AstNode { class Declaration : public AstNode {
public: public:
VariableProxy* proxy() const { return proxy_; } VariableProxy* proxy() const { return proxy_; }
@ -640,6 +660,7 @@ class IterationStatement : public BreakableStatement {
IterationStatement* AsIterationStatement() final { return this; } IterationStatement* AsIterationStatement() final { return this; }
Statement* body() const { return body_; } Statement* body() const { return body_; }
void set_body(Statement* s) { body_ = s; }
static int num_ids() { return parent_num_ids() + 1; } static int num_ids() { return parent_num_ids() + 1; }
BailoutId OsrEntryId() const { return BailoutId(local_id(0)); } BailoutId OsrEntryId() const { return BailoutId(local_id(0)); }
@ -777,20 +798,17 @@ class ForEachStatement : public IterationStatement {
Expression* subject() const { return subject_; } Expression* subject() const { return subject_; }
void AssignFeedbackVectorSlots(Isolate* isolate, FeedbackVectorSpec* spec, void AssignFeedbackVectorSlots(Isolate* isolate, FeedbackVectorSpec* spec,
ICSlotCache* cache) override; FeedbackVectorSlotCache* cache) override;
FeedbackVectorICSlot EachFeedbackSlot() const { return each_slot_; } FeedbackVectorSlot EachFeedbackSlot() const { return each_slot_; }
protected: protected:
ForEachStatement(Zone* zone, ZoneList<const AstRawString*>* labels, int pos) ForEachStatement(Zone* zone, ZoneList<const AstRawString*>* labels, int pos)
: IterationStatement(zone, labels, pos), : IterationStatement(zone, labels, pos), each_(NULL), subject_(NULL) {}
each_(NULL),
subject_(NULL),
each_slot_(FeedbackVectorICSlot::Invalid()) {}
private: private:
Expression* each_; Expression* each_;
Expression* subject_; Expression* subject_;
FeedbackVectorICSlot each_slot_; FeedbackVectorSlot each_slot_;
}; };
@ -804,9 +822,9 @@ class ForInStatement final : public ForEachStatement {
// Type feedback information. // Type feedback information.
void AssignFeedbackVectorSlots(Isolate* isolate, FeedbackVectorSpec* spec, void AssignFeedbackVectorSlots(Isolate* isolate, FeedbackVectorSpec* spec,
ICSlotCache* cache) override { FeedbackVectorSlotCache* cache) override {
ForEachStatement::AssignFeedbackVectorSlots(isolate, spec, cache); ForEachStatement::AssignFeedbackVectorSlots(isolate, spec, cache);
for_in_feedback_slot_ = spec->AddStubSlot(); for_in_feedback_slot_ = spec->AddGeneralSlot();
} }
FeedbackVectorSlot ForInFeedbackSlot() { FeedbackVectorSlot ForInFeedbackSlot() {
@ -830,9 +848,7 @@ class ForInStatement final : public ForEachStatement {
protected: protected:
ForInStatement(Zone* zone, ZoneList<const AstRawString*>* labels, int pos) ForInStatement(Zone* zone, ZoneList<const AstRawString*>* labels, int pos)
: ForEachStatement(zone, labels, pos), : ForEachStatement(zone, labels, pos), for_in_type_(SLOW_FOR_IN) {}
for_in_type_(SLOW_FOR_IN),
for_in_feedback_slot_(FeedbackVectorSlot::Invalid()) {}
static int parent_num_ids() { return ForEachStatement::num_ids(); } static int parent_num_ids() { return ForEachStatement::num_ids(); }
private: private:
@ -988,6 +1004,7 @@ class WithStatement final : public Statement {
Scope* scope() { return scope_; } Scope* scope() { return scope_; }
Expression* expression() const { return expression_; } Expression* expression() const { return expression_; }
Statement* statement() const { return statement_; } Statement* statement() const { return statement_; }
void set_statement(Statement* s) { statement_ = s; }
void set_base_id(int id) { base_id_ = id; } void set_base_id(int id) { base_id_ = id; }
static int num_ids() { return parent_num_ids() + 1; } static int num_ids() { return parent_num_ids() + 1; }
@ -1092,6 +1109,9 @@ class IfStatement final : public Statement {
Statement* then_statement() const { return then_statement_; } Statement* then_statement() const { return then_statement_; }
Statement* else_statement() const { return else_statement_; } Statement* else_statement() const { return else_statement_; }
void set_then_statement(Statement* s) { then_statement_ = s; }
void set_else_statement(Statement* s) { else_statement_ = s; }
bool IsJump() const override { bool IsJump() const override {
return HasThenStatement() && then_statement()->IsJump() return HasThenStatement() && then_statement()->IsJump()
&& HasElseStatement() && else_statement()->IsJump(); && HasElseStatement() && else_statement()->IsJump();
@ -1131,6 +1151,7 @@ class IfStatement final : public Statement {
class TryStatement : public Statement { class TryStatement : public Statement {
public: public:
Block* try_block() const { return try_block_; } Block* try_block() const { return try_block_; }
void set_try_block(Block* b) { try_block_ = b; }
void set_base_id(int id) { base_id_ = id; } void set_base_id(int id) { base_id_ = id; }
static int num_ids() { return parent_num_ids() + 1; } static int num_ids() { return parent_num_ids() + 1; }
@ -1163,6 +1184,7 @@ class TryCatchStatement final : public TryStatement {
Scope* scope() { return scope_; } Scope* scope() { return scope_; }
Variable* variable() { return variable_; } Variable* variable() { return variable_; }
Block* catch_block() const { return catch_block_; } Block* catch_block() const { return catch_block_; }
void set_catch_block(Block* b) { catch_block_ = b; }
protected: protected:
TryCatchStatement(Zone* zone, Block* try_block, Scope* scope, TryCatchStatement(Zone* zone, Block* try_block, Scope* scope,
@ -1184,6 +1206,7 @@ class TryFinallyStatement final : public TryStatement {
DECLARE_NODE_TYPE(TryFinallyStatement) DECLARE_NODE_TYPE(TryFinallyStatement)
Block* finally_block() const { return finally_block_; } Block* finally_block() const { return finally_block_; }
void set_finally_block(Block* b) { finally_block_ = b; }
protected: protected:
TryFinallyStatement(Zone* zone, Block* try_block, Block* finally_block, TryFinallyStatement(Zone* zone, Block* try_block, Block* finally_block,
@ -1301,7 +1324,7 @@ class AstLiteralReindexer;
// Base class for literals that needs space in the corresponding JSFunction. // Base class for literals that needs space in the corresponding JSFunction.
class MaterializedLiteral : public Expression { class MaterializedLiteral : public Expression {
public: public:
virtual MaterializedLiteral* AsMaterializedLiteral() { return this; } MaterializedLiteral* AsMaterializedLiteral() final { return this; }
int literal_index() { return literal_index_; } int literal_index() { return literal_index_; }
@ -1383,13 +1406,14 @@ class ObjectLiteralProperty final : public ZoneObject {
bool is_static() const { return is_static_; } bool is_static() const { return is_static_; }
bool is_computed_name() const { return is_computed_name_; } bool is_computed_name() const { return is_computed_name_; }
FeedbackVectorICSlot GetSlot(int offset = 0) const { FeedbackVectorSlot GetSlot(int offset = 0) const {
if (slot_.IsInvalid()) return slot_; DCHECK_LT(offset, static_cast<int>(arraysize(slots_)));
int slot = slot_.ToInt(); return slots_[offset];
return FeedbackVectorICSlot(slot + offset); }
void SetSlot(FeedbackVectorSlot slot, int offset = 0) {
DCHECK_LT(offset, static_cast<int>(arraysize(slots_)));
slots_[offset] = slot;
} }
FeedbackVectorICSlot slot() const { return slot_; }
void set_slot(FeedbackVectorICSlot slot) { slot_ = slot; }
void set_receiver_type(Handle<Map> map) { receiver_type_ = map; } void set_receiver_type(Handle<Map> map) { receiver_type_ = map; }
@ -1405,7 +1429,7 @@ class ObjectLiteralProperty final : public ZoneObject {
private: private:
Expression* key_; Expression* key_;
Expression* value_; Expression* value_;
FeedbackVectorICSlot slot_; FeedbackVectorSlot slots_[2];
Kind kind_; Kind kind_;
bool emit_store_; bool emit_store_;
bool is_static_; bool is_static_;
@ -1486,7 +1510,7 @@ class ObjectLiteral final : public MaterializedLiteral {
// Object literals need one feedback slot for each non-trivial value, as well // Object literals need one feedback slot for each non-trivial value, as well
// as some slots for home objects. // as some slots for home objects.
void AssignFeedbackVectorSlots(Isolate* isolate, FeedbackVectorSpec* spec, void AssignFeedbackVectorSlots(Isolate* isolate, FeedbackVectorSpec* spec,
ICSlotCache* cache) override; FeedbackVectorSlotCache* cache) override;
protected: protected:
ObjectLiteral(Zone* zone, ZoneList<Property*>* properties, int literal_index, ObjectLiteral(Zone* zone, ZoneList<Property*>* properties, int literal_index,
@ -1498,9 +1522,7 @@ class ObjectLiteral final : public MaterializedLiteral {
fast_elements_(false), fast_elements_(false),
has_elements_(false), has_elements_(false),
may_store_doubles_(false), may_store_doubles_(false),
has_function_(has_function), has_function_(has_function) {}
slot_(FeedbackVectorICSlot::Invalid()) {
}
static int parent_num_ids() { return MaterializedLiteral::num_ids(); } static int parent_num_ids() { return MaterializedLiteral::num_ids(); }
private: private:
@ -1512,7 +1534,28 @@ class ObjectLiteral final : public MaterializedLiteral {
bool has_elements_; bool has_elements_;
bool may_store_doubles_; bool may_store_doubles_;
bool has_function_; bool has_function_;
FeedbackVectorICSlot slot_; FeedbackVectorSlot slot_;
};
// A map from property names to getter/setter pairs allocated in the zone.
class AccessorTable : public TemplateHashMap<Literal, ObjectLiteral::Accessors,
ZoneAllocationPolicy> {
public:
explicit AccessorTable(Zone* zone)
: TemplateHashMap<Literal, ObjectLiteral::Accessors,
ZoneAllocationPolicy>(Literal::Match,
ZoneAllocationPolicy(zone)),
zone_(zone) {}
Iterator lookup(Literal* literal) {
Iterator it = find(literal, true, ZoneAllocationPolicy(zone_));
if (it->second == NULL) it->second = new (zone_) ObjectLiteral::Accessors();
return it;
}
private:
Zone* zone_;
}; };
@ -1586,6 +1629,10 @@ class ArrayLiteral final : public MaterializedLiteral {
kIsStrong = 1 << 2 kIsStrong = 1 << 2
}; };
void AssignFeedbackVectorSlots(Isolate* isolate, FeedbackVectorSpec* spec,
FeedbackVectorSlotCache* cache) override;
FeedbackVectorSlot LiteralFeedbackSlot() const { return literal_slot_; }
protected: protected:
ArrayLiteral(Zone* zone, ZoneList<Expression*>* values, ArrayLiteral(Zone* zone, ZoneList<Expression*>* values,
int first_spread_index, int literal_index, bool is_strong, int first_spread_index, int literal_index, bool is_strong,
@ -1601,6 +1648,7 @@ class ArrayLiteral final : public MaterializedLiteral {
Handle<FixedArray> constant_elements_; Handle<FixedArray> constant_elements_;
ZoneList<Expression*>* values_; ZoneList<Expression*>* values_;
int first_spread_index_; int first_spread_index_;
FeedbackVectorSlot literal_slot_;
}; };
@ -1656,11 +1704,9 @@ class VariableProxy final : public Expression {
} }
void AssignFeedbackVectorSlots(Isolate* isolate, FeedbackVectorSpec* spec, void AssignFeedbackVectorSlots(Isolate* isolate, FeedbackVectorSpec* spec,
ICSlotCache* cache) override; FeedbackVectorSlotCache* cache) override;
FeedbackVectorICSlot VariableFeedbackSlot() { FeedbackVectorSlot VariableFeedbackSlot() { return variable_feedback_slot_; }
return variable_feedback_slot_;
}
static int num_ids() { return parent_num_ids() + 1; } static int num_ids() { return parent_num_ids() + 1; }
BailoutId BeforeId() const { return BailoutId(local_id(0)); } BailoutId BeforeId() const { return BailoutId(local_id(0)); }
@ -1683,7 +1729,7 @@ class VariableProxy final : public Expression {
// Start with 16-bit (or smaller) field, which should get packed together // Start with 16-bit (or smaller) field, which should get packed together
// with Expression's trailing 16-bit field. // with Expression's trailing 16-bit field.
uint8_t bit_field_; uint8_t bit_field_;
FeedbackVectorICSlot variable_feedback_slot_; FeedbackVectorSlot variable_feedback_slot_;
union { union {
const AstRawString* raw_name_; // if !is_resolved_ const AstRawString* raw_name_; // if !is_resolved_
Variable* var_; // if is_resolved_ Variable* var_; // if is_resolved_
@ -1755,14 +1801,14 @@ class Property final : public Expression {
bool IsSuperAccess() { return obj()->IsSuperPropertyReference(); } bool IsSuperAccess() { return obj()->IsSuperPropertyReference(); }
void AssignFeedbackVectorSlots(Isolate* isolate, FeedbackVectorSpec* spec, void AssignFeedbackVectorSlots(Isolate* isolate, FeedbackVectorSpec* spec,
ICSlotCache* cache) override { FeedbackVectorSlotCache* cache) override {
FeedbackVectorSlotKind kind = key()->IsPropertyName() FeedbackVectorSlotKind kind = key()->IsPropertyName()
? FeedbackVectorSlotKind::LOAD_IC ? FeedbackVectorSlotKind::LOAD_IC
: FeedbackVectorSlotKind::KEYED_LOAD_IC; : FeedbackVectorSlotKind::KEYED_LOAD_IC;
property_feedback_slot_ = spec->AddSlot(kind); property_feedback_slot_ = spec->AddSlot(kind);
} }
FeedbackVectorICSlot PropertyFeedbackSlot() const { FeedbackVectorSlot PropertyFeedbackSlot() const {
return property_feedback_slot_; return property_feedback_slot_;
} }
@ -1780,7 +1826,6 @@ class Property final : public Expression {
bit_field_(IsForCallField::encode(false) | bit_field_(IsForCallField::encode(false) |
IsStringAccessField::encode(false) | IsStringAccessField::encode(false) |
InlineCacheStateField::encode(UNINITIALIZED)), InlineCacheStateField::encode(UNINITIALIZED)),
property_feedback_slot_(FeedbackVectorICSlot::Invalid()),
obj_(obj), obj_(obj),
key_(key) {} key_(key) {}
static int parent_num_ids() { return Expression::num_ids(); } static int parent_num_ids() { return Expression::num_ids(); }
@ -1793,7 +1838,7 @@ class Property final : public Expression {
class KeyTypeField : public BitField8<IcCheckType, 2, 1> {}; class KeyTypeField : public BitField8<IcCheckType, 2, 1> {};
class InlineCacheStateField : public BitField8<InlineCacheState, 3, 4> {}; class InlineCacheStateField : public BitField8<InlineCacheState, 3, 4> {};
uint8_t bit_field_; uint8_t bit_field_;
FeedbackVectorICSlot property_feedback_slot_; FeedbackVectorSlot property_feedback_slot_;
Expression* obj_; Expression* obj_;
Expression* key_; Expression* key_;
SmallMapList receiver_types_; SmallMapList receiver_types_;
@ -1809,11 +1854,11 @@ class Call final : public Expression {
// Type feedback information. // Type feedback information.
void AssignFeedbackVectorSlots(Isolate* isolate, FeedbackVectorSpec* spec, void AssignFeedbackVectorSlots(Isolate* isolate, FeedbackVectorSpec* spec,
ICSlotCache* cache) override; FeedbackVectorSlotCache* cache) override;
FeedbackVectorSlot CallFeedbackSlot() const { return slot_; } FeedbackVectorSlot CallFeedbackSlot() const { return stub_slot_; }
FeedbackVectorICSlot CallFeedbackICSlot() const { return ic_slot_; } FeedbackVectorSlot CallFeedbackICSlot() const { return ic_slot_; }
SmallMapList* GetReceiverTypes() override { SmallMapList* GetReceiverTypes() override {
if (expression()->IsProperty()) { if (expression()->IsProperty()) {
@ -1851,10 +1896,11 @@ class Call final : public Expression {
allocation_site_ = site; allocation_site_ = site;
} }
static int num_ids() { return parent_num_ids() + 3; } static int num_ids() { return parent_num_ids() + 4; }
BailoutId ReturnId() const { return BailoutId(local_id(0)); } BailoutId ReturnId() const { return BailoutId(local_id(0)); }
BailoutId EvalId() const { return BailoutId(local_id(1)); } BailoutId EvalId() const { return BailoutId(local_id(1)); }
BailoutId LookupId() const { return BailoutId(local_id(2)); } BailoutId LookupId() const { return BailoutId(local_id(2)); }
BailoutId CallId() const { return BailoutId(local_id(3)); }
bool is_uninitialized() const { bool is_uninitialized() const {
return IsUninitializedField::decode(bit_field_); return IsUninitializedField::decode(bit_field_);
@ -1867,7 +1913,10 @@ class Call final : public Expression {
POSSIBLY_EVAL_CALL, POSSIBLY_EVAL_CALL,
GLOBAL_CALL, GLOBAL_CALL,
LOOKUP_SLOT_CALL, LOOKUP_SLOT_CALL,
PROPERTY_CALL, NAMED_PROPERTY_CALL,
KEYED_PROPERTY_CALL,
NAMED_SUPER_PROPERTY_CALL,
KEYED_SUPER_PROPERTY_CALL,
SUPER_CALL, SUPER_CALL,
OTHER_CALL OTHER_CALL
}; };
@ -1886,8 +1935,6 @@ class Call final : public Expression {
Call(Zone* zone, Expression* expression, ZoneList<Expression*>* arguments, Call(Zone* zone, Expression* expression, ZoneList<Expression*>* arguments,
int pos) int pos)
: Expression(zone, pos), : Expression(zone, pos),
ic_slot_(FeedbackVectorICSlot::Invalid()),
slot_(FeedbackVectorSlot::Invalid()),
expression_(expression), expression_(expression),
arguments_(arguments), arguments_(arguments),
bit_field_(IsUninitializedField::encode(false)) { bit_field_(IsUninitializedField::encode(false)) {
@ -1900,8 +1947,8 @@ class Call final : public Expression {
private: private:
int local_id(int n) const { return base_id() + parent_num_ids() + n; } int local_id(int n) const { return base_id() + parent_num_ids() + n; }
FeedbackVectorICSlot ic_slot_; FeedbackVectorSlot ic_slot_;
FeedbackVectorSlot slot_; FeedbackVectorSlot stub_slot_;
Expression* expression_; Expression* expression_;
ZoneList<Expression*>* arguments_; ZoneList<Expression*>* arguments_;
Handle<JSFunction> target_; Handle<JSFunction> target_;
@ -1920,8 +1967,8 @@ class CallNew final : public Expression {
// Type feedback information. // Type feedback information.
void AssignFeedbackVectorSlots(Isolate* isolate, FeedbackVectorSpec* spec, void AssignFeedbackVectorSlots(Isolate* isolate, FeedbackVectorSpec* spec,
ICSlotCache* cache) override { FeedbackVectorSlotCache* cache) override {
callnew_feedback_slot_ = spec->AddStubSlot(); callnew_feedback_slot_ = spec->AddGeneralSlot();
} }
FeedbackVectorSlot CallNewFeedbackSlot() { FeedbackVectorSlot CallNewFeedbackSlot() {
@ -1955,8 +2002,7 @@ class CallNew final : public Expression {
: Expression(zone, pos), : Expression(zone, pos),
expression_(expression), expression_(expression),
arguments_(arguments), arguments_(arguments),
is_monomorphic_(false), is_monomorphic_(false) {}
callnew_feedback_slot_(FeedbackVectorSlot::Invalid()) {}
static int parent_num_ids() { return Expression::num_ids(); } static int parent_num_ids() { return Expression::num_ids(); }
@ -2035,7 +2081,7 @@ class UnaryOperation final : public Expression {
BailoutId MaterializeTrueId() const { return BailoutId(local_id(0)); } BailoutId MaterializeTrueId() const { return BailoutId(local_id(0)); }
BailoutId MaterializeFalseId() const { return BailoutId(local_id(1)); } BailoutId MaterializeFalseId() const { return BailoutId(local_id(1)); }
virtual void RecordToBooleanTypeFeedback(TypeFeedbackOracle* oracle) override; void RecordToBooleanTypeFeedback(TypeFeedbackOracle* oracle) override;
protected: protected:
UnaryOperation(Zone* zone, Token::Value op, Expression* expression, int pos) UnaryOperation(Zone* zone, Token::Value op, Expression* expression, int pos)
@ -2080,7 +2126,7 @@ class BinaryOperation final : public Expression {
if (arg.IsJust()) fixed_right_arg_value_ = arg.FromJust(); if (arg.IsJust()) fixed_right_arg_value_ = arg.FromJust();
} }
virtual void RecordToBooleanTypeFeedback(TypeFeedbackOracle* oracle) override; void RecordToBooleanTypeFeedback(TypeFeedbackOracle* oracle) override;
protected: protected:
BinaryOperation(Zone* zone, Token::Value op, Expression* left, BinaryOperation(Zone* zone, Token::Value op, Expression* left,
@ -2151,8 +2197,8 @@ class CountOperation final : public Expression {
} }
void AssignFeedbackVectorSlots(Isolate* isolate, FeedbackVectorSpec* spec, void AssignFeedbackVectorSlots(Isolate* isolate, FeedbackVectorSpec* spec,
ICSlotCache* cache) override; FeedbackVectorSlotCache* cache) override;
FeedbackVectorICSlot CountSlot() const { return slot_; } FeedbackVectorSlot CountSlot() const { return slot_; }
protected: protected:
CountOperation(Zone* zone, Token::Value op, bool is_prefix, Expression* expr, CountOperation(Zone* zone, Token::Value op, bool is_prefix, Expression* expr,
@ -2162,8 +2208,7 @@ class CountOperation final : public Expression {
IsPrefixField::encode(is_prefix) | KeyTypeField::encode(ELEMENT) | IsPrefixField::encode(is_prefix) | KeyTypeField::encode(ELEMENT) |
StoreModeField::encode(STANDARD_STORE) | TokenField::encode(op)), StoreModeField::encode(STANDARD_STORE) | TokenField::encode(op)),
type_(NULL), type_(NULL),
expression_(expr), expression_(expr) {}
slot_(FeedbackVectorICSlot::Invalid()) {}
static int parent_num_ids() { return Expression::num_ids(); } static int parent_num_ids() { return Expression::num_ids(); }
private: private:
@ -2180,7 +2225,7 @@ class CountOperation final : public Expression {
Type* type_; Type* type_;
Expression* expression_; Expression* expression_;
SmallMapList receiver_types_; SmallMapList receiver_types_;
FeedbackVectorICSlot slot_; FeedbackVectorSlot slot_;
}; };
@ -2324,8 +2369,8 @@ class Assignment final : public Expression {
} }
void AssignFeedbackVectorSlots(Isolate* isolate, FeedbackVectorSpec* spec, void AssignFeedbackVectorSlots(Isolate* isolate, FeedbackVectorSpec* spec,
ICSlotCache* cache) override; FeedbackVectorSlotCache* cache) override;
FeedbackVectorICSlot AssignmentSlot() const { return slot_; } FeedbackVectorSlot AssignmentSlot() const { return slot_; }
protected: protected:
Assignment(Zone* zone, Token::Value op, Expression* target, Expression* value, Assignment(Zone* zone, Token::Value op, Expression* target, Expression* value,
@ -2347,7 +2392,7 @@ class Assignment final : public Expression {
Expression* value_; Expression* value_;
BinaryOperation* binary_operation_; BinaryOperation* binary_operation_;
SmallMapList receiver_types_; SmallMapList receiver_types_;
FeedbackVectorICSlot slot_; FeedbackVectorSlot slot_;
}; };
@ -2369,23 +2414,22 @@ class Yield final : public Expression {
// Type feedback information. // Type feedback information.
bool HasFeedbackSlots() const { return yield_kind() == kDelegating; } bool HasFeedbackSlots() const { return yield_kind() == kDelegating; }
void AssignFeedbackVectorSlots(Isolate* isolate, FeedbackVectorSpec* spec, void AssignFeedbackVectorSlots(Isolate* isolate, FeedbackVectorSpec* spec,
ICSlotCache* cache) override { FeedbackVectorSlotCache* cache) override {
if (HasFeedbackSlots()) { if (HasFeedbackSlots()) {
yield_first_feedback_slot_ = spec->AddKeyedLoadICSlot(); yield_first_feedback_slot_ = spec->AddKeyedLoadICSlot();
spec->AddLoadICSlots(2); keyed_load_feedback_slot_ = spec->AddLoadICSlot();
done_feedback_slot_ = spec->AddLoadICSlot();
} }
} }
FeedbackVectorICSlot KeyedLoadFeedbackSlot() { FeedbackVectorSlot KeyedLoadFeedbackSlot() {
DCHECK(!HasFeedbackSlots() || !yield_first_feedback_slot_.IsInvalid()); DCHECK(!HasFeedbackSlots() || !yield_first_feedback_slot_.IsInvalid());
return yield_first_feedback_slot_; return yield_first_feedback_slot_;
} }
FeedbackVectorICSlot DoneFeedbackSlot() { FeedbackVectorSlot DoneFeedbackSlot() { return keyed_load_feedback_slot_; }
return KeyedLoadFeedbackSlot().next();
}
FeedbackVectorICSlot ValueFeedbackSlot() { return DoneFeedbackSlot().next(); } FeedbackVectorSlot ValueFeedbackSlot() { return done_feedback_slot_; }
protected: protected:
Yield(Zone* zone, Expression* generator_object, Expression* expression, Yield(Zone* zone, Expression* generator_object, Expression* expression,
@ -2393,14 +2437,15 @@ class Yield final : public Expression {
: Expression(zone, pos), : Expression(zone, pos),
generator_object_(generator_object), generator_object_(generator_object),
expression_(expression), expression_(expression),
yield_kind_(yield_kind), yield_kind_(yield_kind) {}
yield_first_feedback_slot_(FeedbackVectorICSlot::Invalid()) {}
private: private:
Expression* generator_object_; Expression* generator_object_;
Expression* expression_; Expression* expression_;
Kind yield_kind_; Kind yield_kind_;
FeedbackVectorICSlot yield_first_feedback_slot_; FeedbackVectorSlot yield_first_feedback_slot_;
FeedbackVectorSlot keyed_load_feedback_slot_;
FeedbackVectorSlot done_feedback_slot_;
}; };
@ -2642,14 +2687,14 @@ class ClassLiteral final : public Expression {
// Object literals need one feedback slot for each non-trivial value, as well // Object literals need one feedback slot for each non-trivial value, as well
// as some slots for home objects. // as some slots for home objects.
void AssignFeedbackVectorSlots(Isolate* isolate, FeedbackVectorSpec* spec, void AssignFeedbackVectorSlots(Isolate* isolate, FeedbackVectorSpec* spec,
ICSlotCache* cache) override; FeedbackVectorSlotCache* cache) override;
bool NeedsProxySlot() const { bool NeedsProxySlot() const {
return FLAG_vector_stores && scope() != NULL && return FLAG_vector_stores && class_variable_proxy() != nullptr &&
class_variable_proxy()->var()->IsUnallocated(); class_variable_proxy()->var()->IsUnallocated();
} }
FeedbackVectorICSlot ProxySlot() const { return slot_; } FeedbackVectorSlot ProxySlot() const { return slot_; }
protected: protected:
ClassLiteral(Zone* zone, const AstRawString* name, Scope* scope, ClassLiteral(Zone* zone, const AstRawString* name, Scope* scope,
@ -2663,9 +2708,7 @@ class ClassLiteral final : public Expression {
extends_(extends), extends_(extends),
constructor_(constructor), constructor_(constructor),
properties_(properties), properties_(properties),
end_position_(end_position), end_position_(end_position) {}
slot_(FeedbackVectorICSlot::Invalid()) {
}
static int parent_num_ids() { return Expression::num_ids(); } static int parent_num_ids() { return Expression::num_ids(); }
@ -2679,7 +2722,7 @@ class ClassLiteral final : public Expression {
FunctionLiteral* constructor_; FunctionLiteral* constructor_;
ZoneList<Property*>* properties_; ZoneList<Property*>* properties_;
int end_position_; int end_position_;
FeedbackVectorICSlot slot_; FeedbackVectorSlot slot_;
}; };
@ -2816,8 +2859,7 @@ class RegExpDisjunction final : public RegExpTree {
public: public:
explicit RegExpDisjunction(ZoneList<RegExpTree*>* alternatives); explicit RegExpDisjunction(ZoneList<RegExpTree*>* alternatives);
void* Accept(RegExpVisitor* visitor, void* data) override; void* Accept(RegExpVisitor* visitor, void* data) override;
virtual RegExpNode* ToNode(RegExpCompiler* compiler, RegExpNode* ToNode(RegExpCompiler* compiler, RegExpNode* on_success) override;
RegExpNode* on_success) override;
RegExpDisjunction* AsDisjunction() override; RegExpDisjunction* AsDisjunction() override;
Interval CaptureRegisters() override; Interval CaptureRegisters() override;
bool IsDisjunction() override; bool IsDisjunction() override;
@ -2840,8 +2882,7 @@ class RegExpAlternative final : public RegExpTree {
public: public:
explicit RegExpAlternative(ZoneList<RegExpTree*>* nodes); explicit RegExpAlternative(ZoneList<RegExpTree*>* nodes);
void* Accept(RegExpVisitor* visitor, void* data) override; void* Accept(RegExpVisitor* visitor, void* data) override;
virtual RegExpNode* ToNode(RegExpCompiler* compiler, RegExpNode* ToNode(RegExpCompiler* compiler, RegExpNode* on_success) override;
RegExpNode* on_success) override;
RegExpAlternative* AsAlternative() override; RegExpAlternative* AsAlternative() override;
Interval CaptureRegisters() override; Interval CaptureRegisters() override;
bool IsAlternative() override; bool IsAlternative() override;
@ -2869,8 +2910,7 @@ class RegExpAssertion final : public RegExpTree {
}; };
explicit RegExpAssertion(AssertionType type) : assertion_type_(type) { } explicit RegExpAssertion(AssertionType type) : assertion_type_(type) { }
void* Accept(RegExpVisitor* visitor, void* data) override; void* Accept(RegExpVisitor* visitor, void* data) override;
virtual RegExpNode* ToNode(RegExpCompiler* compiler, RegExpNode* ToNode(RegExpCompiler* compiler, RegExpNode* on_success) override;
RegExpNode* on_success) override;
RegExpAssertion* AsAssertion() override; RegExpAssertion* AsAssertion() override;
bool IsAssertion() override; bool IsAssertion() override;
bool IsAnchoredAtStart() override; bool IsAnchoredAtStart() override;
@ -2915,8 +2955,7 @@ class RegExpCharacterClass final : public RegExpTree {
: set_(type), : set_(type),
is_negated_(false) { } is_negated_(false) { }
void* Accept(RegExpVisitor* visitor, void* data) override; void* Accept(RegExpVisitor* visitor, void* data) override;
virtual RegExpNode* ToNode(RegExpCompiler* compiler, RegExpNode* ToNode(RegExpCompiler* compiler, RegExpNode* on_success) override;
RegExpNode* on_success) override;
RegExpCharacterClass* AsCharacterClass() override; RegExpCharacterClass* AsCharacterClass() override;
bool IsCharacterClass() override; bool IsCharacterClass() override;
bool IsTextElement() override { return true; } bool IsTextElement() override { return true; }
@ -2952,8 +2991,7 @@ class RegExpAtom final : public RegExpTree {
public: public:
explicit RegExpAtom(Vector<const uc16> data) : data_(data) { } explicit RegExpAtom(Vector<const uc16> data) : data_(data) { }
void* Accept(RegExpVisitor* visitor, void* data) override; void* Accept(RegExpVisitor* visitor, void* data) override;
virtual RegExpNode* ToNode(RegExpCompiler* compiler, RegExpNode* ToNode(RegExpCompiler* compiler, RegExpNode* on_success) override;
RegExpNode* on_success) override;
RegExpAtom* AsAtom() override; RegExpAtom* AsAtom() override;
bool IsAtom() override; bool IsAtom() override;
bool IsTextElement() override { return true; } bool IsTextElement() override { return true; }
@ -2971,8 +3009,7 @@ class RegExpText final : public RegExpTree {
public: public:
explicit RegExpText(Zone* zone) : elements_(2, zone), length_(0) {} explicit RegExpText(Zone* zone) : elements_(2, zone), length_(0) {}
void* Accept(RegExpVisitor* visitor, void* data) override; void* Accept(RegExpVisitor* visitor, void* data) override;
virtual RegExpNode* ToNode(RegExpCompiler* compiler, RegExpNode* ToNode(RegExpCompiler* compiler, RegExpNode* on_success) override;
RegExpNode* on_success) override;
RegExpText* AsText() override; RegExpText* AsText() override;
bool IsText() override; bool IsText() override;
bool IsTextElement() override { return true; } bool IsTextElement() override { return true; }
@ -3006,8 +3043,7 @@ class RegExpQuantifier final : public RegExpTree {
} }
} }
void* Accept(RegExpVisitor* visitor, void* data) override; void* Accept(RegExpVisitor* visitor, void* data) override;
virtual RegExpNode* ToNode(RegExpCompiler* compiler, RegExpNode* ToNode(RegExpCompiler* compiler, RegExpNode* on_success) override;
RegExpNode* on_success) override;
static RegExpNode* ToNode(int min, static RegExpNode* ToNode(int min,
int max, int max,
bool is_greedy, bool is_greedy,
@ -3042,8 +3078,7 @@ class RegExpCapture final : public RegExpTree {
explicit RegExpCapture(RegExpTree* body, int index) explicit RegExpCapture(RegExpTree* body, int index)
: body_(body), index_(index) { } : body_(body), index_(index) { }
void* Accept(RegExpVisitor* visitor, void* data) override; void* Accept(RegExpVisitor* visitor, void* data) override;
virtual RegExpNode* ToNode(RegExpCompiler* compiler, RegExpNode* ToNode(RegExpCompiler* compiler, RegExpNode* on_success) override;
RegExpNode* on_success) override;
static RegExpNode* ToNode(RegExpTree* body, static RegExpNode* ToNode(RegExpTree* body,
int index, int index,
RegExpCompiler* compiler, RegExpCompiler* compiler,
@ -3078,8 +3113,7 @@ class RegExpLookahead final : public RegExpTree {
capture_from_(capture_from) { } capture_from_(capture_from) { }
void* Accept(RegExpVisitor* visitor, void* data) override; void* Accept(RegExpVisitor* visitor, void* data) override;
virtual RegExpNode* ToNode(RegExpCompiler* compiler, RegExpNode* ToNode(RegExpCompiler* compiler, RegExpNode* on_success) override;
RegExpNode* on_success) override;
RegExpLookahead* AsLookahead() override; RegExpLookahead* AsLookahead() override;
Interval CaptureRegisters() override; Interval CaptureRegisters() override;
bool IsLookahead() override; bool IsLookahead() override;
@ -3104,8 +3138,7 @@ class RegExpBackReference final : public RegExpTree {
explicit RegExpBackReference(RegExpCapture* capture) explicit RegExpBackReference(RegExpCapture* capture)
: capture_(capture) { } : capture_(capture) { }
void* Accept(RegExpVisitor* visitor, void* data) override; void* Accept(RegExpVisitor* visitor, void* data) override;
virtual RegExpNode* ToNode(RegExpCompiler* compiler, RegExpNode* ToNode(RegExpCompiler* compiler, RegExpNode* on_success) override;
RegExpNode* on_success) override;
RegExpBackReference* AsBackReference() override; RegExpBackReference* AsBackReference() override;
bool IsBackReference() override; bool IsBackReference() override;
int min_match() override { return 0; } int min_match() override { return 0; }
@ -3121,8 +3154,7 @@ class RegExpEmpty final : public RegExpTree {
public: public:
RegExpEmpty() { } RegExpEmpty() { }
void* Accept(RegExpVisitor* visitor, void* data) override; void* Accept(RegExpVisitor* visitor, void* data) override;
virtual RegExpNode* ToNode(RegExpCompiler* compiler, RegExpNode* ToNode(RegExpCompiler* compiler, RegExpNode* on_success) override;
RegExpNode* on_success) override;
RegExpEmpty* AsEmpty() override; RegExpEmpty* AsEmpty() override;
bool IsEmpty() override; bool IsEmpty() override;
int min_match() override { return 0; } int min_match() override { return 0; }
@ -3167,23 +3199,25 @@ class AstVisitor BASE_EMBEDDED {
\ \
bool CheckStackOverflow() { \ bool CheckStackOverflow() { \
if (stack_overflow_) return true; \ if (stack_overflow_) return true; \
StackLimitCheck check(isolate_); \ if (GetCurrentStackPosition() < stack_limit_) { \
if (!check.HasOverflowed()) return false; \ stack_overflow_ = true; \
stack_overflow_ = true; \ return true; \
return true; \ } \
return false; \
} \ } \
\ \
private: \ private: \
void InitializeAstVisitor(Isolate* isolate, Zone* zone) { \ void InitializeAstVisitor(Isolate* isolate) { \
isolate_ = isolate; \ stack_limit_ = isolate->stack_guard()->real_climit(); \
zone_ = zone; \ stack_overflow_ = false; \
} \
\
void InitializeAstVisitor(uintptr_t stack_limit) { \
stack_limit_ = stack_limit; \
stack_overflow_ = false; \ stack_overflow_ = false; \
} \ } \
Zone* zone() { return zone_; } \
Isolate* isolate() { return isolate_; } \
\ \
Isolate* isolate_; \ uintptr_t stack_limit_; \
Zone* zone_; \
bool stack_overflow_ bool stack_overflow_
@ -3197,6 +3231,8 @@ class AstNodeFactory final BASE_EMBEDDED {
parser_zone_(ast_value_factory->zone()), parser_zone_(ast_value_factory->zone()),
ast_value_factory_(ast_value_factory) {} ast_value_factory_(ast_value_factory) {}
AstValueFactory* ast_value_factory() const { return ast_value_factory_; }
VariableDeclaration* NewVariableDeclaration( VariableDeclaration* NewVariableDeclaration(
VariableProxy* proxy, VariableMode mode, Scope* scope, int pos, VariableProxy* proxy, VariableMode mode, Scope* scope, int pos,
bool is_class_declaration = false, int declaration_group_start = -1) { bool is_class_declaration = false, int declaration_group_start = -1) {
@ -3567,6 +3603,11 @@ class AstNodeFactory final BASE_EMBEDDED {
NativeFunctionLiteral(parser_zone_, name, extension, pos); NativeFunctionLiteral(parser_zone_, name, extension, pos);
} }
DoExpression* NewDoExpression(Block* block, Variable* result_var, int pos) {
VariableProxy* result = NewVariableProxy(result_var, pos);
return new (parser_zone_) DoExpression(parser_zone_, block, result, pos);
}
ThisFunction* NewThisFunction(int pos) { ThisFunction* NewThisFunction(int pos) {
return new (local_zone_) ThisFunction(local_zone_, pos); return new (local_zone_) ThisFunction(local_zone_, pos);
} }
@ -3622,6 +3663,7 @@ class AstNodeFactory final BASE_EMBEDDED {
}; };
} } // namespace v8::internal } // namespace internal
} // namespace v8
#endif // V8_AST_H_ #endif // V8_AST_H_

4
deps/v8/src/background-parsing-task.h

@ -54,7 +54,7 @@ class BackgroundParsingTask : public ScriptCompiler::ScriptStreamingTask {
StreamedSource* source_; // Not owned. StreamedSource* source_; // Not owned.
int stack_size_; int stack_size_;
}; };
} } // namespace internal
} // namespace v8::internal } // namespace v8
#endif // V8_BACKGROUND_PARSING_TASK_H_ #endif // V8_BACKGROUND_PARSING_TASK_H_

35
deps/v8/src/bailout-reason.h

@ -29,7 +29,6 @@ namespace internal {
"Assignment to parameter, function uses arguments object") \ "Assignment to parameter, function uses arguments object") \
V(kAssignmentToParameterInArgumentsObject, \ V(kAssignmentToParameterInArgumentsObject, \
"Assignment to parameter in arguments object") \ "Assignment to parameter in arguments object") \
V(kAttemptToUseUndefinedCache, "Attempt to use undefined cache") \
V(kBadValueContextForArgumentsObjectValue, \ V(kBadValueContextForArgumentsObjectValue, \
"Bad value context for arguments object value") \ "Bad value context for arguments object value") \
V(kBadValueContextForArgumentsValue, \ V(kBadValueContextForArgumentsValue, \
@ -58,6 +57,7 @@ namespace internal {
V(kDestinationOfCopyNotAligned, "Destination of copy not aligned") \ V(kDestinationOfCopyNotAligned, "Destination of copy not aligned") \
V(kDontDeleteCellsCannotContainTheHole, \ V(kDontDeleteCellsCannotContainTheHole, \
"DontDelete cells can't contain the hole") \ "DontDelete cells can't contain the hole") \
V(kDoExpression, "Do expression encountered") \
V(kDoPushArgumentNotImplementedForDoubleType, \ V(kDoPushArgumentNotImplementedForDoubleType, \
"DoPushArgument not implemented for double type") \ "DoPushArgument not implemented for double type") \
V(kEliminatedBoundsCheckFailed, "Eliminated bounds check failed") \ V(kEliminatedBoundsCheckFailed, "Eliminated bounds check failed") \
@ -105,14 +105,6 @@ namespace internal {
V(kInputGPRIsExpectedToHaveUpper32Cleared, \ V(kInputGPRIsExpectedToHaveUpper32Cleared, \
"Input GPR is expected to have upper32 cleared") \ "Input GPR is expected to have upper32 cleared") \
V(kInputStringTooLong, "Input string too long") \ V(kInputStringTooLong, "Input string too long") \
V(kInstanceofStubUnexpectedCallSiteCacheCheck, \
"InstanceofStub unexpected call site cache (check)") \
V(kInstanceofStubUnexpectedCallSiteCacheCmp1, \
"InstanceofStub unexpected call site cache (cmp 1)") \
V(kInstanceofStubUnexpectedCallSiteCacheCmp2, \
"InstanceofStub unexpected call site cache (cmp 2)") \
V(kInstanceofStubUnexpectedCallSiteCacheMov, \
"InstanceofStub unexpected call site cache (mov)") \
V(kInteger32ToSmiFieldWritingToNonSmiLocation, \ V(kInteger32ToSmiFieldWritingToNonSmiLocation, \
"Integer32ToSmiField writing to non-smi location") \ "Integer32ToSmiField writing to non-smi location") \
V(kInvalidCaptureReferenced, "Invalid capture referenced") \ V(kInvalidCaptureReferenced, "Invalid capture referenced") \
@ -183,7 +175,8 @@ namespace internal {
V(kRegisterWasClobbered, "Register was clobbered") \ V(kRegisterWasClobbered, "Register was clobbered") \
V(kRememberedSetPointerInNewSpace, "Remembered set pointer is in new space") \ V(kRememberedSetPointerInNewSpace, "Remembered set pointer is in new space") \
V(kReturnAddressNotFoundInFrame, "Return address not found in frame") \ V(kReturnAddressNotFoundInFrame, "Return address not found in frame") \
V(kScriptContext, "Allocation of script context") \ V(kSloppyFunctionExpectsJSReceiverReceiver, \
"Sloppy function expects JSReceiver as receiver.") \
V(kSmiAdditionOverflow, "Smi addition overflow") \ V(kSmiAdditionOverflow, "Smi addition overflow") \
V(kSmiSubtractionOverflow, "Smi subtraction overflow") \ V(kSmiSubtractionOverflow, "Smi subtraction overflow") \
V(kSpread, "Spread in array literal") \ V(kSpread, "Spread in array literal") \
@ -192,20 +185,6 @@ namespace internal {
V(kSuperReference, "Super reference") \ V(kSuperReference, "Super reference") \
V(kTheCurrentStackPointerIsBelowCsp, \ V(kTheCurrentStackPointerIsBelowCsp, \
"The current stack pointer is below csp") \ "The current stack pointer is below csp") \
V(kTheInstructionShouldBeALis, "The instruction should be a lis") \
V(kTheInstructionShouldBeALui, "The instruction should be a lui") \
V(kTheInstructionShouldBeAnOri, "The instruction should be an ori") \
V(kTheInstructionShouldBeAnOris, "The instruction should be an oris") \
V(kTheInstructionShouldBeALi, "The instruction should be a li") \
V(kTheInstructionShouldBeASldi, "The instruction should be a sldi") \
V(kTheInstructionToPatchShouldBeAnLdrLiteral, \
"The instruction to patch should be a ldr literal") \
V(kTheInstructionToPatchShouldBeALis, \
"The instruction to patch should be a lis") \
V(kTheInstructionToPatchShouldBeALui, \
"The instruction to patch should be a lui") \
V(kTheInstructionToPatchShouldBeAnOri, \
"The instruction to patch should be an ori") \
V(kTheSourceAndDestinationAreTheSame, \ V(kTheSourceAndDestinationAreTheSame, \
"The source and destination are the same") \ "The source and destination are the same") \
V(kTheStackWasCorruptedByMacroAssemblerCall, \ V(kTheStackWasCorruptedByMacroAssemblerCall, \
@ -220,7 +199,6 @@ namespace internal {
"ToOperand Unsupported double immediate") \ "ToOperand Unsupported double immediate") \
V(kTryCatchStatement, "TryCatchStatement") \ V(kTryCatchStatement, "TryCatchStatement") \
V(kTryFinallyStatement, "TryFinallyStatement") \ V(kTryFinallyStatement, "TryFinallyStatement") \
V(kUnableToEncodeValueAsSmi, "Unable to encode value as smi") \
V(kUnalignedAllocationInNewSpace, "Unaligned allocation in new space") \ V(kUnalignedAllocationInNewSpace, "Unaligned allocation in new space") \
V(kUnalignedCellInWriteBarrier, "Unaligned cell in write barrier") \ V(kUnalignedCellInWriteBarrier, "Unaligned cell in write barrier") \
V(kUnexpectedAllocationTop, "Unexpected allocation top") \ V(kUnexpectedAllocationTop, "Unexpected allocation top") \
@ -254,15 +232,11 @@ namespace internal {
"Unexpected number of pre-allocated property fields") \ "Unexpected number of pre-allocated property fields") \
V(kUnexpectedFPCRMode, "Unexpected FPCR mode.") \ V(kUnexpectedFPCRMode, "Unexpected FPCR mode.") \
V(kUnexpectedSmi, "Unexpected smi value") \ V(kUnexpectedSmi, "Unexpected smi value") \
V(kUnexpectedStringFunction, "Unexpected String function") \ V(kUnexpectedStackPointer, "The stack pointer is not the expected value") \
V(kUnexpectedStringType, "Unexpected string type") \ V(kUnexpectedStringType, "Unexpected string type") \
V(kUnexpectedStringWrapperInstanceSize, \
"Unexpected string wrapper instance size") \
V(kUnexpectedTypeForRegExpDataFixedArrayExpected, \ V(kUnexpectedTypeForRegExpDataFixedArrayExpected, \
"Unexpected type for RegExp data, FixedArray expected") \ "Unexpected type for RegExp data, FixedArray expected") \
V(kUnexpectedValue, "Unexpected value") \ V(kUnexpectedValue, "Unexpected value") \
V(kUnexpectedUnusedPropertiesOfStringWrapper, \
"Unexpected unused properties of string wrapper") \
V(kUnsupportedConstCompoundAssignment, \ V(kUnsupportedConstCompoundAssignment, \
"Unsupported const compound assignment") \ "Unsupported const compound assignment") \
V(kUnsupportedCountOperationWithConst, \ V(kUnsupportedCountOperationWithConst, \
@ -275,6 +249,7 @@ namespace internal {
V(kUnsupportedPhiUseOfArguments, "Unsupported phi use of arguments") \ V(kUnsupportedPhiUseOfArguments, "Unsupported phi use of arguments") \
V(kUnsupportedPhiUseOfConstVariable, \ V(kUnsupportedPhiUseOfConstVariable, \
"Unsupported phi use of const variable") \ "Unsupported phi use of const variable") \
V(kUnexpectedReturnFromThrow, "Unexpectedly returned from a throw") \
V(kUnsupportedTaggedImmediate, "Unsupported tagged immediate") \ V(kUnsupportedTaggedImmediate, "Unsupported tagged immediate") \
V(kVariableResolvedToWithContext, "Variable resolved to with context") \ V(kVariableResolvedToWithContext, "Variable resolved to with context") \
V(kWeShouldNotHaveAnEmptyLexicalContext, \ V(kWeShouldNotHaveAnEmptyLexicalContext, \

46
deps/v8/src/base.isolate

@ -2,8 +2,19 @@
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
{ {
'includes': [
'../third_party/icu/icu.isolate',
'../build/config/win/msvs_dependencies.isolate',
],
'conditions': [ 'conditions': [
['v8_use_external_startup_data==1', { ['use_custom_libcxx==1', {
'variables': {
'files': [
'<(PRODUCT_DIR)/lib/libc++.so',
],
},
}],
['v8_use_snapshot=="true" and v8_use_external_startup_data==1', {
'variables': { 'variables': {
'files': [ 'files': [
'<(PRODUCT_DIR)/natives_blob.bin', '<(PRODUCT_DIR)/natives_blob.bin',
@ -11,5 +22,38 @@
], ],
}, },
}], }],
['OS=="linux" and component=="shared_library" and target_arch=="ia32"', {
'variables': {
'files': [
'<(PRODUCT_DIR)/lib/',
],
},
}],
['tsan==1', {
'variables': {
'files': [
'../tools/sanitizers/tsan_suppressions.txt',
],
},
}],
['OS=="linux" and (asan==1 or cfi_vptr==1 or msan==1 or tsan==1)', {
'variables': {
'files': [
# For llvm-symbolizer.
'../third_party/llvm-build/Release+Asserts/lib/libstdc++.so.6',
],
},
}],
['asan==1 or cfi_vptr==1 or msan==1 or tsan==1', {
'variables': {
'files': [
'../third_party/llvm-build/Release+Asserts/bin/llvm-symbolizer<(EXECUTABLE_SUFFIX)',
],
},
}],
# Workaround for https://code.google.com/p/swarming/issues/detail?id=211
['asan==0 or cfi_vptr==0 or msan==0 or tsan==0', {
'variables': {},
}],
], ],
} }

3
deps/v8/src/base/atomicops.h

@ -133,7 +133,8 @@ Atomic64 Acquire_Load(volatile const Atomic64* ptr);
Atomic64 Release_Load(volatile const Atomic64* ptr); Atomic64 Release_Load(volatile const Atomic64* ptr);
#endif // V8_HOST_ARCH_64_BIT #endif // V8_HOST_ARCH_64_BIT
} } // namespace v8::base } // namespace base
} // namespace v8
// Include our platform specific implementation. // Include our platform specific implementation.
#if defined(THREAD_SANITIZER) #if defined(THREAD_SANITIZER)

3
deps/v8/src/base/atomicops_internals_arm64_gcc.h

@ -311,6 +311,7 @@ inline Atomic64 Release_Load(volatile const Atomic64* ptr) {
return *ptr; return *ptr;
} }
} } // namespace v8::base } // namespace base
} // namespace v8
#endif // V8_BASE_ATOMICOPS_INTERNALS_ARM_GCC_H_ #endif // V8_BASE_ATOMICOPS_INTERNALS_ARM_GCC_H_

6
deps/v8/src/base/atomicops_internals_arm_gcc.h

@ -59,7 +59,8 @@ inline void MemoryBarrier() {
// variant of the target architecture is being used. This tests against // variant of the target architecture is being used. This tests against
// any known ARMv6 or ARMv7 variant, where it is possible to directly // any known ARMv6 or ARMv7 variant, where it is possible to directly
// use ldrex/strex instructions to implement fast atomic operations. // use ldrex/strex instructions to implement fast atomic operations.
#if defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7A__) || \ #if defined(__ARM_ARCH_8A__) || \
defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7A__) || \
defined(__ARM_ARCH_7R__) || defined(__ARM_ARCH_7M__) || \ defined(__ARM_ARCH_7R__) || defined(__ARM_ARCH_7M__) || \
defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_6J__) || \ defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_6J__) || \
defined(__ARM_ARCH_6K__) || defined(__ARM_ARCH_6Z__) || \ defined(__ARM_ARCH_6K__) || defined(__ARM_ARCH_6Z__) || \
@ -296,6 +297,7 @@ inline void NoBarrier_Store(volatile Atomic8* ptr, Atomic8 value) {
inline Atomic8 NoBarrier_Load(volatile const Atomic8* ptr) { return *ptr; } inline Atomic8 NoBarrier_Load(volatile const Atomic8* ptr) { return *ptr; }
} } // namespace v8::base } // namespace base
} // namespace v8
#endif // V8_BASE_ATOMICOPS_INTERNALS_ARM_GCC_H_ #endif // V8_BASE_ATOMICOPS_INTERNALS_ARM_GCC_H_

3
deps/v8/src/base/atomicops_internals_atomicword_compat.h

@ -92,7 +92,8 @@ inline AtomicWord Release_Load(volatile const AtomicWord* ptr) {
reinterpret_cast<volatile const Atomic32*>(ptr)); reinterpret_cast<volatile const Atomic32*>(ptr));
} }
} } // namespace v8::base } // namespace base
} // namespace v8
#endif // !defined(V8_HOST_ARCH_64_BIT) #endif // !defined(V8_HOST_ARCH_64_BIT)

3
deps/v8/src/base/atomicops_internals_mac.h

@ -210,6 +210,7 @@ inline Atomic64 Release_Load(volatile const Atomic64* ptr) {
#endif // defined(__LP64__) #endif // defined(__LP64__)
#undef ATOMICOPS_COMPILER_BARRIER #undef ATOMICOPS_COMPILER_BARRIER
} } // namespace v8::base } // namespace base
} // namespace v8
#endif // V8_BASE_ATOMICOPS_INTERNALS_MAC_H_ #endif // V8_BASE_ATOMICOPS_INTERNALS_MAC_H_

3
deps/v8/src/base/atomicops_internals_mips64_gcc.h

@ -302,6 +302,7 @@ inline Atomic64 Release_Load(volatile const Atomic64* ptr) {
return *ptr; return *ptr;
} }
} } // namespace v8::base } // namespace base
} // namespace v8
#endif // V8_BASE_ATOMICOPS_INTERNALS_MIPS_GCC_H_ #endif // V8_BASE_ATOMICOPS_INTERNALS_MIPS_GCC_H_

3
deps/v8/src/base/atomicops_internals_mips_gcc.h

@ -155,6 +155,7 @@ inline Atomic32 Release_Load(volatile const Atomic32* ptr) {
return *ptr; return *ptr;
} }
} } // namespace v8::base } // namespace base
} // namespace v8
#endif // V8_BASE_ATOMICOPS_INTERNALS_MIPS_GCC_H_ #endif // V8_BASE_ATOMICOPS_INTERNALS_MIPS_GCC_H_

Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save