Browse Source

Upgrade V8 to 3.9.5

v0.7.4-release
isaacs 13 years ago
parent
commit
68a0c56a7d
  1. 24
      deps/v8/ChangeLog
  2. 2
      deps/v8/SConstruct
  3. 26
      deps/v8/build/common.gypi
  4. 1
      deps/v8/build/mipsu.gypi
  5. 4
      deps/v8/include/v8-profiler.h
  6. 9
      deps/v8/src/api.cc
  7. 38
      deps/v8/src/arm/builtins-arm.cc
  8. 4
      deps/v8/src/arm/code-stubs-arm.cc
  9. 35
      deps/v8/src/arm/full-codegen-arm.cc
  10. 1
      deps/v8/src/arm/lithium-codegen-arm.cc
  11. 25
      deps/v8/src/arm/macro-assembler-arm.cc
  12. 7
      deps/v8/src/arm/macro-assembler-arm.h
  13. 68
      deps/v8/src/arm/stub-cache-arm.cc
  14. 399
      deps/v8/src/ast.cc
  15. 1353
      deps/v8/src/ast.h
  16. 2
      deps/v8/src/builtins.cc
  17. 13
      deps/v8/src/codegen.cc
  18. 20
      deps/v8/src/compiler.cc
  19. 6
      deps/v8/src/compiler.h
  20. 4
      deps/v8/src/cpu-profiler.cc
  21. 2
      deps/v8/src/d8.cc
  22. 36
      deps/v8/src/flag-definitions.h
  23. 14
      deps/v8/src/frames-inl.h
  24. 2
      deps/v8/src/frames.cc
  25. 20
      deps/v8/src/frames.h
  26. 118
      deps/v8/src/full-codegen.cc
  27. 6
      deps/v8/src/full-codegen.h
  28. 4
      deps/v8/src/handles.cc
  29. 13
      deps/v8/src/heap.cc
  30. 5
      deps/v8/src/heap.h
  31. 7
      deps/v8/src/hydrogen-instructions.cc
  32. 42
      deps/v8/src/hydrogen-instructions.h
  33. 312
      deps/v8/src/hydrogen.cc
  34. 33
      deps/v8/src/hydrogen.h
  35. 4
      deps/v8/src/ia32/code-stubs-ia32.cc
  36. 34
      deps/v8/src/ia32/full-codegen-ia32.cc
  37. 1
      deps/v8/src/ia32/lithium-codegen-ia32.cc
  38. 25
      deps/v8/src/ia32/macro-assembler-ia32.cc
  39. 4
      deps/v8/src/ia32/macro-assembler-ia32.h
  40. 75
      deps/v8/src/ia32/stub-cache-ia32.cc
  41. 5
      deps/v8/src/ic-inl.h
  42. 25
      deps/v8/src/ic.cc
  43. 1
      deps/v8/src/ic.h
  44. 3
      deps/v8/src/incremental-marking.cc
  45. 114
      deps/v8/src/isolate.cc
  46. 10
      deps/v8/src/isolate.h
  47. 4
      deps/v8/src/list-inl.h
  48. 16
      deps/v8/src/macro-assembler.h
  49. 41
      deps/v8/src/mark-compact.cc
  50. 7
      deps/v8/src/mark-compact.h
  51. 6
      deps/v8/src/messages.js
  52. 4
      deps/v8/src/mips/assembler-mips.cc
  53. 39
      deps/v8/src/mips/builtins-mips.cc
  54. 2
      deps/v8/src/mips/code-stubs-mips.cc
  55. 62
      deps/v8/src/mips/ic-mips.cc
  56. 1
      deps/v8/src/mips/lithium-codegen-mips.cc
  57. 77
      deps/v8/src/mips/macro-assembler-mips.cc
  58. 23
      deps/v8/src/mips/macro-assembler-mips.h
  59. 68
      deps/v8/src/mips/stub-cache-mips.cc
  60. 20
      deps/v8/src/objects-inl.h
  61. 116
      deps/v8/src/objects.cc
  62. 53
      deps/v8/src/objects.h
  63. 458
      deps/v8/src/parser.cc
  64. 96
      deps/v8/src/parser.h
  65. 2
      deps/v8/src/platform-freebsd.cc
  66. 2
      deps/v8/src/platform-linux.cc
  67. 2
      deps/v8/src/platform-macos.cc
  68. 2
      deps/v8/src/platform-openbsd.cc
  69. 2
      deps/v8/src/platform-solaris.cc
  70. 2
      deps/v8/src/platform-win32.cc
  71. 10
      deps/v8/src/preparser.h
  72. 454
      deps/v8/src/prettyprinter.cc
  73. 103
      deps/v8/src/prettyprinter.h
  74. 88
      deps/v8/src/profile-generator.cc
  75. 11
      deps/v8/src/profile-generator.h
  76. 4
      deps/v8/src/property-details.h
  77. 2
      deps/v8/src/property.h
  78. 40
      deps/v8/src/rewriter.cc
  79. 116
      deps/v8/src/runtime-profiler.cc
  80. 17
      deps/v8/src/runtime-profiler.h
  81. 90
      deps/v8/src/runtime.cc
  82. 2
      deps/v8/src/runtime.h
  83. 18
      deps/v8/src/scanner.cc
  84. 20
      deps/v8/src/scanner.h
  85. 60
      deps/v8/src/scopes.cc
  86. 42
      deps/v8/src/scopes.h
  87. 7
      deps/v8/src/spaces.h
  88. 3
      deps/v8/src/token.h
  89. 5
      deps/v8/src/v8.cc
  90. 2
      deps/v8/src/version.cc
  91. 4
      deps/v8/src/x64/code-stubs-x64.cc
  92. 35
      deps/v8/src/x64/full-codegen-x64.cc
  93. 5
      deps/v8/src/x64/lithium-codegen-x64.cc
  94. 24
      deps/v8/src/x64/macro-assembler-x64.cc
  95. 4
      deps/v8/src/x64/macro-assembler-x64.h
  96. 74
      deps/v8/src/x64/stub-cache-x64.cc
  97. 131
      deps/v8/test/cctest/test-api.cc
  98. 5
      deps/v8/test/cctest/test-ast.cc
  99. 4
      deps/v8/test/cctest/test-heap-profiler.cc
  100. 10
      deps/v8/test/cctest/test-mark-compact.cc

24
deps/v8/ChangeLog

@ -1,3 +1,27 @@
2012-02-09: Version 3.9.5
Removed unused command line flags.
Performance and stability improvements on all platforms.
2012-02-08: Version 3.9.4
Properly initialize element-transitioning array literals on ARM.
(issue 1930)
Bug fixes on all platforms.
2012-02-07: Version 3.9.3
When rethrowing an exception, print the stack trace of its original
site instead of rethrow site (Chromium issue 60240).
Increased size of small stacks from 32k to 64k to avoid hitting limits
in Chromium (Chromium issue 112843).
2012-02-06: Version 3.9.2 2012-02-06: Version 3.9.2
Add timestamp to --trace-gc output. (issue 1932) Add timestamp to --trace-gc output. (issue 1932)

2
deps/v8/SConstruct

@ -128,7 +128,7 @@ LIBRARY_FLAGS = {
'CPPDEFINES': ['__C99FEATURES__'], 'CPPDEFINES': ['__C99FEATURES__'],
'CPPPATH' : [src_dir, '/usr/local/include'], 'CPPPATH' : [src_dir, '/usr/local/include'],
'LIBPATH' : ['/usr/local/lib'], 'LIBPATH' : ['/usr/local/lib'],
'CCFLAGS': ['-ansi', '-fno-omit-frame-pointer'], 'CCFLAGS': ['-ansi'],
}, },
'os:netbsd': { 'os:netbsd': {
'CPPPATH' : [src_dir, '/usr/pkg/include'], 'CPPPATH' : [src_dir, '/usr/pkg/include'],

26
deps/v8/build/common.gypi

@ -169,6 +169,28 @@
'V8_TARGET_ARCH_MIPS', 'V8_TARGET_ARCH_MIPS',
], ],
'conditions': [ 'conditions': [
[ 'target_arch=="mips"', {
'target_conditions': [
['_toolset=="target"', {
'cflags': ['-EL'],
'ldflags': ['-EL'],
'conditions': [
[ 'v8_use_mips_abi_hardfloat=="true"', {
'cflags': ['-mhard-float'],
'ldflags': ['-mhard-float'],
}, {
'cflags': ['-msoft-float'],
'ldflags': ['-msoft-float'],
}],
['mips_arch_variant=="mips32r2"', {
'cflags': ['-mips32r2', '-Wa,-mips32r2'],
}, {
'cflags': ['-mips32', '-Wa,-mips32'],
}],
],
}],
],
}],
[ 'v8_can_use_fpu_instructions=="true"', { [ 'v8_can_use_fpu_instructions=="true"', {
'defines': [ 'defines': [
'CAN_USE_FPU_INSTRUCTIONS', 'CAN_USE_FPU_INSTRUCTIONS',
@ -184,6 +206,9 @@
'__mips_soft_float=1' '__mips_soft_float=1'
], ],
}], }],
['mips_arch_variant=="mips32r2"', {
'defines': ['_MIPS_ARCH_MIPS32R2',],
}],
# The MIPS assembler assumes the host is 32 bits, # The MIPS assembler assumes the host is 32 bits,
# so force building 32-bit host tools. # so force building 32-bit host tools.
['host_arch=="x64"', { ['host_arch=="x64"', {
@ -327,6 +352,7 @@
}], # OS=="mac" }], # OS=="mac"
['OS=="win"', { ['OS=="win"', {
'msvs_configuration_attributes': { 'msvs_configuration_attributes': {
'OutputDirectory': '<(DEPTH)\\build\\$(ConfigurationName)',
'IntermediateDirectory': '$(OutDir)\\obj\\$(ProjectName)', 'IntermediateDirectory': '$(OutDir)\\obj\\$(ProjectName)',
'CharacterSet': '1', 'CharacterSet': '1',
}, },

1
deps/v8/build/mipsu.gypi

@ -29,5 +29,6 @@
'variables': { 'variables': {
'target_arch': 'ia32', 'target_arch': 'ia32',
'v8_target_arch': 'mips', 'v8_target_arch': 'mips',
'mips_arch_variant': 'mips32r2',
}, },
} }

4
deps/v8/include/v8-profiler.h

@ -255,7 +255,9 @@ class V8EXPORT HeapGraphNode {
kClosure = 5, // Function closure. kClosure = 5, // Function closure.
kRegExp = 6, // RegExp. kRegExp = 6, // RegExp.
kHeapNumber = 7, // Number stored in the heap. kHeapNumber = 7, // Number stored in the heap.
kNative = 8 // Native object (not from V8 heap). kNative = 8, // Native object (not from V8 heap).
kSynthetic = 9 // Synthetic object, usualy used for grouping
// snapshot items together.
}; };
/** Returns node type (see HeapGraphNode::Type). */ /** Returns node type (see HeapGraphNode::Type). */

9
deps/v8/src/api.cc

@ -6088,9 +6088,7 @@ static void SetFlagsFromString(const char* flags) {
void Testing::PrepareStressRun(int run) { void Testing::PrepareStressRun(int run) {
static const char* kLazyOptimizations = static const char* kLazyOptimizations =
"--prepare-always-opt --nolimit-inlining " "--prepare-always-opt --nolimit-inlining --noalways-opt";
"--noalways-opt --noopt-eagerly";
static const char* kEagerOptimizations = "--opt-eagerly";
static const char* kForcedOptimizations = "--always-opt"; static const char* kForcedOptimizations = "--always-opt";
// If deoptimization stressed turn on frequent deoptimization. If no value // If deoptimization stressed turn on frequent deoptimization. If no value
@ -6107,15 +6105,12 @@ void Testing::PrepareStressRun(int run) {
if (run == GetStressRuns() - 1) { if (run == GetStressRuns() - 1) {
SetFlagsFromString(kForcedOptimizations); SetFlagsFromString(kForcedOptimizations);
} else { } else {
SetFlagsFromString(kEagerOptimizations);
SetFlagsFromString(kLazyOptimizations); SetFlagsFromString(kLazyOptimizations);
} }
#else #else
if (run == GetStressRuns() - 1) { if (run == GetStressRuns() - 1) {
SetFlagsFromString(kForcedOptimizations); SetFlagsFromString(kForcedOptimizations);
} else if (run == GetStressRuns() - 2) { } else if (run != GetStressRuns() - 2) {
SetFlagsFromString(kEagerOptimizations);
} else {
SetFlagsFromString(kLazyOptimizations); SetFlagsFromString(kLazyOptimizations);
} }
#endif #endif

38
deps/v8/src/arm/builtins-arm.cc

@ -895,23 +895,15 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// r4: JSObject // r4: JSObject
__ bind(&allocated); __ bind(&allocated);
__ push(r4); __ push(r4);
__ push(r4);
// Push the function and the allocated receiver from the stack. // Reload the number of arguments and the constructor from the stack.
// sp[0]: receiver (newly allocated object)
// sp[1]: constructor function
// sp[2]: number of arguments (smi-tagged)
__ ldr(r1, MemOperand(sp, kPointerSize));
__ push(r1); // Constructor function.
__ push(r4); // Receiver.
// Reload the number of arguments from the stack.
// r1: constructor function
// sp[0]: receiver // sp[0]: receiver
// sp[1]: constructor function // sp[1]: receiver
// sp[2]: receiver // sp[2]: constructor function
// sp[3]: constructor function // sp[3]: number of arguments (smi-tagged)
// sp[4]: number of arguments (smi-tagged) __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
__ ldr(r3, MemOperand(sp, 4 * kPointerSize)); __ ldr(r3, MemOperand(sp, 3 * kPointerSize));
// Set up pointer to last argument. // Set up pointer to last argument.
__ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset)); __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
@ -921,14 +913,13 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// Copy arguments and receiver to the expression stack. // Copy arguments and receiver to the expression stack.
// r0: number of arguments // r0: number of arguments
// r2: address of last argument (caller sp)
// r1: constructor function // r1: constructor function
// r2: address of last argument (caller sp)
// r3: number of arguments (smi-tagged) // r3: number of arguments (smi-tagged)
// sp[0]: receiver // sp[0]: receiver
// sp[1]: constructor function // sp[1]: receiver
// sp[2]: receiver // sp[2]: constructor function
// sp[3]: constructor function // sp[3]: number of arguments (smi-tagged)
// sp[4]: number of arguments (smi-tagged)
Label loop, entry; Label loop, entry;
__ b(&entry); __ b(&entry);
__ bind(&loop); __ bind(&loop);
@ -954,13 +945,6 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
NullCallWrapper(), CALL_AS_METHOD); NullCallWrapper(), CALL_AS_METHOD);
} }
// Pop the function from the stack.
// sp[0]: constructor function
// sp[2]: receiver
// sp[3]: constructor function
// sp[4]: number of arguments (smi-tagged)
__ pop();
// Restore context from the frame. // Restore context from the frame.
// r0: result // r0: result
// sp[0]: receiver // sp[0]: receiver

4
deps/v8/src/arm/code-stubs-arm.cc

@ -3964,7 +3964,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// handler block in this code object, so its index is 0. // handler block in this code object, so its index is 0.
__ bind(&invoke); __ bind(&invoke);
// Must preserve r0-r4, r5-r7 are available. // Must preserve r0-r4, r5-r7 are available.
__ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER, 0); __ PushTryHandler(StackHandler::JS_ENTRY, 0);
// If an exception not caught by another handler occurs, this handler // If an exception not caught by another handler occurs, this handler
// returns control to the code after the bl(&invoke) above, which // returns control to the code after the bl(&invoke) above, which
// restores all kCalleeSaved registers (including cp and fp) to their // restores all kCalleeSaved registers (including cp and fp) to their
@ -7358,7 +7358,7 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
// Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS. // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS.
__ bind(&double_elements); __ bind(&double_elements);
__ ldr(r5, FieldMemOperand(r1, JSObject::kElementsOffset)); __ ldr(r5, FieldMemOperand(r1, JSObject::kElementsOffset));
__ StoreNumberToDoubleElements(r0, r3, r1, r5, r6, r7, r9, r10, __ StoreNumberToDoubleElements(r0, r3, r1, r5, r6, r7, r9, r2,
&slow_elements); &slow_elements);
__ Ret(); __ Ret();
} }

35
deps/v8/src/arm/full-codegen-arm.cc

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved. // Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are // modification, are permitted provided that the following conditions are
// met: // met:
@ -139,6 +139,27 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
} }
#endif #endif
// We can optionally optimize based on counters rather than statistical
// sampling.
if (info->ShouldSelfOptimize()) {
if (FLAG_trace_opt) {
PrintF("[adding self-optimization header to %s]\n",
*info->function()->debug_name()->ToCString());
}
MaybeObject* maybe_cell = isolate()->heap()->AllocateJSGlobalPropertyCell(
Smi::FromInt(Compiler::kCallsUntilPrimitiveOpt));
JSGlobalPropertyCell* cell;
if (maybe_cell->To(&cell)) {
__ mov(r2, Operand(Handle<JSGlobalPropertyCell>(cell)));
__ ldr(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
__ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC);
__ str(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
Handle<Code> compile_stub(
isolate()->builtins()->builtin(Builtins::kLazyRecompile));
__ Jump(compile_stub, RelocInfo::CODE_TARGET, eq);
}
}
// Strict mode functions and builtins need to replace the receiver // Strict mode functions and builtins need to replace the receiver
// with undefined when called as functions (without an explicit // with undefined when called as functions (without an explicit
// receiver object). r5 is zero for method calls and non-zero for // receiver object). r5 is zero for method calls and non-zero for
@ -265,11 +286,11 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
// For named function expressions, declare the function name as a // For named function expressions, declare the function name as a
// constant. // constant.
if (scope()->is_function_scope() && scope()->function() != NULL) { if (scope()->is_function_scope() && scope()->function() != NULL) {
int ignored = 0;
VariableProxy* proxy = scope()->function(); VariableProxy* proxy = scope()->function();
ASSERT(proxy->var()->mode() == CONST || ASSERT(proxy->var()->mode() == CONST ||
proxy->var()->mode() == CONST_HARMONY); proxy->var()->mode() == CONST_HARMONY);
EmitDeclaration(proxy, proxy->var()->mode(), NULL, &ignored); ASSERT(proxy->var()->location() != Variable::UNALLOCATED);
EmitDeclaration(proxy, proxy->var()->mode(), NULL);
} }
VisitDeclarations(scope()->declarations()); VisitDeclarations(scope()->declarations());
} }
@ -706,8 +727,7 @@ void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
VariableMode mode, VariableMode mode,
FunctionLiteral* function, FunctionLiteral* function) {
int* global_count) {
// If it was not possible to allocate the variable at compile time, we // If it was not possible to allocate the variable at compile time, we
// need to "declare" it at runtime to make sure it actually exists in the // need to "declare" it at runtime to make sure it actually exists in the
// local context. // local context.
@ -716,7 +736,7 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
(mode == CONST || mode == CONST_HARMONY || mode == LET); (mode == CONST || mode == CONST_HARMONY || mode == LET);
switch (variable->location()) { switch (variable->location()) {
case Variable::UNALLOCATED: case Variable::UNALLOCATED:
++(*global_count); ++global_count_;
break; break;
case Variable::PARAMETER: case Variable::PARAMETER:
@ -801,9 +821,6 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
} }
void FullCodeGenerator::VisitDeclaration(Declaration* decl) { }
void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
// Call the runtime to declare the globals. // Call the runtime to declare the globals.
// The context is the first argument. // The context is the first argument.

1
deps/v8/src/arm/lithium-codegen-arm.cc

@ -673,7 +673,6 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
int length = deoptimizations_.length(); int length = deoptimizations_.length();
if (length == 0) return; if (length == 0) return;
ASSERT(FLAG_deopt);
Handle<DeoptimizationInputData> data = Handle<DeoptimizationInputData> data =
factory()->NewDeoptimizationInputData(length, TENURED); factory()->NewDeoptimizationInputData(length, TENURED);

25
deps/v8/src/arm/macro-assembler-arm.cc

@ -1188,8 +1188,7 @@ void MacroAssembler::DebugBreak() {
#endif #endif
void MacroAssembler::PushTryHandler(CodeLocation try_location, void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
HandlerType type,
int handler_index) { int handler_index) {
// Adjust this code if not the case. // Adjust this code if not the case.
STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize); STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
@ -1201,28 +1200,20 @@ void MacroAssembler::PushTryHandler(CodeLocation try_location,
// For the JSEntry handler, we must preserve r0-r4, r5-r7 are available. // For the JSEntry handler, we must preserve r0-r4, r5-r7 are available.
// We will build up the handler from the bottom by pushing on the stack. // We will build up the handler from the bottom by pushing on the stack.
// First compute the state.
unsigned state = StackHandler::OffsetField::encode(handler_index);
if (try_location == IN_JAVASCRIPT) {
state |= (type == TRY_CATCH_HANDLER)
? StackHandler::KindField::encode(StackHandler::TRY_CATCH)
: StackHandler::KindField::encode(StackHandler::TRY_FINALLY);
} else {
ASSERT(try_location == IN_JS_ENTRY);
state |= StackHandler::KindField::encode(StackHandler::ENTRY);
}
// Set up the code object (r5) and the state (r6) for pushing. // Set up the code object (r5) and the state (r6) for pushing.
unsigned state =
StackHandler::IndexField::encode(handler_index) |
StackHandler::KindField::encode(kind);
mov(r5, Operand(CodeObject())); mov(r5, Operand(CodeObject()));
mov(r6, Operand(state)); mov(r6, Operand(state));
// Push the frame pointer, context, state, and code object. // Push the frame pointer, context, state, and code object.
if (try_location == IN_JAVASCRIPT) { if (kind == StackHandler::JS_ENTRY) {
stm(db_w, sp, r5.bit() | r6.bit() | cp.bit() | fp.bit());
} else {
mov(r7, Operand(Smi::FromInt(0))); // Indicates no context. mov(r7, Operand(Smi::FromInt(0))); // Indicates no context.
mov(ip, Operand(0, RelocInfo::NONE)); // NULL frame pointer. mov(ip, Operand(0, RelocInfo::NONE)); // NULL frame pointer.
stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() | ip.bit()); stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() | ip.bit());
} else {
stm(db_w, sp, r5.bit() | r6.bit() | cp.bit() | fp.bit());
} }
// Link the current handler as the next handler. // Link the current handler as the next handler.
@ -1330,7 +1321,7 @@ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
ldr(sp, MemOperand(sp, StackHandlerConstants::kNextOffset)); ldr(sp, MemOperand(sp, StackHandlerConstants::kNextOffset));
bind(&check_kind); bind(&check_kind);
STATIC_ASSERT(StackHandler::ENTRY == 0); STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
ldr(r2, MemOperand(sp, StackHandlerConstants::kStateOffset)); ldr(r2, MemOperand(sp, StackHandlerConstants::kStateOffset));
tst(r2, Operand(StackHandler::KindField::kMask)); tst(r2, Operand(StackHandler::KindField::kMask));
b(ne, &fetch_next); b(ne, &fetch_next);

7
deps/v8/src/arm/macro-assembler-arm.h

@ -582,9 +582,7 @@ class MacroAssembler: public Assembler {
// Exception handling // Exception handling
// Push a new try handler and link into try handler chain. // Push a new try handler and link into try handler chain.
void PushTryHandler(CodeLocation try_location, void PushTryHandler(StackHandler::Kind kind, int handler_index);
HandlerType type,
int handler_index);
// Unlink the stack handler on top of the stack from the try handler chain. // Unlink the stack handler on top of the stack from the try handler chain.
// Must preserve the result register. // Must preserve the result register.
@ -803,7 +801,8 @@ class MacroAssembler: public Assembler {
// Check to see if maybe_number can be stored as a double in // Check to see if maybe_number can be stored as a double in
// FastDoubleElements. If it can, store it at the index specified by key in // FastDoubleElements. If it can, store it at the index specified by key in
// the FastDoubleElements array elements, otherwise jump to fail. // the FastDoubleElements array elements. Otherwise jump to fail, in which
// case scratch2, scratch3 and scratch4 are unmodified.
void StoreNumberToDoubleElements(Register value_reg, void StoreNumberToDoubleElements(Register value_reg,
Register key_reg, Register key_reg,
Register receiver_reg, Register receiver_reg,

68
deps/v8/src/arm/stub-cache-arm.cc

@ -1475,28 +1475,30 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ Ret(); __ Ret();
} else { } else {
Label call_builtin; Label call_builtin;
Register elements = r3;
Register end_elements = r5;
// Get the elements array of the object.
__ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
// Check that the elements are in fast mode and writable.
__ CheckMap(elements,
r0,
Heap::kFixedArrayMapRootIndex,
&call_builtin,
DONT_DO_SMI_CHECK);
if (argc == 1) { // Otherwise fall through to call the builtin. if (argc == 1) { // Otherwise fall through to call the builtin.
Label attempt_to_grow_elements; Label attempt_to_grow_elements;
Register elements = r6;
Register end_elements = r5;
// Get the elements array of the object.
__ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
// Check that the elements are in fast mode and writable.
__ CheckMap(elements,
r0,
Heap::kFixedArrayMapRootIndex,
&call_builtin,
DONT_DO_SMI_CHECK);
// Get the array's length into r0 and calculate new length. // Get the array's length into r0 and calculate new length.
__ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset)); __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
STATIC_ASSERT(kSmiTagSize == 1); STATIC_ASSERT(kSmiTagSize == 1);
STATIC_ASSERT(kSmiTag == 0); STATIC_ASSERT(kSmiTag == 0);
__ add(r0, r0, Operand(Smi::FromInt(argc))); __ add(r0, r0, Operand(Smi::FromInt(argc)));
// Get the element's length. // Get the elements' length.
__ ldr(r4, FieldMemOperand(elements, FixedArray::kLengthOffset)); __ ldr(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
// Check if we could survive without allocation. // Check if we could survive without allocation.
@ -1511,7 +1513,7 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
// Save new length. // Save new length.
__ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset)); __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
// Push the element. // Store the value.
// We may need a register containing the address end_elements below, // We may need a register containing the address end_elements below,
// so write back the value in end_elements. // so write back the value in end_elements.
__ add(end_elements, elements, __ add(end_elements, elements,
@ -1526,13 +1528,33 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ bind(&with_write_barrier); __ bind(&with_write_barrier);
__ ldr(r6, FieldMemOperand(receiver, HeapObject::kMapOffset)); __ ldr(r3, FieldMemOperand(receiver, HeapObject::kMapOffset));
__ CheckFastObjectElements(r6, r6, &call_builtin);
if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) {
Label fast_object, not_fast_object;
__ CheckFastObjectElements(r3, r7, &not_fast_object);
__ jmp(&fast_object);
// In case of fast smi-only, convert to fast object, otherwise bail out.
__ bind(&not_fast_object);
__ CheckFastSmiOnlyElements(r3, r7, &call_builtin);
// edx: receiver
// r3: map
__ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
FAST_ELEMENTS,
r3,
r7,
&call_builtin);
__ mov(r2, receiver);
ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm());
__ bind(&fast_object);
} else {
__ CheckFastObjectElements(r3, r3, &call_builtin);
}
// Save new length. // Save new length.
__ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset)); __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
// Push the element. // Store the value.
// We may need a register containing the address end_elements below, // We may need a register containing the address end_elements below,
// so write back the value in end_elements. // so write back the value in end_elements.
__ add(end_elements, elements, __ add(end_elements, elements,
@ -1578,25 +1600,25 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize)); Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
__ add(end_elements, end_elements, Operand(kEndElementsOffset)); __ add(end_elements, end_elements, Operand(kEndElementsOffset));
__ mov(r7, Operand(new_space_allocation_top)); __ mov(r7, Operand(new_space_allocation_top));
__ ldr(r6, MemOperand(r7)); __ ldr(r3, MemOperand(r7));
__ cmp(end_elements, r6); __ cmp(end_elements, r3);
__ b(ne, &call_builtin); __ b(ne, &call_builtin);
__ mov(r9, Operand(new_space_allocation_limit)); __ mov(r9, Operand(new_space_allocation_limit));
__ ldr(r9, MemOperand(r9)); __ ldr(r9, MemOperand(r9));
__ add(r6, r6, Operand(kAllocationDelta * kPointerSize)); __ add(r3, r3, Operand(kAllocationDelta * kPointerSize));
__ cmp(r6, r9); __ cmp(r3, r9);
__ b(hi, &call_builtin); __ b(hi, &call_builtin);
// We fit and could grow elements. // We fit and could grow elements.
// Update new_space_allocation_top. // Update new_space_allocation_top.
__ str(r6, MemOperand(r7)); __ str(r3, MemOperand(r7));
// Push the argument. // Push the argument.
__ str(r2, MemOperand(end_elements)); __ str(r2, MemOperand(end_elements));
// Fill the rest with holes. // Fill the rest with holes.
__ LoadRoot(r6, Heap::kTheHoleValueRootIndex); __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
for (int i = 1; i < kAllocationDelta; i++) { for (int i = 1; i < kAllocationDelta; i++) {
__ str(r6, MemOperand(end_elements, i * kPointerSize)); __ str(r3, MemOperand(end_elements, i * kPointerSize));
} }
// Update elements' and array's sizes. // Update elements' and array's sizes.

399
deps/v8/src/ast.cc

@ -126,18 +126,7 @@ Assignment::Assignment(Isolate* isolate,
assignment_id_(GetNextId(isolate)), assignment_id_(GetNextId(isolate)),
block_start_(false), block_start_(false),
block_end_(false), block_end_(false),
is_monomorphic_(false) { is_monomorphic_(false) { }
ASSERT(Token::IsAssignmentOp(op));
if (is_compound()) {
binary_operation_ =
new(isolate->zone()) BinaryOperation(isolate,
binary_op(),
target,
value,
pos + 1);
compound_load_id_ = GetNextId(isolate);
}
}
Token::Value Assignment::binary_op() const { Token::Value Assignment::binary_op() const {
@ -197,9 +186,7 @@ ObjectLiteral::Property::Property(Literal* key, Expression* value) {
ObjectLiteral::Property::Property(bool is_getter, FunctionLiteral* value) { ObjectLiteral::Property::Property(bool is_getter, FunctionLiteral* value) {
Isolate* isolate = Isolate::Current();
emit_store_ = true; emit_store_ = true;
key_ = new(isolate->zone()) Literal(isolate, value->name());
value_ = value; value_ = value;
kind_ = is_getter ? GETTER : SETTER; kind_ = is_getter ? GETTER : SETTER;
} }
@ -427,224 +414,11 @@ bool CompareOperation::IsLiteralCompareNull(Expression** expr) {
// Inlining support // Inlining support
bool Declaration::IsInlineable() const { bool Declaration::IsInlineable() const {
return proxy()->var()->IsStackAllocated() && fun() == NULL; return proxy()->var()->IsStackAllocated();
}
bool TargetCollector::IsInlineable() const {
UNREACHABLE();
return false;
}
bool ForInStatement::IsInlineable() const {
return false;
}
bool WithStatement::IsInlineable() const {
return false;
}
bool SwitchStatement::IsInlineable() const {
return false;
}
bool TryStatement::IsInlineable() const {
return false;
}
bool TryCatchStatement::IsInlineable() const {
return false;
}
bool TryFinallyStatement::IsInlineable() const {
return false;
}
bool DebuggerStatement::IsInlineable() const {
return false;
}
bool Throw::IsInlineable() const {
return exception()->IsInlineable();
}
bool MaterializedLiteral::IsInlineable() const {
// TODO(1322): Allow materialized literals.
return false;
}
bool FunctionLiteral::IsInlineable() const {
// TODO(1322): Allow materialized literals.
return false;
}
bool ThisFunction::IsInlineable() const {
return true;
}
bool SharedFunctionInfoLiteral::IsInlineable() const {
return false;
}
bool ForStatement::IsInlineable() const {
return (init() == NULL || init()->IsInlineable())
&& (cond() == NULL || cond()->IsInlineable())
&& (next() == NULL || next()->IsInlineable())
&& body()->IsInlineable();
}
bool WhileStatement::IsInlineable() const {
return cond()->IsInlineable()
&& body()->IsInlineable();
}
bool DoWhileStatement::IsInlineable() const {
return cond()->IsInlineable()
&& body()->IsInlineable();
}
bool ContinueStatement::IsInlineable() const {
return true;
}
bool BreakStatement::IsInlineable() const {
return true;
}
bool EmptyStatement::IsInlineable() const {
return true;
}
bool Literal::IsInlineable() const {
return true;
}
bool Block::IsInlineable() const {
const int count = statements_.length();
for (int i = 0; i < count; ++i) {
if (!statements_[i]->IsInlineable()) return false;
}
return true;
}
bool ExpressionStatement::IsInlineable() const {
return expression()->IsInlineable();
}
bool IfStatement::IsInlineable() const {
return condition()->IsInlineable()
&& then_statement()->IsInlineable()
&& else_statement()->IsInlineable();
}
bool ReturnStatement::IsInlineable() const {
return expression()->IsInlineable();
}
bool Conditional::IsInlineable() const {
return condition()->IsInlineable() && then_expression()->IsInlineable() &&
else_expression()->IsInlineable();
} }
bool VariableDeclaration::IsInlineable() const {
bool VariableProxy::IsInlineable() const { return Declaration::IsInlineable() && fun() == NULL;
return var()->IsUnallocated()
|| var()->IsStackAllocated()
|| var()->IsContextSlot();
}
bool Assignment::IsInlineable() const {
return target()->IsInlineable() && value()->IsInlineable();
}
bool Property::IsInlineable() const {
return obj()->IsInlineable() && key()->IsInlineable();
}
bool Call::IsInlineable() const {
if (!expression()->IsInlineable()) return false;
const int count = arguments()->length();
for (int i = 0; i < count; ++i) {
if (!arguments()->at(i)->IsInlineable()) return false;
}
return true;
}
bool CallNew::IsInlineable() const {
if (!expression()->IsInlineable()) return false;
const int count = arguments()->length();
for (int i = 0; i < count; ++i) {
if (!arguments()->at(i)->IsInlineable()) return false;
}
return true;
}
bool CallRuntime::IsInlineable() const {
// Don't try to inline JS runtime calls because we don't (currently) even
// optimize them.
if (is_jsruntime()) return false;
// Don't inline the %_ArgumentsLength or %_Arguments because their
// implementation will not work. There is no stack frame to get them
// from.
if (function()->intrinsic_type == Runtime::INLINE &&
(name()->IsEqualTo(CStrVector("_ArgumentsLength")) ||
name()->IsEqualTo(CStrVector("_Arguments")))) {
return false;
}
const int count = arguments()->length();
for (int i = 0; i < count; ++i) {
if (!arguments()->at(i)->IsInlineable()) return false;
}
return true;
}
bool UnaryOperation::IsInlineable() const {
return expression()->IsInlineable();
}
bool BinaryOperation::IsInlineable() const {
return left()->IsInlineable() && right()->IsInlineable();
}
bool CompareOperation::IsInlineable() const {
return left()->IsInlineable() && right()->IsInlineable();
}
bool CountOperation::IsInlineable() const {
return expression()->IsInlineable();
} }
@ -1214,4 +988,169 @@ CaseClause::CaseClause(Isolate* isolate,
entry_id_(AstNode::GetNextId(isolate)) { entry_id_(AstNode::GetNextId(isolate)) {
} }
#define INCREASE_NODE_COUNT(NodeType) \
void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
increase_node_count(); \
}
INCREASE_NODE_COUNT(VariableDeclaration)
INCREASE_NODE_COUNT(ModuleDeclaration)
INCREASE_NODE_COUNT(ModuleLiteral)
INCREASE_NODE_COUNT(ModuleVariable)
INCREASE_NODE_COUNT(ModulePath)
INCREASE_NODE_COUNT(ModuleUrl)
INCREASE_NODE_COUNT(Block)
INCREASE_NODE_COUNT(ExpressionStatement)
INCREASE_NODE_COUNT(EmptyStatement)
INCREASE_NODE_COUNT(IfStatement)
INCREASE_NODE_COUNT(ContinueStatement)
INCREASE_NODE_COUNT(BreakStatement)
INCREASE_NODE_COUNT(ReturnStatement)
INCREASE_NODE_COUNT(Conditional)
INCREASE_NODE_COUNT(Literal)
INCREASE_NODE_COUNT(Assignment)
INCREASE_NODE_COUNT(Throw)
INCREASE_NODE_COUNT(Property)
INCREASE_NODE_COUNT(UnaryOperation)
INCREASE_NODE_COUNT(CountOperation)
INCREASE_NODE_COUNT(BinaryOperation)
INCREASE_NODE_COUNT(CompareOperation)
INCREASE_NODE_COUNT(ThisFunction)
#undef INCREASE_NODE_COUNT
void AstConstructionVisitor::VisitWithStatement(WithStatement* node) {
increase_node_count();
add_flag(kDontOptimize);
add_flag(kDontInline);
}
void AstConstructionVisitor::VisitSwitchStatement(SwitchStatement* node) {
increase_node_count();
add_flag(kDontInline);
}
void AstConstructionVisitor::VisitDoWhileStatement(DoWhileStatement* node) {
increase_node_count();
add_flag(kDontSelfOptimize);
}
void AstConstructionVisitor::VisitWhileStatement(WhileStatement* node) {
increase_node_count();
add_flag(kDontSelfOptimize);
}
void AstConstructionVisitor::VisitForStatement(ForStatement* node) {
increase_node_count();
add_flag(kDontSelfOptimize);
}
void AstConstructionVisitor::VisitForInStatement(ForInStatement* node) {
increase_node_count();
add_flag(kDontOptimize);
add_flag(kDontInline);
add_flag(kDontSelfOptimize);
}
void AstConstructionVisitor::VisitTryCatchStatement(TryCatchStatement* node) {
increase_node_count();
add_flag(kDontOptimize);
add_flag(kDontInline);
}
void AstConstructionVisitor::VisitTryFinallyStatement(
TryFinallyStatement* node) {
increase_node_count();
add_flag(kDontOptimize);
add_flag(kDontInline);
}
void AstConstructionVisitor::VisitDebuggerStatement(DebuggerStatement* node) {
increase_node_count();
add_flag(kDontOptimize);
add_flag(kDontInline);
}
void AstConstructionVisitor::VisitFunctionLiteral(FunctionLiteral* node) {
increase_node_count();
add_flag(kDontInline);
}
void AstConstructionVisitor::VisitSharedFunctionInfoLiteral(
SharedFunctionInfoLiteral* node) {
increase_node_count();
add_flag(kDontOptimize);
add_flag(kDontInline);
}
void AstConstructionVisitor::VisitVariableProxy(VariableProxy* node) {
increase_node_count();
// In theory, we'd have to add:
// if(node->var()->IsLookupSlot()) { add_flag(kDontInline); }
// However, node->var() is usually not bound yet at VariableProxy creation
// time, and LOOKUP variables only result from constructs that cannot
// be inlined anyway.
}
void AstConstructionVisitor::VisitRegExpLiteral(RegExpLiteral* node) {
increase_node_count();
add_flag(kDontInline); // TODO(1322): Allow materialized literals.
}
void AstConstructionVisitor::VisitObjectLiteral(ObjectLiteral* node) {
increase_node_count();
add_flag(kDontInline); // TODO(1322): Allow materialized literals.
}
void AstConstructionVisitor::VisitArrayLiteral(ArrayLiteral* node) {
increase_node_count();
add_flag(kDontInline); // TODO(1322): Allow materialized literals.
}
void AstConstructionVisitor::VisitCall(Call* node) {
increase_node_count();
add_flag(kDontSelfOptimize);
}
void AstConstructionVisitor::VisitCallNew(CallNew* node) {
increase_node_count();
add_flag(kDontSelfOptimize);
}
void AstConstructionVisitor::VisitCallRuntime(CallRuntime* node) {
increase_node_count();
add_flag(kDontSelfOptimize);
if (node->is_jsruntime()) {
// Don't try to inline JS runtime calls because we don't (currently) even
// optimize them.
add_flag(kDontInline);
} else if (node->function()->intrinsic_type == Runtime::INLINE &&
(node->name()->IsEqualTo(CStrVector("_ArgumentsLength")) ||
node->name()->IsEqualTo(CStrVector("_Arguments")))) {
// Don't inline the %_ArgumentsLength or %_Arguments because their
// implementation will not work. There is no stack frame to get them
// from.
add_flag(kDontInline);
}
}
} } // namespace v8::internal } } // namespace v8::internal

1353
deps/v8/src/ast.h

File diff suppressed because it is too large

2
deps/v8/src/builtins.cc

@ -978,7 +978,7 @@ BUILTIN(ArrayConcat) {
return CallJsBuiltin(isolate, "ArrayConcat", args); return CallJsBuiltin(isolate, "ArrayConcat", args);
} }
if (!JSArray::cast(arg)->HasFastElements()) { if (!JSArray::cast(arg)->HasFastSmiOnlyElements()) {
elements_kind = FAST_ELEMENTS; elements_kind = FAST_ELEMENTS;
} }
} }

13
deps/v8/src/codegen.cc

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved. // Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are // modification, are permitted provided that the following conditions are
// met: // met:
@ -62,18 +62,15 @@ void CodeGenerator::MakeCodePrologue(CompilationInfo* info) {
#ifdef DEBUG #ifdef DEBUG
bool print_source = false; bool print_source = false;
bool print_ast = false; bool print_ast = false;
bool print_json_ast = false;
const char* ftype; const char* ftype;
if (Isolate::Current()->bootstrapper()->IsActive()) { if (Isolate::Current()->bootstrapper()->IsActive()) {
print_source = FLAG_print_builtin_source; print_source = FLAG_print_builtin_source;
print_ast = FLAG_print_builtin_ast; print_ast = FLAG_print_builtin_ast;
print_json_ast = FLAG_print_builtin_json_ast;
ftype = "builtin"; ftype = "builtin";
} else { } else {
print_source = FLAG_print_source; print_source = FLAG_print_source;
print_ast = FLAG_print_ast; print_ast = FLAG_print_ast;
print_json_ast = FLAG_print_json_ast;
Vector<const char> filter = CStrVector(FLAG_hydrogen_filter); Vector<const char> filter = CStrVector(FLAG_hydrogen_filter);
if (print_source && !filter.is_empty()) { if (print_source && !filter.is_empty()) {
print_source = info->function()->name()->IsEqualTo(filter); print_source = info->function()->name()->IsEqualTo(filter);
@ -81,9 +78,6 @@ void CodeGenerator::MakeCodePrologue(CompilationInfo* info) {
if (print_ast && !filter.is_empty()) { if (print_ast && !filter.is_empty()) {
print_ast = info->function()->name()->IsEqualTo(filter); print_ast = info->function()->name()->IsEqualTo(filter);
} }
if (print_json_ast && !filter.is_empty()) {
print_json_ast = info->function()->name()->IsEqualTo(filter);
}
ftype = "user-defined"; ftype = "user-defined";
} }
@ -102,11 +96,6 @@ void CodeGenerator::MakeCodePrologue(CompilationInfo* info) {
PrintF("--- AST ---\n%s\n", PrintF("--- AST ---\n%s\n",
AstPrinter().PrintProgram(info->function())); AstPrinter().PrintProgram(info->function()));
} }
if (print_json_ast) {
JsonAstBuilder builder;
PrintF("%s", builder.BuildProgram(info->function()));
}
#endif // DEBUG #endif // DEBUG
} }

20
deps/v8/src/compiler.cc

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved. // Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are // modification, are permitted provided that the following conditions are
// met: // met:
@ -110,6 +110,18 @@ void CompilationInfo::DisableOptimization() {
} }
// Primitive functions are unlikely to be picked up by the stack-walking
// profiler, so they trigger their own optimization when they're called
// for the SharedFunctionInfo::kCallsUntilPrimitiveOptimization-th time.
bool CompilationInfo::ShouldSelfOptimize() {
return FLAG_self_optimization &&
FLAG_crankshaft &&
!Serializer::enabled() &&
!function()->flags()->Contains(kDontSelfOptimize) &&
(shared_info().is_null() || !shared_info()->optimization_disabled());
}
void CompilationInfo::AbortOptimization() { void CompilationInfo::AbortOptimization() {
Handle<Code> code(shared_info()->code()); Handle<Code> code(shared_info()->code());
SetCode(code); SetCode(code);
@ -652,6 +664,9 @@ bool Compiler::CompileLazy(CompilationInfo* info) {
// Check the function has compiled code. // Check the function has compiled code.
ASSERT(shared->is_compiled()); ASSERT(shared->is_compiled());
shared->set_code_age(0); shared->set_code_age(0);
shared->set_dont_crankshaft(lit->flags()->Contains(kDontOptimize));
shared->set_dont_inline(lit->flags()->Contains(kDontInline));
shared->set_ast_node_count(lit->ast_node_count());
if (info->AllowOptimize() && !shared->optimization_disabled()) { if (info->AllowOptimize() && !shared->optimization_disabled()) {
// If we're asked to always optimize, we compile the optimized // If we're asked to always optimize, we compile the optimized
@ -750,6 +765,9 @@ void Compiler::SetFunctionInfo(Handle<SharedFunctionInfo> function_info,
function_info->set_language_mode(lit->language_mode()); function_info->set_language_mode(lit->language_mode());
function_info->set_uses_arguments(lit->scope()->arguments() != NULL); function_info->set_uses_arguments(lit->scope()->arguments() != NULL);
function_info->set_has_duplicate_parameters(lit->has_duplicate_parameters()); function_info->set_has_duplicate_parameters(lit->has_duplicate_parameters());
function_info->set_ast_node_count(lit->ast_node_count());
function_info->set_dont_crankshaft(lit->flags()->Contains(kDontOptimize));
function_info->set_dont_inline(lit->flags()->Contains(kDontInline));
} }

6
deps/v8/src/compiler.h

@ -168,6 +168,9 @@ class CompilationInfo BASE_EMBEDDED {
return V8::UseCrankshaft() && !closure_.is_null(); return V8::UseCrankshaft() && !closure_.is_null();
} }
// Determines whether or not to insert a self-optimization header.
bool ShouldSelfOptimize();
// Disable all optimization attempts of this info for the rest of the // Disable all optimization attempts of this info for the rest of the
// current compilation pipeline. // current compilation pipeline.
void AbortOptimization(); void AbortOptimization();
@ -280,6 +283,9 @@ class Compiler : public AllStatic {
static const int kMaxInliningLevels = 3; static const int kMaxInliningLevels = 3;
// Call count before primitive functions trigger their own optimization.
static const int kCallsUntilPrimitiveOpt = 200;
// All routines return a SharedFunctionInfo. // All routines return a SharedFunctionInfo.
// If an error occurs an exception is raised and the return handle // If an error occurs an exception is raised and the return handle
// contains NULL. // contains NULL.

4
deps/v8/src/cpu-profiler.cc

@ -1,4 +1,4 @@
// Copyright 2010 the V8 project authors. All rights reserved. // Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are // modification, are permitted provided that the following conditions are
// met: // met:
@ -42,7 +42,7 @@ namespace internal {
static const int kEventsBufferSize = 256 * KB; static const int kEventsBufferSize = 256 * KB;
static const int kTickSamplesBufferChunkSize = 64 * KB; static const int kTickSamplesBufferChunkSize = 64 * KB;
static const int kTickSamplesBufferChunksCount = 16; static const int kTickSamplesBufferChunksCount = 16;
static const int kProfilerStackSize = 32 * KB; static const int kProfilerStackSize = 64 * KB;
ProfilerEventsProcessor::ProfilerEventsProcessor(ProfileGenerator* generator) ProfilerEventsProcessor::ProfilerEventsProcessor(ProfileGenerator* generator)

2
deps/v8/src/d8.cc

@ -1288,7 +1288,7 @@ bool Shell::SetOptions(int argc, char* argv[]) {
options.use_preemption = true; options.use_preemption = true;
argv[i] = NULL; argv[i] = NULL;
#endif // V8_SHARED #endif // V8_SHARED
} else if (strcmp(argv[i], "--no-preemption") == 0) { } else if (strcmp(argv[i], "--nopreemption") == 0) {
#ifdef V8_SHARED #ifdef V8_SHARED
printf("D8 with shared library does not support multi-threading\n"); printf("D8 with shared library does not support multi-threading\n");
return false; return false;

36
deps/v8/src/flag-definitions.h

@ -109,11 +109,13 @@ private:
// Flags for experimental language features. // Flags for experimental language features.
DEFINE_bool(harmony_typeof, false, "enable harmony semantics for typeof") DEFINE_bool(harmony_typeof, false, "enable harmony semantics for typeof")
DEFINE_bool(harmony_scoping, false, "enable harmony block scoping") DEFINE_bool(harmony_scoping, false, "enable harmony block scoping")
DEFINE_bool(harmony_modules, false, "enable harmony modules")
DEFINE_bool(harmony_proxies, false, "enable harmony proxies") DEFINE_bool(harmony_proxies, false, "enable harmony proxies")
DEFINE_bool(harmony_collections, false, DEFINE_bool(harmony_collections, false,
"enable harmony collections (sets, maps, and weak maps)") "enable harmony collections (sets, maps, and weak maps)")
DEFINE_bool(harmony, false, "enable all harmony features (except typeof)") DEFINE_bool(harmony, false, "enable all harmony features (except typeof)")
DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_scoping)
DEFINE_implication(harmony, harmony_modules)
DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_proxies)
DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony, harmony_collections)
@ -136,7 +138,6 @@ DEFINE_bool(use_gvn, true, "use hydrogen global value numbering")
DEFINE_bool(use_canonicalizing, true, "use hydrogen instruction canonicalizing") DEFINE_bool(use_canonicalizing, true, "use hydrogen instruction canonicalizing")
DEFINE_bool(use_inlining, true, "use function inlining") DEFINE_bool(use_inlining, true, "use function inlining")
DEFINE_bool(limit_inlining, true, "limit code size growth from inlining") DEFINE_bool(limit_inlining, true, "limit code size growth from inlining")
DEFINE_bool(eliminate_empty_blocks, true, "eliminate empty blocks")
DEFINE_bool(loop_invariant_code_motion, true, "loop invariant code motion") DEFINE_bool(loop_invariant_code_motion, true, "loop invariant code motion")
DEFINE_bool(collect_megamorphic_maps_from_stub_cache, DEFINE_bool(collect_megamorphic_maps_from_stub_cache,
true, true,
@ -164,12 +165,19 @@ DEFINE_int(stress_runs, 0, "number of stress runs")
DEFINE_bool(optimize_closures, true, "optimize closures") DEFINE_bool(optimize_closures, true, "optimize closures")
DEFINE_int(loop_weight, 1, "loop weight for representation inference") DEFINE_int(loop_weight, 1, "loop weight for representation inference")
// Experimental profiler changes.
DEFINE_bool(experimental_profiler, false, "enable all profiler experiments")
DEFINE_bool(watch_ic_patching, false, "profiler considers IC stability")
DEFINE_bool(self_optimization, false,
"primitive functions trigger their own optimization")
DEFINE_implication(experimental_profiler, watch_ic_patching)
DEFINE_implication(experimental_profiler, self_optimization)
// assembler-ia32.cc / assembler-arm.cc / assembler-x64.cc // assembler-ia32.cc / assembler-arm.cc / assembler-x64.cc
DEFINE_bool(debug_code, false, DEFINE_bool(debug_code, false,
"generate extra code (assertions) for debugging") "generate extra code (assertions) for debugging")
DEFINE_bool(code_comments, false, "emit comments in code disassembly") DEFINE_bool(code_comments, false, "emit comments in code disassembly")
DEFINE_bool(peephole_optimization, true,
"perform peephole optimizations in assembly code")
DEFINE_bool(enable_sse2, true, DEFINE_bool(enable_sse2, true,
"enable use of SSE2 instructions if available") "enable use of SSE2 instructions if available")
DEFINE_bool(enable_sse3, true, DEFINE_bool(enable_sse3, true,
@ -219,10 +227,8 @@ DEFINE_bool(lazy, true, "use lazy compilation")
DEFINE_bool(trace_opt, false, "trace lazy optimization") DEFINE_bool(trace_opt, false, "trace lazy optimization")
DEFINE_bool(trace_opt_stats, false, "trace lazy optimization statistics") DEFINE_bool(trace_opt_stats, false, "trace lazy optimization statistics")
DEFINE_bool(opt, true, "use adaptive optimizations") DEFINE_bool(opt, true, "use adaptive optimizations")
DEFINE_bool(opt_eagerly, false, "be more eager when adaptively optimizing")
DEFINE_bool(always_opt, false, "always try to optimize functions") DEFINE_bool(always_opt, false, "always try to optimize functions")
DEFINE_bool(prepare_always_opt, false, "prepare for turning on always opt") DEFINE_bool(prepare_always_opt, false, "prepare for turning on always opt")
DEFINE_bool(deopt, true, "support deoptimization")
DEFINE_bool(trace_deopt, false, "trace deoptimization") DEFINE_bool(trace_deopt, false, "trace deoptimization")
// compiler.cc // compiler.cc
@ -303,11 +309,10 @@ DEFINE_bool(native_code_counters, false,
DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(always_compact, false, "Perform compaction on every full GC")
DEFINE_bool(lazy_sweeping, true, DEFINE_bool(lazy_sweeping, true,
"Use lazy sweeping for old pointer and data spaces") "Use lazy sweeping for old pointer and data spaces")
DEFINE_bool(cleanup_caches_in_maps_at_gc, true,
"Flush code caches in maps during mark compact cycle.")
DEFINE_bool(never_compact, false, DEFINE_bool(never_compact, false,
"Never perform compaction on full GC - testing only") "Never perform compaction on full GC - testing only")
DEFINE_bool(compact_code_space, false, "Compact code space") DEFINE_bool(compact_code_space, true,
"Compact code space on full non-incremental collections")
DEFINE_bool(cleanup_code_caches_at_gc, true, DEFINE_bool(cleanup_code_caches_at_gc, true,
"Flush inline caches prior to mark compact collection and " "Flush inline caches prior to mark compact collection and "
"flush code caches in maps during mark compact cycle.") "flush code caches in maps during mark compact cycle.")
@ -315,14 +320,6 @@ DEFINE_int(random_seed, 0,
"Default seed for initializing random generator " "Default seed for initializing random generator "
"(0, the default, means to use system random).") "(0, the default, means to use system random).")
DEFINE_bool(canonicalize_object_literal_maps, true,
"Canonicalize maps for object literals.")
DEFINE_int(max_map_space_pages, MapSpace::kMaxMapPageIndex - 1,
"Maximum number of pages in map space which still allows to encode "
"forwarding pointers. That's actually a constant, but it's useful "
"to control it with a flag for better testing.")
// objects.cc // objects.cc
DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(use_verbose_printer, true, "allows verbose printing")
@ -443,9 +440,6 @@ DEFINE_bool(print_builtin_source, false,
"pretty print source code for builtins") "pretty print source code for builtins")
DEFINE_bool(print_ast, false, "print source AST") DEFINE_bool(print_ast, false, "print source AST")
DEFINE_bool(print_builtin_ast, false, "print source AST for builtins") DEFINE_bool(print_builtin_ast, false, "print source AST for builtins")
DEFINE_bool(print_json_ast, false, "print source AST as JSON")
DEFINE_bool(print_builtin_json_ast, false,
"print source AST for builtins as JSON")
DEFINE_string(stop_at, "", "function name where to insert a breakpoint") DEFINE_string(stop_at, "", "function name where to insert a breakpoint")
// compiler.cc // compiler.cc
@ -475,10 +469,6 @@ DEFINE_bool(trace_normalization,
// runtime.cc // runtime.cc
DEFINE_bool(trace_lazy, false, "trace lazy compilation") DEFINE_bool(trace_lazy, false, "trace lazy compilation")
// serialize.cc
DEFINE_bool(debug_serialization, false,
"write debug information into the snapshot.")
// spaces.cc // spaces.cc
DEFINE_bool(collect_heap_spill_statistics, false, DEFINE_bool(collect_heap_spill_statistics, false,
"report heap spill statistics along with heap_stats " "report heap spill statistics along with heap_stats "

14
deps/v8/src/frames-inl.h

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved. // Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are // modification, are permitted provided that the following conditions are
// met: // met:
@ -77,18 +77,18 @@ inline StackHandler* StackHandler::FromAddress(Address address) {
} }
inline bool StackHandler::is_entry() const { inline bool StackHandler::is_js_entry() const {
return kind() == ENTRY; return kind() == JS_ENTRY;
} }
inline bool StackHandler::is_try_catch() const { inline bool StackHandler::is_catch() const {
return kind() == TRY_CATCH; return kind() == CATCH;
} }
inline bool StackHandler::is_try_finally() const { inline bool StackHandler::is_finally() const {
return kind() == TRY_FINALLY; return kind() == FINALLY;
} }

2
deps/v8/src/frames.cc

@ -1174,7 +1174,7 @@ void EntryFrame::Iterate(ObjectVisitor* v) const {
StackHandlerIterator it(this, top_handler()); StackHandlerIterator it(this, top_handler());
ASSERT(!it.done()); ASSERT(!it.done());
StackHandler* handler = it.handler(); StackHandler* handler = it.handler();
ASSERT(handler->is_entry()); ASSERT(handler->is_js_entry());
handler->Iterate(v, LookupCode()); handler->Iterate(v, LookupCode());
#ifdef DEBUG #ifdef DEBUG
// Make sure that the entry frame does not contain more than one // Make sure that the entry frame does not contain more than one

20
deps/v8/src/frames.h

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved. // Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are // modification, are permitted provided that the following conditions are
// met: // met:
@ -85,15 +85,17 @@ class InnerPointerToCodeCache {
class StackHandler BASE_EMBEDDED { class StackHandler BASE_EMBEDDED {
public: public:
enum Kind { enum Kind {
ENTRY, JS_ENTRY,
TRY_CATCH, CATCH,
TRY_FINALLY FINALLY,
LAST_KIND = FINALLY
}; };
static const int kKindWidth = 2; static const int kKindWidth = 2;
static const int kOffsetWidth = 32 - kKindWidth; STATIC_ASSERT(LAST_KIND < (1 << kKindWidth));
static const int kIndexWidth = 32 - kKindWidth;
class KindField: public BitField<StackHandler::Kind, 0, kKindWidth> {}; class KindField: public BitField<StackHandler::Kind, 0, kKindWidth> {};
class OffsetField: public BitField<unsigned, kKindWidth, kOffsetWidth> {}; class IndexField: public BitField<unsigned, kKindWidth, kIndexWidth> {};
// Get the address of this stack handler. // Get the address of this stack handler.
inline Address address() const; inline Address address() const;
@ -111,9 +113,9 @@ class StackHandler BASE_EMBEDDED {
static inline StackHandler* FromAddress(Address address); static inline StackHandler* FromAddress(Address address);
// Testers // Testers
inline bool is_entry() const; inline bool is_js_entry() const;
inline bool is_try_catch() const; inline bool is_catch() const;
inline bool is_try_finally() const; inline bool is_finally() const;
private: private:
// Accessors. // Accessors.

118
deps/v8/src/full-codegen.cc

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved. // Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are // modification, are permitted provided that the following conditions are
// met: // met:
@ -51,7 +51,25 @@ void BreakableStatementChecker::Check(Expression* expr) {
} }
void BreakableStatementChecker::VisitDeclaration(Declaration* decl) { void BreakableStatementChecker::VisitVariableDeclaration(
VariableDeclaration* decl) {
}
void BreakableStatementChecker::VisitModuleDeclaration(
ModuleDeclaration* decl) {
}
void BreakableStatementChecker::VisitModuleLiteral(ModuleLiteral* module) {
}
void BreakableStatementChecker::VisitModuleVariable(ModuleVariable* module) {
}
void BreakableStatementChecker::VisitModulePath(ModulePath* module) {
}
void BreakableStatementChecker::VisitModuleUrl(ModuleUrl* module) {
} }
@ -297,6 +315,9 @@ bool FullCodeGenerator::MakeCode(CompilationInfo* info) {
code->set_stack_check_table_offset(table_offset); code->set_stack_check_table_offset(table_offset);
CodeGenerator::PrintCode(code, info); CodeGenerator::PrintCode(code, info);
info->SetCode(code); // May be an empty handle. info->SetCode(code); // May be an empty handle.
if (!code.is_null()) {
isolate->runtime_profiler()->NotifyCodeGenerated(code->instruction_size());
}
#ifdef ENABLE_GDB_JIT_INTERFACE #ifdef ENABLE_GDB_JIT_INTERFACE
if (FLAG_gdbjit && !code.is_null()) { if (FLAG_gdbjit && !code.is_null()) {
GDBJITLineInfo* lineinfo = GDBJITLineInfo* lineinfo =
@ -380,7 +401,7 @@ void FullCodeGenerator::RecordJSReturnSite(Call* call) {
void FullCodeGenerator::PrepareForBailoutForId(unsigned id, State state) { void FullCodeGenerator::PrepareForBailoutForId(unsigned id, State state) {
// There's no need to prepare this code for bailouts from already optimized // There's no need to prepare this code for bailouts from already optimized
// code or code that can't be optimized. // code or code that can't be optimized.
if (!FLAG_deopt || !info_->HasDeoptimizationSupport()) return; if (!info_->HasDeoptimizationSupport()) return;
unsigned pc_and_state = unsigned pc_and_state =
StateField::encode(state) | PcField::encode(masm_->pc_offset()); StateField::encode(state) | PcField::encode(masm_->pc_offset());
BailoutEntry entry = { id, pc_and_state }; BailoutEntry entry = { id, pc_and_state };
@ -525,39 +546,40 @@ void FullCodeGenerator::DoTest(const TestContext* context) {
void FullCodeGenerator::VisitDeclarations( void FullCodeGenerator::VisitDeclarations(
ZoneList<Declaration*>* declarations) { ZoneList<Declaration*>* declarations) {
int length = declarations->length(); int save_global_count = global_count_;
int global_count = 0; global_count_ = 0;
for (int i = 0; i < length; i++) {
Declaration* decl = declarations->at(i); AstVisitor::VisitDeclarations(declarations);
EmitDeclaration(decl->proxy(), decl->mode(), decl->fun(), &global_count);
}
// Batch declare global functions and variables. // Batch declare global functions and variables.
if (global_count > 0) { if (global_count_ > 0) {
Handle<FixedArray> array = Handle<FixedArray> array =
isolate()->factory()->NewFixedArray(2 * global_count, TENURED); isolate()->factory()->NewFixedArray(2 * global_count_, TENURED);
int length = declarations->length();
for (int j = 0, i = 0; i < length; i++) { for (int j = 0, i = 0; i < length; i++) {
Declaration* decl = declarations->at(i); VariableDeclaration* decl = declarations->at(i)->AsVariableDeclaration();
Variable* var = decl->proxy()->var(); if (decl != NULL) {
Variable* var = decl->proxy()->var();
if (var->IsUnallocated()) {
array->set(j++, *(var->name())); if (var->IsUnallocated()) {
if (decl->fun() == NULL) { array->set(j++, *(var->name()));
if (var->binding_needs_init()) { if (decl->fun() == NULL) {
// In case this binding needs initialization use the hole. if (var->binding_needs_init()) {
array->set_the_hole(j++); // In case this binding needs initialization use the hole.
array->set_the_hole(j++);
} else {
array->set_undefined(j++);
}
} else { } else {
array->set_undefined(j++); Handle<SharedFunctionInfo> function =
Compiler::BuildFunctionInfo(decl->fun(), script());
// Check for stack-overflow exception.
if (function.is_null()) {
SetStackOverflow();
return;
}
array->set(j++, *function);
} }
} else {
Handle<SharedFunctionInfo> function =
Compiler::BuildFunctionInfo(decl->fun(), script());
// Check for stack-overflow exception.
if (function.is_null()) {
SetStackOverflow();
return;
}
array->set(j++, *function);
} }
} }
} }
@ -565,6 +587,38 @@ void FullCodeGenerator::VisitDeclarations(
// declaration the global functions and variables. // declaration the global functions and variables.
DeclareGlobals(array); DeclareGlobals(array);
} }
global_count_ = save_global_count;
}
void FullCodeGenerator::VisitVariableDeclaration(VariableDeclaration* decl) {
EmitDeclaration(decl->proxy(), decl->mode(), decl->fun());
}
void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* decl) {
// TODO(rossberg)
}
void FullCodeGenerator::VisitModuleLiteral(ModuleLiteral* module) {
// TODO(rossberg)
}
void FullCodeGenerator::VisitModuleVariable(ModuleVariable* module) {
// TODO(rossberg)
}
void FullCodeGenerator::VisitModulePath(ModulePath* module) {
// TODO(rossberg)
}
void FullCodeGenerator::VisitModuleUrl(ModuleUrl* decl) {
// TODO(rossberg)
} }
@ -1147,7 +1201,7 @@ void FullCodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
// Try block code. Sets up the exception handler chain. // Try block code. Sets up the exception handler chain.
__ bind(&try_entry); __ bind(&try_entry);
__ PushTryHandler(IN_JAVASCRIPT, TRY_CATCH_HANDLER, stmt->index()); __ PushTryHandler(StackHandler::CATCH, stmt->index());
{ TryCatch try_body(this); { TryCatch try_body(this);
Visit(stmt->try_block()); Visit(stmt->try_block());
} }
@ -1204,7 +1258,7 @@ void FullCodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
// Set up try handler. // Set up try handler.
__ bind(&try_entry); __ bind(&try_entry);
__ PushTryHandler(IN_JAVASCRIPT, TRY_FINALLY_HANDLER, stmt->index()); __ PushTryHandler(StackHandler::FINALLY, stmt->index());
{ TryFinally try_body(this, &finally_entry); { TryFinally try_body(this, &finally_entry);
Visit(stmt->try_block()); Visit(stmt->try_block());
} }

6
deps/v8/src/full-codegen.h

@ -83,6 +83,7 @@ class FullCodeGenerator: public AstVisitor {
scope_(NULL), scope_(NULL),
nesting_stack_(NULL), nesting_stack_(NULL),
loop_depth_(0), loop_depth_(0),
global_count_(0),
context_(NULL), context_(NULL),
bailout_entries_(0), bailout_entries_(0),
stack_checks_(2), // There's always at least one. stack_checks_(2), // There's always at least one.
@ -416,10 +417,10 @@ class FullCodeGenerator: public AstVisitor {
// Platform-specific code for a variable, constant, or function // Platform-specific code for a variable, constant, or function
// declaration. Functions have an initial value. // declaration. Functions have an initial value.
// Increments global_count_ for unallocated variables.
void EmitDeclaration(VariableProxy* proxy, void EmitDeclaration(VariableProxy* proxy,
VariableMode mode, VariableMode mode,
FunctionLiteral* function, FunctionLiteral* function);
int* global_count);
// Platform-specific code for checking the stack limit at the back edge of // Platform-specific code for checking the stack limit at the back edge of
// a loop. // a loop.
@ -767,6 +768,7 @@ class FullCodeGenerator: public AstVisitor {
Label return_label_; Label return_label_;
NestedStatement* nesting_stack_; NestedStatement* nesting_stack_;
int loop_depth_; int loop_depth_;
int global_count_;
const ExpressionContext* context_; const ExpressionContext* context_;
ZoneList<BailoutEntry> bailout_entries_; ZoneList<BailoutEntry> bailout_entries_;
ZoneList<BailoutEntry> stack_checks_; ZoneList<BailoutEntry> stack_checks_;

4
deps/v8/src/handles.cc

@ -711,7 +711,7 @@ Handle<FixedArray> GetEnumPropertyKeys(Handle<JSObject> object,
isolate); isolate);
} }
isolate->counters()->enum_cache_misses()->Increment(); isolate->counters()->enum_cache_misses()->Increment();
int num_enum = object->NumberOfEnumProperties(); int num_enum = object->NumberOfLocalProperties(DONT_ENUM);
Handle<FixedArray> storage = isolate->factory()->NewFixedArray(num_enum); Handle<FixedArray> storage = isolate->factory()->NewFixedArray(num_enum);
Handle<FixedArray> sort_array = isolate->factory()->NewFixedArray(num_enum); Handle<FixedArray> sort_array = isolate->factory()->NewFixedArray(num_enum);
Handle<DescriptorArray> descs = Handle<DescriptorArray> descs =
@ -735,7 +735,7 @@ Handle<FixedArray> GetEnumPropertyKeys(Handle<JSObject> object,
ASSERT(storage->length() == index); ASSERT(storage->length() == index);
return storage; return storage;
} else { } else {
int num_enum = object->NumberOfEnumProperties(); int num_enum = object->NumberOfLocalProperties(DONT_ENUM);
Handle<FixedArray> storage = isolate->factory()->NewFixedArray(num_enum); Handle<FixedArray> storage = isolate->factory()->NewFixedArray(num_enum);
Handle<FixedArray> sort_array = isolate->factory()->NewFixedArray(num_enum); Handle<FixedArray> sort_array = isolate->factory()->NewFixedArray(num_enum);
object->property_dictionary()->CopyEnumKeysTo(*storage, *sort_array); object->property_dictionary()->CopyEnumKeysTo(*storage, *sort_array);

13
deps/v8/src/heap.cc

@ -1201,7 +1201,9 @@ void Heap::Scavenge() {
promotion_queue_.Destroy(); promotion_queue_.Destroy();
LiveObjectList::UpdateReferencesForScavengeGC(); LiveObjectList::UpdateReferencesForScavengeGC();
isolate()->runtime_profiler()->UpdateSamplesAfterScavenge(); if (!FLAG_watch_ic_patching) {
isolate()->runtime_profiler()->UpdateSamplesAfterScavenge();
}
incremental_marking()->UpdateMarkingDequeAfterScavenge(); incremental_marking()->UpdateMarkingDequeAfterScavenge();
ASSERT(new_space_front == new_space_.top()); ASSERT(new_space_front == new_space_.top());
@ -2865,7 +2867,9 @@ MaybeObject* Heap::AllocateSharedFunctionInfo(Object* name) {
share->set_inferred_name(empty_string(), SKIP_WRITE_BARRIER); share->set_inferred_name(empty_string(), SKIP_WRITE_BARRIER);
share->set_initial_map(undefined_value(), SKIP_WRITE_BARRIER); share->set_initial_map(undefined_value(), SKIP_WRITE_BARRIER);
share->set_this_property_assignments(undefined_value(), SKIP_WRITE_BARRIER); share->set_this_property_assignments(undefined_value(), SKIP_WRITE_BARRIER);
share->set_deopt_counter(Smi::FromInt(FLAG_deopt_every_n_times)); share->set_deopt_counter(FLAG_deopt_every_n_times);
share->set_profiler_ticks(0);
share->set_ast_node_count(0);
// Set integer fields (smi or int, depending on the architecture). // Set integer fields (smi or int, depending on the architecture).
share->set_length(0); share->set_length(0);
@ -5839,10 +5843,7 @@ bool Heap::SetUp(bool create_heap_objects) {
if (!code_space_->SetUp()) return false; if (!code_space_->SetUp()) return false;
// Initialize map space. // Initialize map space.
map_space_ = new MapSpace(this, map_space_ = new MapSpace(this, max_old_generation_size_, MAP_SPACE);
max_old_generation_size_,
FLAG_max_map_space_pages,
MAP_SPACE);
if (map_space_ == NULL) return false; if (map_space_ == NULL) return false;
if (!map_space_->SetUp()) return false; if (!map_space_->SetUp()) return false;

5
deps/v8/src/heap.h

@ -241,9 +241,10 @@ namespace internal {
V(use_strict, "use strict") \ V(use_strict, "use strict") \
V(dot_symbol, ".") \ V(dot_symbol, ".") \
V(anonymous_function_symbol, "(anonymous function)") \ V(anonymous_function_symbol, "(anonymous function)") \
V(compare_ic_symbol, ".compare_ic") \ V(compare_ic_symbol, ".compare_ic") \
V(infinity_symbol, "Infinity") \ V(infinity_symbol, "Infinity") \
V(minus_infinity_symbol, "-Infinity") V(minus_infinity_symbol, "-Infinity") \
V(hidden_stack_trace_symbol, "v8::hidden_stack_trace")
// Forward declarations. // Forward declarations.
class GCTracer; class GCTracer;

7
deps/v8/src/hydrogen-instructions.cc

@ -893,6 +893,13 @@ void HCheckInstanceType::GetCheckMaskAndTag(uint8_t* mask, uint8_t* tag) {
void HCheckMap::PrintDataTo(StringStream* stream) { void HCheckMap::PrintDataTo(StringStream* stream) {
value()->PrintNameTo(stream); value()->PrintNameTo(stream);
stream->Add(" %p", *map()); stream->Add(" %p", *map());
if (mode() == REQUIRE_EXACT_MAP) {
stream->Add(" [EXACT]");
} else if (!has_element_transitions_) {
stream->Add(" [EXACT*]");
} else {
stream->Add(" [MATCH ELEMENTS]");
}
} }

42
deps/v8/src/hydrogen-instructions.h

@ -186,6 +186,7 @@ class LChunkBuilder;
V(InobjectFields) \ V(InobjectFields) \
V(BackingStoreFields) \ V(BackingStoreFields) \
V(ElementsKind) \ V(ElementsKind) \
V(ElementsPointer) \
V(ArrayElements) \ V(ArrayElements) \
V(DoubleArrayElements) \ V(DoubleArrayElements) \
V(SpecializedArrayElements) \ V(SpecializedArrayElements) \
@ -646,6 +647,18 @@ class HValue: public ZoneObject {
return gvn_flags_.ContainsAnyOf(AllObservableSideEffectsFlagSet()); return gvn_flags_.ContainsAnyOf(AllObservableSideEffectsFlagSet());
} }
GVNFlagSet DependsOnFlags() const {
GVNFlagSet result = gvn_flags_;
result.Intersect(AllDependsOnFlagSet());
return result;
}
GVNFlagSet SideEffectFlags() const {
GVNFlagSet result = gvn_flags_;
result.Intersect(AllSideEffectsFlagSet());
return result;
}
GVNFlagSet ChangesFlags() const { GVNFlagSet ChangesFlags() const {
GVNFlagSet result = gvn_flags_; GVNFlagSet result = gvn_flags_;
result.Intersect(AllChangesFlagSet()); result.Intersect(AllChangesFlagSet());
@ -722,6 +735,15 @@ class HValue: public ZoneObject {
representation_ = r; representation_ = r;
} }
static GVNFlagSet AllDependsOnFlagSet() {
GVNFlagSet result;
// Create changes mask.
#define ADD_FLAG(type) result.Add(kDependsOn##type);
GVN_FLAG_LIST(ADD_FLAG)
#undef ADD_FLAG
return result;
}
static GVNFlagSet AllChangesFlagSet() { static GVNFlagSet AllChangesFlagSet() {
GVNFlagSet result; GVNFlagSet result;
// Create changes mask. // Create changes mask.
@ -743,6 +765,8 @@ class HValue: public ZoneObject {
static GVNFlagSet AllObservableSideEffectsFlagSet() { static GVNFlagSet AllObservableSideEffectsFlagSet() {
GVNFlagSet result = AllChangesFlagSet(); GVNFlagSet result = AllChangesFlagSet();
result.Remove(kChangesElementsKind); result.Remove(kChangesElementsKind);
result.Remove(kChangesElementsPointer);
result.Remove(kChangesMaps);
return result; return result;
} }
@ -1920,8 +1944,7 @@ class HLoadElements: public HUnaryOperation {
explicit HLoadElements(HValue* value) : HUnaryOperation(value) { explicit HLoadElements(HValue* value) : HUnaryOperation(value) {
set_representation(Representation::Tagged()); set_representation(Representation::Tagged());
SetFlag(kUseGVN); SetFlag(kUseGVN);
SetGVNFlag(kDependsOnMaps); SetGVNFlag(kDependsOnElementsPointer);
SetGVNFlag(kDependsOnElementsKind);
} }
virtual Representation RequiredInputRepresentation(int index) { virtual Representation RequiredInputRepresentation(int index) {
@ -1972,6 +1995,11 @@ class HCheckMap: public HTemplateInstruction<2> {
set_representation(Representation::Tagged()); set_representation(Representation::Tagged());
SetFlag(kUseGVN); SetFlag(kUseGVN);
SetGVNFlag(kDependsOnMaps); SetGVNFlag(kDependsOnMaps);
// If the map to check doesn't have the untransitioned elements, it must not
// be hoisted above TransitionElements instructions.
if (mode == REQUIRE_EXACT_MAP || !map->has_fast_smi_only_elements()) {
SetGVNFlag(kDependsOnElementsKind);
}
has_element_transitions_ = has_element_transitions_ =
map->LookupElementsTransitionMap(FAST_DOUBLE_ELEMENTS, NULL) != NULL || map->LookupElementsTransitionMap(FAST_DOUBLE_ELEMENTS, NULL) != NULL ||
map->LookupElementsTransitionMap(FAST_ELEMENTS, NULL) != NULL; map->LookupElementsTransitionMap(FAST_ELEMENTS, NULL) != NULL;
@ -4135,7 +4163,17 @@ class HTransitionElementsKind: public HTemplateInstruction<1> {
transitioned_map_(transitioned_map) { transitioned_map_(transitioned_map) {
SetOperandAt(0, object); SetOperandAt(0, object);
SetFlag(kUseGVN); SetFlag(kUseGVN);
SetGVNFlag(kDependsOnMaps);
SetGVNFlag(kChangesElementsKind); SetGVNFlag(kChangesElementsKind);
if (original_map->has_fast_double_elements()) {
SetGVNFlag(kChangesElementsPointer);
SetGVNFlag(kDependsOnElementsPointer);
SetGVNFlag(kDependsOnDoubleArrayElements);
} else if (transitioned_map->has_fast_double_elements()) {
SetGVNFlag(kChangesElementsPointer);
SetGVNFlag(kDependsOnElementsPointer);
SetGVNFlag(kDependsOnArrayElements);
}
set_representation(Representation::Tagged()); set_representation(Representation::Tagged());
} }

312
deps/v8/src/hydrogen.cc

@ -70,7 +70,8 @@ HBasicBlock::HBasicBlock(HGraph* graph)
deleted_phis_(4), deleted_phis_(4),
parent_loop_header_(NULL), parent_loop_header_(NULL),
is_inline_return_target_(false), is_inline_return_target_(false),
is_deoptimizing_(false) { } is_deoptimizing_(false),
dominates_loop_successors_(false) { }
void HBasicBlock::AttachLoopInformation() { void HBasicBlock::AttachLoopInformation() {
@ -315,6 +316,62 @@ void HBasicBlock::AssignCommonDominator(HBasicBlock* other) {
} }
void HBasicBlock::AssignLoopSuccessorDominators() {
// Mark blocks that dominate all subsequent reachable blocks inside their
// loop. Exploit the fact that blocks are sorted in reverse post order. When
// the loop is visited in increasing block id order, if the number of
// non-loop-exiting successor edges at the dominator_candidate block doesn't
// exceed the number of previously encountered predecessor edges, there is no
// path from the loop header to any block with higher id that doesn't go
// through the dominator_candidate block. In this case, the
// dominator_candidate block is guaranteed to dominate all blocks reachable
// from it with higher ids.
HBasicBlock* last = loop_information()->GetLastBackEdge();
int outstanding_successors = 1; // one edge from the pre-header
// Header always dominates everything.
MarkAsLoopSuccessorDominator();
for (int j = block_id(); j <= last->block_id(); ++j) {
HBasicBlock* dominator_candidate = graph_->blocks()->at(j);
for (HPredecessorIterator it(dominator_candidate); !it.Done();
it.Advance()) {
HBasicBlock* predecessor = it.Current();
// Don't count back edges.
if (predecessor->block_id() < dominator_candidate->block_id()) {
outstanding_successors--;
}
}
// If more successors than predecessors have been seen in the loop up to
// now, it's not possible to guarantee that the current block dominates
// all of the blocks with higher IDs. In this case, assume conservatively
// that those paths through loop that don't go through the current block
// contain all of the loop's dependencies. Also be careful to record
// dominator information about the current loop that's being processed,
// and not nested loops, which will be processed when
// AssignLoopSuccessorDominators gets called on their header.
ASSERT(outstanding_successors >= 0);
HBasicBlock* parent_loop_header = dominator_candidate->parent_loop_header();
if (outstanding_successors == 0 &&
(parent_loop_header == this && !dominator_candidate->IsLoopHeader())) {
dominator_candidate->MarkAsLoopSuccessorDominator();
}
HControlInstruction* end = dominator_candidate->end();
for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
HBasicBlock* successor = it.Current();
// Only count successors that remain inside the loop and don't loop back
// to a loop header.
if (successor->block_id() > dominator_candidate->block_id() &&
successor->block_id() <= last->block_id()) {
// Backwards edges must land on loop headers.
ASSERT(successor->block_id() > dominator_candidate->block_id() ||
successor->IsLoopHeader());
outstanding_successors++;
}
}
}
}
int HBasicBlock::PredecessorIndexOf(HBasicBlock* predecessor) const { int HBasicBlock::PredecessorIndexOf(HBasicBlock* predecessor) const {
for (int i = 0; i < predecessors_.length(); ++i) { for (int i = 0; i < predecessors_.length(); ++i) {
if (predecessors_[i] == predecessor) return i; if (predecessors_[i] == predecessor) return i;
@ -646,9 +703,7 @@ Handle<Code> HGraph::Compile(CompilationInfo* info) {
MacroAssembler assembler(info->isolate(), NULL, 0); MacroAssembler assembler(info->isolate(), NULL, 0);
LCodeGen generator(chunk, &assembler, info); LCodeGen generator(chunk, &assembler, info);
if (FLAG_eliminate_empty_blocks) { chunk->MarkEmptyBlocks();
chunk->MarkEmptyBlocks();
}
if (generator.GenerateCode()) { if (generator.GenerateCode()) {
if (FLAG_trace_codegen) { if (FLAG_trace_codegen) {
@ -752,10 +807,12 @@ void HGraph::Postorder(HBasicBlock* block,
void HGraph::AssignDominators() { void HGraph::AssignDominators() {
HPhase phase("Assign dominators", this); HPhase phase("Assign dominators", this);
for (int i = 0; i < blocks_.length(); ++i) { for (int i = 0; i < blocks_.length(); ++i) {
if (blocks_[i]->IsLoopHeader()) { HBasicBlock* block = blocks_[i];
if (block->IsLoopHeader()) {
// Only the first predecessor of a loop header is from outside the loop. // Only the first predecessor of a loop header is from outside the loop.
// All others are back edges, and thus cannot dominate the loop header. // All others are back edges, and thus cannot dominate the loop header.
blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->first()); block->AssignCommonDominator(block->predecessors()->first());
block->AssignLoopSuccessorDominators();
} else { } else {
for (int j = blocks_[i]->predecessors()->length() - 1; j >= 0; --j) { for (int j = blocks_[i]->predecessors()->length() - 1; j >= 0; --j) {
blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->at(j)); blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->at(j));
@ -1373,7 +1430,8 @@ class HGlobalValueNumberer BASE_EMBEDDED {
void LoopInvariantCodeMotion(); void LoopInvariantCodeMotion();
void ProcessLoopBlock(HBasicBlock* block, void ProcessLoopBlock(HBasicBlock* block,
HBasicBlock* before_loop, HBasicBlock* before_loop,
GVNFlagSet loop_kills); GVNFlagSet loop_kills,
GVNFlagSet* accumulated_first_time_depends);
bool AllowCodeMotion(); bool AllowCodeMotion();
bool ShouldMove(HInstruction* instr, HBasicBlock* loop_header); bool ShouldMove(HInstruction* instr, HBasicBlock* loop_header);
@ -1398,6 +1456,7 @@ class HGlobalValueNumberer BASE_EMBEDDED {
bool HGlobalValueNumberer::Analyze() { bool HGlobalValueNumberer::Analyze() {
removed_side_effects_ = false;
ComputeBlockSideEffects(); ComputeBlockSideEffects();
if (FLAG_loop_invariant_code_motion) { if (FLAG_loop_invariant_code_motion) {
LoopInvariantCodeMotion(); LoopInvariantCodeMotion();
@ -1409,6 +1468,12 @@ bool HGlobalValueNumberer::Analyze() {
void HGlobalValueNumberer::ComputeBlockSideEffects() { void HGlobalValueNumberer::ComputeBlockSideEffects() {
// The Analyze phase of GVN can be called multiple times. Clear loop side
// effects before computing them to erase the contents from previous Analyze
// passes.
for (int i = 0; i < loop_side_effects_.length(); ++i) {
loop_side_effects_[i].RemoveAll();
}
for (int i = graph_->blocks()->length() - 1; i >= 0; --i) { for (int i = graph_->blocks()->length() - 1; i >= 0; --i) {
// Compute side effects for the block. // Compute side effects for the block.
HBasicBlock* block = graph_->blocks()->at(i); HBasicBlock* block = graph_->blocks()->at(i);
@ -1446,18 +1511,22 @@ void HGlobalValueNumberer::LoopInvariantCodeMotion() {
block->block_id(), block->block_id(),
side_effects.ToIntegral()); side_effects.ToIntegral());
GVNFlagSet accumulated_first_time_depends;
HBasicBlock* last = block->loop_information()->GetLastBackEdge(); HBasicBlock* last = block->loop_information()->GetLastBackEdge();
for (int j = block->block_id(); j <= last->block_id(); ++j) { for (int j = block->block_id(); j <= last->block_id(); ++j) {
ProcessLoopBlock(graph_->blocks()->at(j), block, side_effects); ProcessLoopBlock(graph_->blocks()->at(j), block, side_effects,
&accumulated_first_time_depends);
} }
} }
} }
} }
void HGlobalValueNumberer::ProcessLoopBlock(HBasicBlock* block, void HGlobalValueNumberer::ProcessLoopBlock(
HBasicBlock* loop_header, HBasicBlock* block,
GVNFlagSet loop_kills) { HBasicBlock* loop_header,
GVNFlagSet loop_kills,
GVNFlagSet* accumulated_first_time_depends) {
HBasicBlock* pre_header = loop_header->predecessors()->at(0); HBasicBlock* pre_header = loop_header->predecessors()->at(0);
GVNFlagSet depends_flags = HValue::ConvertChangesToDependsFlags(loop_kills); GVNFlagSet depends_flags = HValue::ConvertChangesToDependsFlags(loop_kills);
TraceGVN("Loop invariant motion for B%d depends_flags=0x%x\n", TraceGVN("Loop invariant motion for B%d depends_flags=0x%x\n",
@ -1466,25 +1535,65 @@ void HGlobalValueNumberer::ProcessLoopBlock(HBasicBlock* block,
HInstruction* instr = block->first(); HInstruction* instr = block->first();
while (instr != NULL) { while (instr != NULL) {
HInstruction* next = instr->next(); HInstruction* next = instr->next();
if (instr->CheckFlag(HValue::kUseGVN) && bool hoisted = false;
!instr->gvn_flags().ContainsAnyOf(depends_flags)) { if (instr->CheckFlag(HValue::kUseGVN)) {
TraceGVN("Checking instruction %d (%s)\n", TraceGVN("Checking instruction %d (%s) instruction GVN flags 0x%X, "
"loop kills 0x%X\n",
instr->id(), instr->id(),
instr->Mnemonic()); instr->Mnemonic(),
bool inputs_loop_invariant = true; instr->gvn_flags().ToIntegral(),
for (int i = 0; i < instr->OperandCount(); ++i) { depends_flags.ToIntegral());
if (instr->OperandAt(i)->IsDefinedAfter(pre_header)) { bool can_hoist = !instr->gvn_flags().ContainsAnyOf(depends_flags);
inputs_loop_invariant = false; if (!can_hoist && instr->IsTransitionElementsKind()) {
} // It's only possible to hoist one time side effects if there are no
// dependencies on their changes from the loop header to the current
// instruction.
GVNFlagSet converted_changes =
HValue::ConvertChangesToDependsFlags(instr->ChangesFlags());
TraceGVN("Checking dependencies on one-time instruction %d (%s) "
"converted changes 0x%X, accumulated depends 0x%X\n",
instr->id(),
instr->Mnemonic(),
converted_changes.ToIntegral(),
accumulated_first_time_depends->ToIntegral());
// It's possible to hoist one-time side effects from the current loop
// loop only if they dominate all of the successor blocks in the same
// loop and there are not any instructions that have Changes/DependsOn
// that intervene between it and the beginning of the loop header.
bool in_nested_loop = block != loop_header &&
((block->parent_loop_header() != loop_header) ||
block->IsLoopHeader());
can_hoist = !in_nested_loop &&
block->IsLoopSuccessorDominator() &&
!accumulated_first_time_depends->ContainsAnyOf(converted_changes);
} }
if (inputs_loop_invariant && ShouldMove(instr, loop_header)) { if (can_hoist) {
TraceGVN("Found loop invariant instruction %d\n", instr->id()); bool inputs_loop_invariant = true;
// Move the instruction out of the loop. for (int i = 0; i < instr->OperandCount(); ++i) {
instr->Unlink(); if (instr->OperandAt(i)->IsDefinedAfter(pre_header)) {
instr->InsertBefore(pre_header->end()); inputs_loop_invariant = false;
}
}
if (inputs_loop_invariant && ShouldMove(instr, loop_header)) {
TraceGVN("Hoisting loop invariant instruction %d\n", instr->id());
// Move the instruction out of the loop.
instr->Unlink();
instr->InsertBefore(pre_header->end());
if (instr->HasSideEffects()) removed_side_effects_ = true;
hoisted = true;
}
} }
} }
if (!hoisted) {
// If an instruction is not hoisted, we have to account for its side
// effects when hoisting later HTransitionElementsKind instructions.
accumulated_first_time_depends->Add(instr->DependsOnFlags());
GVNFlagSet converted_changes =
HValue::ConvertChangesToDependsFlags(instr->SideEffectFlags());
accumulated_first_time_depends->Add(converted_changes);
}
instr = next; instr = next;
} }
} }
@ -2334,7 +2443,7 @@ HGraph* HGraphBuilder::CreateGraph() {
// Handle implicit declaration of the function name in named function // Handle implicit declaration of the function name in named function
// expressions before other declarations. // expressions before other declarations.
if (scope->is_function_scope() && scope->function() != NULL) { if (scope->is_function_scope() && scope->function() != NULL) {
HandleDeclaration(scope->function(), CONST, NULL); HandleVariableDeclaration(scope->function(), CONST, NULL);
} }
VisitDeclarations(scope->declarations()); VisitDeclarations(scope->declarations());
AddSimulate(AstNode::kDeclarationsId); AddSimulate(AstNode::kDeclarationsId);
@ -2392,7 +2501,8 @@ HGraph* HGraphBuilder::CreateGraph() {
// could only be discovered by removing side-effect-generating instructions // could only be discovered by removing side-effect-generating instructions
// during the first pass. // during the first pass.
if (FLAG_smi_only_arrays && removed_side_effects) { if (FLAG_smi_only_arrays && removed_side_effects) {
gvn.Analyze(); removed_side_effects = gvn.Analyze();
ASSERT(!removed_side_effects);
} }
} }
@ -4796,8 +4906,8 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
// Do a quick check on source code length to avoid parsing large // Do a quick check on source code length to avoid parsing large
// inlining candidates. // inlining candidates.
if ((FLAG_limit_inlining && target->shared()->SourceSize() > kMaxSourceSize) if ((FLAG_limit_inlining && target_shared->SourceSize() > kMaxSourceSize)
|| target->shared()->SourceSize() > kUnlimitedMaxSourceSize) { || target_shared->SourceSize() > kUnlimitedMaxSourceSize) {
TraceInline(target, caller, "target text too big"); TraceInline(target, caller, "target text too big");
return false; return false;
} }
@ -4807,6 +4917,17 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
TraceInline(target, caller, "target not inlineable"); TraceInline(target, caller, "target not inlineable");
return false; return false;
} }
if (target_shared->dont_inline() || target_shared->dont_crankshaft()) {
TraceInline(target, caller, "target contains unsupported syntax [early]");
return false;
}
int nodes_added = target_shared->ast_node_count();
if ((FLAG_limit_inlining && nodes_added > kMaxInlinedSize) ||
nodes_added > kUnlimitedMaxInlinedSize) {
TraceInline(target, caller, "target AST is too large [early]");
return false;
}
#if !defined(V8_TARGET_ARCH_IA32) #if !defined(V8_TARGET_ARCH_IA32)
// Target must be able to use caller's context. // Target must be able to use caller's context.
@ -4851,8 +4972,6 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
return false; return false;
} }
int count_before = AstNode::Count();
// Parse and allocate variables. // Parse and allocate variables.
CompilationInfo target_info(target); CompilationInfo target_info(target);
if (!ParserApi::Parse(&target_info, kNoParsingFlags) || if (!ParserApi::Parse(&target_info, kNoParsingFlags) ||
@ -4872,11 +4991,17 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
} }
FunctionLiteral* function = target_info.function(); FunctionLiteral* function = target_info.function();
// Count the number of AST nodes added by inlining this call. // The following conditions must be checked again after re-parsing, because
int nodes_added = AstNode::Count() - count_before; // earlier the information might not have been complete due to lazy parsing.
nodes_added = function->ast_node_count();
if ((FLAG_limit_inlining && nodes_added > kMaxInlinedSize) || if ((FLAG_limit_inlining && nodes_added > kMaxInlinedSize) ||
nodes_added > kUnlimitedMaxInlinedSize) { nodes_added > kUnlimitedMaxInlinedSize) {
TraceInline(target, caller, "target AST is too large"); TraceInline(target, caller, "target AST is too large [late]");
return false;
}
AstProperties::Flags* flags(function->flags());
if (flags->Contains(kDontInline) || flags->Contains(kDontOptimize)) {
TraceInline(target, caller, "target contains unsupported syntax [late]");
return false; return false;
} }
@ -4895,13 +5020,6 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
return false; return false;
} }
} }
// All statements in the body must be inlineable.
for (int i = 0, count = function->body()->length(); i < count; ++i) {
if (!function->body()->at(i)->IsInlineable()) {
TraceInline(target, caller, "target contains unsupported syntax");
return false;
}
}
// Generate the deoptimization data for the unoptimized version of // Generate the deoptimization data for the unoptimized version of
// the target function if we don't already have it. // the target function if we don't already have it.
@ -5050,10 +5168,41 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
} }
bool HGraphBuilder::TryInlineBuiltinFunction(Call* expr, bool HGraphBuilder::TryInlineBuiltinFunctionCall(Call* expr, bool drop_extra) {
HValue* receiver, if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
Handle<Map> receiver_map, BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
CheckType check_type) { switch (id) {
case kMathRound:
case kMathFloor:
case kMathAbs:
case kMathSqrt:
case kMathLog:
case kMathSin:
case kMathCos:
if (expr->arguments()->length() == 1) {
HValue* argument = Pop();
HValue* context = environment()->LookupContext();
Drop(1); // Receiver.
HUnaryMathOperation* op =
new(zone()) HUnaryMathOperation(context, argument, id);
op->set_position(expr->position());
if (drop_extra) Drop(1); // Optionally drop the function.
ast_context()->ReturnInstruction(op, expr->id());
return true;
}
break;
default:
// Not supported for inlining yet.
break;
}
return false;
}
bool HGraphBuilder::TryInlineBuiltinMethodCall(Call* expr,
HValue* receiver,
Handle<Map> receiver_map,
CheckType check_type) {
ASSERT(check_type != RECEIVER_MAP_CHECK || !receiver_map.is_null()); ASSERT(check_type != RECEIVER_MAP_CHECK || !receiver_map.is_null());
// Try to inline calls like Math.* as operations in the calling function. // Try to inline calls like Math.* as operations in the calling function.
if (!expr->target()->shared()->HasBuiltinFunctionId()) return false; if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
@ -5147,7 +5296,7 @@ bool HGraphBuilder::TryInlineBuiltinFunction(Call* expr,
case kMathRandom: case kMathRandom:
if (argument_count == 1 && check_type == RECEIVER_MAP_CHECK) { if (argument_count == 1 && check_type == RECEIVER_MAP_CHECK) {
AddCheckConstantFunction(expr, receiver, receiver_map, true); AddCheckConstantFunction(expr, receiver, receiver_map, true);
Drop(1); Drop(1); // Receiver.
HValue* context = environment()->LookupContext(); HValue* context = environment()->LookupContext();
HGlobalObject* global_object = new(zone()) HGlobalObject(context); HGlobalObject* global_object = new(zone()) HGlobalObject(context);
AddInstruction(global_object); AddInstruction(global_object);
@ -5315,10 +5464,15 @@ void HGraphBuilder::VisitCall(Call* expr) {
Handle<Map> receiver_map = (types == NULL || types->is_empty()) Handle<Map> receiver_map = (types == NULL || types->is_empty())
? Handle<Map>::null() ? Handle<Map>::null()
: types->first(); : types->first();
if (TryInlineBuiltinFunction(expr, if (TryInlineBuiltinMethodCall(expr,
receiver, receiver,
receiver_map, receiver_map,
expr->check_type())) { expr->check_type())) {
if (FLAG_trace_inlining) {
PrintF("Inlining builtin ");
expr->target()->ShortPrint();
PrintF("\n");
}
return; return;
} }
@ -5389,6 +5543,14 @@ void HGraphBuilder::VisitCall(Call* expr) {
IsGlobalObject()); IsGlobalObject());
environment()->SetExpressionStackAt(receiver_index, global_receiver); environment()->SetExpressionStackAt(receiver_index, global_receiver);
if (TryInlineBuiltinFunctionCall(expr, false)) { // Nothing to drop.
if (FLAG_trace_inlining) {
PrintF("Inlining builtin ");
expr->target()->ShortPrint();
PrintF("\n");
}
return;
}
if (TryInline(expr)) return; if (TryInline(expr)) return;
call = PreProcessCall(new(zone()) HCallKnownGlobal(expr->target(), call = PreProcessCall(new(zone()) HCallKnownGlobal(expr->target(),
argument_count)); argument_count));
@ -5415,6 +5577,16 @@ void HGraphBuilder::VisitCall(Call* expr) {
PushAndAdd(receiver); PushAndAdd(receiver);
CHECK_ALIVE(VisitExpressions(expr->arguments())); CHECK_ALIVE(VisitExpressions(expr->arguments()));
AddInstruction(new(zone()) HCheckFunction(function, expr->target())); AddInstruction(new(zone()) HCheckFunction(function, expr->target()));
if (TryInlineBuiltinFunctionCall(expr, true)) { // Drop the function.
if (FLAG_trace_inlining) {
PrintF("Inlining builtin ");
expr->target()->ShortPrint();
PrintF("\n");
}
return;
}
if (TryInline(expr, true)) { // Drop function from environment. if (TryInline(expr, true)) { // Drop function from environment.
return; return;
} else { } else {
@ -6368,14 +6540,14 @@ void HGraphBuilder::VisitThisFunction(ThisFunction* expr) {
} }
void HGraphBuilder::VisitDeclaration(Declaration* decl) { void HGraphBuilder::VisitVariableDeclaration(VariableDeclaration* decl) {
HandleDeclaration(decl->proxy(), decl->mode(), decl->fun()); HandleVariableDeclaration(decl->proxy(), decl->mode(), decl->fun());
} }
void HGraphBuilder::HandleDeclaration(VariableProxy* proxy, void HGraphBuilder::HandleVariableDeclaration(VariableProxy* proxy,
VariableMode mode, VariableMode mode,
FunctionLiteral* function) { FunctionLiteral* function) {
Variable* var = proxy->var(); Variable* var = proxy->var();
bool binding_needs_init = bool binding_needs_init =
(mode == CONST || mode == CONST_HARMONY || mode == LET); (mode == CONST || mode == CONST_HARMONY || mode == LET);
@ -6410,6 +6582,31 @@ void HGraphBuilder::HandleDeclaration(VariableProxy* proxy,
} }
void HGraphBuilder::VisitModuleDeclaration(ModuleDeclaration* decl) {
// TODO(rossberg)
}
void HGraphBuilder::VisitModuleLiteral(ModuleLiteral* module) {
// TODO(rossberg)
}
void HGraphBuilder::VisitModuleVariable(ModuleVariable* module) {
// TODO(rossberg)
}
void HGraphBuilder::VisitModulePath(ModulePath* module) {
// TODO(rossberg)
}
void HGraphBuilder::VisitModuleUrl(ModuleUrl* module) {
// TODO(rossberg)
}
// Generators for inline runtime functions. // Generators for inline runtime functions.
// Support for types. // Support for types.
void HGraphBuilder::GenerateIsSmi(CallRuntime* call) { void HGraphBuilder::GenerateIsSmi(CallRuntime* call) {
@ -7200,7 +7397,10 @@ void HTracer::Trace(const char* name, HGraph* graph, LChunk* chunk) {
} }
PrintEmptyProperty("xhandlers"); PrintEmptyProperty("xhandlers");
PrintEmptyProperty("flags"); const char* flags = current->IsLoopSuccessorDominator()
? "dom-loop-succ"
: "";
PrintStringProperty("flags", flags);
if (current->dominator() != NULL) { if (current->dominator() != NULL) {
PrintBlockProperty("dominator", current->dominator()->block_id()); PrintBlockProperty("dominator", current->dominator()->block_id());

33
deps/v8/src/hydrogen.h

@ -126,6 +126,7 @@ class HBasicBlock: public ZoneObject {
int PredecessorIndexOf(HBasicBlock* predecessor) const; int PredecessorIndexOf(HBasicBlock* predecessor) const;
void AddSimulate(int ast_id) { AddInstruction(CreateSimulate(ast_id)); } void AddSimulate(int ast_id) { AddInstruction(CreateSimulate(ast_id)); }
void AssignCommonDominator(HBasicBlock* other); void AssignCommonDominator(HBasicBlock* other);
void AssignLoopSuccessorDominators();
void FinishExitWithDeoptimization(HDeoptimize::UseEnvironment has_uses) { void FinishExitWithDeoptimization(HDeoptimize::UseEnvironment has_uses) {
FinishExit(CreateDeoptimize(has_uses)); FinishExit(CreateDeoptimize(has_uses));
@ -149,6 +150,13 @@ class HBasicBlock: public ZoneObject {
bool IsDeoptimizing() const { return is_deoptimizing_; } bool IsDeoptimizing() const { return is_deoptimizing_; }
void MarkAsDeoptimizing() { is_deoptimizing_ = true; } void MarkAsDeoptimizing() { is_deoptimizing_ = true; }
bool IsLoopSuccessorDominator() const {
return dominates_loop_successors_;
}
void MarkAsLoopSuccessorDominator() {
dominates_loop_successors_ = true;
}
inline Zone* zone(); inline Zone* zone();
#ifdef DEBUG #ifdef DEBUG
@ -182,6 +190,22 @@ class HBasicBlock: public ZoneObject {
HBasicBlock* parent_loop_header_; HBasicBlock* parent_loop_header_;
bool is_inline_return_target_; bool is_inline_return_target_;
bool is_deoptimizing_; bool is_deoptimizing_;
bool dominates_loop_successors_;
};
class HPredecessorIterator BASE_EMBEDDED {
public:
explicit HPredecessorIterator(HBasicBlock* block)
: predecessor_list_(block->predecessors()), current_(0) { }
bool Done() { return current_ >= predecessor_list_->length(); }
HBasicBlock* Current() { return predecessor_list_->at(current_); }
void Advance() { current_++; }
private:
const ZoneList<HBasicBlock*>* predecessor_list_;
int current_;
}; };
@ -815,9 +839,9 @@ class HGraphBuilder: public AstVisitor {
INLINE_RUNTIME_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_DECLARATION) INLINE_RUNTIME_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_DECLARATION)
#undef INLINE_FUNCTION_GENERATOR_DECLARATION #undef INLINE_FUNCTION_GENERATOR_DECLARATION
void HandleDeclaration(VariableProxy* proxy, void HandleVariableDeclaration(VariableProxy* proxy,
VariableMode mode, VariableMode mode,
FunctionLiteral* function); FunctionLiteral* function);
void VisitDelete(UnaryOperation* expr); void VisitDelete(UnaryOperation* expr);
void VisitVoid(UnaryOperation* expr); void VisitVoid(UnaryOperation* expr);
@ -918,10 +942,11 @@ class HGraphBuilder: public AstVisitor {
bool TryCallApply(Call* expr); bool TryCallApply(Call* expr);
bool TryInline(Call* expr, bool drop_extra = false); bool TryInline(Call* expr, bool drop_extra = false);
bool TryInlineBuiltinFunction(Call* expr, bool TryInlineBuiltinMethodCall(Call* expr,
HValue* receiver, HValue* receiver,
Handle<Map> receiver_map, Handle<Map> receiver_map,
CheckType check_type); CheckType check_type);
bool TryInlineBuiltinFunctionCall(Call* expr, bool drop_extra);
// If --trace-inlining, print a line of the inlining trace. Inlining // If --trace-inlining, print a line of the inlining trace. Inlining
// succeeded if the reason string is NULL and failed if there is a // succeeded if the reason string is NULL and failed if there is a

4
deps/v8/src/ia32/code-stubs-ia32.cc

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved. // Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are // modification, are permitted provided that the following conditions are
// met: // met:
@ -5022,7 +5022,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// Invoke: Link this frame into the handler chain. There's only one // Invoke: Link this frame into the handler chain. There's only one
// handler block in this code object, so its index is 0. // handler block in this code object, so its index is 0.
__ bind(&invoke); __ bind(&invoke);
__ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER, 0); __ PushTryHandler(StackHandler::JS_ENTRY, 0);
// Clear any pending exceptions. // Clear any pending exceptions.
__ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value())); __ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value()));

34
deps/v8/src/ia32/full-codegen-ia32.cc

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved. // Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are // modification, are permitted provided that the following conditions are
// met: // met:
@ -129,6 +129,26 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
} }
#endif #endif
// We can optionally optimize based on counters rather than statistical
// sampling.
if (info->ShouldSelfOptimize()) {
if (FLAG_trace_opt) {
PrintF("[adding self-optimization header to %s]\n",
*info->function()->debug_name()->ToCString());
}
MaybeObject* maybe_cell = isolate()->heap()->AllocateJSGlobalPropertyCell(
Smi::FromInt(Compiler::kCallsUntilPrimitiveOpt));
JSGlobalPropertyCell* cell;
if (maybe_cell->To(&cell)) {
__ sub(Operand::Cell(Handle<JSGlobalPropertyCell>(cell)),
Immediate(Smi::FromInt(1)));
Handle<Code> compile_stub(
isolate()->builtins()->builtin(Builtins::kLazyRecompile));
STATIC_ASSERT(kSmiTag == 0);
__ j(zero, compile_stub);
}
}
// Strict mode functions and builtins need to replace the receiver // Strict mode functions and builtins need to replace the receiver
// with undefined when called as functions (without an explicit // with undefined when called as functions (without an explicit
// receiver object). ecx is zero for method calls and non-zero for // receiver object). ecx is zero for method calls and non-zero for
@ -261,11 +281,11 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
// For named function expressions, declare the function name as a // For named function expressions, declare the function name as a
// constant. // constant.
if (scope()->is_function_scope() && scope()->function() != NULL) { if (scope()->is_function_scope() && scope()->function() != NULL) {
int ignored = 0;
VariableProxy* proxy = scope()->function(); VariableProxy* proxy = scope()->function();
ASSERT(proxy->var()->mode() == CONST || ASSERT(proxy->var()->mode() == CONST ||
proxy->var()->mode() == CONST_HARMONY); proxy->var()->mode() == CONST_HARMONY);
EmitDeclaration(proxy, proxy->var()->mode(), NULL, &ignored); ASSERT(proxy->var()->location() != Variable::UNALLOCATED);
EmitDeclaration(proxy, proxy->var()->mode(), NULL);
} }
VisitDeclarations(scope()->declarations()); VisitDeclarations(scope()->declarations());
} }
@ -681,8 +701,7 @@ void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
VariableMode mode, VariableMode mode,
FunctionLiteral* function, FunctionLiteral* function) {
int* global_count) {
// If it was not possible to allocate the variable at compile time, we // If it was not possible to allocate the variable at compile time, we
// need to "declare" it at runtime to make sure it actually exists in the // need to "declare" it at runtime to make sure it actually exists in the
// local context. // local context.
@ -691,7 +710,7 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
(mode == CONST || mode == CONST_HARMONY || mode == LET); (mode == CONST || mode == CONST_HARMONY || mode == LET);
switch (variable->location()) { switch (variable->location()) {
case Variable::UNALLOCATED: case Variable::UNALLOCATED:
++(*global_count); ++global_count_;
break; break;
case Variable::PARAMETER: case Variable::PARAMETER:
@ -771,9 +790,6 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
} }
void FullCodeGenerator::VisitDeclaration(Declaration* decl) { }
void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
// Call the runtime to declare the globals. // Call the runtime to declare the globals.
__ push(esi); // The context is the first argument. __ push(esi); // The context is the first argument.

1
deps/v8/src/ia32/lithium-codegen-ia32.cc

@ -622,7 +622,6 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
int length = deoptimizations_.length(); int length = deoptimizations_.length();
if (length == 0) return; if (length == 0) return;
ASSERT(FLAG_deopt);
Handle<DeoptimizationInputData> data = Handle<DeoptimizationInputData> data =
factory()->NewDeoptimizationInputData(length, TENURED); factory()->NewDeoptimizationInputData(length, TENURED);

25
deps/v8/src/ia32/macro-assembler-ia32.cc

@ -764,8 +764,7 @@ void MacroAssembler::LeaveApiExitFrame() {
} }
void MacroAssembler::PushTryHandler(CodeLocation try_location, void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
HandlerType type,
int handler_index) { int handler_index) {
// Adjust this code if not the case. // Adjust this code if not the case.
STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize); STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
@ -776,25 +775,21 @@ void MacroAssembler::PushTryHandler(CodeLocation try_location,
STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize); STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
// We will build up the handler from the bottom by pushing on the stack. // We will build up the handler from the bottom by pushing on the stack.
// First compute the state and push the frame pointer and context. // First push the frame pointer and context.
unsigned state = StackHandler::OffsetField::encode(handler_index); if (kind == StackHandler::JS_ENTRY) {
if (try_location == IN_JAVASCRIPT) {
push(ebp);
push(esi);
state |= (type == TRY_CATCH_HANDLER)
? StackHandler::KindField::encode(StackHandler::TRY_CATCH)
: StackHandler::KindField::encode(StackHandler::TRY_FINALLY);
} else {
ASSERT(try_location == IN_JS_ENTRY);
// The frame pointer does not point to a JS frame so we save NULL for // The frame pointer does not point to a JS frame so we save NULL for
// ebp. We expect the code throwing an exception to check ebp before // ebp. We expect the code throwing an exception to check ebp before
// dereferencing it to restore the context. // dereferencing it to restore the context.
push(Immediate(0)); // NULL frame pointer. push(Immediate(0)); // NULL frame pointer.
push(Immediate(Smi::FromInt(0))); // No context. push(Immediate(Smi::FromInt(0))); // No context.
state |= StackHandler::KindField::encode(StackHandler::ENTRY); } else {
push(ebp);
push(esi);
} }
// Push the state and the code object. // Push the state and the code object.
unsigned state =
StackHandler::IndexField::encode(handler_index) |
StackHandler::KindField::encode(kind);
push(Immediate(state)); push(Immediate(state));
Push(CodeObject()); Push(CodeObject());
@ -904,7 +899,7 @@ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
mov(esp, Operand(esp, StackHandlerConstants::kNextOffset)); mov(esp, Operand(esp, StackHandlerConstants::kNextOffset));
bind(&check_kind); bind(&check_kind);
STATIC_ASSERT(StackHandler::ENTRY == 0); STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
test(Operand(esp, StackHandlerConstants::kStateOffset), test(Operand(esp, StackHandlerConstants::kStateOffset),
Immediate(StackHandler::KindField::kMask)); Immediate(StackHandler::KindField::kMask));
j(not_zero, &fetch_next); j(not_zero, &fetch_next);

4
deps/v8/src/ia32/macro-assembler-ia32.h

@ -491,9 +491,7 @@ class MacroAssembler: public Assembler {
// Exception handling // Exception handling
// Push a new try handler and link it into try handler chain. // Push a new try handler and link it into try handler chain.
void PushTryHandler(CodeLocation try_location, void PushTryHandler(StackHandler::Kind kind, int handler_index);
HandlerType type,
int handler_index);
// Unlink the stack handler on top of the stack from the try handler chain. // Unlink the stack handler on top of the stack from the try handler chain.
void PopTryHandler(); void PopTryHandler();

75
deps/v8/src/ia32/stub-cache-ia32.cc

@ -1345,25 +1345,25 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
} else { } else {
Label call_builtin; Label call_builtin;
// Get the elements array of the object.
__ mov(ebx, FieldOperand(edx, JSArray::kElementsOffset));
// Check that the elements are in fast mode and writable.
__ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
Immediate(factory()->fixed_array_map()));
__ j(not_equal, &call_builtin);
if (argc == 1) { // Otherwise fall through to call builtin. if (argc == 1) { // Otherwise fall through to call builtin.
Label attempt_to_grow_elements, with_write_barrier; Label attempt_to_grow_elements, with_write_barrier;
// Get the elements array of the object.
__ mov(edi, FieldOperand(edx, JSArray::kElementsOffset));
// Check that the elements are in fast mode and writable.
__ cmp(FieldOperand(edi, HeapObject::kMapOffset),
Immediate(factory()->fixed_array_map()));
__ j(not_equal, &call_builtin);
// Get the array's length into eax and calculate new length. // Get the array's length into eax and calculate new length.
__ mov(eax, FieldOperand(edx, JSArray::kLengthOffset)); __ mov(eax, FieldOperand(edx, JSArray::kLengthOffset));
STATIC_ASSERT(kSmiTagSize == 1); STATIC_ASSERT(kSmiTagSize == 1);
STATIC_ASSERT(kSmiTag == 0); STATIC_ASSERT(kSmiTag == 0);
__ add(eax, Immediate(Smi::FromInt(argc))); __ add(eax, Immediate(Smi::FromInt(argc)));
// Get the element's length into ecx. // Get the elements' length into ecx.
__ mov(ecx, FieldOperand(ebx, FixedArray::kLengthOffset)); __ mov(ecx, FieldOperand(edi, FixedArray::kLengthOffset));
// Check if we could survive without allocation. // Check if we could survive without allocation.
__ cmp(eax, ecx); __ cmp(eax, ecx);
@ -1376,29 +1376,52 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
// Save new length. // Save new length.
__ mov(FieldOperand(edx, JSArray::kLengthOffset), eax); __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax);
// Push the element. // Store the value.
__ lea(edx, FieldOperand(ebx, __ mov(FieldOperand(edi,
eax, times_half_pointer_size, eax,
FixedArray::kHeaderSize - argc * kPointerSize)); times_half_pointer_size,
__ mov(Operand(edx, 0), ecx); FixedArray::kHeaderSize - argc * kPointerSize),
ecx);
__ ret((argc + 1) * kPointerSize); __ ret((argc + 1) * kPointerSize);
__ bind(&with_write_barrier); __ bind(&with_write_barrier);
__ mov(edi, FieldOperand(edx, HeapObject::kMapOffset)); __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
__ CheckFastObjectElements(edi, &call_builtin);
if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) {
Label fast_object, not_fast_object;
__ CheckFastObjectElements(ebx, &not_fast_object, Label::kNear);
__ jmp(&fast_object);
// In case of fast smi-only, convert to fast object, otherwise bail out.
__ bind(&not_fast_object);
__ CheckFastSmiOnlyElements(ebx, &call_builtin);
// edi: elements array
// edx: receiver
// ebx: map
__ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
FAST_ELEMENTS,
ebx,
edi,
&call_builtin);
ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm());
// Restore edi.
__ mov(edi, FieldOperand(edx, JSArray::kElementsOffset));
__ bind(&fast_object);
} else {
__ CheckFastObjectElements(ebx, &call_builtin);
}
// Save new length. // Save new length.
__ mov(FieldOperand(edx, JSArray::kLengthOffset), eax); __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax);
// Push the element. // Store the value.
__ lea(edx, FieldOperand(ebx, __ lea(edx, FieldOperand(edi,
eax, times_half_pointer_size, eax, times_half_pointer_size,
FixedArray::kHeaderSize - argc * kPointerSize)); FixedArray::kHeaderSize - argc * kPointerSize));
__ mov(Operand(edx, 0), ecx); __ mov(Operand(edx, 0), ecx);
__ RecordWrite(ebx, edx, ecx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, __ RecordWrite(edi, edx, ecx, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
OMIT_SMI_CHECK); OMIT_SMI_CHECK);
__ ret((argc + 1) * kPointerSize); __ ret((argc + 1) * kPointerSize);
@ -1408,11 +1431,11 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ jmp(&call_builtin); __ jmp(&call_builtin);
} }
__ mov(edi, Operand(esp, argc * kPointerSize)); __ mov(ebx, Operand(esp, argc * kPointerSize));
// Growing elements that are SMI-only requires special handling in case // Growing elements that are SMI-only requires special handling in case
// the new element is non-Smi. For now, delegate to the builtin. // the new element is non-Smi. For now, delegate to the builtin.
Label no_fast_elements_check; Label no_fast_elements_check;
__ JumpIfSmi(edi, &no_fast_elements_check); __ JumpIfSmi(ebx, &no_fast_elements_check);
__ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset)); __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
__ CheckFastObjectElements(ecx, &call_builtin, Label::kFar); __ CheckFastObjectElements(ecx, &call_builtin, Label::kFar);
__ bind(&no_fast_elements_check); __ bind(&no_fast_elements_check);
@ -1431,7 +1454,7 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ mov(ecx, Operand::StaticVariable(new_space_allocation_top)); __ mov(ecx, Operand::StaticVariable(new_space_allocation_top));
// Check if it's the end of elements. // Check if it's the end of elements.
__ lea(edx, FieldOperand(ebx, __ lea(edx, FieldOperand(edi,
eax, times_half_pointer_size, eax, times_half_pointer_size,
FixedArray::kHeaderSize - argc * kPointerSize)); FixedArray::kHeaderSize - argc * kPointerSize));
__ cmp(edx, ecx); __ cmp(edx, ecx);
@ -1444,7 +1467,7 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ mov(Operand::StaticVariable(new_space_allocation_top), ecx); __ mov(Operand::StaticVariable(new_space_allocation_top), ecx);
// Push the argument... // Push the argument...
__ mov(Operand(edx, 0), edi); __ mov(Operand(edx, 0), ebx);
// ... and fill the rest with holes. // ... and fill the rest with holes.
for (int i = 1; i < kAllocationDelta; i++) { for (int i = 1; i < kAllocationDelta; i++) {
__ mov(Operand(edx, i * kPointerSize), __ mov(Operand(edx, i * kPointerSize),
@ -1456,13 +1479,13 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
// tell the incremental marker to rescan the object that we just grew. We // tell the incremental marker to rescan the object that we just grew. We
// don't need to worry about the holes because they are in old space and // don't need to worry about the holes because they are in old space and
// already marked black. // already marked black.
__ RecordWrite(ebx, edx, edi, kDontSaveFPRegs, OMIT_REMEMBERED_SET); __ RecordWrite(edi, edx, ebx, kDontSaveFPRegs, OMIT_REMEMBERED_SET);
// Restore receiver to edx as finish sequence assumes it's here. // Restore receiver to edx as finish sequence assumes it's here.
__ mov(edx, Operand(esp, (argc + 1) * kPointerSize)); __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
// Increment element's and array's sizes. // Increment element's and array's sizes.
__ add(FieldOperand(ebx, FixedArray::kLengthOffset), __ add(FieldOperand(edi, FixedArray::kLengthOffset),
Immediate(Smi::FromInt(kAllocationDelta))); Immediate(Smi::FromInt(kAllocationDelta)));
// NOTE: This only happen in new-space, where we don't // NOTE: This only happen in new-space, where we don't

5
deps/v8/src/ic-inl.h

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved. // Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are // modification, are permitted provided that the following conditions are
// met: // met:
@ -29,6 +29,8 @@
#define V8_IC_INL_H_ #define V8_IC_INL_H_
#include "ic.h" #include "ic.h"
#include "compiler.h"
#include "debug.h" #include "debug.h"
#include "macro-assembler.h" #include "macro-assembler.h"
@ -89,6 +91,7 @@ void IC::SetTargetAtAddress(Address address, Code* target) {
Assembler::set_target_address_at(address, target->instruction_start()); Assembler::set_target_address_at(address, target->instruction_start());
target->GetHeap()->incremental_marking()->RecordCodeTargetPatch(address, target->GetHeap()->incremental_marking()->RecordCodeTargetPatch(address,
target); target);
PostPatching();
} }

25
deps/v8/src/ic.cc

@ -292,6 +292,31 @@ Failure* IC::ReferenceError(const char* type, Handle<String> name) {
} }
void IC::PostPatching() {
if (FLAG_watch_ic_patching) {
Isolate::Current()->runtime_profiler()->NotifyICChanged();
// We do not want to optimize until the ICs have settled down,
// so when they are patched, we postpone optimization for the
// current function and the functions above it on the stack that
// might want to inline this one.
StackFrameIterator it;
if (it.done()) return;
it.Advance();
static const int kStackFramesToMark = Compiler::kMaxInliningLevels - 1;
for (int i = 0; i < kStackFramesToMark; ++i) {
if (it.done()) return;
StackFrame* raw_frame = it.frame();
if (raw_frame->is_java_script()) {
JSFunction* function =
JSFunction::cast(JavaScriptFrame::cast(raw_frame)->function());
function->shared()->set_profiler_ticks(0);
}
it.Advance();
}
}
}
void IC::Clear(Address address) { void IC::Clear(Address address) {
Code* target = GetTargetAtAddress(address); Code* target = GetTargetAtAddress(address);

1
deps/v8/src/ic.h

@ -165,6 +165,7 @@ class IC {
// Access the target code for the given IC address. // Access the target code for the given IC address.
static inline Code* GetTargetAtAddress(Address address); static inline Code* GetTargetAtAddress(Address address);
static inline void SetTargetAtAddress(Address address, Code* target); static inline void SetTargetAtAddress(Address address, Code* target);
static void PostPatching();
private: private:
// Frame pointer for the frame that uses (calls) the IC. // Frame pointer for the frame that uses (calls) the IC.

3
deps/v8/src/incremental-marking.cc

@ -505,7 +505,8 @@ void IncrementalMarking::StartMarking(CompactionFlag flag) {
} }
is_compacting_ = !FLAG_never_compact && (flag == ALLOW_COMPACTION) && is_compacting_ = !FLAG_never_compact && (flag == ALLOW_COMPACTION) &&
heap_->mark_compact_collector()->StartCompaction(); heap_->mark_compact_collector()->StartCompaction(
MarkCompactCollector::INCREMENTAL_COMPACTION);
state_ = MARKING; state_ = MARKING;

114
deps/v8/src/isolate.cc

@ -542,6 +542,18 @@ Handle<String> Isolate::StackTraceString() {
} }
void Isolate::CaptureAndSetCurrentStackTraceFor(Handle<JSObject> error_object) {
if (capture_stack_trace_for_uncaught_exceptions_) {
// Capture stack trace for a detailed exception message.
Handle<String> key = factory()->hidden_stack_trace_symbol();
Handle<JSArray> stack_trace = CaptureCurrentStackTrace(
stack_trace_for_uncaught_exceptions_frame_limit_,
stack_trace_for_uncaught_exceptions_options_);
JSObject::SetHiddenProperty(error_object, key, stack_trace);
}
}
Handle<JSArray> Isolate::CaptureCurrentStackTrace( Handle<JSArray> Isolate::CaptureCurrentStackTrace(
int frame_limit, StackTrace::StackTraceOptions options) { int frame_limit, StackTrace::StackTraceOptions options) {
// Ensure no negative values. // Ensure no negative values.
@ -1011,7 +1023,7 @@ bool Isolate::ShouldReportException(bool* can_be_caught_externally,
// Find the top-most try-catch handler. // Find the top-most try-catch handler.
StackHandler* handler = StackHandler* handler =
StackHandler::FromAddress(Isolate::handler(thread_local_top())); StackHandler::FromAddress(Isolate::handler(thread_local_top()));
while (handler != NULL && !handler->is_try_catch()) { while (handler != NULL && !handler->is_catch()) {
handler = handler->next(); handler = handler->next();
} }
@ -1037,22 +1049,39 @@ bool Isolate::ShouldReportException(bool* can_be_caught_externally,
} }
void Isolate::DoThrow(MaybeObject* exception, MessageLocation* location) { bool Isolate::IsErrorObject(Handle<Object> obj) {
if (!obj->IsJSObject()) return false;
String* error_key = *(factory()->LookupAsciiSymbol("$Error"));
Object* error_constructor =
js_builtins_object()->GetPropertyNoExceptionThrown(error_key);
for (Object* prototype = *obj; !prototype->IsNull();
prototype = prototype->GetPrototype()) {
if (!prototype->IsJSObject()) return false;
if (JSObject::cast(prototype)->map()->constructor() == error_constructor) {
return true;
}
}
return false;
}
void Isolate::DoThrow(Object* exception, MessageLocation* location) {
ASSERT(!has_pending_exception()); ASSERT(!has_pending_exception());
HandleScope scope; HandleScope scope;
Object* exception_object = Smi::FromInt(0); Handle<Object> exception_handle(exception);
bool is_object = exception->ToObject(&exception_object);
Handle<Object> exception_handle(exception_object);
// Determine reporting and whether the exception is caught externally. // Determine reporting and whether the exception is caught externally.
bool catchable_by_javascript = is_catchable_by_javascript(exception); bool catchable_by_javascript = is_catchable_by_javascript(exception);
// Only real objects can be caught by JS.
ASSERT(!catchable_by_javascript || is_object);
bool can_be_caught_externally = false; bool can_be_caught_externally = false;
bool should_report_exception = bool should_report_exception =
ShouldReportException(&can_be_caught_externally, catchable_by_javascript); ShouldReportException(&can_be_caught_externally, catchable_by_javascript);
bool report_exception = catchable_by_javascript && should_report_exception; bool report_exception = catchable_by_javascript && should_report_exception;
bool try_catch_needs_message =
can_be_caught_externally && try_catch_handler()->capture_message_;
bool bootstrapping = bootstrapper()->IsActive();
#ifdef ENABLE_DEBUGGER_SUPPORT #ifdef ENABLE_DEBUGGER_SUPPORT
// Notify debugger of exception. // Notify debugger of exception.
@ -1061,34 +1090,52 @@ void Isolate::DoThrow(MaybeObject* exception, MessageLocation* location) {
} }
#endif #endif
// Generate the message. // Generate the message if required.
Handle<Object> message_obj;
MessageLocation potential_computed_location;
bool try_catch_needs_message =
can_be_caught_externally &&
try_catch_handler()->capture_message_;
if (report_exception || try_catch_needs_message) { if (report_exception || try_catch_needs_message) {
MessageLocation potential_computed_location;
if (location == NULL) { if (location == NULL) {
// If no location was specified we use a computed one instead // If no location was specified we use a computed one instead.
ComputeLocation(&potential_computed_location); ComputeLocation(&potential_computed_location);
location = &potential_computed_location; location = &potential_computed_location;
} }
if (!bootstrapper()->IsActive()) { // It's not safe to try to make message objects or collect stack traces
// It's not safe to try to make message objects or collect stack // while the bootstrapper is active since the infrastructure may not have
// traces while the bootstrapper is active since the infrastructure // been properly initialized.
// may not have been properly initialized. if (!bootstrapping) {
Handle<String> stack_trace; Handle<String> stack_trace;
if (FLAG_trace_exception) stack_trace = StackTraceString(); if (FLAG_trace_exception) stack_trace = StackTraceString();
Handle<JSArray> stack_trace_object; Handle<JSArray> stack_trace_object;
if (report_exception && capture_stack_trace_for_uncaught_exceptions_) { if (capture_stack_trace_for_uncaught_exceptions_) {
if (IsErrorObject(exception_handle)) {
// We fetch the stack trace that corresponds to this error object.
String* key = heap()->hidden_stack_trace_symbol();
Object* stack_property =
JSObject::cast(*exception_handle)->GetHiddenProperty(key);
// Property lookup may have failed. In this case it's probably not
// a valid Error object.
if (stack_property->IsJSArray()) {
stack_trace_object = Handle<JSArray>(JSArray::cast(stack_property));
}
}
if (stack_trace_object.is_null()) {
// Not an error object, we capture at throw site.
stack_trace_object = CaptureCurrentStackTrace( stack_trace_object = CaptureCurrentStackTrace(
stack_trace_for_uncaught_exceptions_frame_limit_, stack_trace_for_uncaught_exceptions_frame_limit_,
stack_trace_for_uncaught_exceptions_options_); stack_trace_for_uncaught_exceptions_options_);
}
} }
ASSERT(is_object); // Can't use the handle unless there's a real object. Handle<Object> message_obj = MessageHandler::MakeMessageObject(
message_obj = MessageHandler::MakeMessageObject("uncaught_exception", "uncaught_exception",
location, HandleVector<Object>(&exception_handle, 1), stack_trace, location,
HandleVector<Object>(&exception_handle, 1),
stack_trace,
stack_trace_object); stack_trace_object);
thread_local_top()->pending_message_obj_ = *message_obj;
if (location != NULL) {
thread_local_top()->pending_message_script_ = *location->script();
thread_local_top()->pending_message_start_pos_ = location->start_pos();
thread_local_top()->pending_message_end_pos_ = location->end_pos();
}
} else if (location != NULL && !location->script().is_null()) { } else if (location != NULL && !location->script().is_null()) {
// We are bootstrapping and caught an error where the location is set // We are bootstrapping and caught an error where the location is set
// and we have a script for the location. // and we have a script for the location.
@ -1104,30 +1151,13 @@ void Isolate::DoThrow(MaybeObject* exception, MessageLocation* location) {
// Save the message for reporting if the the exception remains uncaught. // Save the message for reporting if the the exception remains uncaught.
thread_local_top()->has_pending_message_ = report_exception; thread_local_top()->has_pending_message_ = report_exception;
if (!message_obj.is_null()) {
thread_local_top()->pending_message_obj_ = *message_obj;
if (location != NULL) {
thread_local_top()->pending_message_script_ = *location->script();
thread_local_top()->pending_message_start_pos_ = location->start_pos();
thread_local_top()->pending_message_end_pos_ = location->end_pos();
}
}
// Do not forget to clean catcher_ if currently thrown exception cannot // Do not forget to clean catcher_ if currently thrown exception cannot
// be caught. If necessary, ReThrow will update the catcher. // be caught. If necessary, ReThrow will update the catcher.
thread_local_top()->catcher_ = can_be_caught_externally ? thread_local_top()->catcher_ = can_be_caught_externally ?
try_catch_handler() : NULL; try_catch_handler() : NULL;
// NOTE: Notifying the debugger or generating the message set_pending_exception(*exception_handle);
// may have caused new exceptions. For now, we just ignore
// that and set the pending exception to the original one.
if (is_object) {
set_pending_exception(*exception_handle);
} else {
// Failures are not on the heap so they neither need nor work with handles.
ASSERT(exception_handle->IsFailure());
set_pending_exception(exception);
}
} }
@ -1163,8 +1193,8 @@ bool Isolate::IsExternallyCaught() {
StackHandler* handler = StackHandler* handler =
StackHandler::FromAddress(Isolate::handler(thread_local_top())); StackHandler::FromAddress(Isolate::handler(thread_local_top()));
while (handler != NULL && handler->address() < external_handler_address) { while (handler != NULL && handler->address() < external_handler_address) {
ASSERT(!handler->is_try_catch()); ASSERT(!handler->is_catch());
if (handler->is_try_finally()) return false; if (handler->is_finally()) return false;
handler = handler->next(); handler = handler->next();
} }

10
deps/v8/src/isolate.h

@ -362,7 +362,7 @@ typedef List<HeapObject*, PreallocatedStorage> DebugObjectCache;
/* Serializer state. */ \ /* Serializer state. */ \
V(ExternalReferenceTable*, external_reference_table, NULL) \ V(ExternalReferenceTable*, external_reference_table, NULL) \
/* AstNode state. */ \ /* AstNode state. */ \
V(unsigned, ast_node_id, 0) \ V(int, ast_node_id, 0) \
V(unsigned, ast_node_count, 0) \ V(unsigned, ast_node_count, 0) \
/* SafeStackFrameIterator activations count. */ \ /* SafeStackFrameIterator activations count. */ \
V(int, safe_stack_iterator_counter, 0) \ V(int, safe_stack_iterator_counter, 0) \
@ -703,6 +703,8 @@ class Isolate {
int frame_limit, int frame_limit,
StackTrace::StackTraceOptions options); StackTrace::StackTraceOptions options);
void CaptureAndSetCurrentStackTraceFor(Handle<JSObject> error_object);
// Returns if the top context may access the given global object. If // Returns if the top context may access the given global object. If
// the result is false, the pending exception is guaranteed to be // the result is false, the pending exception is guaranteed to be
// set. // set.
@ -729,7 +731,7 @@ class Isolate {
// Promote a scheduled exception to pending. Asserts has_scheduled_exception. // Promote a scheduled exception to pending. Asserts has_scheduled_exception.
Failure* PromoteScheduledException(); Failure* PromoteScheduledException();
void DoThrow(MaybeObject* exception, MessageLocation* location); void DoThrow(Object* exception, MessageLocation* location);
// Checks if exception should be reported and finds out if it's // Checks if exception should be reported and finds out if it's
// caught externally. // caught externally.
bool ShouldReportException(bool* can_be_caught_externally, bool ShouldReportException(bool* can_be_caught_externally,
@ -1141,6 +1143,10 @@ class Isolate {
void InitializeDebugger(); void InitializeDebugger();
// Traverse prototype chain to find out whether the object is derived from
// the Error object.
bool IsErrorObject(Handle<Object> obj);
int stack_trace_nesting_level_; int stack_trace_nesting_level_;
StringStream* incomplete_message_; StringStream* incomplete_message_;
// The preallocated memory thread singleton. // The preallocated memory thread singleton.

4
deps/v8/src/list-inl.h

@ -72,9 +72,9 @@ void List<T, P>::ResizeAdd(const T& element) {
template<typename T, class P> template<typename T, class P>
void List<T, P>::ResizeAddInternal(const T& element) { void List<T, P>::ResizeAddInternal(const T& element) {
ASSERT(length_ >= capacity_); ASSERT(length_ >= capacity_);
// Grow the list capacity by 50%, but make sure to let it grow // Grow the list capacity by 100%, but make sure to let it grow
// even when the capacity is zero (possible initial case). // even when the capacity is zero (possible initial case).
int new_capacity = 1 + capacity_ + (capacity_ >> 1); int new_capacity = 1 + 2 * capacity_;
// Since the element reference could be an element of the list, copy // Since the element reference could be an element of the list, copy
// it out of the old backing storage before resizing. // it out of the old backing storage before resizing.
T temp = element; T temp = element;

16
deps/v8/src/macro-assembler.h

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved. // Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are // modification, are permitted provided that the following conditions are
// met: // met:
@ -36,20 +36,6 @@ enum InvokeFlag {
}; };
enum CodeLocation {
IN_JAVASCRIPT,
IN_JS_ENTRY,
IN_C_ENTRY
};
enum HandlerType {
TRY_CATCH_HANDLER,
TRY_FINALLY_HANDLER,
JS_ENTRY_HANDLER
};
// Types of uncatchable exceptions. // Types of uncatchable exceptions.
enum UncatchableExceptionType { enum UncatchableExceptionType {
OUT_OF_MEMORY, OUT_OF_MEMORY,

41
deps/v8/src/mark-compact.cc

@ -242,14 +242,14 @@ static void TraceFragmentation(PagedSpace* space) {
} }
bool MarkCompactCollector::StartCompaction() { bool MarkCompactCollector::StartCompaction(CompactionMode mode) {
if (!compacting_) { if (!compacting_) {
ASSERT(evacuation_candidates_.length() == 0); ASSERT(evacuation_candidates_.length() == 0);
CollectEvacuationCandidates(heap()->old_pointer_space()); CollectEvacuationCandidates(heap()->old_pointer_space());
CollectEvacuationCandidates(heap()->old_data_space()); CollectEvacuationCandidates(heap()->old_data_space());
if (FLAG_compact_code_space) { if (FLAG_compact_code_space && mode == NON_INCREMENTAL_COMPACTION) {
CollectEvacuationCandidates(heap()->code_space()); CollectEvacuationCandidates(heap()->code_space());
} else if (FLAG_trace_fragmentation) { } else if (FLAG_trace_fragmentation) {
TraceFragmentation(heap()->code_space()); TraceFragmentation(heap()->code_space());
@ -697,7 +697,7 @@ void MarkCompactCollector::Prepare(GCTracer* tracer) {
// Don't start compaction if we are in the middle of incremental // Don't start compaction if we are in the middle of incremental
// marking cycle. We did not collect any slots. // marking cycle. We did not collect any slots.
if (!FLAG_never_compact && !was_marked_incrementally_) { if (!FLAG_never_compact && !was_marked_incrementally_) {
StartCompaction(); StartCompaction(NON_INCREMENTAL_COMPACTION);
} }
PagedSpaces spaces; PagedSpaces spaces;
@ -809,6 +809,8 @@ class CodeFlusher {
isolate_->heap()->mark_compact_collector()-> isolate_->heap()->mark_compact_collector()->
RecordCodeEntrySlot(slot, target); RecordCodeEntrySlot(slot, target);
RecordSharedFunctionInfoCodeSlot(shared);
candidate = next_candidate; candidate = next_candidate;
} }
@ -831,12 +833,21 @@ class CodeFlusher {
candidate->set_code(lazy_compile); candidate->set_code(lazy_compile);
} }
RecordSharedFunctionInfoCodeSlot(candidate);
candidate = next_candidate; candidate = next_candidate;
} }
shared_function_info_candidates_head_ = NULL; shared_function_info_candidates_head_ = NULL;
} }
void RecordSharedFunctionInfoCodeSlot(SharedFunctionInfo* shared) {
Object** slot = HeapObject::RawField(shared,
SharedFunctionInfo::kCodeOffset);
isolate_->heap()->mark_compact_collector()->
RecordSlot(slot, slot, HeapObject::cast(*slot));
}
static JSFunction** GetNextCandidateField(JSFunction* candidate) { static JSFunction** GetNextCandidateField(JSFunction* candidate) {
return reinterpret_cast<JSFunction**>( return reinterpret_cast<JSFunction**>(
candidate->address() + JSFunction::kCodeEntryOffset); candidate->address() + JSFunction::kCodeEntryOffset);
@ -1314,6 +1325,16 @@ class StaticMarkingVisitor : public StaticVisitorBase {
re->SetDataAtUnchecked(JSRegExp::saved_code_index(is_ascii), re->SetDataAtUnchecked(JSRegExp::saved_code_index(is_ascii),
code, code,
heap); heap);
// Saving a copy might create a pointer into compaction candidate
// that was not observed by marker. This might happen if JSRegExp data
// was marked through the compilation cache before marker reached JSRegExp
// object.
FixedArray* data = FixedArray::cast(re->data());
Object** slot = data->data_start() + JSRegExp::saved_code_index(is_ascii);
heap->mark_compact_collector()->
RecordSlot(slot, slot, code);
// Set a number in the 0-255 range to guarantee no smi overflow. // Set a number in the 0-255 range to guarantee no smi overflow.
re->SetDataAtUnchecked(JSRegExp::code_index(is_ascii), re->SetDataAtUnchecked(JSRegExp::code_index(is_ascii),
Smi::FromInt(heap->sweep_generation() & 0xff), Smi::FromInt(heap->sweep_generation() & 0xff),
@ -2352,8 +2373,10 @@ void MarkCompactCollector::AfterMarking() {
code_flusher_->ProcessCandidates(); code_flusher_->ProcessCandidates();
} }
// Clean up dead objects from the runtime profiler. if (!FLAG_watch_ic_patching) {
heap()->isolate()->runtime_profiler()->RemoveDeadSamples(); // Clean up dead objects from the runtime profiler.
heap()->isolate()->runtime_profiler()->RemoveDeadSamples();
}
} }
@ -3360,9 +3383,11 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
heap_->UpdateReferencesInExternalStringTable( heap_->UpdateReferencesInExternalStringTable(
&UpdateReferenceInExternalStringTableEntry); &UpdateReferenceInExternalStringTableEntry);
// Update JSFunction pointers from the runtime profiler. if (!FLAG_watch_ic_patching) {
heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact( // Update JSFunction pointers from the runtime profiler.
&updating_visitor); heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact(
&updating_visitor);
}
EvacuationWeakObjectRetainer evacuation_object_retainer; EvacuationWeakObjectRetainer evacuation_object_retainer;
heap()->ProcessWeakReferences(&evacuation_object_retainer); heap()->ProcessWeakReferences(&evacuation_object_retainer);

7
deps/v8/src/mark-compact.h

@ -441,7 +441,12 @@ class MarkCompactCollector {
// Performs a global garbage collection. // Performs a global garbage collection.
void CollectGarbage(); void CollectGarbage();
bool StartCompaction(); enum CompactionMode {
INCREMENTAL_COMPACTION,
NON_INCREMENTAL_COMPACTION
};
bool StartCompaction(CompactionMode mode);
void AbortCompaction(); void AbortCompaction();

6
deps/v8/src/messages.js

@ -1078,9 +1078,9 @@ function captureStackTrace(obj, cons_opt) {
if (stackTraceLimit < 0 || stackTraceLimit > 10000) { if (stackTraceLimit < 0 || stackTraceLimit > 10000) {
stackTraceLimit = 10000; stackTraceLimit = 10000;
} }
var raw_stack = %CollectStackTrace(cons_opt var raw_stack = %CollectStackTrace(obj,
? cons_opt cons_opt ? cons_opt : captureStackTrace,
: captureStackTrace, stackTraceLimit); stackTraceLimit);
DefineOneShotAccessor(obj, 'stack', function (obj) { DefineOneShotAccessor(obj, 'stack', function (obj) {
return FormatRawStackTrace(obj, raw_stack); return FormatRawStackTrace(obj, raw_stack);
}); });

4
deps/v8/src/mips/assembler-mips.cc

@ -1245,6 +1245,7 @@ void Assembler::and_(Register rd, Register rs, Register rt) {
void Assembler::andi(Register rt, Register rs, int32_t j) { void Assembler::andi(Register rt, Register rs, int32_t j) {
ASSERT(is_uint16(j));
GenInstrImmediate(ANDI, rs, rt, j); GenInstrImmediate(ANDI, rs, rt, j);
} }
@ -1255,6 +1256,7 @@ void Assembler::or_(Register rd, Register rs, Register rt) {
void Assembler::ori(Register rt, Register rs, int32_t j) { void Assembler::ori(Register rt, Register rs, int32_t j) {
ASSERT(is_uint16(j));
GenInstrImmediate(ORI, rs, rt, j); GenInstrImmediate(ORI, rs, rt, j);
} }
@ -1265,6 +1267,7 @@ void Assembler::xor_(Register rd, Register rs, Register rt) {
void Assembler::xori(Register rt, Register rs, int32_t j) { void Assembler::xori(Register rt, Register rs, int32_t j) {
ASSERT(is_uint16(j));
GenInstrImmediate(XORI, rs, rt, j); GenInstrImmediate(XORI, rs, rt, j);
} }
@ -1445,6 +1448,7 @@ void Assembler::swr(Register rd, const MemOperand& rs) {
void Assembler::lui(Register rd, int32_t j) { void Assembler::lui(Register rd, int32_t j) {
ASSERT(is_uint16(j));
GenInstrImmediate(LUI, zero_reg, rd, j); GenInstrImmediate(LUI, zero_reg, rd, j);
} }

39
deps/v8/src/mips/builtins-mips.cc

@ -116,7 +116,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
Label* gc_required) { Label* gc_required) {
const int initial_capacity = JSArray::kPreallocatedArrayElements; const int initial_capacity = JSArray::kPreallocatedArrayElements;
STATIC_ASSERT(initial_capacity >= 0); STATIC_ASSERT(initial_capacity >= 0);
__ LoadGlobalInitialConstructedArrayMap(array_function, scratch2, scratch1); __ LoadInitialArrayMap(array_function, scratch2, scratch1);
// Allocate the JSArray object together with space for a fixed array with the // Allocate the JSArray object together with space for a fixed array with the
// requested elements. // requested elements.
@ -212,8 +212,7 @@ static void AllocateJSArray(MacroAssembler* masm,
bool fill_with_hole, bool fill_with_hole,
Label* gc_required) { Label* gc_required) {
// Load the initial map from the array function. // Load the initial map from the array function.
__ LoadGlobalInitialConstructedArrayMap(array_function, scratch2, __ LoadInitialArrayMap(array_function, scratch2, elements_array_storage);
elements_array_storage);
if (FLAG_debug_code) { // Assert that array size is not zero. if (FLAG_debug_code) { // Assert that array size is not zero.
__ Assert( __ Assert(
@ -924,22 +923,15 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// t4: JSObject // t4: JSObject
__ bind(&allocated); __ bind(&allocated);
__ push(t4); __ push(t4);
__ push(t4);
// Push the function and the allocated receiver from the stack.
// sp[0]: receiver (newly allocated object)
// sp[1]: constructor function
// sp[2]: number of arguments (smi-tagged)
__ lw(a1, MemOperand(sp, kPointerSize));
__ MultiPushReversed(a1.bit() | t4.bit());
// Reload the number of arguments from the stack. // Reload the number of arguments from the stack.
// a1: constructor function
// sp[0]: receiver // sp[0]: receiver
// sp[1]: constructor function // sp[1]: receiver
// sp[2]: receiver // sp[2]: constructor function
// sp[3]: constructor function // sp[3]: number of arguments (smi-tagged)
// sp[4]: number of arguments (smi-tagged) __ lw(a1, MemOperand(sp, 2 * kPointerSize));
__ lw(a3, MemOperand(sp, 4 * kPointerSize)); __ lw(a3, MemOperand(sp, 3 * kPointerSize));
// Set up pointer to last argument. // Set up pointer to last argument.
__ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset)); __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
@ -953,10 +945,9 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// a2: address of last argument (caller sp) // a2: address of last argument (caller sp)
// a3: number of arguments (smi-tagged) // a3: number of arguments (smi-tagged)
// sp[0]: receiver // sp[0]: receiver
// sp[1]: constructor function // sp[1]: receiver
// sp[2]: receiver // sp[2]: constructor function
// sp[3]: constructor function // sp[3]: number of arguments (smi-tagged)
// sp[4]: number of arguments (smi-tagged)
Label loop, entry; Label loop, entry;
__ jmp(&entry); __ jmp(&entry);
__ bind(&loop); __ bind(&loop);
@ -984,14 +975,6 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
NullCallWrapper(), CALL_AS_METHOD); NullCallWrapper(), CALL_AS_METHOD);
} }
// Pop the function from the stack.
// v0: result
// sp[0]: constructor function
// sp[2]: receiver
// sp[3]: constructor function
// sp[4]: number of arguments (smi-tagged)
__ Pop();
// Restore context from the frame. // Restore context from the frame.
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));

2
deps/v8/src/mips/code-stubs-mips.cc

@ -4140,7 +4140,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// Invoke: Link this frame into the handler chain. There's only one // Invoke: Link this frame into the handler chain. There's only one
// handler block in this code object, so its index is 0. // handler block in this code object, so its index is 0.
__ bind(&invoke); __ bind(&invoke);
__ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER, 0); __ PushTryHandler(StackHandler::JS_ENTRY, 0);
// If an exception not caught by another handler occurs, this handler // If an exception not caught by another handler occurs, this handler
// returns control to the code after the bal(&invoke) above, which // returns control to the code after the bal(&invoke) above, which
// restores all kCalleeSaved registers (including cp and fp) to their // restores all kCalleeSaved registers (including cp and fp) to their

62
deps/v8/src/mips/ic-mips.cc

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved. // Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are // modification, are permitted provided that the following conditions are
// met: // met:
@ -1198,14 +1198,16 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
Label slow, array, extra, check_if_double_array; Label slow, array, extra, check_if_double_array;
Label fast_object_with_map_check, fast_object_without_map_check; Label fast_object_with_map_check, fast_object_without_map_check;
Label fast_double_with_map_check, fast_double_without_map_check; Label fast_double_with_map_check, fast_double_without_map_check;
Label transition_smi_elements, finish_object_store, non_double_value;
Label transition_double_elements;
// Register usage. // Register usage.
Register value = a0; Register value = a0;
Register key = a1; Register key = a1;
Register receiver = a2; Register receiver = a2;
Register elements = a3; // Elements array of the receiver. Register receiver_map = a3;
Register elements_map = t2; Register elements_map = t2;
Register receiver_map = t3; Register elements = t3; // Elements array of the receiver.
// t0 and t1 are used as general scratch registers. // t0 and t1 are used as general scratch registers.
// Check that the key is a smi. // Check that the key is a smi.
@ -1298,9 +1300,11 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
__ mov(v0, value); __ mov(v0, value);
__ bind(&non_smi_value); __ bind(&non_smi_value);
// Escape to slow case when writing non-smi into smi-only array. // Escape to elements kind transition case.
__ CheckFastObjectElements(receiver_map, scratch_value, &slow); __ CheckFastObjectElements(receiver_map, scratch_value,
&transition_smi_elements);
// Fast elements array, store the value to the elements backing store. // Fast elements array, store the value to the elements backing store.
__ bind(&finish_object_store);
__ Addu(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); __ Addu(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
__ sll(scratch_value, key, kPointerSizeLog2 - kSmiTagSize); __ sll(scratch_value, key, kPointerSizeLog2 - kSmiTagSize);
__ Addu(address, address, scratch_value); __ Addu(address, address, scratch_value);
@ -1326,13 +1330,57 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
key, key,
receiver, receiver,
elements, elements,
a3,
t0, t0,
t1, t1,
t2, t2,
t3, &transition_double_elements);
&slow);
__ Ret(USE_DELAY_SLOT); __ Ret(USE_DELAY_SLOT);
__ mov(v0, value); __ mov(v0, value);
__ bind(&transition_smi_elements);
// Transition the array appropriately depending on the value type.
__ lw(t0, FieldMemOperand(value, HeapObject::kMapOffset));
__ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
__ Branch(&non_double_value, ne, t0, Operand(at));
// Value is a double. Transition FAST_SMI_ONLY_ELEMENTS ->
// FAST_DOUBLE_ELEMENTS and complete the store.
__ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
FAST_DOUBLE_ELEMENTS,
receiver_map,
t0,
&slow);
ASSERT(receiver_map.is(a3)); // Transition code expects map in a3
ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &slow);
__ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
__ jmp(&fast_double_without_map_check);
__ bind(&non_double_value);
// Value is not a double, FAST_SMI_ONLY_ELEMENTS -> FAST_ELEMENTS
__ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
FAST_ELEMENTS,
receiver_map,
t0,
&slow);
ASSERT(receiver_map.is(a3)); // Transition code expects map in a3
ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm);
__ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
__ jmp(&finish_object_store);
__ bind(&transition_double_elements);
// Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
// HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
// transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
__ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
FAST_ELEMENTS,
receiver_map,
t0,
&slow);
ASSERT(receiver_map.is(a3)); // Transition code expects map in a3
ElementsTransitionGenerator::GenerateDoubleToObject(masm, &slow);
__ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
__ jmp(&finish_object_store);
} }

1
deps/v8/src/mips/lithium-codegen-mips.cc

@ -640,7 +640,6 @@ void LCodeGen::DeoptimizeIf(Condition cc,
void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
int length = deoptimizations_.length(); int length = deoptimizations_.length();
if (length == 0) return; if (length == 0) return;
ASSERT(FLAG_deopt);
Handle<DeoptimizationInputData> data = Handle<DeoptimizationInputData> data =
factory()->NewDeoptimizationInputData(length, TENURED); factory()->NewDeoptimizationInputData(length, TENURED);

77
deps/v8/src/mips/macro-assembler-mips.cc

@ -771,18 +771,18 @@ void MacroAssembler::li(Register rd, Operand j, bool gen2instr) {
} else if (!(j.imm32_ & kHiMask)) { } else if (!(j.imm32_ & kHiMask)) {
ori(rd, zero_reg, j.imm32_); ori(rd, zero_reg, j.imm32_);
} else if (!(j.imm32_ & kImm16Mask)) { } else if (!(j.imm32_ & kImm16Mask)) {
lui(rd, (j.imm32_ & kHiMask) >> kLuiShift); lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask);
} else { } else {
lui(rd, (j.imm32_ & kHiMask) >> kLuiShift); lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask);
ori(rd, rd, (j.imm32_ & kImm16Mask)); ori(rd, rd, (j.imm32_ & kImm16Mask));
} }
} else if (MustUseReg(j.rmode_) || gen2instr) { } else if (MustUseReg(j.rmode_) || gen2instr) {
if (MustUseReg(j.rmode_)) { if (MustUseReg(j.rmode_)) {
RecordRelocInfo(j.rmode_, j.imm32_); RecordRelocInfo(j.rmode_, j.imm32_);
} }
// We need always the same number of instructions as we may need to patch // We always need the same number of instructions as we may need to patch
// this code to load another value which may need 2 instructions to load. // this code to load another value which may need 2 instructions to load.
lui(rd, (j.imm32_ & kHiMask) >> kLuiShift); lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask);
ori(rd, rd, (j.imm32_ & kImm16Mask)); ori(rd, rd, (j.imm32_ & kImm16Mask));
} }
} }
@ -2576,8 +2576,7 @@ void MacroAssembler::DebugBreak() {
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Exception handling. // Exception handling.
void MacroAssembler::PushTryHandler(CodeLocation try_location, void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
HandlerType type,
int handler_index) { int handler_index) {
// Adjust this code if not the case. // Adjust this code if not the case.
STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize); STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
@ -2589,30 +2588,23 @@ void MacroAssembler::PushTryHandler(CodeLocation try_location,
// For the JSEntry handler, we must preserve a0-a3 and s0. // For the JSEntry handler, we must preserve a0-a3 and s0.
// t1-t3 are available. We will build up the handler from the bottom by // t1-t3 are available. We will build up the handler from the bottom by
// pushing on the stack. First compute the state. // pushing on the stack.
unsigned state = StackHandler::OffsetField::encode(handler_index);
if (try_location == IN_JAVASCRIPT) {
state |= (type == TRY_CATCH_HANDLER)
? StackHandler::KindField::encode(StackHandler::TRY_CATCH)
: StackHandler::KindField::encode(StackHandler::TRY_FINALLY);
} else {
ASSERT(try_location == IN_JS_ENTRY);
state |= StackHandler::KindField::encode(StackHandler::ENTRY);
}
// Set up the code object (t1) and the state (t2) for pushing. // Set up the code object (t1) and the state (t2) for pushing.
unsigned state =
StackHandler::IndexField::encode(handler_index) |
StackHandler::KindField::encode(kind);
li(t1, Operand(CodeObject())); li(t1, Operand(CodeObject()));
li(t2, Operand(state)); li(t2, Operand(state));
// Push the frame pointer, context, state, and code object. // Push the frame pointer, context, state, and code object.
if (try_location == IN_JAVASCRIPT) { if (kind == StackHandler::JS_ENTRY) {
MultiPush(t1.bit() | t2.bit() | cp.bit() | fp.bit());
} else {
ASSERT_EQ(Smi::FromInt(0), 0); ASSERT_EQ(Smi::FromInt(0), 0);
// The second zero_reg indicates no context. // The second zero_reg indicates no context.
// The first zero_reg is the NULL frame pointer. // The first zero_reg is the NULL frame pointer.
// The operands are reversed to match the order of MultiPush/Pop. // The operands are reversed to match the order of MultiPush/Pop.
Push(zero_reg, zero_reg, t2, t1); Push(zero_reg, zero_reg, t2, t1);
} else {
MultiPush(t1.bit() | t2.bit() | cp.bit() | fp.bit());
} }
// Link the current handler as the next handler. // Link the current handler as the next handler.
@ -2727,7 +2719,7 @@ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
lw(sp, MemOperand(sp, StackHandlerConstants::kNextOffset)); lw(sp, MemOperand(sp, StackHandlerConstants::kNextOffset));
bind(&check_kind); bind(&check_kind);
STATIC_ASSERT(StackHandler::ENTRY == 0); STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
lw(a2, MemOperand(sp, StackHandlerConstants::kStateOffset)); lw(a2, MemOperand(sp, StackHandlerConstants::kStateOffset));
And(a2, a2, Operand(StackHandler::KindField::kMask)); And(a2, a2, Operand(StackHandler::KindField::kMask));
Branch(&fetch_next, ne, a2, Operand(zero_reg)); Branch(&fetch_next, ne, a2, Operand(zero_reg));
@ -4279,26 +4271,41 @@ void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
} }
void MacroAssembler::LoadGlobalInitialConstructedArrayMap( void MacroAssembler::LoadTransitionedArrayMapConditional(
ElementsKind expected_kind,
ElementsKind transitioned_kind,
Register map_in_out,
Register scratch,
Label* no_map_match) {
// Load the global or builtins object from the current context.
lw(scratch, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
lw(scratch, FieldMemOperand(scratch, GlobalObject::kGlobalContextOffset));
// Check that the function's map is the same as the expected cached map.
int expected_index =
Context::GetContextMapIndexFromElementsKind(expected_kind);
lw(at, MemOperand(scratch, Context::SlotOffset(expected_index)));
Branch(no_map_match, ne, map_in_out, Operand(at));
// Use the transitioned cached map.
int trans_index =
Context::GetContextMapIndexFromElementsKind(transitioned_kind);
lw(map_in_out, MemOperand(scratch, Context::SlotOffset(trans_index)));
}
void MacroAssembler::LoadInitialArrayMap(
Register function_in, Register scratch, Register map_out) { Register function_in, Register scratch, Register map_out) {
ASSERT(!function_in.is(map_out)); ASSERT(!function_in.is(map_out));
Label done; Label done;
lw(map_out, FieldMemOperand(function_in, lw(map_out, FieldMemOperand(function_in,
JSFunction::kPrototypeOrInitialMapOffset)); JSFunction::kPrototypeOrInitialMapOffset));
if (!FLAG_smi_only_arrays) { if (!FLAG_smi_only_arrays) {
// Load the global or builtins object from the current context. LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
lw(scratch, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); FAST_ELEMENTS,
lw(scratch, FieldMemOperand(scratch, GlobalObject::kGlobalContextOffset)); map_out,
scratch,
// Check that the function's map is same as the cached map. &done);
lw(at, MemOperand(
scratch, Context::SlotOffset(Context::SMI_JS_ARRAY_MAP_INDEX)));
Branch(&done, ne, map_out, Operand(at));
// Use the cached transitioned map.
lw(map_out,
MemOperand(scratch,
Context::SlotOffset(Context::OBJECT_JS_ARRAY_MAP_INDEX)));
} }
bind(&done); bind(&done);
} }

23
deps/v8/src/mips/macro-assembler-mips.h

@ -772,10 +772,21 @@ class MacroAssembler: public Assembler {
void LoadContext(Register dst, int context_chain_length); void LoadContext(Register dst, int context_chain_length);
// Load the initial map for new Arrays of a given type. // Conditionally load the cached Array transitioned map of type
void LoadGlobalInitialConstructedArrayMap(Register function_in, // transitioned_kind from the global context if the map in register
Register scratch, // map_in_out is the cached Array map in the global context of
Register map_out); // expected_kind.
void LoadTransitionedArrayMapConditional(
ElementsKind expected_kind,
ElementsKind transitioned_kind,
Register map_in_out,
Register scratch,
Label* no_map_match);
// Load the initial map for new Arrays from a JSFunction.
void LoadInitialArrayMap(Register function_in,
Register scratch,
Register map_out);
void LoadGlobalFunction(int index, Register function); void LoadGlobalFunction(int index, Register function);
@ -854,9 +865,7 @@ class MacroAssembler: public Assembler {
// Exception handling. // Exception handling.
// Push a new try handler and link into try handler chain. // Push a new try handler and link into try handler chain.
void PushTryHandler(CodeLocation try_location, void PushTryHandler(StackHandler::Kind kind, int handler_index);
HandlerType type,
int handler_index);
// Unlink the stack handler on top of the stack from the try handler chain. // Unlink the stack handler on top of the stack from the try handler chain.
// Must preserve the result register. // Must preserve the result register.

68
deps/v8/src/mips/stub-cache-mips.cc

@ -1468,28 +1468,28 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ Ret(); __ Ret();
} else { } else {
Label call_builtin; Label call_builtin;
Register elements = a3;
Register end_elements = t1;
// Get the elements array of the object.
__ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
// Check that the elements are in fast mode and writable.
__ CheckMap(elements,
v0,
Heap::kFixedArrayMapRootIndex,
&call_builtin,
DONT_DO_SMI_CHECK);
if (argc == 1) { // Otherwise fall through to call the builtin. if (argc == 1) { // Otherwise fall through to call the builtin.
Label attempt_to_grow_elements; Label attempt_to_grow_elements;
Register elements = t2;
Register end_elements = t1;
// Get the elements array of the object.
__ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
// Check that the elements are in fast mode and writable.
__ CheckMap(elements,
v0,
Heap::kFixedArrayMapRootIndex,
&call_builtin,
DONT_DO_SMI_CHECK);
// Get the array's length into v0 and calculate new length. // Get the array's length into v0 and calculate new length.
__ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset)); __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
STATIC_ASSERT(kSmiTagSize == 1); STATIC_ASSERT(kSmiTagSize == 1);
STATIC_ASSERT(kSmiTag == 0); STATIC_ASSERT(kSmiTag == 0);
__ Addu(v0, v0, Operand(Smi::FromInt(argc))); __ Addu(v0, v0, Operand(Smi::FromInt(argc)));
// Get the element's length. // Get the elements' length.
__ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset)); __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
// Check if we could survive without allocation. // Check if we could survive without allocation.
@ -1503,7 +1503,7 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
// Save new length. // Save new length.
__ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset)); __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
// Push the element. // Store the value.
// We may need a register containing the address end_elements below, // We may need a register containing the address end_elements below,
// so write back the value in end_elements. // so write back the value in end_elements.
__ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize); __ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize);
@ -1519,13 +1519,33 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ bind(&with_write_barrier); __ bind(&with_write_barrier);
__ lw(t2, FieldMemOperand(receiver, HeapObject::kMapOffset)); __ lw(a3, FieldMemOperand(receiver, HeapObject::kMapOffset));
__ CheckFastObjectElements(t2, t2, &call_builtin);
if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) {
Label fast_object, not_fast_object;
__ CheckFastObjectElements(a3, t3, &not_fast_object);
__ jmp(&fast_object);
// In case of fast smi-only, convert to fast object, otherwise bail out.
__ bind(&not_fast_object);
__ CheckFastSmiOnlyElements(a3, t3, &call_builtin);
// edx: receiver
// r3: map
__ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
FAST_ELEMENTS,
a3,
t3,
&call_builtin);
__ mov(a2, receiver);
ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm());
__ bind(&fast_object);
} else {
__ CheckFastObjectElements(a3, a3, &call_builtin);
}
// Save new length. // Save new length.
__ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset)); __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
// Push the element. // Store the value.
// We may need a register containing the address end_elements below, // We may need a register containing the address end_elements below,
// so write back the value in end_elements. // so write back the value in end_elements.
__ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize); __ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize);
@ -1573,23 +1593,23 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ Addu(end_elements, elements, end_elements); __ Addu(end_elements, elements, end_elements);
__ Addu(end_elements, end_elements, Operand(kEndElementsOffset)); __ Addu(end_elements, end_elements, Operand(kEndElementsOffset));
__ li(t3, Operand(new_space_allocation_top)); __ li(t3, Operand(new_space_allocation_top));
__ lw(t2, MemOperand(t3)); __ lw(a3, MemOperand(t3));
__ Branch(&call_builtin, ne, end_elements, Operand(t2)); __ Branch(&call_builtin, ne, end_elements, Operand(a3));
__ li(t5, Operand(new_space_allocation_limit)); __ li(t5, Operand(new_space_allocation_limit));
__ lw(t5, MemOperand(t5)); __ lw(t5, MemOperand(t5));
__ Addu(t2, t2, Operand(kAllocationDelta * kPointerSize)); __ Addu(a3, a3, Operand(kAllocationDelta * kPointerSize));
__ Branch(&call_builtin, hi, t2, Operand(t5)); __ Branch(&call_builtin, hi, a3, Operand(t5));
// We fit and could grow elements. // We fit and could grow elements.
// Update new_space_allocation_top. // Update new_space_allocation_top.
__ sw(t2, MemOperand(t3)); __ sw(a3, MemOperand(t3));
// Push the argument. // Push the argument.
__ sw(a2, MemOperand(end_elements)); __ sw(a2, MemOperand(end_elements));
// Fill the rest with holes. // Fill the rest with holes.
__ LoadRoot(t2, Heap::kTheHoleValueRootIndex); __ LoadRoot(a3, Heap::kTheHoleValueRootIndex);
for (int i = 1; i < kAllocationDelta; i++) { for (int i = 1; i < kAllocationDelta; i++) {
__ sw(t2, MemOperand(end_elements, i * kPointerSize)); __ sw(a3, MemOperand(end_elements, i * kPointerSize));
} }
// Update elements' and array's sizes. // Update elements' and array's sizes.

20
deps/v8/src/objects-inl.h

@ -3530,6 +3530,8 @@ ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
ACCESSORS(SharedFunctionInfo, this_property_assignments, Object, ACCESSORS(SharedFunctionInfo, this_property_assignments, Object,
kThisPropertyAssignmentsOffset) kThisPropertyAssignmentsOffset)
SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset)
BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype, BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
kHiddenPrototypeBit) kHiddenPrototypeBit)
BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit) BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
@ -3576,6 +3578,8 @@ SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
SMI_ACCESSORS(SharedFunctionInfo, this_property_assignments_count, SMI_ACCESSORS(SharedFunctionInfo, this_property_assignments_count,
kThisPropertyAssignmentsCountOffset) kThisPropertyAssignmentsCountOffset)
SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset) SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset)
SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
SMI_ACCESSORS(SharedFunctionInfo, deopt_counter, kDeoptCounterOffset)
#else #else
#define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \ #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
@ -3626,6 +3630,9 @@ PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
this_property_assignments_count, this_property_assignments_count,
kThisPropertyAssignmentsCountOffset) kThisPropertyAssignmentsCountOffset)
PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, opt_count, kOptCountOffset) PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, opt_count, kOptCountOffset)
PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, deopt_counter, kDeoptCounterOffset)
#endif #endif
@ -3708,6 +3715,9 @@ BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
kNameShouldPrintAsAnonymous) kNameShouldPrintAsAnonymous)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous) BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_crankshaft,
kDontCrankshaft)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_inline, kDontInline)
ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset) ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset) ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
@ -3777,16 +3787,6 @@ void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
} }
Smi* SharedFunctionInfo::deopt_counter() {
return reinterpret_cast<Smi*>(READ_FIELD(this, kDeoptCounterOffset));
}
void SharedFunctionInfo::set_deopt_counter(Smi* value) {
WRITE_FIELD(this, kDeoptCounterOffset, value);
}
bool SharedFunctionInfo::is_compiled() { bool SharedFunctionInfo::is_compiled() {
return code() != return code() !=
Isolate::Current()->builtins()->builtin(Builtins::kLazyCompile); Isolate::Current()->builtins()->builtin(Builtins::kLazyCompile);

116
deps/v8/src/objects.cc

@ -3773,12 +3773,15 @@ MaybeObject* JSObject::GetHiddenPropertiesDictionary(bool create_if_absent) {
// code zero) it will always occupy the first entry if present. // code zero) it will always occupy the first entry if present.
DescriptorArray* descriptors = this->map()->instance_descriptors(); DescriptorArray* descriptors = this->map()->instance_descriptors();
if ((descriptors->number_of_descriptors() > 0) && if ((descriptors->number_of_descriptors() > 0) &&
(descriptors->GetKey(0) == GetHeap()->hidden_symbol()) && (descriptors->GetKey(0) == GetHeap()->hidden_symbol())) {
descriptors->IsProperty(0)) { if (descriptors->GetType(0) == FIELD) {
ASSERT(descriptors->GetType(0) == FIELD); Object* hidden_store =
Object* hidden_store = this->FastPropertyAt(descriptors->GetFieldIndex(0));
this->FastPropertyAt(descriptors->GetFieldIndex(0)); return StringDictionary::cast(hidden_store);
return StringDictionary::cast(hidden_store); } else {
ASSERT(descriptors->GetType(0) == NULL_DESCRIPTOR ||
descriptors->GetType(0) == MAP_TRANSITION);
}
} }
} else { } else {
PropertyAttributes attributes; PropertyAttributes attributes;
@ -3819,11 +3822,14 @@ MaybeObject* JSObject::SetHiddenPropertiesDictionary(
// code zero) it will always occupy the first entry if present. // code zero) it will always occupy the first entry if present.
DescriptorArray* descriptors = this->map()->instance_descriptors(); DescriptorArray* descriptors = this->map()->instance_descriptors();
if ((descriptors->number_of_descriptors() > 0) && if ((descriptors->number_of_descriptors() > 0) &&
(descriptors->GetKey(0) == GetHeap()->hidden_symbol()) && (descriptors->GetKey(0) == GetHeap()->hidden_symbol())) {
descriptors->IsProperty(0)) { if (descriptors->GetType(0) == FIELD) {
ASSERT(descriptors->GetType(0) == FIELD); this->FastPropertyAtPut(descriptors->GetFieldIndex(0), dictionary);
this->FastPropertyAtPut(descriptors->GetFieldIndex(0), dictionary); return this;
return this; } else {
ASSERT(descriptors->GetType(0) == NULL_DESCRIPTOR ||
descriptors->GetType(0) == MAP_TRANSITION);
}
} }
} }
MaybeObject* store_result = MaybeObject* store_result =
@ -4247,11 +4253,14 @@ bool JSReceiver::IsSimpleEnum() {
} }
int Map::NumberOfDescribedProperties() { int Map::NumberOfDescribedProperties(PropertyAttributes filter) {
int result = 0; int result = 0;
DescriptorArray* descs = instance_descriptors(); DescriptorArray* descs = instance_descriptors();
for (int i = 0; i < descs->number_of_descriptors(); i++) { for (int i = 0; i < descs->number_of_descriptors(); i++) {
if (descs->IsProperty(i)) result++; PropertyDetails details(descs->GetDetails(i));
if (descs->IsProperty(i) && (details.attributes() & filter) == 0) {
result++;
}
} }
return result; return result;
} }
@ -5502,7 +5511,7 @@ class PolymorphicCodeCacheHashTableKey : public HashTableKey {
for (int i = 0; i < maps_->length(); ++i) { for (int i = 0; i < maps_->length(); ++i) {
bool match_found = false; bool match_found = false;
for (int j = 0; j < other_maps.length(); ++j) { for (int j = 0; j < other_maps.length(); ++j) {
if (maps_->at(i)->EquivalentTo(*other_maps.at(j))) { if (*(maps_->at(i)) == *(other_maps.at(j))) {
match_found = true; match_found = true;
break; break;
} }
@ -5721,6 +5730,11 @@ void DescriptorArray::SetEnumCache(FixedArray* bridge_storage,
} }
static bool InsertionPointFound(String* key1, String* key2) {
return key1->Hash() > key2->Hash() || key1 == key2;
}
MaybeObject* DescriptorArray::CopyInsert(Descriptor* descriptor, MaybeObject* DescriptorArray::CopyInsert(Descriptor* descriptor,
TransitionFlag transition_flag) { TransitionFlag transition_flag) {
// Transitions are only kept when inserting another transition. // Transitions are only kept when inserting another transition.
@ -5793,28 +5807,24 @@ MaybeObject* DescriptorArray::CopyInsert(Descriptor* descriptor,
// Copy the descriptors, filtering out transitions and null descriptors, // Copy the descriptors, filtering out transitions and null descriptors,
// and inserting or replacing a descriptor. // and inserting or replacing a descriptor.
uint32_t descriptor_hash = descriptor->GetKey()->Hash();
int from_index = 0;
int to_index = 0; int to_index = 0;
int insertion_index = -1;
for (; from_index < number_of_descriptors(); from_index++) { int from_index = 0;
String* key = GetKey(from_index); while (from_index < number_of_descriptors()) {
if (key->Hash() > descriptor_hash || key == descriptor->GetKey()) { if (insertion_index < 0 &&
break; InsertionPointFound(GetKey(from_index), descriptor->GetKey())) {
insertion_index = to_index++;
if (replacing) from_index++;
} else {
if (!(IsNullDescriptor(from_index) ||
(remove_transitions && IsTransitionOnly(from_index)))) {
new_descriptors->CopyFrom(to_index++, this, from_index, witness);
}
from_index++;
} }
if (IsNullDescriptor(from_index)) continue;
if (remove_transitions && IsTransitionOnly(from_index)) continue;
new_descriptors->CopyFrom(to_index++, this, from_index, witness);
}
new_descriptors->Set(to_index++, descriptor, witness);
if (replacing) from_index++;
for (; from_index < number_of_descriptors(); from_index++) {
if (IsNullDescriptor(from_index)) continue;
if (remove_transitions && IsTransitionOnly(from_index)) continue;
new_descriptors->CopyFrom(to_index++, this, from_index, witness);
} }
if (insertion_index < 0) insertion_index = to_index++;
new_descriptors->Set(insertion_index, descriptor, witness);
ASSERT(to_index == new_descriptors->number_of_descriptors()); ASSERT(to_index == new_descriptors->number_of_descriptors());
SLOW_ASSERT(new_descriptors->IsSortedNoDuplicates()); SLOW_ASSERT(new_descriptors->IsSortedNoDuplicates());
@ -5829,14 +5839,14 @@ MaybeObject* DescriptorArray::RemoveTransitions() {
// not be allocated. // not be allocated.
// Compute the size of the map transition entries to be removed. // Compute the size of the map transition entries to be removed.
int num_removed = 0; int new_number_of_descriptors = 0;
for (int i = 0; i < number_of_descriptors(); i++) { for (int i = 0; i < number_of_descriptors(); i++) {
if (!IsProperty(i)) num_removed++; if (IsProperty(i)) new_number_of_descriptors++;
} }
// Allocate the new descriptor array. // Allocate the new descriptor array.
DescriptorArray* new_descriptors; DescriptorArray* new_descriptors;
{ MaybeObject* maybe_result = Allocate(number_of_descriptors() - num_removed); { MaybeObject* maybe_result = Allocate(new_number_of_descriptors);
if (!maybe_result->To<DescriptorArray>(&new_descriptors)) { if (!maybe_result->To<DescriptorArray>(&new_descriptors)) {
return maybe_result; return maybe_result;
} }
@ -7606,13 +7616,10 @@ bool SharedFunctionInfo::HasSourceCode() {
} }
Object* SharedFunctionInfo::GetSourceCode() { Handle<Object> SharedFunctionInfo::GetSourceCode() {
Isolate* isolate = GetIsolate(); if (!HasSourceCode()) return GetIsolate()->factory()->undefined_value();
if (!HasSourceCode()) return isolate->heap()->undefined_value(); Handle<String> source(String::cast(Script::cast(script())->source()));
HandleScope scope(isolate); return SubString(source, start_position(), end_position());
Object* source = Script::cast(script())->source();
return *SubString(Handle<String>(String::cast(source), isolate),
start_position(), end_position());
} }
@ -10355,24 +10362,9 @@ bool JSObject::HasRealNamedCallbackProperty(String* key) {
int JSObject::NumberOfLocalProperties(PropertyAttributes filter) { int JSObject::NumberOfLocalProperties(PropertyAttributes filter) {
if (HasFastProperties()) { return HasFastProperties() ?
DescriptorArray* descs = map()->instance_descriptors(); map()->NumberOfDescribedProperties(filter) :
int result = 0; property_dictionary()->NumberOfElementsFilterAttributes(filter);
for (int i = 0; i < descs->number_of_descriptors(); i++) {
PropertyDetails details(descs->GetDetails(i));
if (details.IsProperty() && (details.attributes() & filter) == 0) {
result++;
}
}
return result;
} else {
return property_dictionary()->NumberOfElementsFilterAttributes(filter);
}
}
int JSObject::NumberOfEnumProperties() {
return NumberOfLocalProperties(static_cast<PropertyAttributes>(DONT_ENUM));
} }
@ -10493,7 +10485,7 @@ void FixedArray::SortPairs(FixedArray* numbers, uint32_t len) {
// purpose of this function is to provide reflection information for the object // purpose of this function is to provide reflection information for the object
// mirrors. // mirrors.
void JSObject::GetLocalPropertyNames(FixedArray* storage, int index) { void JSObject::GetLocalPropertyNames(FixedArray* storage, int index) {
ASSERT(storage->length() >= (NumberOfLocalProperties(NONE) - index)); ASSERT(storage->length() >= (NumberOfLocalProperties() - index));
if (HasFastProperties()) { if (HasFastProperties()) {
DescriptorArray* descs = map()->instance_descriptors(); DescriptorArray* descs = map()->instance_descriptors();
for (int i = 0; i < descs->number_of_descriptors(); i++) { for (int i = 0; i < descs->number_of_descriptors(); i++) {

53
deps/v8/src/objects.h

@ -1638,7 +1638,7 @@ class JSObject: public JSReceiver {
Handle<String> key, Handle<String> key,
Handle<Object> value); Handle<Object> value);
// Returns a failure if a GC is required. // Returns a failure if a GC is required.
MaybeObject* SetHiddenProperty(String* key, Object* value); MUST_USE_RESULT MaybeObject* SetHiddenProperty(String* key, Object* value);
// Gets the value of a hidden property with the given key. Returns undefined // Gets the value of a hidden property with the given key. Returns undefined
// if the property doesn't exist (or if called on a detached proxy), // if the property doesn't exist (or if called on a detached proxy),
// otherwise returns the value set for the key. // otherwise returns the value set for the key.
@ -1807,9 +1807,7 @@ class JSObject: public JSReceiver {
// Returns the number of properties on this object filtering out properties // Returns the number of properties on this object filtering out properties
// with the specified attributes (ignoring interceptors). // with the specified attributes (ignoring interceptors).
int NumberOfLocalProperties(PropertyAttributes filter); int NumberOfLocalProperties(PropertyAttributes filter = NONE);
// Returns the number of enumerable properties (ignoring interceptors).
int NumberOfEnumProperties();
// Fill in details for properties into storage starting at the specified // Fill in details for properties into storage starting at the specified
// index. // index.
void GetLocalPropertyNames(FixedArray* storage, int index); void GetLocalPropertyNames(FixedArray* storage, int index);
@ -4638,8 +4636,9 @@ class Map: public HeapObject {
// Returns the next free property index (only valid for FAST MODE). // Returns the next free property index (only valid for FAST MODE).
int NextFreePropertyIndex(); int NextFreePropertyIndex();
// Returns the number of properties described in instance_descriptors. // Returns the number of properties described in instance_descriptors
int NumberOfDescribedProperties(); // filtering out properties with the specified attributes.
int NumberOfDescribedProperties(PropertyAttributes filter = NONE);
// Casting. // Casting.
static inline Map* cast(Object* obj); static inline Map* cast(Object* obj);
@ -4697,12 +4696,6 @@ class Map: public HeapObject {
// The "shared" flags of both this map and |other| are ignored. // The "shared" flags of both this map and |other| are ignored.
bool EquivalentToForNormalization(Map* other, PropertyNormalizationMode mode); bool EquivalentToForNormalization(Map* other, PropertyNormalizationMode mode);
// Returns true if this map and |other| describe equivalent objects.
// The "shared" flags of both this map and |other| are ignored.
bool EquivalentTo(Map* other) {
return EquivalentToForNormalization(other, KEEP_INOBJECT_PROPERTIES);
}
// Returns the contents of this map's descriptor array for the given string. // Returns the contents of this map's descriptor array for the given string.
// May return NULL. |safe_to_add_transition| is set to false and NULL // May return NULL. |safe_to_add_transition| is set to false and NULL
// is returned if adding transitions is not allowed. // is returned if adding transitions is not allowed.
@ -5204,8 +5197,14 @@ class SharedFunctionInfo: public HeapObject {
// A counter used to determine when to stress the deoptimizer with a // A counter used to determine when to stress the deoptimizer with a
// deopt. // deopt.
inline Smi* deopt_counter(); inline int deopt_counter();
inline void set_deopt_counter(Smi* counter); inline void set_deopt_counter(int counter);
inline int profiler_ticks();
inline void set_profiler_ticks(int ticks);
inline int ast_node_count();
inline void set_ast_node_count(int count);
// Add information on assignments of the form this.x = ...; // Add information on assignments of the form this.x = ...;
void SetThisPropertyAssignmentsInfo( void SetThisPropertyAssignmentsInfo(
@ -5279,6 +5278,12 @@ class SharedFunctionInfo: public HeapObject {
// through the API, which does not change this flag). // through the API, which does not change this flag).
DECL_BOOLEAN_ACCESSORS(is_anonymous) DECL_BOOLEAN_ACCESSORS(is_anonymous)
// Indicates that the function cannot be crankshafted.
DECL_BOOLEAN_ACCESSORS(dont_crankshaft)
// Indicates that the function cannot be inlined.
DECL_BOOLEAN_ACCESSORS(dont_inline)
// Indicates whether or not the code in the shared function support // Indicates whether or not the code in the shared function support
// deoptimization. // deoptimization.
inline bool has_deoptimization_support(); inline bool has_deoptimization_support();
@ -5316,7 +5321,7 @@ class SharedFunctionInfo: public HeapObject {
// [source code]: Source code for the function. // [source code]: Source code for the function.
bool HasSourceCode(); bool HasSourceCode();
Object* GetSourceCode(); Handle<Object> GetSourceCode();
inline int opt_count(); inline int opt_count();
inline void set_opt_count(int opt_count); inline void set_opt_count(int opt_count);
@ -5373,12 +5378,12 @@ class SharedFunctionInfo: public HeapObject {
kInferredNameOffset + kPointerSize; kInferredNameOffset + kPointerSize;
static const int kThisPropertyAssignmentsOffset = static const int kThisPropertyAssignmentsOffset =
kInitialMapOffset + kPointerSize; kInitialMapOffset + kPointerSize;
static const int kDeoptCounterOffset = static const int kProfilerTicksOffset =
kThisPropertyAssignmentsOffset + kPointerSize; kThisPropertyAssignmentsOffset + kPointerSize;
#if V8_HOST_ARCH_32_BIT #if V8_HOST_ARCH_32_BIT
// Smi fields. // Smi fields.
static const int kLengthOffset = static const int kLengthOffset =
kDeoptCounterOffset + kPointerSize; kProfilerTicksOffset + kPointerSize;
static const int kFormalParameterCountOffset = kLengthOffset + kPointerSize; static const int kFormalParameterCountOffset = kLengthOffset + kPointerSize;
static const int kExpectedNofPropertiesOffset = static const int kExpectedNofPropertiesOffset =
kFormalParameterCountOffset + kPointerSize; kFormalParameterCountOffset + kPointerSize;
@ -5396,8 +5401,11 @@ class SharedFunctionInfo: public HeapObject {
kCompilerHintsOffset + kPointerSize; kCompilerHintsOffset + kPointerSize;
static const int kOptCountOffset = static const int kOptCountOffset =
kThisPropertyAssignmentsCountOffset + kPointerSize; kThisPropertyAssignmentsCountOffset + kPointerSize;
static const int kAstNodeCountOffset = kOptCountOffset + kPointerSize;
static const int kDeoptCounterOffset =
kAstNodeCountOffset + kPointerSize;
// Total size. // Total size.
static const int kSize = kOptCountOffset + kPointerSize; static const int kSize = kDeoptCounterOffset + kPointerSize;
#else #else
// The only reason to use smi fields instead of int fields // The only reason to use smi fields instead of int fields
// is to allow iteration without maps decoding during // is to allow iteration without maps decoding during
@ -5409,7 +5417,7 @@ class SharedFunctionInfo: public HeapObject {
// word is not set and thus this word cannot be treated as pointer // word is not set and thus this word cannot be treated as pointer
// to HeapObject during old space traversal. // to HeapObject during old space traversal.
static const int kLengthOffset = static const int kLengthOffset =
kDeoptCounterOffset + kPointerSize; kProfilerTicksOffset + kPointerSize;
static const int kFormalParameterCountOffset = static const int kFormalParameterCountOffset =
kLengthOffset + kIntSize; kLengthOffset + kIntSize;
@ -5433,8 +5441,11 @@ class SharedFunctionInfo: public HeapObject {
static const int kOptCountOffset = static const int kOptCountOffset =
kThisPropertyAssignmentsCountOffset + kIntSize; kThisPropertyAssignmentsCountOffset + kIntSize;
static const int kAstNodeCountOffset = kOptCountOffset + kIntSize;
static const int kDeoptCounterOffset = kAstNodeCountOffset + kIntSize;
// Total size. // Total size.
static const int kSize = kOptCountOffset + kIntSize; static const int kSize = kDeoptCounterOffset + kIntSize;
#endif #endif
@ -5481,6 +5492,8 @@ class SharedFunctionInfo: public HeapObject {
kBoundFunction, kBoundFunction,
kIsAnonymous, kIsAnonymous,
kNameShouldPrintAsAnonymous, kNameShouldPrintAsAnonymous,
kDontCrankshaft,
kDontInline,
kCompilerHintsCount // Pseudo entry kCompilerHintsCount // Pseudo entry
}; };

458
deps/v8/src/parser.cc

File diff suppressed because it is too large

96
deps/v8/src/parser.h

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved. // Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are // modification, are permitted provided that the following conditions are
// met: // met:
@ -435,9 +435,8 @@ class Parser {
v8::Extension* extension, v8::Extension* extension,
ScriptDataImpl* pre_data); ScriptDataImpl* pre_data);
virtual ~Parser() { virtual ~Parser() {
if (reusable_preparser_ != NULL) { delete reusable_preparser_;
delete reusable_preparser_; reusable_preparser_ = NULL;
}
} }
// Returns NULL if parsing failed. // Returns NULL if parsing failed.
@ -477,7 +476,69 @@ class Parser {
}; };
class BlockState; class BlockState;
class FunctionState;
class FunctionState BASE_EMBEDDED {
public:
FunctionState(Parser* parser,
Scope* scope,
Isolate* isolate);
~FunctionState();
int NextMaterializedLiteralIndex() {
return next_materialized_literal_index_++;
}
int materialized_literal_count() {
return next_materialized_literal_index_ - JSFunction::kLiteralsPrefixSize;
}
int NextHandlerIndex() { return next_handler_index_++; }
int handler_count() { return next_handler_index_; }
void SetThisPropertyAssignmentInfo(
bool only_simple_this_property_assignments,
Handle<FixedArray> this_property_assignments) {
only_simple_this_property_assignments_ =
only_simple_this_property_assignments;
this_property_assignments_ = this_property_assignments;
}
bool only_simple_this_property_assignments() {
return only_simple_this_property_assignments_;
}
Handle<FixedArray> this_property_assignments() {
return this_property_assignments_;
}
void AddProperty() { expected_property_count_++; }
int expected_property_count() { return expected_property_count_; }
AstNodeFactory<AstConstructionVisitor>* factory() { return &factory_; }
private:
// Used to assign an index to each literal that needs materialization in
// the function. Includes regexp literals, and boilerplate for object and
// array literals.
int next_materialized_literal_index_;
// Used to assign a per-function index to try and catch handlers.
int next_handler_index_;
// Properties count estimation.
int expected_property_count_;
// Keeps track of assignments to properties of this. Used for
// optimizing constructors.
bool only_simple_this_property_assignments_;
Handle<FixedArray> this_property_assignments_;
Parser* parser_;
FunctionState* outer_function_state_;
Scope* outer_scope_;
int saved_ast_node_id_;
AstNodeFactory<AstConstructionVisitor> factory_;
};
FunctionLiteral* ParseLazy(CompilationInfo* info, FunctionLiteral* ParseLazy(CompilationInfo* info,
UC16CharacterStream* source, UC16CharacterStream* source,
@ -651,7 +712,6 @@ class Parser {
// Get odd-ball literals. // Get odd-ball literals.
Literal* GetLiteralUndefined(); Literal* GetLiteralUndefined();
Literal* GetLiteralTheHole(); Literal* GetLiteralTheHole();
Literal* GetLiteralNumber(double value);
Handle<String> ParseIdentifier(bool* ok); Handle<String> ParseIdentifier(bool* ok);
Handle<String> ParseIdentifierOrStrictReservedWord( Handle<String> ParseIdentifierOrStrictReservedWord(
@ -699,31 +759,12 @@ class Parser {
// Factory methods. // Factory methods.
Statement* EmptyStatement() {
static v8::internal::EmptyStatement* empty =
::new v8::internal::EmptyStatement();
return empty;
}
Scope* NewScope(Scope* parent, ScopeType type); Scope* NewScope(Scope* parent, ScopeType type);
Handle<String> LookupSymbol(int symbol_id); Handle<String> LookupSymbol(int symbol_id);
Handle<String> LookupCachedSymbol(int symbol_id); Handle<String> LookupCachedSymbol(int symbol_id);
Expression* NewCall(Expression* expression,
ZoneList<Expression*>* arguments,
int pos) {
return new(zone()) Call(isolate(), expression, arguments, pos);
}
inline Literal* NewLiteral(Handle<Object> handle) {
return new(zone()) Literal(isolate(), handle);
}
// Create a number literal.
Literal* NewNumberLiteral(double value);
// Generate AST node that throw a ReferenceError with the given type. // Generate AST node that throw a ReferenceError with the given type.
Expression* NewThrowReferenceError(Handle<String> type); Expression* NewThrowReferenceError(Handle<String> type);
@ -746,6 +787,10 @@ class Parser {
preparser::PreParser::PreParseResult LazyParseFunctionLiteral( preparser::PreParser::PreParseResult LazyParseFunctionLiteral(
SingletonLogger* logger); SingletonLogger* logger);
AstNodeFactory<AstConstructionVisitor>* factory() {
return current_function_state_->factory();
}
Isolate* isolate_; Isolate* isolate_;
ZoneList<Handle<String> > symbol_cache_; ZoneList<Handle<String> > symbol_cache_;
@ -762,6 +807,7 @@ class Parser {
Mode mode_; Mode mode_;
bool allow_natives_syntax_; bool allow_natives_syntax_;
bool allow_lazy_; bool allow_lazy_;
bool allow_modules_;
bool stack_overflow_; bool stack_overflow_;
// If true, the next (and immediately following) function literal is // If true, the next (and immediately following) function literal is
// preceded by a parenthesis. // preceded by a parenthesis.

2
deps/v8/src/platform-freebsd.cc

@ -710,7 +710,7 @@ class SignalSender : public Thread {
FULL_INTERVAL FULL_INTERVAL
}; };
static const int kSignalSenderStackSize = 32 * KB; static const int kSignalSenderStackSize = 64 * KB;
explicit SignalSender(int interval) explicit SignalSender(int interval)
: Thread(Thread::Options("SignalSender", kSignalSenderStackSize)), : Thread(Thread::Options("SignalSender", kSignalSenderStackSize)),

2
deps/v8/src/platform-linux.cc

@ -1060,7 +1060,7 @@ class SignalSender : public Thread {
FULL_INTERVAL FULL_INTERVAL
}; };
static const int kSignalSenderStackSize = 32 * KB; static const int kSignalSenderStackSize = 64 * KB;
explicit SignalSender(int interval) explicit SignalSender(int interval)
: Thread(Thread::Options("SignalSender", kSignalSenderStackSize)), : Thread(Thread::Options("SignalSender", kSignalSenderStackSize)),

2
deps/v8/src/platform-macos.cc

@ -733,7 +733,7 @@ class Sampler::PlatformData : public Malloced {
class SamplerThread : public Thread { class SamplerThread : public Thread {
public: public:
static const int kSamplerThreadStackSize = 32 * KB; static const int kSamplerThreadStackSize = 64 * KB;
explicit SamplerThread(int interval) explicit SamplerThread(int interval)
: Thread(Thread::Options("SamplerThread", kSamplerThreadStackSize)), : Thread(Thread::Options("SamplerThread", kSamplerThreadStackSize)),

2
deps/v8/src/platform-openbsd.cc

@ -782,7 +782,7 @@ class SignalSender : public Thread {
FULL_INTERVAL FULL_INTERVAL
}; };
static const int kSignalSenderStackSize = 32 * KB; static const int kSignalSenderStackSize = 64 * KB;
explicit SignalSender(int interval) explicit SignalSender(int interval)
: Thread(Thread::Options("SignalSender", kSignalSenderStackSize)), : Thread(Thread::Options("SignalSender", kSignalSenderStackSize)),

2
deps/v8/src/platform-solaris.cc

@ -704,7 +704,7 @@ class SignalSender : public Thread {
FULL_INTERVAL FULL_INTERVAL
}; };
static const int kSignalSenderStackSize = 32 * KB; static const int kSignalSenderStackSize = 64 * KB;
explicit SignalSender(int interval) explicit SignalSender(int interval)
: Thread(Thread::Options("SignalSender", kSignalSenderStackSize)), : Thread(Thread::Options("SignalSender", kSignalSenderStackSize)),

2
deps/v8/src/platform-win32.cc

@ -1894,7 +1894,7 @@ class Sampler::PlatformData : public Malloced {
class SamplerThread : public Thread { class SamplerThread : public Thread {
public: public:
static const int kSamplerThreadStackSize = 32 * KB; static const int kSamplerThreadStackSize = 64 * KB;
explicit SamplerThread(int interval) explicit SamplerThread(int interval)
: Thread(Thread::Options("SamplerThread", kSamplerThreadStackSize)), : Thread(Thread::Options("SamplerThread", kSamplerThreadStackSize)),

10
deps/v8/src/preparser.h

@ -115,7 +115,8 @@ class PreParser {
i::ParserRecorder* log, i::ParserRecorder* log,
uintptr_t stack_limit, uintptr_t stack_limit,
bool allow_lazy, bool allow_lazy,
bool allow_natives_syntax) bool allow_natives_syntax,
bool allow_modules)
: scanner_(scanner), : scanner_(scanner),
log_(log), log_(log),
scope_(NULL), scope_(NULL),
@ -124,6 +125,7 @@ class PreParser {
strict_mode_violation_type_(NULL), strict_mode_violation_type_(NULL),
stack_overflow_(false), stack_overflow_(false),
allow_lazy_(allow_lazy), allow_lazy_(allow_lazy),
allow_modules_(allow_modules),
allow_natives_syntax_(allow_natives_syntax), allow_natives_syntax_(allow_natives_syntax),
parenthesized_function_(false), parenthesized_function_(false),
harmony_scoping_(scanner->HarmonyScoping()) { } harmony_scoping_(scanner->HarmonyScoping()) { }
@ -140,8 +142,9 @@ class PreParser {
uintptr_t stack_limit) { uintptr_t stack_limit) {
bool allow_lazy = (flags & i::kAllowLazy) != 0; bool allow_lazy = (flags & i::kAllowLazy) != 0;
bool allow_natives_syntax = (flags & i::kAllowNativesSyntax) != 0; bool allow_natives_syntax = (flags & i::kAllowNativesSyntax) != 0;
return PreParser(scanner, log, stack_limit, bool allow_modules = (flags & i::kAllowModules) != 0;
allow_lazy, allow_natives_syntax).PreParse(); return PreParser(scanner, log, stack_limit, allow_lazy,
allow_natives_syntax, allow_modules).PreParse();
} }
// Parses a single function literal, from the opening parentheses before // Parses a single function literal, from the opening parentheses before
@ -647,6 +650,7 @@ class PreParser {
const char* strict_mode_violation_type_; const char* strict_mode_violation_type_;
bool stack_overflow_; bool stack_overflow_;
bool allow_lazy_; bool allow_lazy_;
bool allow_modules_;
bool allow_natives_syntax_; bool allow_natives_syntax_;
bool parenthesized_function_; bool parenthesized_function_;
bool harmony_scoping_; bool harmony_scoping_;

454
deps/v8/src/prettyprinter.cc

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved. // Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are // modification, are permitted provided that the following conditions are
// met: // met:
@ -58,7 +58,7 @@ void PrettyPrinter::VisitBlock(Block* node) {
} }
void PrettyPrinter::VisitDeclaration(Declaration* node) { void PrettyPrinter::VisitVariableDeclaration(VariableDeclaration* node) {
Print("var "); Print("var ");
PrintLiteral(node->proxy()->name(), false); PrintLiteral(node->proxy()->name(), false);
if (node->fun() != NULL) { if (node->fun() != NULL) {
@ -69,6 +69,38 @@ void PrettyPrinter::VisitDeclaration(Declaration* node) {
} }
void PrettyPrinter::VisitModuleDeclaration(ModuleDeclaration* node) {
Print("module ");
PrintLiteral(node->proxy()->name(), false);
Print(" = ");
Visit(node->module());
Print(";");
}
void PrettyPrinter::VisitModuleLiteral(ModuleLiteral* node) {
VisitBlock(node->body());
}
void PrettyPrinter::VisitModuleVariable(ModuleVariable* node) {
PrintLiteral(node->var()->name(), false);
}
void PrettyPrinter::VisitModulePath(ModulePath* node) {
Visit(node->module());
Print(".");
PrintLiteral(node->name(), false);
}
void PrettyPrinter::VisitModuleUrl(ModuleUrl* node) {
Print("at ");
PrintLiteral(node->url(), true);
}
void PrettyPrinter::VisitExpressionStatement(ExpressionStatement* node) { void PrettyPrinter::VisitExpressionStatement(ExpressionStatement* node) {
Visit(node->expression()); Visit(node->expression());
Print(";"); Print(";");
@ -711,7 +743,7 @@ void AstPrinter::VisitBlock(Block* node) {
} }
void AstPrinter::VisitDeclaration(Declaration* node) { void AstPrinter::VisitVariableDeclaration(VariableDeclaration* node) {
if (node->fun() == NULL) { if (node->fun() == NULL) {
// var or const declarations // var or const declarations
PrintLiteralWithModeIndented(Variable::Mode2String(node->mode()), PrintLiteralWithModeIndented(Variable::Mode2String(node->mode()),
@ -728,6 +760,35 @@ void AstPrinter::VisitDeclaration(Declaration* node) {
} }
void AstPrinter::VisitModuleDeclaration(ModuleDeclaration* node) {
IndentedScope indent(this, "MODULE");
PrintLiteralIndented("NAME", node->proxy()->name(), true);
Visit(node->module());
}
void AstPrinter::VisitModuleLiteral(ModuleLiteral* node) {
VisitBlock(node->body());
}
void AstPrinter::VisitModuleVariable(ModuleVariable* node) {
PrintLiteralIndented("VARIABLE", node->var()->name(), false);
}
void AstPrinter::VisitModulePath(ModulePath* node) {
IndentedScope indent(this, "PATH");
PrintIndentedVisit("MODULE", node->module());
PrintLiteralIndented("NAME", node->name(), false);
}
void AstPrinter::VisitModuleUrl(ModuleUrl* node) {
PrintLiteralIndented("URL", node->url(), true);
}
void AstPrinter::VisitExpressionStatement(ExpressionStatement* node) { void AstPrinter::VisitExpressionStatement(ExpressionStatement* node) {
Visit(node->expression()); Visit(node->expression());
} }
@ -1018,393 +1079,6 @@ void AstPrinter::VisitThisFunction(ThisFunction* node) {
IndentedScope indent(this, "THIS-FUNCTION"); IndentedScope indent(this, "THIS-FUNCTION");
} }
TagScope::TagScope(JsonAstBuilder* builder, const char* name)
: builder_(builder), next_(builder->tag()), has_body_(false) {
if (next_ != NULL) {
next_->use();
builder->Print(",\n");
}
builder->set_tag(this);
builder->PrintIndented("[");
builder->Print("\"%s\"", name);
builder->increase_indent(JsonAstBuilder::kTagIndentSize);
}
TagScope::~TagScope() {
builder_->decrease_indent(JsonAstBuilder::kTagIndentSize);
if (has_body_) {
builder_->Print("\n");
builder_->PrintIndented("]");
} else {
builder_->Print("]");
}
builder_->set_tag(next_);
}
AttributesScope::AttributesScope(JsonAstBuilder* builder)
: builder_(builder), attribute_count_(0) {
builder->set_attributes(this);
builder->tag()->use();
builder->Print(",\n");
builder->PrintIndented("{");
builder->increase_indent(JsonAstBuilder::kAttributesIndentSize);
}
AttributesScope::~AttributesScope() {
builder_->decrease_indent(JsonAstBuilder::kAttributesIndentSize);
if (attribute_count_ > 1) {
builder_->Print("\n");
builder_->PrintIndented("}");
} else {
builder_->Print("}");
}
builder_->set_attributes(NULL);
}
const char* JsonAstBuilder::BuildProgram(FunctionLiteral* program) {
Init();
Visit(program);
Print("\n");
return Output();
}
void JsonAstBuilder::AddAttributePrefix(const char* name) {
if (attributes()->is_used()) {
Print(",\n");
PrintIndented("\"");
} else {
Print("\"");
}
Print("%s\":", name);
attributes()->use();
}
void JsonAstBuilder::AddAttribute(const char* name, Handle<String> value) {
SmartArrayPointer<char> value_string = value->ToCString();
AddAttributePrefix(name);
Print("\"%s\"", *value_string);
}
void JsonAstBuilder::AddAttribute(const char* name, const char* value) {
AddAttributePrefix(name);
Print("\"%s\"", value);
}
void JsonAstBuilder::AddAttribute(const char* name, int value) {
AddAttributePrefix(name);
Print("%d", value);
}
void JsonAstBuilder::AddAttribute(const char* name, bool value) {
AddAttributePrefix(name);
Print(value ? "true" : "false");
}
void JsonAstBuilder::VisitBlock(Block* stmt) {
TagScope tag(this, "Block");
VisitStatements(stmt->statements());
}
void JsonAstBuilder::VisitExpressionStatement(ExpressionStatement* stmt) {
TagScope tag(this, "ExpressionStatement");
Visit(stmt->expression());
}
void JsonAstBuilder::VisitEmptyStatement(EmptyStatement* stmt) {
TagScope tag(this, "EmptyStatement");
}
void JsonAstBuilder::VisitIfStatement(IfStatement* stmt) {
TagScope tag(this, "IfStatement");
Visit(stmt->condition());
Visit(stmt->then_statement());
Visit(stmt->else_statement());
}
void JsonAstBuilder::VisitContinueStatement(ContinueStatement* stmt) {
TagScope tag(this, "ContinueStatement");
}
void JsonAstBuilder::VisitBreakStatement(BreakStatement* stmt) {
TagScope tag(this, "BreakStatement");
}
void JsonAstBuilder::VisitReturnStatement(ReturnStatement* stmt) {
TagScope tag(this, "ReturnStatement");
Visit(stmt->expression());
}
void JsonAstBuilder::VisitWithStatement(WithStatement* stmt) {
TagScope tag(this, "WithStatement");
Visit(stmt->expression());
Visit(stmt->statement());
}
void JsonAstBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
TagScope tag(this, "SwitchStatement");
}
void JsonAstBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) {
TagScope tag(this, "DoWhileStatement");
Visit(stmt->body());
Visit(stmt->cond());
}
void JsonAstBuilder::VisitWhileStatement(WhileStatement* stmt) {
TagScope tag(this, "WhileStatement");
Visit(stmt->cond());
Visit(stmt->body());
}
void JsonAstBuilder::VisitForStatement(ForStatement* stmt) {
TagScope tag(this, "ForStatement");
if (stmt->init() != NULL) Visit(stmt->init());
if (stmt->cond() != NULL) Visit(stmt->cond());
Visit(stmt->body());
if (stmt->next() != NULL) Visit(stmt->next());
}
void JsonAstBuilder::VisitForInStatement(ForInStatement* stmt) {
TagScope tag(this, "ForInStatement");
Visit(stmt->each());
Visit(stmt->enumerable());
Visit(stmt->body());
}
void JsonAstBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) {
TagScope tag(this, "TryCatchStatement");
{ AttributesScope attributes(this);
AddAttribute("variable", stmt->variable()->name());
}
Visit(stmt->try_block());
Visit(stmt->catch_block());
}
void JsonAstBuilder::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
TagScope tag(this, "TryFinallyStatement");
Visit(stmt->try_block());
Visit(stmt->finally_block());
}
void JsonAstBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) {
TagScope tag(this, "DebuggerStatement");
}
void JsonAstBuilder::VisitFunctionLiteral(FunctionLiteral* expr) {
TagScope tag(this, "FunctionLiteral");
{
AttributesScope attributes(this);
AddAttribute("name", expr->name());
}
VisitDeclarations(expr->scope()->declarations());
VisitStatements(expr->body());
}
void JsonAstBuilder::VisitSharedFunctionInfoLiteral(
SharedFunctionInfoLiteral* expr) {
TagScope tag(this, "SharedFunctionInfoLiteral");
}
void JsonAstBuilder::VisitConditional(Conditional* expr) {
TagScope tag(this, "Conditional");
}
void JsonAstBuilder::VisitVariableProxy(VariableProxy* expr) {
TagScope tag(this, "Variable");
{
AttributesScope attributes(this);
Variable* var = expr->var();
AddAttribute("name", var->name());
switch (var->location()) {
case Variable::UNALLOCATED:
AddAttribute("location", "UNALLOCATED");
break;
case Variable::PARAMETER:
AddAttribute("location", "PARAMETER");
AddAttribute("index", var->index());
break;
case Variable::LOCAL:
AddAttribute("location", "LOCAL");
AddAttribute("index", var->index());
break;
case Variable::CONTEXT:
AddAttribute("location", "CONTEXT");
AddAttribute("index", var->index());
break;
case Variable::LOOKUP:
AddAttribute("location", "LOOKUP");
break;
}
}
}
void JsonAstBuilder::VisitLiteral(Literal* expr) {
TagScope tag(this, "Literal");
{
AttributesScope attributes(this);
Handle<Object> handle = expr->handle();
if (handle->IsString()) {
AddAttribute("handle", Handle<String>(String::cast(*handle)));
} else if (handle->IsSmi()) {
AddAttribute("handle", Smi::cast(*handle)->value());
}
}
}
void JsonAstBuilder::VisitRegExpLiteral(RegExpLiteral* expr) {
TagScope tag(this, "RegExpLiteral");
}
void JsonAstBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
TagScope tag(this, "ObjectLiteral");
}
void JsonAstBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
TagScope tag(this, "ArrayLiteral");
}
void JsonAstBuilder::VisitAssignment(Assignment* expr) {
TagScope tag(this, "Assignment");
{
AttributesScope attributes(this);
AddAttribute("op", Token::Name(expr->op()));
}
Visit(expr->target());
Visit(expr->value());
}
void JsonAstBuilder::VisitThrow(Throw* expr) {
TagScope tag(this, "Throw");
Visit(expr->exception());
}
void JsonAstBuilder::VisitProperty(Property* expr) {
TagScope tag(this, "Property");
Visit(expr->obj());
Visit(expr->key());
}
void JsonAstBuilder::VisitCall(Call* expr) {
TagScope tag(this, "Call");
Visit(expr->expression());
VisitExpressions(expr->arguments());
}
void JsonAstBuilder::VisitCallNew(CallNew* expr) {
TagScope tag(this, "CallNew");
Visit(expr->expression());
VisitExpressions(expr->arguments());
}
void JsonAstBuilder::VisitCallRuntime(CallRuntime* expr) {
TagScope tag(this, "CallRuntime");
{
AttributesScope attributes(this);
AddAttribute("name", expr->name());
}
VisitExpressions(expr->arguments());
}
void JsonAstBuilder::VisitUnaryOperation(UnaryOperation* expr) {
TagScope tag(this, "UnaryOperation");
{
AttributesScope attributes(this);
AddAttribute("op", Token::Name(expr->op()));
}
Visit(expr->expression());
}
void JsonAstBuilder::VisitCountOperation(CountOperation* expr) {
TagScope tag(this, "CountOperation");
{
AttributesScope attributes(this);
AddAttribute("is_prefix", expr->is_prefix());
AddAttribute("op", Token::Name(expr->op()));
}
Visit(expr->expression());
}
void JsonAstBuilder::VisitBinaryOperation(BinaryOperation* expr) {
TagScope tag(this, "BinaryOperation");
{
AttributesScope attributes(this);
AddAttribute("op", Token::Name(expr->op()));
}
Visit(expr->left());
Visit(expr->right());
}
void JsonAstBuilder::VisitCompareOperation(CompareOperation* expr) {
TagScope tag(this, "CompareOperation");
{
AttributesScope attributes(this);
AddAttribute("op", Token::Name(expr->op()));
}
Visit(expr->left());
Visit(expr->right());
}
void JsonAstBuilder::VisitThisFunction(ThisFunction* expr) {
TagScope tag(this, "ThisFunction");
}
void JsonAstBuilder::VisitDeclaration(Declaration* decl) {
TagScope tag(this, "Declaration");
{
AttributesScope attributes(this);
AddAttribute("mode", Variable::Mode2String(decl->mode()));
}
Visit(decl->proxy());
if (decl->fun() != NULL) Visit(decl->fun());
}
#endif // DEBUG #endif // DEBUG
} } // namespace v8::internal } } // namespace v8::internal

103
deps/v8/src/prettyprinter.h

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved. // Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are // modification, are permitted provided that the following conditions are
// met: // met:
@ -112,107 +112,6 @@ class AstPrinter: public PrettyPrinter {
int indent_; int indent_;
}; };
// Forward declaration of helper classes.
class TagScope;
class AttributesScope;
// Build a C string containing a JSON representation of a function's
// AST. The representation is based on JsonML (www.jsonml.org).
class JsonAstBuilder: public PrettyPrinter {
public:
JsonAstBuilder()
: indent_(0), top_tag_scope_(NULL), attributes_scope_(NULL) {
}
virtual ~JsonAstBuilder() {}
// Controls the indentation of subsequent lines of a tag body after
// the first line.
static const int kTagIndentSize = 2;
// Controls the indentation of subsequent lines of an attributes
// blocks's body after the first line.
static const int kAttributesIndentSize = 1;
// Construct a JSON representation of a function literal.
const char* BuildProgram(FunctionLiteral* program);
// Print text indented by the current indentation level.
void PrintIndented(const char* text) { Print("%*s%s", indent_, "", text); }
// Change the indentation level.
void increase_indent(int amount) { indent_ += amount; }
void decrease_indent(int amount) { indent_ -= amount; }
// The builder maintains a stack of opened AST node constructors.
// Each node constructor corresponds to a JsonML tag.
TagScope* tag() { return top_tag_scope_; }
void set_tag(TagScope* scope) { top_tag_scope_ = scope; }
// The builder maintains a pointer to the currently opened attributes
// of current AST node or NULL if the attributes are not opened.
AttributesScope* attributes() { return attributes_scope_; }
void set_attributes(AttributesScope* scope) { attributes_scope_ = scope; }
// Add an attribute to the currently opened attributes.
void AddAttribute(const char* name, Handle<String> value);
void AddAttribute(const char* name, const char* value);
void AddAttribute(const char* name, int value);
void AddAttribute(const char* name, bool value);
// AST node visit functions.
#define DECLARE_VISIT(type) virtual void Visit##type(type* node);
AST_NODE_LIST(DECLARE_VISIT)
#undef DECLARE_VISIT
private:
int indent_;
TagScope* top_tag_scope_;
AttributesScope* attributes_scope_;
// Utility function used by AddAttribute implementations.
void AddAttributePrefix(const char* name);
};
// The JSON AST builder keeps a stack of open element tags (AST node
// constructors from the current iteration point to the root of the
// AST). TagScope is a helper class to manage the opening and closing
// of tags, the indentation of their bodies, and comma separating their
// contents.
class TagScope BASE_EMBEDDED {
public:
TagScope(JsonAstBuilder* builder, const char* name);
~TagScope();
void use() { has_body_ = true; }
private:
JsonAstBuilder* builder_;
TagScope* next_;
bool has_body_;
};
// AttributesScope is a helper class to manage the opening and closing
// of attribute blocks, the indentation of their bodies, and comma
// separating their contents. JsonAstBuilder::AddAttribute adds an
// attribute to the currently open AttributesScope. They cannot be
// nested so the builder keeps an optional single scope rather than a
// stack.
class AttributesScope BASE_EMBEDDED {
public:
explicit AttributesScope(JsonAstBuilder* builder);
~AttributesScope();
bool is_used() { return attribute_count_ > 0; }
void use() { ++attribute_count_; }
private:
JsonAstBuilder* builder_;
int attribute_count_;
};
#endif // DEBUG #endif // DEBUG
} } // namespace v8::internal } } // namespace v8::internal

88
deps/v8/src/profile-generator.cc

@ -1131,6 +1131,7 @@ const char* HeapEntry::TypeAsString() {
case kRegExp: return "/regexp/"; case kRegExp: return "/regexp/";
case kHeapNumber: return "/number/"; case kHeapNumber: return "/number/";
case kNative: return "/native/"; case kNative: return "/native/";
case kSynthetic: return "/synthetic/";
default: return "???"; default: return "???";
} }
} }
@ -2698,6 +2699,45 @@ class GlobalHandlesExtractor : public ObjectVisitor {
NativeObjectsExplorer* explorer_; NativeObjectsExplorer* explorer_;
}; };
class BasicHeapEntriesAllocator : public HeapEntriesAllocator {
public:
BasicHeapEntriesAllocator(
HeapSnapshot* snapshot,
HeapEntry::Type entries_type)
: snapshot_(snapshot),
collection_(snapshot_->collection()),
entries_type_(entries_type) {
}
virtual HeapEntry* AllocateEntry(
HeapThing ptr, int children_count, int retainers_count);
private:
HeapSnapshot* snapshot_;
HeapSnapshotsCollection* collection_;
HeapEntry::Type entries_type_;
};
HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(
HeapThing ptr, int children_count, int retainers_count) {
v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
intptr_t elements = info->GetElementCount();
intptr_t size = info->GetSizeInBytes();
return snapshot_->AddEntry(
entries_type_,
elements != -1 ?
collection_->names()->GetFormatted(
"%s / %" V8_PTR_PREFIX "d entries",
info->GetLabel(),
info->GetElementCount()) :
collection_->names()->GetCopy(info->GetLabel()),
HeapObjectsMap::GenerateId(info),
size != -1 ? static_cast<int>(size) : 0,
children_count,
retainers_count);
}
NativeObjectsExplorer::NativeObjectsExplorer( NativeObjectsExplorer::NativeObjectsExplorer(
HeapSnapshot* snapshot, SnapshottingProgressReportingInterface* progress) HeapSnapshot* snapshot, SnapshottingProgressReportingInterface* progress)
: snapshot_(snapshot), : snapshot_(snapshot),
@ -2707,6 +2747,10 @@ NativeObjectsExplorer::NativeObjectsExplorer(
objects_by_info_(RetainedInfosMatch), objects_by_info_(RetainedInfosMatch),
native_groups_(StringsMatch), native_groups_(StringsMatch),
filler_(NULL) { filler_(NULL) {
synthetic_entries_allocator_ =
new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic);
native_entries_allocator_ =
new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative);
} }
@ -2728,27 +2772,8 @@ NativeObjectsExplorer::~NativeObjectsExplorer() {
reinterpret_cast<v8::RetainedObjectInfo*>(p->value); reinterpret_cast<v8::RetainedObjectInfo*>(p->value);
info->Dispose(); info->Dispose();
} }
} delete synthetic_entries_allocator_;
delete native_entries_allocator_;
HeapEntry* NativeObjectsExplorer::AllocateEntry(
HeapThing ptr, int children_count, int retainers_count) {
v8::RetainedObjectInfo* info =
reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
intptr_t elements = info->GetElementCount();
intptr_t size = info->GetSizeInBytes();
return snapshot_->AddEntry(
HeapEntry::kNative,
elements != -1 ?
collection_->names()->GetFormatted(
"%s / %" V8_PTR_PREFIX "d entries",
info->GetLabel(),
info->GetElementCount()) :
collection_->names()->GetCopy(info->GetLabel()),
HeapObjectsMap::GenerateId(info),
size != -1 ? static_cast<int>(size) : 0,
children_count,
retainers_count);
} }
@ -2790,12 +2815,14 @@ void NativeObjectsExplorer::FillImplicitReferences() {
for (int i = 0; i < groups->length(); ++i) { for (int i = 0; i < groups->length(); ++i) {
ImplicitRefGroup* group = groups->at(i); ImplicitRefGroup* group = groups->at(i);
HeapObject* parent = *group->parent_; HeapObject* parent = *group->parent_;
HeapEntry* parent_entry = filler_->FindOrAddEntry(parent, this); HeapEntry* parent_entry =
filler_->FindOrAddEntry(parent, native_entries_allocator_);
ASSERT(parent_entry != NULL); ASSERT(parent_entry != NULL);
Object*** children = group->children_; Object*** children = group->children_;
for (size_t j = 0; j < group->length_; ++j) { for (size_t j = 0; j < group->length_; ++j) {
Object* child = *children[j]; Object* child = *children[j];
HeapEntry* child_entry = filler_->FindOrAddEntry(child, this); HeapEntry* child_entry =
filler_->FindOrAddEntry(child, native_entries_allocator_);
filler_->SetNamedReference( filler_->SetNamedReference(
HeapGraphEdge::kInternal, HeapGraphEdge::kInternal,
parent, parent_entry, parent, parent_entry,
@ -2886,11 +2913,13 @@ NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
void NativeObjectsExplorer::SetNativeRootReference( void NativeObjectsExplorer::SetNativeRootReference(
v8::RetainedObjectInfo* info) { v8::RetainedObjectInfo* info) {
HeapEntry* child_entry = filler_->FindOrAddEntry(info, this); HeapEntry* child_entry =
filler_->FindOrAddEntry(info, native_entries_allocator_);
ASSERT(child_entry != NULL); ASSERT(child_entry != NULL);
NativeGroupRetainedObjectInfo* group_info = NativeGroupRetainedObjectInfo* group_info =
FindOrAddGroupInfo(info->GetGroupLabel()); FindOrAddGroupInfo(info->GetGroupLabel());
HeapEntry* group_entry = filler_->FindOrAddEntry(group_info, this); HeapEntry* group_entry =
filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_);
filler_->SetNamedAutoIndexReference( filler_->SetNamedAutoIndexReference(
HeapGraphEdge::kInternal, HeapGraphEdge::kInternal,
group_info, group_entry, group_info, group_entry,
@ -2902,7 +2931,8 @@ void NativeObjectsExplorer::SetWrapperNativeReferences(
HeapObject* wrapper, v8::RetainedObjectInfo* info) { HeapObject* wrapper, v8::RetainedObjectInfo* info) {
HeapEntry* wrapper_entry = filler_->FindEntry(wrapper); HeapEntry* wrapper_entry = filler_->FindEntry(wrapper);
ASSERT(wrapper_entry != NULL); ASSERT(wrapper_entry != NULL);
HeapEntry* info_entry = filler_->FindOrAddEntry(info, this); HeapEntry* info_entry =
filler_->FindOrAddEntry(info, native_entries_allocator_);
ASSERT(info_entry != NULL); ASSERT(info_entry != NULL);
filler_->SetNamedReference(HeapGraphEdge::kInternal, filler_->SetNamedReference(HeapGraphEdge::kInternal,
wrapper, wrapper_entry, wrapper, wrapper_entry,
@ -2920,7 +2950,8 @@ void NativeObjectsExplorer::SetRootNativeRootsReference() {
entry = native_groups_.Next(entry)) { entry = native_groups_.Next(entry)) {
NativeGroupRetainedObjectInfo* group_info = NativeGroupRetainedObjectInfo* group_info =
static_cast<NativeGroupRetainedObjectInfo*>(entry->value); static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
HeapEntry* group_entry = filler_->FindOrAddEntry(group_info, this); HeapEntry* group_entry =
filler_->FindOrAddEntry(group_info, native_entries_allocator_);
ASSERT(group_entry != NULL); ASSERT(group_entry != NULL);
filler_->SetIndexedAutoIndexReference( filler_->SetIndexedAutoIndexReference(
HeapGraphEdge::kElement, HeapGraphEdge::kElement,
@ -3547,7 +3578,8 @@ void HeapSnapshotJSONSerializer::SerializeNodes() {
"," JSON_S("closure") "," JSON_S("closure")
"," JSON_S("regexp") "," JSON_S("regexp")
"," JSON_S("number") "," JSON_S("number")
"," JSON_S("native")) "," JSON_S("native")
"," JSON_S("synthetic"))
"," JSON_S("string") "," JSON_S("string")
"," JSON_S("number") "," JSON_S("number")
"," JSON_S("number") "," JSON_S("number")

11
deps/v8/src/profile-generator.h

@ -525,7 +525,8 @@ class HeapEntry BASE_EMBEDDED {
kClosure = v8::HeapGraphNode::kClosure, kClosure = v8::HeapGraphNode::kClosure,
kRegExp = v8::HeapGraphNode::kRegExp, kRegExp = v8::HeapGraphNode::kRegExp,
kHeapNumber = v8::HeapGraphNode::kHeapNumber, kHeapNumber = v8::HeapGraphNode::kHeapNumber,
kNative = v8::HeapGraphNode::kNative kNative = v8::HeapGraphNode::kNative,
kSynthetic = v8::HeapGraphNode::kSynthetic
}; };
HeapEntry() { } HeapEntry() { }
@ -1026,16 +1027,16 @@ class V8HeapExplorer : public HeapEntriesAllocator {
DISALLOW_COPY_AND_ASSIGN(V8HeapExplorer); DISALLOW_COPY_AND_ASSIGN(V8HeapExplorer);
}; };
class NativeGroupRetainedObjectInfo; class NativeGroupRetainedObjectInfo;
// An implementation of retained native objects extractor. // An implementation of retained native objects extractor.
class NativeObjectsExplorer : public HeapEntriesAllocator { class NativeObjectsExplorer {
public: public:
NativeObjectsExplorer(HeapSnapshot* snapshot, NativeObjectsExplorer(HeapSnapshot* snapshot,
SnapshottingProgressReportingInterface* progress); SnapshottingProgressReportingInterface* progress);
virtual ~NativeObjectsExplorer(); virtual ~NativeObjectsExplorer();
virtual HeapEntry* AllocateEntry(
HeapThing ptr, int children_count, int retainers_count);
void AddRootEntries(SnapshotFillerInterface* filler); void AddRootEntries(SnapshotFillerInterface* filler);
int EstimateObjectsCount(); int EstimateObjectsCount();
bool IterateAndExtractReferences(SnapshotFillerInterface* filler); bool IterateAndExtractReferences(SnapshotFillerInterface* filler);
@ -1074,6 +1075,8 @@ class NativeObjectsExplorer : public HeapEntriesAllocator {
// RetainedObjectInfo* -> List<HeapObject*>* // RetainedObjectInfo* -> List<HeapObject*>*
HashMap objects_by_info_; HashMap objects_by_info_;
HashMap native_groups_; HashMap native_groups_;
HeapEntriesAllocator* synthetic_entries_allocator_;
HeapEntriesAllocator* native_entries_allocator_;
// Used during references extraction. // Used during references extraction.
SnapshotFillerInterface* filler_; SnapshotFillerInterface* filler_;

4
deps/v8/src/property-details.h

@ -119,10 +119,6 @@ class PropertyDetails BASE_EMBEDDED {
PropertyType type() { return TypeField::decode(value_); } PropertyType type() { return TypeField::decode(value_); }
bool IsProperty() {
return IsRealProperty(type());
}
PropertyAttributes attributes() { return AttributesField::decode(value_); } PropertyAttributes attributes() { return AttributesField::decode(value_); }
int index() { return StorageField::decode(value_); } int index() { return StorageField::decode(value_); }

2
deps/v8/src/property.h

@ -264,7 +264,7 @@ class LookupResult BASE_EMBEDDED {
// Is the result is a property excluding transitions and the null // Is the result is a property excluding transitions and the null
// descriptor? // descriptor?
bool IsProperty() { bool IsProperty() {
return IsFound() && GetPropertyDetails().IsProperty(); return IsFound() && IsRealProperty(GetPropertyDetails().type());
} }
bool IsCacheable() { return cacheable_; } bool IsCacheable() { return cacheable_; }

40
deps/v8/src/rewriter.cc

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved. // Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are // modification, are permitted provided that the following conditions are
// met: // met:
@ -42,12 +42,18 @@ class Processor: public AstVisitor {
: result_(result), : result_(result),
result_assigned_(false), result_assigned_(false),
is_set_(false), is_set_(false),
in_try_(false) { in_try_(false),
} factory_(isolate()) { }
virtual ~Processor() { }
void Process(ZoneList<Statement*>* statements); void Process(ZoneList<Statement*>* statements);
bool result_assigned() const { return result_assigned_; } bool result_assigned() const { return result_assigned_; }
AstNodeFactory<AstNullVisitor>* factory() {
return &factory_;
}
private: private:
Variable* result_; Variable* result_;
@ -64,15 +70,13 @@ class Processor: public AstVisitor {
bool is_set_; bool is_set_;
bool in_try_; bool in_try_;
AstNodeFactory<AstNullVisitor> factory_;
Expression* SetResult(Expression* value) { Expression* SetResult(Expression* value) {
result_assigned_ = true; result_assigned_ = true;
Zone* zone = isolate()->zone(); VariableProxy* result_proxy = factory()->NewVariableProxy(result_);
VariableProxy* result_proxy = new(zone) VariableProxy(isolate(), result_); return factory()->NewAssignment(
return new(zone) Assignment(isolate(), Token::ASSIGN, result_proxy, value, RelocInfo::kNoPosition);
Token::ASSIGN,
result_proxy,
value,
RelocInfo::kNoPosition);
} }
// Node visitors. // Node visitors.
@ -205,7 +209,12 @@ void Processor::VisitWithStatement(WithStatement* node) {
// Do nothing: // Do nothing:
void Processor::VisitDeclaration(Declaration* node) {} void Processor::VisitVariableDeclaration(VariableDeclaration* node) {}
void Processor::VisitModuleDeclaration(ModuleDeclaration* node) {}
void Processor::VisitModuleLiteral(ModuleLiteral* node) {}
void Processor::VisitModuleVariable(ModuleVariable* node) {}
void Processor::VisitModulePath(ModulePath* node) {}
void Processor::VisitModuleUrl(ModuleUrl* node) {}
void Processor::VisitEmptyStatement(EmptyStatement* node) {} void Processor::VisitEmptyStatement(EmptyStatement* node) {}
void Processor::VisitReturnStatement(ReturnStatement* node) {} void Processor::VisitReturnStatement(ReturnStatement* node) {}
void Processor::VisitDebuggerStatement(DebuggerStatement* node) {} void Processor::VisitDebuggerStatement(DebuggerStatement* node) {}
@ -237,8 +246,6 @@ bool Rewriter::Rewrite(CompilationInfo* info) {
if (processor.result_assigned()) { if (processor.result_assigned()) {
ASSERT(function->end_position() != RelocInfo::kNoPosition); ASSERT(function->end_position() != RelocInfo::kNoPosition);
Isolate* isolate = info->isolate();
Zone* zone = isolate->zone();
// Set the position of the assignment statement one character past the // Set the position of the assignment statement one character past the
// source code, such that it definitely is not in the source code range // source code, such that it definitely is not in the source code range
// of an immediate inner scope. For example in // of an immediate inner scope. For example in
@ -246,10 +253,11 @@ bool Rewriter::Rewrite(CompilationInfo* info) {
// the end position of the function generated for executing the eval code // the end position of the function generated for executing the eval code
// coincides with the end of the with scope which is the position of '1'. // coincides with the end of the with scope which is the position of '1'.
int position = function->end_position(); int position = function->end_position();
VariableProxy* result_proxy = new(zone) VariableProxy( VariableProxy* result_proxy = processor.factory()->NewVariableProxy(
isolate, result->name(), false, position); result->name(), false, position);
result_proxy->BindTo(result); result_proxy->BindTo(result);
Statement* result_statement = new(zone) ReturnStatement(result_proxy); Statement* result_statement =
processor.factory()->NewReturnStatement(result_proxy);
result_statement->set_statement_pos(position); result_statement->set_statement_pos(position);
body->Add(result_statement); body->Add(result_statement);
} }

116
deps/v8/src/runtime-profiler.cc

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved. // Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are // modification, are permitted provided that the following conditions are
// met: // met:
@ -46,6 +46,8 @@ namespace internal {
// Optimization sampler constants. // Optimization sampler constants.
static const int kSamplerFrameCount = 2; static const int kSamplerFrameCount = 2;
// Constants for statistical profiler.
static const int kSamplerFrameWeight[kSamplerFrameCount] = { 2, 1 }; static const int kSamplerFrameWeight[kSamplerFrameCount] = { 2, 1 };
static const int kSamplerTicksBetweenThresholdAdjustment = 32; static const int kSamplerTicksBetweenThresholdAdjustment = 32;
@ -58,6 +60,16 @@ static const int kSamplerThresholdSizeFactorInit = 3;
static const int kSizeLimit = 1500; static const int kSizeLimit = 1500;
// Constants for counter based profiler.
// Number of times a function has to be seen on the stack before it is
// optimized.
static const int kProfilerTicksBeforeOptimization = 2;
// Maximum size in bytes of generated code for a function to be optimized
// the very first time it is seen on the stack.
static const int kMaxSizeEarlyOpt = 500;
Atomic32 RuntimeProfiler::state_ = 0; Atomic32 RuntimeProfiler::state_ = 0;
// TODO(isolates): Create the semaphore lazily and clean it up when no // TODO(isolates): Create the semaphore lazily and clean it up when no
@ -90,13 +102,13 @@ void RuntimeProfiler::GlobalSetup() {
} }
void RuntimeProfiler::Optimize(JSFunction* function) { void RuntimeProfiler::Optimize(JSFunction* function, const char* reason) {
ASSERT(function->IsOptimizable()); ASSERT(function->IsOptimizable());
if (FLAG_trace_opt) { if (FLAG_trace_opt) {
PrintF("[marking "); PrintF("[marking ");
function->PrintName(); function->PrintName();
PrintF(" 0x%" V8PRIxPTR, reinterpret_cast<intptr_t>(function->address())); PrintF(" 0x%" V8PRIxPTR, reinterpret_cast<intptr_t>(function->address()));
PrintF(" for recompilation"); PrintF(" for recompilation, reason: %s", reason);
PrintF("]\n"); PrintF("]\n");
} }
@ -192,17 +204,19 @@ void RuntimeProfiler::OptimizeNow() {
JavaScriptFrame* frame = it.frame(); JavaScriptFrame* frame = it.frame();
JSFunction* function = JSFunction::cast(frame->function()); JSFunction* function = JSFunction::cast(frame->function());
// Adjust threshold each time we have processed if (!FLAG_watch_ic_patching) {
// a certain number of ticks. // Adjust threshold each time we have processed
if (sampler_ticks_until_threshold_adjustment_ > 0) { // a certain number of ticks.
sampler_ticks_until_threshold_adjustment_--; if (sampler_ticks_until_threshold_adjustment_ > 0) {
if (sampler_ticks_until_threshold_adjustment_ <= 0) { sampler_ticks_until_threshold_adjustment_--;
// If the threshold is not already at the minimum if (sampler_ticks_until_threshold_adjustment_ <= 0) {
// modify and reset the ticks until next adjustment. // If the threshold is not already at the minimum
if (sampler_threshold_ > kSamplerThresholdMin) { // modify and reset the ticks until next adjustment.
sampler_threshold_ -= kSamplerThresholdDelta; if (sampler_threshold_ > kSamplerThresholdMin) {
sampler_ticks_until_threshold_adjustment_ = sampler_threshold_ -= kSamplerThresholdDelta;
kSamplerTicksBetweenThresholdAdjustment; sampler_ticks_until_threshold_adjustment_ =
kSamplerTicksBetweenThresholdAdjustment;
}
} }
} }
} }
@ -217,25 +231,55 @@ void RuntimeProfiler::OptimizeNow() {
// Do not record non-optimizable functions. // Do not record non-optimizable functions.
if (!function->IsOptimizable()) continue; if (!function->IsOptimizable()) continue;
samples[sample_count++] = function;
int function_size = function->shared()->SourceSize(); if (FLAG_watch_ic_patching) {
int threshold_size_factor = (function_size > kSizeLimit) int ticks = function->shared()->profiler_ticks();
? sampler_threshold_size_factor_
: 1; if (ticks >= kProfilerTicksBeforeOptimization) {
// If this particular function hasn't had any ICs patched for enough
// ticks, optimize it now.
Optimize(function, "hot and stable");
} else if (!any_ic_changed_ &&
function->shared()->code()->instruction_size() < kMaxSizeEarlyOpt) {
// If no IC was patched since the last tick and this function is very
// small, optimistically optimize it now.
Optimize(function, "small function");
} else if (!code_generated_ &&
!any_ic_changed_ &&
total_code_generated_ > 0 &&
total_code_generated_ < 2000) {
// If no code was generated and no IC was patched since the last tick,
// but a little code has already been generated since last Reset(),
// then type info might already be stable and we can optimize now.
Optimize(function, "stable on startup");
} else {
function->shared()->set_profiler_ticks(ticks + 1);
}
} else { // !FLAG_counting_profiler
samples[sample_count++] = function;
int function_size = function->shared()->SourceSize();
int threshold_size_factor = (function_size > kSizeLimit)
? sampler_threshold_size_factor_
: 1;
int threshold = sampler_threshold_ * threshold_size_factor; int threshold = sampler_threshold_ * threshold_size_factor;
if (LookupSample(function) >= threshold) { if (LookupSample(function) >= threshold) {
Optimize(function); Optimize(function, "sampler window lookup");
}
} }
} }
if (FLAG_watch_ic_patching) {
// Add the collected functions as samples. It's important not to do any_ic_changed_ = false;
// this as part of collecting them because this will interfere with code_generated_ = false;
// the sample lookup in case of recursive functions. } else { // !FLAG_counting_profiler
for (int i = 0; i < sample_count; i++) { // Add the collected functions as samples. It's important not to do
AddSample(samples[i], kSamplerFrameWeight[i]); // this as part of collecting them because this will interfere with
// the sample lookup in case of recursive functions.
for (int i = 0; i < sample_count; i++) {
AddSample(samples[i], kSamplerFrameWeight[i]);
}
} }
} }
@ -247,7 +291,9 @@ void RuntimeProfiler::NotifyTick() {
void RuntimeProfiler::SetUp() { void RuntimeProfiler::SetUp() {
ASSERT(has_been_globally_set_up_); ASSERT(has_been_globally_set_up_);
ClearSampleBuffer(); if (!FLAG_watch_ic_patching) {
ClearSampleBuffer();
}
// If the ticker hasn't already started, make sure to do so to get // If the ticker hasn't already started, make sure to do so to get
// the ticks for the runtime profiler. // the ticks for the runtime profiler.
if (IsEnabled()) isolate_->logger()->EnsureTickerStarted(); if (IsEnabled()) isolate_->logger()->EnsureTickerStarted();
@ -255,10 +301,14 @@ void RuntimeProfiler::SetUp() {
void RuntimeProfiler::Reset() { void RuntimeProfiler::Reset() {
sampler_threshold_ = kSamplerThresholdInit; if (FLAG_watch_ic_patching) {
sampler_threshold_size_factor_ = kSamplerThresholdSizeFactorInit; total_code_generated_ = 0;
sampler_ticks_until_threshold_adjustment_ = } else { // !FLAG_counting_profiler
kSamplerTicksBetweenThresholdAdjustment; sampler_threshold_ = kSamplerThresholdInit;
sampler_threshold_size_factor_ = kSamplerThresholdSizeFactorInit;
sampler_ticks_until_threshold_adjustment_ =
kSamplerTicksBetweenThresholdAdjustment;
}
} }

17
deps/v8/src/runtime-profiler.h

@ -1,4 +1,4 @@
// Copyright 2010 the V8 project authors. All rights reserved. // Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are // modification, are permitted provided that the following conditions are
// met: // met:
@ -61,6 +61,15 @@ class RuntimeProfiler {
Object** SamplerWindowAddress(); Object** SamplerWindowAddress();
int SamplerWindowSize(); int SamplerWindowSize();
void NotifyICChanged() { any_ic_changed_ = true; }
void NotifyCodeGenerated(int generated_code_size) {
if (FLAG_watch_ic_patching) {
code_generated_ = true;
total_code_generated_ += generated_code_size;
}
}
// Rate limiting support. // Rate limiting support.
// VM thread interface. // VM thread interface.
@ -97,7 +106,7 @@ class RuntimeProfiler {
static void HandleWakeUp(Isolate* isolate); static void HandleWakeUp(Isolate* isolate);
void Optimize(JSFunction* function); void Optimize(JSFunction* function, const char* reason);
void AttemptOnStackReplacement(JSFunction* function); void AttemptOnStackReplacement(JSFunction* function);
@ -119,6 +128,10 @@ class RuntimeProfiler {
int sampler_window_position_; int sampler_window_position_;
int sampler_window_weight_[kSamplerWindowSize]; int sampler_window_weight_[kSamplerWindowSize];
bool any_ic_changed_;
bool code_generated_;
int total_code_generated_;
// Possible state values: // Possible state values:
// -1 => the profiler thread is waiting on the semaphore // -1 => the profiler thread is waiting on the semaphore
// 0 or positive => the number of isolates running JavaScript code. // 0 or positive => the number of isolates running JavaScript code.

90
deps/v8/src/runtime.cc

@ -165,7 +165,7 @@ MUST_USE_RESULT static MaybeObject* DeepCopyBoilerplate(Isolate* isolate,
} }
} else { } else {
{ MaybeObject* maybe_result = { MaybeObject* maybe_result =
heap->AllocateFixedArray(copy->NumberOfLocalProperties(NONE)); heap->AllocateFixedArray(copy->NumberOfLocalProperties());
if (!maybe_result->ToObject(&result)) return maybe_result; if (!maybe_result->ToObject(&result)) return maybe_result;
} }
FixedArray* names = FixedArray::cast(result); FixedArray* names = FixedArray::cast(result);
@ -273,45 +273,43 @@ static Handle<Map> ComputeObjectLiteralMap(
Isolate* isolate = context->GetIsolate(); Isolate* isolate = context->GetIsolate();
int properties_length = constant_properties->length(); int properties_length = constant_properties->length();
int number_of_properties = properties_length / 2; int number_of_properties = properties_length / 2;
if (FLAG_canonicalize_object_literal_maps) { // Check that there are only symbols and array indices among keys.
// Check that there are only symbols and array indices among keys. int number_of_symbol_keys = 0;
int number_of_symbol_keys = 0; for (int p = 0; p != properties_length; p += 2) {
for (int p = 0; p != properties_length; p += 2) { Object* key = constant_properties->get(p);
Object* key = constant_properties->get(p); uint32_t element_index = 0;
uint32_t element_index = 0; if (key->IsSymbol()) {
if (key->IsSymbol()) { number_of_symbol_keys++;
number_of_symbol_keys++; } else if (key->ToArrayIndex(&element_index)) {
} else if (key->ToArrayIndex(&element_index)) { // An index key does not require space in the property backing store.
// An index key does not require space in the property backing store. number_of_properties--;
number_of_properties--; } else {
} else { // Bail out as a non-symbol non-index key makes caching impossible.
// Bail out as a non-symbol non-index key makes caching impossible. // ASSERT to make sure that the if condition after the loop is false.
// ASSERT to make sure that the if condition after the loop is false. ASSERT(number_of_symbol_keys != number_of_properties);
ASSERT(number_of_symbol_keys != number_of_properties); break;
break;
}
} }
// If we only have symbols and array indices among keys then we can }
// use the map cache in the global context. // If we only have symbols and array indices among keys then we can
const int kMaxKeys = 10; // use the map cache in the global context.
if ((number_of_symbol_keys == number_of_properties) && const int kMaxKeys = 10;
(number_of_symbol_keys < kMaxKeys)) { if ((number_of_symbol_keys == number_of_properties) &&
// Create the fixed array with the key. (number_of_symbol_keys < kMaxKeys)) {
Handle<FixedArray> keys = // Create the fixed array with the key.
isolate->factory()->NewFixedArray(number_of_symbol_keys); Handle<FixedArray> keys =
if (number_of_symbol_keys > 0) { isolate->factory()->NewFixedArray(number_of_symbol_keys);
int index = 0; if (number_of_symbol_keys > 0) {
for (int p = 0; p < properties_length; p += 2) { int index = 0;
Object* key = constant_properties->get(p); for (int p = 0; p < properties_length; p += 2) {
if (key->IsSymbol()) { Object* key = constant_properties->get(p);
keys->set(index++, key); if (key->IsSymbol()) {
} keys->set(index++, key);
} }
ASSERT(index == number_of_symbol_keys);
} }
*is_result_from_cache = true; ASSERT(index == number_of_symbol_keys);
return isolate->factory()->ObjectLiteralMapFromCache(context, keys);
} }
*is_result_from_cache = true;
return isolate->factory()->ObjectLiteralMapFromCache(context, keys);
} }
*is_result_from_cache = false; *is_result_from_cache = false;
return isolate->factory()->CopyMap( return isolate->factory()->CopyMap(
@ -2003,11 +2001,12 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionGetScript) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionGetSourceCode) { RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionGetSourceCode) {
NoHandleAllocation ha; HandleScope scope(isolate);
ASSERT(args.length() == 1); ASSERT(args.length() == 1);
CONVERT_CHECKED(JSFunction, f, args[0]); CONVERT_ARG_CHECKED(JSFunction, f, 0);
return f->shared()->GetSourceCode(); Handle<SharedFunctionInfo> shared(f->shared());
return *shared->GetSourceCode();
} }
@ -5010,7 +5009,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetLocalPropertyNames) {
return *isolate->factory()->NewJSArray(0); return *isolate->factory()->NewJSArray(0);
} }
int n; int n;
n = jsproto->NumberOfLocalProperties(static_cast<PropertyAttributes>(NONE)); n = jsproto->NumberOfLocalProperties();
local_property_count[i] = n; local_property_count[i] = n;
total_property_count += n; total_property_count += n;
if (i < length - 1) { if (i < length - 1) {
@ -8419,6 +8418,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LazyRecompile) {
ASSERT(args.length() == 1); ASSERT(args.length() == 1);
Handle<JSFunction> function = args.at<JSFunction>(0); Handle<JSFunction> function = args.at<JSFunction>(0);
function->shared()->set_profiler_ticks(0);
// If the function is not compiled ignore the lazy // If the function is not compiled ignore the lazy
// recompilation. This can happen if the debugger is activated and // recompilation. This can happen if the debugger is activated and
// the function is returned to the not compiled state. // the function is returned to the not compiled state.
@ -13263,9 +13264,10 @@ static bool ShowFrameInStackTrace(StackFrame* raw_frame,
// element segments each containing a receiver, function, code and // element segments each containing a receiver, function, code and
// native code offset. // native code offset.
RUNTIME_FUNCTION(MaybeObject*, Runtime_CollectStackTrace) { RUNTIME_FUNCTION(MaybeObject*, Runtime_CollectStackTrace) {
ASSERT_EQ(args.length(), 2); ASSERT_EQ(args.length(), 3);
Handle<Object> caller = args.at<Object>(0); CONVERT_ARG_CHECKED(JSObject, error_object, 0);
CONVERT_NUMBER_CHECKED(int32_t, limit, Int32, args[1]); Handle<Object> caller = args.at<Object>(1);
CONVERT_NUMBER_CHECKED(int32_t, limit, Int32, args[2]);
HandleScope scope(isolate); HandleScope scope(isolate);
Factory* factory = isolate->factory(); Factory* factory = isolate->factory();
@ -13315,6 +13317,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CollectStackTrace) {
iter.Advance(); iter.Advance();
} }
Handle<JSArray> result = factory->NewJSArrayWithElements(elements); Handle<JSArray> result = factory->NewJSArrayWithElements(elements);
// Capture and attach a more detailed stack trace if necessary.
isolate->CaptureAndSetCurrentStackTraceFor(error_object);
result->set_length(Smi::FromInt(cursor)); result->set_length(Smi::FromInt(cursor));
return *result; return *result;
} }

2
deps/v8/src/runtime.h

@ -229,7 +229,7 @@ namespace internal {
F(FunctionIsAPIFunction, 1, 1) \ F(FunctionIsAPIFunction, 1, 1) \
F(FunctionIsBuiltin, 1, 1) \ F(FunctionIsBuiltin, 1, 1) \
F(GetScript, 1, 1) \ F(GetScript, 1, 1) \
F(CollectStackTrace, 2, 1) \ F(CollectStackTrace, 3, 1) \
F(GetV8Version, 0, 1) \ F(GetV8Version, 0, 1) \
\ \
F(ClassOf, 1, 1) \ F(ClassOf, 1, 1) \

18
deps/v8/src/scanner.cc

@ -41,7 +41,8 @@ namespace internal {
Scanner::Scanner(UnicodeCache* unicode_cache) Scanner::Scanner(UnicodeCache* unicode_cache)
: unicode_cache_(unicode_cache), : unicode_cache_(unicode_cache),
octal_pos_(Location::invalid()), octal_pos_(Location::invalid()),
harmony_scoping_(false) { } harmony_scoping_(false),
harmony_modules_(false) { }
void Scanner::Initialize(UC16CharacterStream* source) { void Scanner::Initialize(UC16CharacterStream* source) {
@ -830,7 +831,8 @@ uc32 Scanner::ScanIdentifierUnicodeEscape() {
KEYWORD_GROUP('e') \ KEYWORD_GROUP('e') \
KEYWORD("else", Token::ELSE) \ KEYWORD("else", Token::ELSE) \
KEYWORD("enum", Token::FUTURE_RESERVED_WORD) \ KEYWORD("enum", Token::FUTURE_RESERVED_WORD) \
KEYWORD("export", Token::FUTURE_RESERVED_WORD) \ KEYWORD("export", harmony_modules \
? Token::EXPORT : Token::FUTURE_RESERVED_WORD) \
KEYWORD("extends", Token::FUTURE_RESERVED_WORD) \ KEYWORD("extends", Token::FUTURE_RESERVED_WORD) \
KEYWORD_GROUP('f') \ KEYWORD_GROUP('f') \
KEYWORD("false", Token::FALSE_LITERAL) \ KEYWORD("false", Token::FALSE_LITERAL) \
@ -840,13 +842,17 @@ uc32 Scanner::ScanIdentifierUnicodeEscape() {
KEYWORD_GROUP('i') \ KEYWORD_GROUP('i') \
KEYWORD("if", Token::IF) \ KEYWORD("if", Token::IF) \
KEYWORD("implements", Token::FUTURE_STRICT_RESERVED_WORD) \ KEYWORD("implements", Token::FUTURE_STRICT_RESERVED_WORD) \
KEYWORD("import", Token::FUTURE_RESERVED_WORD) \ KEYWORD("import", harmony_modules \
? Token::IMPORT : Token::FUTURE_RESERVED_WORD) \
KEYWORD("in", Token::IN) \ KEYWORD("in", Token::IN) \
KEYWORD("instanceof", Token::INSTANCEOF) \ KEYWORD("instanceof", Token::INSTANCEOF) \
KEYWORD("interface", Token::FUTURE_STRICT_RESERVED_WORD) \ KEYWORD("interface", Token::FUTURE_STRICT_RESERVED_WORD) \
KEYWORD_GROUP('l') \ KEYWORD_GROUP('l') \
KEYWORD("let", harmony_scoping \ KEYWORD("let", harmony_scoping \
? Token::LET : Token::FUTURE_STRICT_RESERVED_WORD) \ ? Token::LET : Token::FUTURE_STRICT_RESERVED_WORD) \
KEYWORD_GROUP('m') \
KEYWORD("module", harmony_modules \
? Token::MODULE : Token::IDENTIFIER) \
KEYWORD_GROUP('n') \ KEYWORD_GROUP('n') \
KEYWORD("new", Token::NEW) \ KEYWORD("new", Token::NEW) \
KEYWORD("null", Token::NULL_LITERAL) \ KEYWORD("null", Token::NULL_LITERAL) \
@ -879,7 +885,8 @@ uc32 Scanner::ScanIdentifierUnicodeEscape() {
static Token::Value KeywordOrIdentifierToken(const char* input, static Token::Value KeywordOrIdentifierToken(const char* input,
int input_length, int input_length,
bool harmony_scoping) { bool harmony_scoping,
bool harmony_modules) {
ASSERT(input_length >= 1); ASSERT(input_length >= 1);
const int kMinLength = 2; const int kMinLength = 2;
const int kMaxLength = 10; const int kMaxLength = 10;
@ -955,7 +962,8 @@ Token::Value Scanner::ScanIdentifierOrKeyword() {
Vector<const char> chars = next_.literal_chars->ascii_literal(); Vector<const char> chars = next_.literal_chars->ascii_literal();
return KeywordOrIdentifierToken(chars.start(), return KeywordOrIdentifierToken(chars.start(),
chars.length(), chars.length(),
harmony_scoping_); harmony_scoping_,
harmony_modules_);
} }
return Token::IDENTIFIER; return Token::IDENTIFIER;

20
deps/v8/src/scanner.h

@ -51,8 +51,9 @@ enum ParsingFlags {
// STRICT_MODE, // STRICT_MODE,
// EXTENDED_MODE, // EXTENDED_MODE,
kLanguageModeMask = 0x03, kLanguageModeMask = 0x03,
kAllowLazy = 4, kAllowLazy = 0x04,
kAllowNativesSyntax = 8 kAllowNativesSyntax = 0x08,
kAllowModules = 0x10
}; };
STATIC_ASSERT((kLanguageModeMask & CLASSIC_MODE) == CLASSIC_MODE); STATIC_ASSERT((kLanguageModeMask & CLASSIC_MODE) == CLASSIC_MODE);
@ -403,8 +404,14 @@ class Scanner {
bool HarmonyScoping() const { bool HarmonyScoping() const {
return harmony_scoping_; return harmony_scoping_;
} }
void SetHarmonyScoping(bool block_scoping) { void SetHarmonyScoping(bool scoping) {
harmony_scoping_ = block_scoping; harmony_scoping_ = scoping;
}
bool HarmonyModules() const {
return harmony_modules_;
}
void SetHarmonyModules(bool modules) {
harmony_modules_ = modules;
} }
@ -552,9 +559,10 @@ class Scanner {
// Whether there is a multi-line comment that contains a // Whether there is a multi-line comment that contains a
// line-terminator after the current token, and before the next. // line-terminator after the current token, and before the next.
bool has_multiline_comment_before_next_; bool has_multiline_comment_before_next_;
// Whether we scan 'let' as a keyword for harmony block scoped // Whether we scan 'let' as a keyword for harmony block-scoped let bindings.
// let bindings.
bool harmony_scoping_; bool harmony_scoping_;
// Whether we scan 'module', 'import', 'export' as keywords.
bool harmony_modules_;
}; };
} } // namespace v8::internal } } // namespace v8::internal

60
deps/v8/src/scopes.cc

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved. // Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are // modification, are permitted provided that the following conditions are
// met: // met:
@ -272,8 +272,11 @@ bool Scope::Analyze(CompilationInfo* info) {
top = top->outer_scope(); top = top->outer_scope();
} }
// Allocated the variables. // Allocate the variables.
top->AllocateVariables(info->global_scope()); {
AstNodeFactory<AstNullVisitor> ast_node_factory(info->isolate());
top->AllocateVariables(info->global_scope(), &ast_node_factory);
}
#ifdef DEBUG #ifdef DEBUG
if (info->isolate()->bootstrapper()->IsActive() if (info->isolate()->bootstrapper()->IsActive()
@ -415,7 +418,8 @@ Variable* Scope::LocalLookup(Handle<String> name) {
} }
Variable* Scope::LookupFunctionVar(Handle<String> name) { Variable* Scope::LookupFunctionVar(Handle<String> name,
AstNodeFactory<AstNullVisitor>* factory) {
if (function_ != NULL && function_->name().is_identical_to(name)) { if (function_ != NULL && function_->name().is_identical_to(name)) {
return function_->var(); return function_->var();
} else if (!scope_info_.is_null()) { } else if (!scope_info_.is_null()) {
@ -423,7 +427,7 @@ Variable* Scope::LookupFunctionVar(Handle<String> name) {
VariableMode mode; VariableMode mode;
int index = scope_info_->FunctionContextSlotIndex(*name, &mode); int index = scope_info_->FunctionContextSlotIndex(*name, &mode);
if (index < 0) return NULL; if (index < 0) return NULL;
Variable* var = DeclareFunctionVar(name, mode); Variable* var = DeclareFunctionVar(name, mode, factory);
var->AllocateTo(Variable::CONTEXT, index); var->AllocateTo(Variable::CONTEXT, index);
return var; return var;
} else { } else {
@ -443,15 +447,6 @@ Variable* Scope::Lookup(Handle<String> name) {
} }
Variable* Scope::DeclareFunctionVar(Handle<String> name, VariableMode mode) {
ASSERT(is_function_scope() && function_ == NULL);
Variable* function_var = new Variable(
this, name, mode, true, Variable::NORMAL, kCreatedInitialized);
function_ = new(isolate_->zone()) VariableProxy(isolate_, function_var);
return function_var;
}
void Scope::DeclareParameter(Handle<String> name, VariableMode mode) { void Scope::DeclareParameter(Handle<String> name, VariableMode mode) {
ASSERT(!already_resolved()); ASSERT(!already_resolved());
ASSERT(is_function_scope()); ASSERT(is_function_scope());
@ -489,18 +484,6 @@ Variable* Scope::DeclareGlobal(Handle<String> name) {
} }
VariableProxy* Scope::NewUnresolved(Handle<String> name, int position) {
// Note that we must not share the unresolved variables with
// the same name because they may be removed selectively via
// RemoveUnresolved().
ASSERT(!already_resolved());
VariableProxy* proxy = new(isolate_->zone()) VariableProxy(
isolate_, name, false, position);
unresolved_.Add(proxy);
return proxy;
}
void Scope::RemoveUnresolved(VariableProxy* var) { void Scope::RemoveUnresolved(VariableProxy* var) {
// Most likely (always?) any variable we want to remove // Most likely (always?) any variable we want to remove
// was just added before, so we search backwards. // was just added before, so we search backwards.
@ -623,7 +606,8 @@ void Scope::CollectStackAndContextLocals(ZoneList<Variable*>* stack_locals,
} }
void Scope::AllocateVariables(Scope* global_scope) { void Scope::AllocateVariables(Scope* global_scope,
AstNodeFactory<AstNullVisitor>* factory) {
// 1) Propagate scope information. // 1) Propagate scope information.
bool outer_scope_calls_non_strict_eval = false; bool outer_scope_calls_non_strict_eval = false;
if (outer_scope_ != NULL) { if (outer_scope_ != NULL) {
@ -634,7 +618,7 @@ void Scope::AllocateVariables(Scope* global_scope) {
PropagateScopeInfo(outer_scope_calls_non_strict_eval); PropagateScopeInfo(outer_scope_calls_non_strict_eval);
// 2) Resolve variables. // 2) Resolve variables.
ResolveVariablesRecursively(global_scope); ResolveVariablesRecursively(global_scope, factory);
// 3) Allocate variables. // 3) Allocate variables.
AllocateVariablesRecursively(); AllocateVariablesRecursively();
@ -897,7 +881,8 @@ Variable* Scope::NonLocal(Handle<String> name, VariableMode mode) {
Variable* Scope::LookupRecursive(Handle<String> name, Variable* Scope::LookupRecursive(Handle<String> name,
BindingKind* binding_kind) { BindingKind* binding_kind,
AstNodeFactory<AstNullVisitor>* factory) {
ASSERT(binding_kind != NULL); ASSERT(binding_kind != NULL);
// Try to find the variable in this scope. // Try to find the variable in this scope.
Variable* var = LocalLookup(name); Variable* var = LocalLookup(name);
@ -914,11 +899,11 @@ Variable* Scope::LookupRecursive(Handle<String> name,
// if any. We can do this for all scopes, since the function variable is // if any. We can do this for all scopes, since the function variable is
// only present - if at all - for function scopes. // only present - if at all - for function scopes.
*binding_kind = UNBOUND; *binding_kind = UNBOUND;
var = LookupFunctionVar(name); var = LookupFunctionVar(name, factory);
if (var != NULL) { if (var != NULL) {
*binding_kind = BOUND; *binding_kind = BOUND;
} else if (outer_scope_ != NULL) { } else if (outer_scope_ != NULL) {
var = outer_scope_->LookupRecursive(name, binding_kind); var = outer_scope_->LookupRecursive(name, binding_kind, factory);
if (*binding_kind == BOUND && (is_function_scope() || is_with_scope())) { if (*binding_kind == BOUND && (is_function_scope() || is_with_scope())) {
var->ForceContextAllocation(); var->ForceContextAllocation();
} }
@ -951,7 +936,8 @@ Variable* Scope::LookupRecursive(Handle<String> name,
void Scope::ResolveVariable(Scope* global_scope, void Scope::ResolveVariable(Scope* global_scope,
VariableProxy* proxy) { VariableProxy* proxy,
AstNodeFactory<AstNullVisitor>* factory) {
ASSERT(global_scope == NULL || global_scope->is_global_scope()); ASSERT(global_scope == NULL || global_scope->is_global_scope());
// If the proxy is already resolved there's nothing to do // If the proxy is already resolved there's nothing to do
@ -960,7 +946,7 @@ void Scope::ResolveVariable(Scope* global_scope,
// Otherwise, try to resolve the variable. // Otherwise, try to resolve the variable.
BindingKind binding_kind; BindingKind binding_kind;
Variable* var = LookupRecursive(proxy->name(), &binding_kind); Variable* var = LookupRecursive(proxy->name(), &binding_kind, factory);
switch (binding_kind) { switch (binding_kind) {
case BOUND: case BOUND:
// We found a variable binding. // We found a variable binding.
@ -1001,17 +987,19 @@ void Scope::ResolveVariable(Scope* global_scope,
} }
void Scope::ResolveVariablesRecursively(Scope* global_scope) { void Scope::ResolveVariablesRecursively(
Scope* global_scope,
AstNodeFactory<AstNullVisitor>* factory) {
ASSERT(global_scope == NULL || global_scope->is_global_scope()); ASSERT(global_scope == NULL || global_scope->is_global_scope());
// Resolve unresolved variables for this scope. // Resolve unresolved variables for this scope.
for (int i = 0; i < unresolved_.length(); i++) { for (int i = 0; i < unresolved_.length(); i++) {
ResolveVariable(global_scope, unresolved_[i]); ResolveVariable(global_scope, unresolved_[i], factory);
} }
// Resolve unresolved variables for inner scopes. // Resolve unresolved variables for inner scopes.
for (int i = 0; i < inner_scopes_.length(); i++) { for (int i = 0; i < inner_scopes_.length(); i++) {
inner_scopes_[i]->ResolveVariablesRecursively(global_scope); inner_scopes_[i]->ResolveVariablesRecursively(global_scope, factory);
} }
} }

42
deps/v8/src/scopes.h

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved. // Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are // modification, are permitted provided that the following conditions are
// met: // met:
@ -115,7 +115,8 @@ class Scope: public ZoneObject {
// between this scope and the outer scope. (ECMA-262, 3rd., requires that // between this scope and the outer scope. (ECMA-262, 3rd., requires that
// the name of named function literal is kept in an intermediate scope // the name of named function literal is kept in an intermediate scope
// in between this scope and the next outer scope.) // in between this scope and the next outer scope.)
Variable* LookupFunctionVar(Handle<String> name); Variable* LookupFunctionVar(Handle<String> name,
AstNodeFactory<AstNullVisitor>* factory);
// Lookup a variable in this scope or outer scopes. // Lookup a variable in this scope or outer scopes.
// Returns the variable or NULL if not found. // Returns the variable or NULL if not found.
@ -124,7 +125,16 @@ class Scope: public ZoneObject {
// Declare the function variable for a function literal. This variable // Declare the function variable for a function literal. This variable
// is in an intermediate scope between this function scope and the the // is in an intermediate scope between this function scope and the the
// outer scope. Only possible for function scopes; at most one variable. // outer scope. Only possible for function scopes; at most one variable.
Variable* DeclareFunctionVar(Handle<String> name, VariableMode mode); template<class Visitor>
Variable* DeclareFunctionVar(Handle<String> name,
VariableMode mode,
AstNodeFactory<Visitor>* factory) {
ASSERT(is_function_scope() && function_ == NULL);
Variable* function_var = new Variable(
this, name, mode, true, Variable::NORMAL, kCreatedInitialized);
function_ = factory->NewVariableProxy(function_var);
return function_var;
}
// Declare a parameter in this scope. When there are duplicated // Declare a parameter in this scope. When there are duplicated
// parameters the rightmost one 'wins'. However, the implementation // parameters the rightmost one 'wins'. However, the implementation
@ -144,8 +154,18 @@ class Scope: public ZoneObject {
Variable* DeclareGlobal(Handle<String> name); Variable* DeclareGlobal(Handle<String> name);
// Create a new unresolved variable. // Create a new unresolved variable.
VariableProxy* NewUnresolved(Handle<String> name, template<class Visitor>
int position = RelocInfo::kNoPosition); VariableProxy* NewUnresolved(AstNodeFactory<Visitor>* factory,
Handle<String> name,
int position = RelocInfo::kNoPosition) {
// Note that we must not share the unresolved variables with
// the same name because they may be removed selectively via
// RemoveUnresolved().
ASSERT(!already_resolved());
VariableProxy* proxy = factory->NewVariableProxy(name, false, position);
unresolved_.Add(proxy);
return proxy;
}
// Remove a unresolved variable. During parsing, an unresolved variable // Remove a unresolved variable. During parsing, an unresolved variable
// may have been added optimistically, but then only the variable name // may have been added optimistically, but then only the variable name
@ -332,7 +352,8 @@ class Scope: public ZoneObject {
// In the case of code compiled and run using 'eval', the context // In the case of code compiled and run using 'eval', the context
// parameter is the context in which eval was called. In all other // parameter is the context in which eval was called. In all other
// cases the context parameter is an empty handle. // cases the context parameter is an empty handle.
void AllocateVariables(Scope* global_scope); void AllocateVariables(Scope* global_scope,
AstNodeFactory<AstNullVisitor>* factory);
// Current number of var or const locals. // Current number of var or const locals.
int num_var_or_const() { return num_var_or_const_; } int num_var_or_const() { return num_var_or_const_; }
@ -519,10 +540,13 @@ class Scope: public ZoneObject {
// scope. If the code is executed because of a call to 'eval', the context // scope. If the code is executed because of a call to 'eval', the context
// parameter should be set to the calling context of 'eval'. // parameter should be set to the calling context of 'eval'.
Variable* LookupRecursive(Handle<String> name, Variable* LookupRecursive(Handle<String> name,
BindingKind* binding_kind); BindingKind* binding_kind,
AstNodeFactory<AstNullVisitor>* factory);
void ResolveVariable(Scope* global_scope, void ResolveVariable(Scope* global_scope,
VariableProxy* proxy); VariableProxy* proxy,
void ResolveVariablesRecursively(Scope* global_scope); AstNodeFactory<AstNullVisitor>* factory);
void ResolveVariablesRecursively(Scope* global_scope,
AstNodeFactory<AstNullVisitor>* factory);
// Scope analysis. // Scope analysis.
bool PropagateScopeInfo(bool outer_scope_calls_non_strict_eval); bool PropagateScopeInfo(bool outer_scope_calls_non_strict_eval);

7
deps/v8/src/spaces.h

@ -2364,12 +2364,9 @@ class FixedSpace : public PagedSpace {
class MapSpace : public FixedSpace { class MapSpace : public FixedSpace {
public: public:
// Creates a map space object with a maximum capacity. // Creates a map space object with a maximum capacity.
MapSpace(Heap* heap, MapSpace(Heap* heap, intptr_t max_capacity, AllocationSpace id)
intptr_t max_capacity,
int max_map_space_pages,
AllocationSpace id)
: FixedSpace(heap, max_capacity, id, Map::kSize, "map"), : FixedSpace(heap, max_capacity, id, Map::kSize, "map"),
max_map_space_pages_(max_map_space_pages) { max_map_space_pages_(kMaxMapPageIndex - 1) {
} }
// Given an index, returns the page address. // Given an index, returns the page address.

3
deps/v8/src/token.h

@ -170,7 +170,10 @@ namespace internal {
T(FUTURE_RESERVED_WORD, NULL, 0) \ T(FUTURE_RESERVED_WORD, NULL, 0) \
T(FUTURE_STRICT_RESERVED_WORD, NULL, 0) \ T(FUTURE_STRICT_RESERVED_WORD, NULL, 0) \
K(CONST, "const", 0) \ K(CONST, "const", 0) \
K(EXPORT, "export", 0) \
K(IMPORT, "import", 0) \
K(LET, "let", 0) \ K(LET, "let", 0) \
K(MODULE, "module", 0) \
\ \
/* Illegal token - not able to scan. */ \ /* Illegal token - not able to scan. */ \
T(ILLEGAL, "ILLEGAL", 0) \ T(ILLEGAL, "ILLEGAL", 0) \

5
deps/v8/src/v8.cc

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved. // Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are // modification, are permitted provided that the following conditions are
// met: // met:
@ -255,9 +255,6 @@ void V8::InitializeOncePerProcess() {
RuntimeProfiler::GlobalSetup(); RuntimeProfiler::GlobalSetup();
// Peephole optimization might interfere with deoptimization.
FLAG_peephole_optimization = !use_crankshaft_;
ElementsAccessor::InitializeOncePerProcess(); ElementsAccessor::InitializeOncePerProcess();
if (FLAG_stress_compaction) { if (FLAG_stress_compaction) {

2
deps/v8/src/version.cc

@ -34,7 +34,7 @@
// cannot be changed without changing the SCons build script. // cannot be changed without changing the SCons build script.
#define MAJOR_VERSION 3 #define MAJOR_VERSION 3
#define MINOR_VERSION 9 #define MINOR_VERSION 9
#define BUILD_NUMBER 2 #define BUILD_NUMBER 5
#define PATCH_LEVEL 0 #define PATCH_LEVEL 0
// Use 1 for candidates and 0 otherwise. // Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.) // (Boolean macro values are not supported by all preprocessors.)

4
deps/v8/src/x64/code-stubs-x64.cc

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved. // Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are // modification, are permitted provided that the following conditions are
// met: // met:
@ -4077,7 +4077,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// Invoke: Link this frame into the handler chain. There's only one // Invoke: Link this frame into the handler chain. There's only one
// handler block in this code object, so its index is 0. // handler block in this code object, so its index is 0.
__ bind(&invoke); __ bind(&invoke);
__ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER, 0); __ PushTryHandler(StackHandler::JS_ENTRY, 0);
// Clear any pending exceptions. // Clear any pending exceptions.
__ LoadRoot(rax, Heap::kTheHoleValueRootIndex); __ LoadRoot(rax, Heap::kTheHoleValueRootIndex);

35
deps/v8/src/x64/full-codegen-x64.cc

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved. // Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are // modification, are permitted provided that the following conditions are
// met: // met:
@ -129,6 +129,27 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
} }
#endif #endif
// We can optionally optimize based on counters rather than statistical
// sampling.
if (info->ShouldSelfOptimize()) {
if (FLAG_trace_opt) {
PrintF("[adding self-optimization header to %s]\n",
*info->function()->debug_name()->ToCString());
}
MaybeObject* maybe_cell = isolate()->heap()->AllocateJSGlobalPropertyCell(
Smi::FromInt(Compiler::kCallsUntilPrimitiveOpt));
JSGlobalPropertyCell* cell;
if (maybe_cell->To(&cell)) {
__ movq(rax, Handle<JSGlobalPropertyCell>(cell),
RelocInfo::EMBEDDED_OBJECT);
__ SmiAddConstant(FieldOperand(rax, JSGlobalPropertyCell::kValueOffset),
Smi::FromInt(-1));
Handle<Code> compile_stub(
isolate()->builtins()->builtin(Builtins::kLazyRecompile));
__ j(zero, compile_stub, RelocInfo::CODE_TARGET);
}
}
// Strict mode functions and builtins need to replace the receiver // Strict mode functions and builtins need to replace the receiver
// with undefined when called as functions (without an explicit // with undefined when called as functions (without an explicit
// receiver object). rcx is zero for method calls and non-zero for // receiver object). rcx is zero for method calls and non-zero for
@ -256,11 +277,11 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
// For named function expressions, declare the function name as a // For named function expressions, declare the function name as a
// constant. // constant.
if (scope()->is_function_scope() && scope()->function() != NULL) { if (scope()->is_function_scope() && scope()->function() != NULL) {
int ignored = 0;
VariableProxy* proxy = scope()->function(); VariableProxy* proxy = scope()->function();
ASSERT(proxy->var()->mode() == CONST || ASSERT(proxy->var()->mode() == CONST ||
proxy->var()->mode() == CONST_HARMONY); proxy->var()->mode() == CONST_HARMONY);
EmitDeclaration(proxy, proxy->var()->mode(), NULL, &ignored); ASSERT(proxy->var()->location() != Variable::UNALLOCATED);
EmitDeclaration(proxy, proxy->var()->mode(), NULL);
} }
VisitDeclarations(scope()->declarations()); VisitDeclarations(scope()->declarations());
} }
@ -678,8 +699,7 @@ void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
VariableMode mode, VariableMode mode,
FunctionLiteral* function, FunctionLiteral* function) {
int* global_count) {
// If it was not possible to allocate the variable at compile time, we // If it was not possible to allocate the variable at compile time, we
// need to "declare" it at runtime to make sure it actually exists in the // need to "declare" it at runtime to make sure it actually exists in the
// local context. // local context.
@ -688,7 +708,7 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
(mode == CONST || mode == CONST_HARMONY || mode == LET); (mode == CONST || mode == CONST_HARMONY || mode == LET);
switch (variable->location()) { switch (variable->location()) {
case Variable::UNALLOCATED: case Variable::UNALLOCATED:
++(*global_count); ++global_count_;
break; break;
case Variable::PARAMETER: case Variable::PARAMETER:
@ -769,9 +789,6 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
} }
void FullCodeGenerator::VisitDeclaration(Declaration* decl) { }
void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
// Call the runtime to declare the globals. // Call the runtime to declare the globals.
__ push(rsi); // The context is the first argument. __ push(rsi); // The context is the first argument.

5
deps/v8/src/x64/lithium-codegen-x64.cc

@ -555,7 +555,6 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
int length = deoptimizations_.length(); int length = deoptimizations_.length();
if (length == 0) return; if (length == 0) return;
ASSERT(FLAG_deopt);
Handle<DeoptimizationInputData> data = Handle<DeoptimizationInputData> data =
factory()->NewDeoptimizationInputData(length, TENURED); factory()->NewDeoptimizationInputData(length, TENURED);
@ -993,11 +992,11 @@ void LCodeGen::DoMulI(LMulI* instr) {
DeoptimizeIf(no_condition, instr->environment()); DeoptimizeIf(no_condition, instr->environment());
} }
} else if (right->IsStackSlot()) { } else if (right->IsStackSlot()) {
__ or_(kScratchRegister, ToOperand(right)); __ orl(kScratchRegister, ToOperand(right));
DeoptimizeIf(sign, instr->environment()); DeoptimizeIf(sign, instr->environment());
} else { } else {
// Test the non-zero operand for negative sign. // Test the non-zero operand for negative sign.
__ or_(kScratchRegister, ToRegister(right)); __ orl(kScratchRegister, ToRegister(right));
DeoptimizeIf(sign, instr->environment()); DeoptimizeIf(sign, instr->environment());
} }
__ bind(&done); __ bind(&done);

24
deps/v8/src/x64/macro-assembler-x64.cc

@ -2453,8 +2453,7 @@ Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
} }
void MacroAssembler::PushTryHandler(CodeLocation try_location, void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
HandlerType type,
int handler_index) { int handler_index) {
// Adjust this code if not the case. // Adjust this code if not the case.
STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize); STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
@ -2465,25 +2464,22 @@ void MacroAssembler::PushTryHandler(CodeLocation try_location,
STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize); STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
// We will build up the handler from the bottom by pushing on the stack. // We will build up the handler from the bottom by pushing on the stack.
// First compute the state and push the frame pointer and context. // First push the frame pointer and context.
unsigned state = StackHandler::OffsetField::encode(handler_index); if (kind == StackHandler::JS_ENTRY) {
if (try_location == IN_JAVASCRIPT) {
push(rbp);
push(rsi);
state |= (type == TRY_CATCH_HANDLER)
? StackHandler::KindField::encode(StackHandler::TRY_CATCH)
: StackHandler::KindField::encode(StackHandler::TRY_FINALLY);
} else {
ASSERT(try_location == IN_JS_ENTRY);
// The frame pointer does not point to a JS frame so we save NULL for // The frame pointer does not point to a JS frame so we save NULL for
// rbp. We expect the code throwing an exception to check rbp before // rbp. We expect the code throwing an exception to check rbp before
// dereferencing it to restore the context. // dereferencing it to restore the context.
push(Immediate(0)); // NULL frame pointer. push(Immediate(0)); // NULL frame pointer.
Push(Smi::FromInt(0)); // No context. Push(Smi::FromInt(0)); // No context.
state |= StackHandler::KindField::encode(StackHandler::ENTRY); } else {
push(rbp);
push(rsi);
} }
// Push the state and the code object. // Push the state and the code object.
unsigned state =
StackHandler::IndexField::encode(handler_index) |
StackHandler::KindField::encode(kind);
push(Immediate(state)); push(Immediate(state));
Push(CodeObject()); Push(CodeObject());
@ -2594,7 +2590,7 @@ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
movq(rsp, Operand(rsp, StackHandlerConstants::kNextOffset)); movq(rsp, Operand(rsp, StackHandlerConstants::kNextOffset));
bind(&check_kind); bind(&check_kind);
STATIC_ASSERT(StackHandler::ENTRY == 0); STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
testl(Operand(rsp, StackHandlerConstants::kStateOffset), testl(Operand(rsp, StackHandlerConstants::kStateOffset),
Immediate(StackHandler::KindField::kMask)); Immediate(StackHandler::KindField::kMask));
j(not_zero, &fetch_next); j(not_zero, &fetch_next);

4
deps/v8/src/x64/macro-assembler-x64.h

@ -961,9 +961,7 @@ class MacroAssembler: public Assembler {
// Exception handling // Exception handling
// Push a new try handler and link it into try handler chain. // Push a new try handler and link it into try handler chain.
void PushTryHandler(CodeLocation try_location, void PushTryHandler(StackHandler::Kind kind, int handler_index);
HandlerType type,
int handler_index);
// Unlink the stack handler on top of the stack from the try handler chain. // Unlink the stack handler on top of the stack from the try handler chain.
void PopTryHandler(); void PopTryHandler();

74
deps/v8/src/x64/stub-cache-x64.cc

@ -1331,24 +1331,24 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
} else { } else {
Label call_builtin; Label call_builtin;
// Get the elements array of the object.
__ movq(rbx, FieldOperand(rdx, JSArray::kElementsOffset));
// Check that the elements are in fast mode and writable.
__ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
factory()->fixed_array_map());
__ j(not_equal, &call_builtin);
if (argc == 1) { // Otherwise fall through to call builtin. if (argc == 1) { // Otherwise fall through to call builtin.
Label attempt_to_grow_elements, with_write_barrier; Label attempt_to_grow_elements, with_write_barrier;
// Get the elements array of the object.
__ movq(rdi, FieldOperand(rdx, JSArray::kElementsOffset));
// Check that the elements are in fast mode and writable.
__ Cmp(FieldOperand(rdi, HeapObject::kMapOffset),
factory()->fixed_array_map());
__ j(not_equal, &call_builtin);
// Get the array's length into rax and calculate new length. // Get the array's length into rax and calculate new length.
__ SmiToInteger32(rax, FieldOperand(rdx, JSArray::kLengthOffset)); __ SmiToInteger32(rax, FieldOperand(rdx, JSArray::kLengthOffset));
STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue); STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue);
__ addl(rax, Immediate(argc)); __ addl(rax, Immediate(argc));
// Get the element's length into rcx. // Get the elements' length into rcx.
__ SmiToInteger32(rcx, FieldOperand(rbx, FixedArray::kLengthOffset)); __ SmiToInteger32(rcx, FieldOperand(rdi, FixedArray::kLengthOffset));
// Check if we could survive without allocation. // Check if we could survive without allocation.
__ cmpl(rax, rcx); __ cmpl(rax, rcx);
@ -1361,30 +1361,52 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
// Save new length. // Save new length.
__ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax); __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax);
// Push the element. // Store the value.
__ lea(rdx, FieldOperand(rbx, __ movq(FieldOperand(rdi,
rax, times_pointer_size, rax,
FixedArray::kHeaderSize - argc * kPointerSize)); times_pointer_size,
__ movq(Operand(rdx, 0), rcx); FixedArray::kHeaderSize - argc * kPointerSize),
rcx);
__ Integer32ToSmi(rax, rax); // Return new length as smi. __ Integer32ToSmi(rax, rax); // Return new length as smi.
__ ret((argc + 1) * kPointerSize); __ ret((argc + 1) * kPointerSize);
__ bind(&with_write_barrier); __ bind(&with_write_barrier);
__ movq(rdi, FieldOperand(rdx, HeapObject::kMapOffset)); __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
__ CheckFastObjectElements(rdi, &call_builtin);
if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) {
Label fast_object, not_fast_object;
__ CheckFastObjectElements(rbx, &not_fast_object, Label::kNear);
__ jmp(&fast_object);
// In case of fast smi-only, convert to fast object, otherwise bail out.
__ bind(&not_fast_object);
__ CheckFastSmiOnlyElements(rbx, &call_builtin);
// rdx: receiver
// rbx: map
__ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
FAST_ELEMENTS,
rbx,
r10,
&call_builtin);
ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm());
__ bind(&fast_object);
} else {
__ CheckFastObjectElements(rbx, &call_builtin);
}
__ CheckFastObjectElements(rbx, &call_builtin);
// Save new length. // Save new length.
__ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax); __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax);
// Push the element. // Store the value.
__ lea(rdx, FieldOperand(rbx, __ lea(rdx, FieldOperand(rdi,
rax, times_pointer_size, rax, times_pointer_size,
FixedArray::kHeaderSize - argc * kPointerSize)); FixedArray::kHeaderSize - argc * kPointerSize));
__ movq(Operand(rdx, 0), rcx); __ movq(Operand(rdx, 0), rcx);
__ RecordWrite(rbx, rdx, rcx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, __ RecordWrite(rdi, rdx, rcx, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
OMIT_SMI_CHECK); OMIT_SMI_CHECK);
__ Integer32ToSmi(rax, rax); // Return new length as smi. __ Integer32ToSmi(rax, rax); // Return new length as smi.
@ -1395,11 +1417,11 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ jmp(&call_builtin); __ jmp(&call_builtin);
} }
__ movq(rdi, Operand(rsp, argc * kPointerSize)); __ movq(rbx, Operand(rsp, argc * kPointerSize));
// Growing elements that are SMI-only requires special handling in case // Growing elements that are SMI-only requires special handling in case
// the new element is non-Smi. For now, delegate to the builtin. // the new element is non-Smi. For now, delegate to the builtin.
Label no_fast_elements_check; Label no_fast_elements_check;
__ JumpIfSmi(rdi, &no_fast_elements_check); __ JumpIfSmi(rbx, &no_fast_elements_check);
__ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset)); __ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
__ CheckFastObjectElements(rcx, &call_builtin, Label::kFar); __ CheckFastObjectElements(rcx, &call_builtin, Label::kFar);
__ bind(&no_fast_elements_check); __ bind(&no_fast_elements_check);
@ -1414,7 +1436,7 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ Load(rcx, new_space_allocation_top); __ Load(rcx, new_space_allocation_top);
// Check if it's the end of elements. // Check if it's the end of elements.
__ lea(rdx, FieldOperand(rbx, __ lea(rdx, FieldOperand(rdi,
rax, times_pointer_size, rax, times_pointer_size,
FixedArray::kHeaderSize - argc * kPointerSize)); FixedArray::kHeaderSize - argc * kPointerSize));
__ cmpq(rdx, rcx); __ cmpq(rdx, rcx);
@ -1429,7 +1451,7 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ Store(new_space_allocation_top, rcx); __ Store(new_space_allocation_top, rcx);
// Push the argument... // Push the argument...
__ movq(Operand(rdx, 0), rdi); __ movq(Operand(rdx, 0), rbx);
// ... and fill the rest with holes. // ... and fill the rest with holes.
__ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
for (int i = 1; i < kAllocationDelta; i++) { for (int i = 1; i < kAllocationDelta; i++) {
@ -1441,13 +1463,13 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
// tell the incremental marker to rescan the object that we just grew. We // tell the incremental marker to rescan the object that we just grew. We
// don't need to worry about the holes because they are in old space and // don't need to worry about the holes because they are in old space and
// already marked black. // already marked black.
__ RecordWrite(rbx, rdx, rdi, kDontSaveFPRegs, OMIT_REMEMBERED_SET); __ RecordWrite(rdi, rdx, rbx, kDontSaveFPRegs, OMIT_REMEMBERED_SET);
// Restore receiver to rdx as finish sequence assumes it's here. // Restore receiver to rdx as finish sequence assumes it's here.
__ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
// Increment element's and array's sizes. // Increment element's and array's sizes.
__ SmiAddConstant(FieldOperand(rbx, FixedArray::kLengthOffset), __ SmiAddConstant(FieldOperand(rdi, FixedArray::kLengthOffset),
Smi::FromInt(kAllocationDelta)); Smi::FromInt(kAllocationDelta));
// Make new length a smi before returning it. // Make new length a smi before returning it.

131
deps/v8/test/cctest/test-api.cc

@ -13535,6 +13535,137 @@ TEST(CaptureStackTraceForUncaughtExceptionAndSetters) {
} }
static void RethrowStackTraceHandler(v8::Handle<v8::Message> message,
v8::Handle<v8::Value> data) {
// Use the frame where JavaScript is called from.
v8::Handle<v8::StackTrace> stack_trace = message->GetStackTrace();
CHECK(!stack_trace.IsEmpty());
int frame_count = stack_trace->GetFrameCount();
CHECK_EQ(3, frame_count);
int line_number[] = {1, 2, 5};
for (int i = 0; i < frame_count; i++) {
CHECK_EQ(line_number[i], stack_trace->GetFrame(i)->GetLineNumber());
}
}
// Test that we only return the stack trace at the site where the exception
// is first thrown (not where it is rethrown).
TEST(RethrowStackTrace) {
v8::HandleScope scope;
LocalContext env;
// We make sure that
// - the stack trace of the ReferenceError in g() is reported.
// - the stack trace is not overwritten when e1 is rethrown by t().
// - the stack trace of e2 does not overwrite that of e1.
const char* source =
"function g() { error; } \n"
"function f() { g(); } \n"
"function t(e) { throw e; } \n"
"try { \n"
" f(); \n"
"} catch (e1) { \n"
" try { \n"
" error; \n"
" } catch (e2) { \n"
" t(e1); \n"
" } \n"
"} \n";
v8::V8::AddMessageListener(RethrowStackTraceHandler);
v8::V8::SetCaptureStackTraceForUncaughtExceptions(true);
CompileRun(source);
v8::V8::SetCaptureStackTraceForUncaughtExceptions(false);
v8::V8::RemoveMessageListeners(RethrowStackTraceHandler);
}
static void RethrowPrimitiveStackTraceHandler(v8::Handle<v8::Message> message,
v8::Handle<v8::Value> data) {
v8::Handle<v8::StackTrace> stack_trace = message->GetStackTrace();
CHECK(!stack_trace.IsEmpty());
int frame_count = stack_trace->GetFrameCount();
CHECK_EQ(2, frame_count);
int line_number[] = {3, 7};
for (int i = 0; i < frame_count; i++) {
CHECK_EQ(line_number[i], stack_trace->GetFrame(i)->GetLineNumber());
}
}
// Test that we do not recognize identity for primitive exceptions.
TEST(RethrowPrimitiveStackTrace) {
v8::HandleScope scope;
LocalContext env;
// We do not capture stack trace for non Error objects on creation time.
// Instead, we capture the stack trace on last throw.
const char* source =
"function g() { throw 404; } \n"
"function f() { g(); } \n"
"function t(e) { throw e; } \n"
"try { \n"
" f(); \n"
"} catch (e1) { \n"
" t(e1) \n"
"} \n";
v8::V8::AddMessageListener(RethrowPrimitiveStackTraceHandler);
v8::V8::SetCaptureStackTraceForUncaughtExceptions(true);
CompileRun(source);
v8::V8::SetCaptureStackTraceForUncaughtExceptions(false);
v8::V8::RemoveMessageListeners(RethrowPrimitiveStackTraceHandler);
}
static void RethrowExistingStackTraceHandler(v8::Handle<v8::Message> message,
v8::Handle<v8::Value> data) {
// Use the frame where JavaScript is called from.
v8::Handle<v8::StackTrace> stack_trace = message->GetStackTrace();
CHECK(!stack_trace.IsEmpty());
CHECK_EQ(1, stack_trace->GetFrameCount());
CHECK_EQ(1, stack_trace->GetFrame(0)->GetLineNumber());
}
// Test that the stack trace is captured when the error object is created and
// not where it is thrown.
TEST(RethrowExistingStackTrace) {
v8::HandleScope scope;
LocalContext env;
const char* source =
"var e = new Error(); \n"
"throw e; \n";
v8::V8::AddMessageListener(RethrowExistingStackTraceHandler);
v8::V8::SetCaptureStackTraceForUncaughtExceptions(true);
CompileRun(source);
v8::V8::SetCaptureStackTraceForUncaughtExceptions(false);
v8::V8::RemoveMessageListeners(RethrowExistingStackTraceHandler);
}
static void RethrowBogusErrorStackTraceHandler(v8::Handle<v8::Message> message,
v8::Handle<v8::Value> data) {
// Use the frame where JavaScript is called from.
v8::Handle<v8::StackTrace> stack_trace = message->GetStackTrace();
CHECK(!stack_trace.IsEmpty());
CHECK_EQ(1, stack_trace->GetFrameCount());
CHECK_EQ(2, stack_trace->GetFrame(0)->GetLineNumber());
}
// Test that the stack trace is captured where the bogus Error object is thrown.
TEST(RethrowBogusErrorStackTrace) {
v8::HandleScope scope;
LocalContext env;
const char* source =
"var e = {__proto__: new Error()} \n"
"throw e; \n";
v8::V8::AddMessageListener(RethrowBogusErrorStackTraceHandler);
v8::V8::SetCaptureStackTraceForUncaughtExceptions(true);
CompileRun(source);
v8::V8::SetCaptureStackTraceForUncaughtExceptions(false);
v8::V8::RemoveMessageListeners(RethrowBogusErrorStackTraceHandler);
}
v8::Handle<Value> AnalyzeStackOfEvalWithSourceURL(const v8::Arguments& args) { v8::Handle<Value> AnalyzeStackOfEvalWithSourceURL(const v8::Arguments& args) {
v8::HandleScope scope; v8::HandleScope scope;
v8::Handle<v8::StackTrace> stackTrace = v8::Handle<v8::StackTrace> stackTrace =

5
deps/v8/test/cctest/test-ast.cc

@ -1,4 +1,4 @@
// Copyright 2006-2008 the V8 project authors. All rights reserved. // Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are // modification, are permitted provided that the following conditions are
// met: // met:
@ -40,7 +40,8 @@ TEST(List) {
CHECK_EQ(0, list->length()); CHECK_EQ(0, list->length());
ZoneScope zone_scope(Isolate::Current(), DELETE_ON_EXIT); ZoneScope zone_scope(Isolate::Current(), DELETE_ON_EXIT);
AstNode* node = new(ZONE) EmptyStatement(); AstNodeFactory<AstNullVisitor> factory(Isolate::Current());
AstNode* node = factory.NewEmptyStatement();
list->Add(node); list->Add(node);
CHECK_EQ(1, list->length()); CHECK_EQ(1, list->length());
CHECK_EQ(node, list->at(0)); CHECK_EQ(node, list->at(0));

4
deps/v8/test/cctest/test-heap-profiler.cc

@ -774,7 +774,7 @@ TEST(HeapSnapshotRetainedObjectInfo) {
} }
const v8::HeapGraphNode* native_group_aaa = GetNode( const v8::HeapGraphNode* native_group_aaa = GetNode(
snapshot->GetRoot(), v8::HeapGraphNode::kNative, "aaa-group"); snapshot->GetRoot(), v8::HeapGraphNode::kSynthetic, "aaa-group");
CHECK_NE(NULL, native_group_aaa); CHECK_NE(NULL, native_group_aaa);
CHECK_EQ(1, native_group_aaa->GetChildrenCount()); CHECK_EQ(1, native_group_aaa->GetChildrenCount());
const v8::HeapGraphNode* aaa = GetNode( const v8::HeapGraphNode* aaa = GetNode(
@ -783,7 +783,7 @@ TEST(HeapSnapshotRetainedObjectInfo) {
CHECK_EQ(2, aaa->GetChildrenCount()); CHECK_EQ(2, aaa->GetChildrenCount());
const v8::HeapGraphNode* native_group_ccc = GetNode( const v8::HeapGraphNode* native_group_ccc = GetNode(
snapshot->GetRoot(), v8::HeapGraphNode::kNative, "ccc-group"); snapshot->GetRoot(), v8::HeapGraphNode::kSynthetic, "ccc-group");
const v8::HeapGraphNode* ccc = GetNode( const v8::HeapGraphNode* ccc = GetNode(
native_group_ccc, v8::HeapGraphNode::kNative, "ccc"); native_group_ccc, v8::HeapGraphNode::kNative, "ccc");
CHECK_NE(NULL, ccc); CHECK_NE(NULL, ccc);

10
deps/v8/test/cctest/test-mark-compact.cc

@ -1,4 +1,4 @@
// Copyright 2006-2008 the V8 project authors. All rights reserved. // Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are // modification, are permitted provided that the following conditions are
// met: // met:
@ -534,15 +534,15 @@ TEST(BootUpMemoryUse) {
intptr_t booted_memory = MemoryInUse(); intptr_t booted_memory = MemoryInUse();
if (sizeof(initial_memory) == 8) { if (sizeof(initial_memory) == 8) {
if (v8::internal::Snapshot::IsEnabled()) { if (v8::internal::Snapshot::IsEnabled()) {
CHECK_LE(booted_memory - initial_memory, 6654 * 1024); // 6444. CHECK_LE(booted_memory - initial_memory, 6686 * 1024); // 6476.
} else { } else {
CHECK_LE(booted_memory - initial_memory, 6777 * 1024); // 6596. CHECK_LE(booted_memory - initial_memory, 6809 * 1024); // 6628.
} }
} else { } else {
if (v8::internal::Snapshot::IsEnabled()) { if (v8::internal::Snapshot::IsEnabled()) {
CHECK_LE(booted_memory - initial_memory, 6500 * 1024); // 6356. CHECK_LE(booted_memory - initial_memory, 6532 * 1024); // 6388.
} else { } else {
CHECK_LE(booted_memory - initial_memory, 6654 * 1024); // 6424 CHECK_LE(booted_memory - initial_memory, 6686 * 1024); // 6456
} }
} }
} }

Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save