Browse Source

Upgrade V8 to 3.9.5

v0.7.4-release
isaacs 13 years ago
parent
commit
68a0c56a7d
  1. 24
      deps/v8/ChangeLog
  2. 2
      deps/v8/SConstruct
  3. 26
      deps/v8/build/common.gypi
  4. 1
      deps/v8/build/mipsu.gypi
  5. 4
      deps/v8/include/v8-profiler.h
  6. 9
      deps/v8/src/api.cc
  7. 38
      deps/v8/src/arm/builtins-arm.cc
  8. 4
      deps/v8/src/arm/code-stubs-arm.cc
  9. 35
      deps/v8/src/arm/full-codegen-arm.cc
  10. 1
      deps/v8/src/arm/lithium-codegen-arm.cc
  11. 25
      deps/v8/src/arm/macro-assembler-arm.cc
  12. 7
      deps/v8/src/arm/macro-assembler-arm.h
  13. 52
      deps/v8/src/arm/stub-cache-arm.cc
  14. 399
      deps/v8/src/ast.cc
  15. 1353
      deps/v8/src/ast.h
  16. 2
      deps/v8/src/builtins.cc
  17. 13
      deps/v8/src/codegen.cc
  18. 20
      deps/v8/src/compiler.cc
  19. 6
      deps/v8/src/compiler.h
  20. 4
      deps/v8/src/cpu-profiler.cc
  21. 2
      deps/v8/src/d8.cc
  22. 36
      deps/v8/src/flag-definitions.h
  23. 14
      deps/v8/src/frames-inl.h
  24. 2
      deps/v8/src/frames.cc
  25. 20
      deps/v8/src/frames.h
  26. 82
      deps/v8/src/full-codegen.cc
  27. 6
      deps/v8/src/full-codegen.h
  28. 4
      deps/v8/src/handles.cc
  29. 11
      deps/v8/src/heap.cc
  30. 3
      deps/v8/src/heap.h
  31. 7
      deps/v8/src/hydrogen-instructions.cc
  32. 42
      deps/v8/src/hydrogen-instructions.h
  33. 274
      deps/v8/src/hydrogen.cc
  34. 29
      deps/v8/src/hydrogen.h
  35. 4
      deps/v8/src/ia32/code-stubs-ia32.cc
  36. 34
      deps/v8/src/ia32/full-codegen-ia32.cc
  37. 1
      deps/v8/src/ia32/lithium-codegen-ia32.cc
  38. 25
      deps/v8/src/ia32/macro-assembler-ia32.cc
  39. 4
      deps/v8/src/ia32/macro-assembler-ia32.h
  40. 69
      deps/v8/src/ia32/stub-cache-ia32.cc
  41. 5
      deps/v8/src/ic-inl.h
  42. 25
      deps/v8/src/ic.cc
  43. 1
      deps/v8/src/ic.h
  44. 3
      deps/v8/src/incremental-marking.cc
  45. 112
      deps/v8/src/isolate.cc
  46. 10
      deps/v8/src/isolate.h
  47. 4
      deps/v8/src/list-inl.h
  48. 16
      deps/v8/src/macro-assembler.h
  49. 31
      deps/v8/src/mark-compact.cc
  50. 7
      deps/v8/src/mark-compact.h
  51. 6
      deps/v8/src/messages.js
  52. 4
      deps/v8/src/mips/assembler-mips.cc
  53. 39
      deps/v8/src/mips/builtins-mips.cc
  54. 2
      deps/v8/src/mips/code-stubs-mips.cc
  55. 62
      deps/v8/src/mips/ic-mips.cc
  56. 1
      deps/v8/src/mips/lithium-codegen-mips.cc
  57. 77
      deps/v8/src/mips/macro-assembler-mips.cc
  58. 19
      deps/v8/src/mips/macro-assembler-mips.h
  59. 52
      deps/v8/src/mips/stub-cache-mips.cc
  60. 20
      deps/v8/src/objects-inl.h
  61. 104
      deps/v8/src/objects.cc
  62. 53
      deps/v8/src/objects.h
  63. 400
      deps/v8/src/parser.cc
  64. 94
      deps/v8/src/parser.h
  65. 2
      deps/v8/src/platform-freebsd.cc
  66. 2
      deps/v8/src/platform-linux.cc
  67. 2
      deps/v8/src/platform-macos.cc
  68. 2
      deps/v8/src/platform-openbsd.cc
  69. 2
      deps/v8/src/platform-solaris.cc
  70. 2
      deps/v8/src/platform-win32.cc
  71. 10
      deps/v8/src/preparser.h
  72. 454
      deps/v8/src/prettyprinter.cc
  73. 103
      deps/v8/src/prettyprinter.h
  74. 88
      deps/v8/src/profile-generator.cc
  75. 11
      deps/v8/src/profile-generator.h
  76. 4
      deps/v8/src/property-details.h
  77. 2
      deps/v8/src/property.h
  78. 40
      deps/v8/src/rewriter.cc
  79. 60
      deps/v8/src/runtime-profiler.cc
  80. 17
      deps/v8/src/runtime-profiler.h
  81. 24
      deps/v8/src/runtime.cc
  82. 2
      deps/v8/src/runtime.h
  83. 18
      deps/v8/src/scanner.cc
  84. 20
      deps/v8/src/scanner.h
  85. 60
      deps/v8/src/scopes.cc
  86. 42
      deps/v8/src/scopes.h
  87. 7
      deps/v8/src/spaces.h
  88. 3
      deps/v8/src/token.h
  89. 5
      deps/v8/src/v8.cc
  90. 2
      deps/v8/src/version.cc
  91. 4
      deps/v8/src/x64/code-stubs-x64.cc
  92. 35
      deps/v8/src/x64/full-codegen-x64.cc
  93. 5
      deps/v8/src/x64/lithium-codegen-x64.cc
  94. 24
      deps/v8/src/x64/macro-assembler-x64.cc
  95. 4
      deps/v8/src/x64/macro-assembler-x64.h
  96. 68
      deps/v8/src/x64/stub-cache-x64.cc
  97. 131
      deps/v8/test/cctest/test-api.cc
  98. 5
      deps/v8/test/cctest/test-ast.cc
  99. 4
      deps/v8/test/cctest/test-heap-profiler.cc
  100. 10
      deps/v8/test/cctest/test-mark-compact.cc

24
deps/v8/ChangeLog

@ -1,3 +1,27 @@
2012-02-09: Version 3.9.5
Removed unused command line flags.
Performance and stability improvements on all platforms.
2012-02-08: Version 3.9.4
Properly initialize element-transitioning array literals on ARM.
(issue 1930)
Bug fixes on all platforms.
2012-02-07: Version 3.9.3
When rethrowing an exception, print the stack trace of its original
site instead of rethrow site (Chromium issue 60240).
Increased size of small stacks from 32k to 64k to avoid hitting limits
in Chromium (Chromium issue 112843).
2012-02-06: Version 3.9.2
Add timestamp to --trace-gc output. (issue 1932)

2
deps/v8/SConstruct

@ -128,7 +128,7 @@ LIBRARY_FLAGS = {
'CPPDEFINES': ['__C99FEATURES__'],
'CPPPATH' : [src_dir, '/usr/local/include'],
'LIBPATH' : ['/usr/local/lib'],
'CCFLAGS': ['-ansi', '-fno-omit-frame-pointer'],
'CCFLAGS': ['-ansi'],
},
'os:netbsd': {
'CPPPATH' : [src_dir, '/usr/pkg/include'],

26
deps/v8/build/common.gypi

@ -169,6 +169,28 @@
'V8_TARGET_ARCH_MIPS',
],
'conditions': [
[ 'target_arch=="mips"', {
'target_conditions': [
['_toolset=="target"', {
'cflags': ['-EL'],
'ldflags': ['-EL'],
'conditions': [
[ 'v8_use_mips_abi_hardfloat=="true"', {
'cflags': ['-mhard-float'],
'ldflags': ['-mhard-float'],
}, {
'cflags': ['-msoft-float'],
'ldflags': ['-msoft-float'],
}],
['mips_arch_variant=="mips32r2"', {
'cflags': ['-mips32r2', '-Wa,-mips32r2'],
}, {
'cflags': ['-mips32', '-Wa,-mips32'],
}],
],
}],
],
}],
[ 'v8_can_use_fpu_instructions=="true"', {
'defines': [
'CAN_USE_FPU_INSTRUCTIONS',
@ -184,6 +206,9 @@
'__mips_soft_float=1'
],
}],
['mips_arch_variant=="mips32r2"', {
'defines': ['_MIPS_ARCH_MIPS32R2',],
}],
# The MIPS assembler assumes the host is 32 bits,
# so force building 32-bit host tools.
['host_arch=="x64"', {
@ -327,6 +352,7 @@
}], # OS=="mac"
['OS=="win"', {
'msvs_configuration_attributes': {
'OutputDirectory': '<(DEPTH)\\build\\$(ConfigurationName)',
'IntermediateDirectory': '$(OutDir)\\obj\\$(ProjectName)',
'CharacterSet': '1',
},

1
deps/v8/build/mipsu.gypi

@ -29,5 +29,6 @@
'variables': {
'target_arch': 'ia32',
'v8_target_arch': 'mips',
'mips_arch_variant': 'mips32r2',
},
}

4
deps/v8/include/v8-profiler.h

@ -255,7 +255,9 @@ class V8EXPORT HeapGraphNode {
kClosure = 5, // Function closure.
kRegExp = 6, // RegExp.
kHeapNumber = 7, // Number stored in the heap.
kNative = 8 // Native object (not from V8 heap).
kNative = 8, // Native object (not from V8 heap).
kSynthetic = 9 // Synthetic object, usualy used for grouping
// snapshot items together.
};
/** Returns node type (see HeapGraphNode::Type). */

9
deps/v8/src/api.cc

@ -6088,9 +6088,7 @@ static void SetFlagsFromString(const char* flags) {
void Testing::PrepareStressRun(int run) {
static const char* kLazyOptimizations =
"--prepare-always-opt --nolimit-inlining "
"--noalways-opt --noopt-eagerly";
static const char* kEagerOptimizations = "--opt-eagerly";
"--prepare-always-opt --nolimit-inlining --noalways-opt";
static const char* kForcedOptimizations = "--always-opt";
// If deoptimization stressed turn on frequent deoptimization. If no value
@ -6107,15 +6105,12 @@ void Testing::PrepareStressRun(int run) {
if (run == GetStressRuns() - 1) {
SetFlagsFromString(kForcedOptimizations);
} else {
SetFlagsFromString(kEagerOptimizations);
SetFlagsFromString(kLazyOptimizations);
}
#else
if (run == GetStressRuns() - 1) {
SetFlagsFromString(kForcedOptimizations);
} else if (run == GetStressRuns() - 2) {
SetFlagsFromString(kEagerOptimizations);
} else {
} else if (run != GetStressRuns() - 2) {
SetFlagsFromString(kLazyOptimizations);
}
#endif

38
deps/v8/src/arm/builtins-arm.cc

@ -895,23 +895,15 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// r4: JSObject
__ bind(&allocated);
__ push(r4);
__ push(r4);
// Push the function and the allocated receiver from the stack.
// sp[0]: receiver (newly allocated object)
// sp[1]: constructor function
// sp[2]: number of arguments (smi-tagged)
__ ldr(r1, MemOperand(sp, kPointerSize));
__ push(r1); // Constructor function.
__ push(r4); // Receiver.
// Reload the number of arguments from the stack.
// r1: constructor function
// Reload the number of arguments and the constructor from the stack.
// sp[0]: receiver
// sp[1]: constructor function
// sp[2]: receiver
// sp[3]: constructor function
// sp[4]: number of arguments (smi-tagged)
__ ldr(r3, MemOperand(sp, 4 * kPointerSize));
// sp[1]: receiver
// sp[2]: constructor function
// sp[3]: number of arguments (smi-tagged)
__ ldr(r1, MemOperand(sp, 2 * kPointerSize));
__ ldr(r3, MemOperand(sp, 3 * kPointerSize));
// Set up pointer to last argument.
__ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
@ -921,14 +913,13 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// Copy arguments and receiver to the expression stack.
// r0: number of arguments
// r2: address of last argument (caller sp)
// r1: constructor function
// r2: address of last argument (caller sp)
// r3: number of arguments (smi-tagged)
// sp[0]: receiver
// sp[1]: constructor function
// sp[2]: receiver
// sp[3]: constructor function
// sp[4]: number of arguments (smi-tagged)
// sp[1]: receiver
// sp[2]: constructor function
// sp[3]: number of arguments (smi-tagged)
Label loop, entry;
__ b(&entry);
__ bind(&loop);
@ -954,13 +945,6 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
NullCallWrapper(), CALL_AS_METHOD);
}
// Pop the function from the stack.
// sp[0]: constructor function
// sp[2]: receiver
// sp[3]: constructor function
// sp[4]: number of arguments (smi-tagged)
__ pop();
// Restore context from the frame.
// r0: result
// sp[0]: receiver

4
deps/v8/src/arm/code-stubs-arm.cc

@ -3964,7 +3964,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// handler block in this code object, so its index is 0.
__ bind(&invoke);
// Must preserve r0-r4, r5-r7 are available.
__ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER, 0);
__ PushTryHandler(StackHandler::JS_ENTRY, 0);
// If an exception not caught by another handler occurs, this handler
// returns control to the code after the bl(&invoke) above, which
// restores all kCalleeSaved registers (including cp and fp) to their
@ -7358,7 +7358,7 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
// Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS.
__ bind(&double_elements);
__ ldr(r5, FieldMemOperand(r1, JSObject::kElementsOffset));
__ StoreNumberToDoubleElements(r0, r3, r1, r5, r6, r7, r9, r10,
__ StoreNumberToDoubleElements(r0, r3, r1, r5, r6, r7, r9, r2,
&slow_elements);
__ Ret();
}

35
deps/v8/src/arm/full-codegen-arm.cc

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -139,6 +139,27 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
}
#endif
// We can optionally optimize based on counters rather than statistical
// sampling.
if (info->ShouldSelfOptimize()) {
if (FLAG_trace_opt) {
PrintF("[adding self-optimization header to %s]\n",
*info->function()->debug_name()->ToCString());
}
MaybeObject* maybe_cell = isolate()->heap()->AllocateJSGlobalPropertyCell(
Smi::FromInt(Compiler::kCallsUntilPrimitiveOpt));
JSGlobalPropertyCell* cell;
if (maybe_cell->To(&cell)) {
__ mov(r2, Operand(Handle<JSGlobalPropertyCell>(cell)));
__ ldr(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
__ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC);
__ str(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
Handle<Code> compile_stub(
isolate()->builtins()->builtin(Builtins::kLazyRecompile));
__ Jump(compile_stub, RelocInfo::CODE_TARGET, eq);
}
}
// Strict mode functions and builtins need to replace the receiver
// with undefined when called as functions (without an explicit
// receiver object). r5 is zero for method calls and non-zero for
@ -265,11 +286,11 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
// For named function expressions, declare the function name as a
// constant.
if (scope()->is_function_scope() && scope()->function() != NULL) {
int ignored = 0;
VariableProxy* proxy = scope()->function();
ASSERT(proxy->var()->mode() == CONST ||
proxy->var()->mode() == CONST_HARMONY);
EmitDeclaration(proxy, proxy->var()->mode(), NULL, &ignored);
ASSERT(proxy->var()->location() != Variable::UNALLOCATED);
EmitDeclaration(proxy, proxy->var()->mode(), NULL);
}
VisitDeclarations(scope()->declarations());
}
@ -706,8 +727,7 @@ void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
VariableMode mode,
FunctionLiteral* function,
int* global_count) {
FunctionLiteral* function) {
// If it was not possible to allocate the variable at compile time, we
// need to "declare" it at runtime to make sure it actually exists in the
// local context.
@ -716,7 +736,7 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
(mode == CONST || mode == CONST_HARMONY || mode == LET);
switch (variable->location()) {
case Variable::UNALLOCATED:
++(*global_count);
++global_count_;
break;
case Variable::PARAMETER:
@ -801,9 +821,6 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
}
void FullCodeGenerator::VisitDeclaration(Declaration* decl) { }
void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
// Call the runtime to declare the globals.
// The context is the first argument.

1
deps/v8/src/arm/lithium-codegen-arm.cc

@ -673,7 +673,6 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
int length = deoptimizations_.length();
if (length == 0) return;
ASSERT(FLAG_deopt);
Handle<DeoptimizationInputData> data =
factory()->NewDeoptimizationInputData(length, TENURED);

25
deps/v8/src/arm/macro-assembler-arm.cc

@ -1188,8 +1188,7 @@ void MacroAssembler::DebugBreak() {
#endif
void MacroAssembler::PushTryHandler(CodeLocation try_location,
HandlerType type,
void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
int handler_index) {
// Adjust this code if not the case.
STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
@ -1201,28 +1200,20 @@ void MacroAssembler::PushTryHandler(CodeLocation try_location,
// For the JSEntry handler, we must preserve r0-r4, r5-r7 are available.
// We will build up the handler from the bottom by pushing on the stack.
// First compute the state.
unsigned state = StackHandler::OffsetField::encode(handler_index);
if (try_location == IN_JAVASCRIPT) {
state |= (type == TRY_CATCH_HANDLER)
? StackHandler::KindField::encode(StackHandler::TRY_CATCH)
: StackHandler::KindField::encode(StackHandler::TRY_FINALLY);
} else {
ASSERT(try_location == IN_JS_ENTRY);
state |= StackHandler::KindField::encode(StackHandler::ENTRY);
}
// Set up the code object (r5) and the state (r6) for pushing.
unsigned state =
StackHandler::IndexField::encode(handler_index) |
StackHandler::KindField::encode(kind);
mov(r5, Operand(CodeObject()));
mov(r6, Operand(state));
// Push the frame pointer, context, state, and code object.
if (try_location == IN_JAVASCRIPT) {
stm(db_w, sp, r5.bit() | r6.bit() | cp.bit() | fp.bit());
} else {
if (kind == StackHandler::JS_ENTRY) {
mov(r7, Operand(Smi::FromInt(0))); // Indicates no context.
mov(ip, Operand(0, RelocInfo::NONE)); // NULL frame pointer.
stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() | ip.bit());
} else {
stm(db_w, sp, r5.bit() | r6.bit() | cp.bit() | fp.bit());
}
// Link the current handler as the next handler.
@ -1330,7 +1321,7 @@ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
ldr(sp, MemOperand(sp, StackHandlerConstants::kNextOffset));
bind(&check_kind);
STATIC_ASSERT(StackHandler::ENTRY == 0);
STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
ldr(r2, MemOperand(sp, StackHandlerConstants::kStateOffset));
tst(r2, Operand(StackHandler::KindField::kMask));
b(ne, &fetch_next);

7
deps/v8/src/arm/macro-assembler-arm.h

@ -582,9 +582,7 @@ class MacroAssembler: public Assembler {
// Exception handling
// Push a new try handler and link into try handler chain.
void PushTryHandler(CodeLocation try_location,
HandlerType type,
int handler_index);
void PushTryHandler(StackHandler::Kind kind, int handler_index);
// Unlink the stack handler on top of the stack from the try handler chain.
// Must preserve the result register.
@ -803,7 +801,8 @@ class MacroAssembler: public Assembler {
// Check to see if maybe_number can be stored as a double in
// FastDoubleElements. If it can, store it at the index specified by key in
// the FastDoubleElements array elements, otherwise jump to fail.
// the FastDoubleElements array elements. Otherwise jump to fail, in which
// case scratch2, scratch3 and scratch4 are unmodified.
void StoreNumberToDoubleElements(Register value_reg,
Register key_reg,
Register receiver_reg,

52
deps/v8/src/arm/stub-cache-arm.cc

@ -1475,7 +1475,11 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ Ret();
} else {
Label call_builtin;
Register elements = r3;
if (argc == 1) { // Otherwise fall through to call the builtin.
Label attempt_to_grow_elements;
Register elements = r6;
Register end_elements = r5;
// Get the elements array of the object.
__ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
@ -1487,8 +1491,6 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
&call_builtin,
DONT_DO_SMI_CHECK);
if (argc == 1) { // Otherwise fall through to call the builtin.
Label attempt_to_grow_elements;
// Get the array's length into r0 and calculate new length.
__ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
@ -1496,7 +1498,7 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
STATIC_ASSERT(kSmiTag == 0);
__ add(r0, r0, Operand(Smi::FromInt(argc)));
// Get the element's length.
// Get the elements' length.
__ ldr(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
// Check if we could survive without allocation.
@ -1511,7 +1513,7 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
// Save new length.
__ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
// Push the element.
// Store the value.
// We may need a register containing the address end_elements below,
// so write back the value in end_elements.
__ add(end_elements, elements,
@ -1526,13 +1528,33 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ bind(&with_write_barrier);
__ ldr(r6, FieldMemOperand(receiver, HeapObject::kMapOffset));
__ CheckFastObjectElements(r6, r6, &call_builtin);
__ ldr(r3, FieldMemOperand(receiver, HeapObject::kMapOffset));
if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) {
Label fast_object, not_fast_object;
__ CheckFastObjectElements(r3, r7, &not_fast_object);
__ jmp(&fast_object);
// In case of fast smi-only, convert to fast object, otherwise bail out.
__ bind(&not_fast_object);
__ CheckFastSmiOnlyElements(r3, r7, &call_builtin);
// edx: receiver
// r3: map
__ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
FAST_ELEMENTS,
r3,
r7,
&call_builtin);
__ mov(r2, receiver);
ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm());
__ bind(&fast_object);
} else {
__ CheckFastObjectElements(r3, r3, &call_builtin);
}
// Save new length.
__ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
// Push the element.
// Store the value.
// We may need a register containing the address end_elements below,
// so write back the value in end_elements.
__ add(end_elements, elements,
@ -1578,25 +1600,25 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
__ add(end_elements, end_elements, Operand(kEndElementsOffset));
__ mov(r7, Operand(new_space_allocation_top));
__ ldr(r6, MemOperand(r7));
__ cmp(end_elements, r6);
__ ldr(r3, MemOperand(r7));
__ cmp(end_elements, r3);
__ b(ne, &call_builtin);
__ mov(r9, Operand(new_space_allocation_limit));
__ ldr(r9, MemOperand(r9));
__ add(r6, r6, Operand(kAllocationDelta * kPointerSize));
__ cmp(r6, r9);
__ add(r3, r3, Operand(kAllocationDelta * kPointerSize));
__ cmp(r3, r9);
__ b(hi, &call_builtin);
// We fit and could grow elements.
// Update new_space_allocation_top.
__ str(r6, MemOperand(r7));
__ str(r3, MemOperand(r7));
// Push the argument.
__ str(r2, MemOperand(end_elements));
// Fill the rest with holes.
__ LoadRoot(r6, Heap::kTheHoleValueRootIndex);
__ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
for (int i = 1; i < kAllocationDelta; i++) {
__ str(r6, MemOperand(end_elements, i * kPointerSize));
__ str(r3, MemOperand(end_elements, i * kPointerSize));
}
// Update elements' and array's sizes.

399
deps/v8/src/ast.cc

@ -126,18 +126,7 @@ Assignment::Assignment(Isolate* isolate,
assignment_id_(GetNextId(isolate)),
block_start_(false),
block_end_(false),
is_monomorphic_(false) {
ASSERT(Token::IsAssignmentOp(op));
if (is_compound()) {
binary_operation_ =
new(isolate->zone()) BinaryOperation(isolate,
binary_op(),
target,
value,
pos + 1);
compound_load_id_ = GetNextId(isolate);
}
}
is_monomorphic_(false) { }
Token::Value Assignment::binary_op() const {
@ -197,9 +186,7 @@ ObjectLiteral::Property::Property(Literal* key, Expression* value) {
ObjectLiteral::Property::Property(bool is_getter, FunctionLiteral* value) {
Isolate* isolate = Isolate::Current();
emit_store_ = true;
key_ = new(isolate->zone()) Literal(isolate, value->name());
value_ = value;
kind_ = is_getter ? GETTER : SETTER;
}
@ -427,224 +414,11 @@ bool CompareOperation::IsLiteralCompareNull(Expression** expr) {
// Inlining support
bool Declaration::IsInlineable() const {
return proxy()->var()->IsStackAllocated() && fun() == NULL;
}
bool TargetCollector::IsInlineable() const {
UNREACHABLE();
return false;
}
bool ForInStatement::IsInlineable() const {
return false;
}
bool WithStatement::IsInlineable() const {
return false;
}
bool SwitchStatement::IsInlineable() const {
return false;
}
bool TryStatement::IsInlineable() const {
return false;
}
bool TryCatchStatement::IsInlineable() const {
return false;
}
bool TryFinallyStatement::IsInlineable() const {
return false;
}
bool DebuggerStatement::IsInlineable() const {
return false;
}
bool Throw::IsInlineable() const {
return exception()->IsInlineable();
}
bool MaterializedLiteral::IsInlineable() const {
// TODO(1322): Allow materialized literals.
return false;
}
bool FunctionLiteral::IsInlineable() const {
// TODO(1322): Allow materialized literals.
return false;
}
bool ThisFunction::IsInlineable() const {
return true;
}
bool SharedFunctionInfoLiteral::IsInlineable() const {
return false;
}
bool ForStatement::IsInlineable() const {
return (init() == NULL || init()->IsInlineable())
&& (cond() == NULL || cond()->IsInlineable())
&& (next() == NULL || next()->IsInlineable())
&& body()->IsInlineable();
}
bool WhileStatement::IsInlineable() const {
return cond()->IsInlineable()
&& body()->IsInlineable();
}
bool DoWhileStatement::IsInlineable() const {
return cond()->IsInlineable()
&& body()->IsInlineable();
}
bool ContinueStatement::IsInlineable() const {
return true;
}
bool BreakStatement::IsInlineable() const {
return true;
}
bool EmptyStatement::IsInlineable() const {
return true;
}
bool Literal::IsInlineable() const {
return true;
}
bool Block::IsInlineable() const {
const int count = statements_.length();
for (int i = 0; i < count; ++i) {
if (!statements_[i]->IsInlineable()) return false;
}
return true;
}
bool ExpressionStatement::IsInlineable() const {
return expression()->IsInlineable();
}
bool IfStatement::IsInlineable() const {
return condition()->IsInlineable()
&& then_statement()->IsInlineable()
&& else_statement()->IsInlineable();
}
bool ReturnStatement::IsInlineable() const {
return expression()->IsInlineable();
}
bool Conditional::IsInlineable() const {
return condition()->IsInlineable() && then_expression()->IsInlineable() &&
else_expression()->IsInlineable();
}
bool VariableProxy::IsInlineable() const {
return var()->IsUnallocated()
|| var()->IsStackAllocated()
|| var()->IsContextSlot();
}
bool Assignment::IsInlineable() const {
return target()->IsInlineable() && value()->IsInlineable();
}
bool Property::IsInlineable() const {
return obj()->IsInlineable() && key()->IsInlineable();
}
bool Call::IsInlineable() const {
if (!expression()->IsInlineable()) return false;
const int count = arguments()->length();
for (int i = 0; i < count; ++i) {
if (!arguments()->at(i)->IsInlineable()) return false;
}
return true;
}
bool CallNew::IsInlineable() const {
if (!expression()->IsInlineable()) return false;
const int count = arguments()->length();
for (int i = 0; i < count; ++i) {
if (!arguments()->at(i)->IsInlineable()) return false;
}
return true;
return proxy()->var()->IsStackAllocated();
}
bool CallRuntime::IsInlineable() const {
// Don't try to inline JS runtime calls because we don't (currently) even
// optimize them.
if (is_jsruntime()) return false;
// Don't inline the %_ArgumentsLength or %_Arguments because their
// implementation will not work. There is no stack frame to get them
// from.
if (function()->intrinsic_type == Runtime::INLINE &&
(name()->IsEqualTo(CStrVector("_ArgumentsLength")) ||
name()->IsEqualTo(CStrVector("_Arguments")))) {
return false;
}
const int count = arguments()->length();
for (int i = 0; i < count; ++i) {
if (!arguments()->at(i)->IsInlineable()) return false;
}
return true;
}
bool UnaryOperation::IsInlineable() const {
return expression()->IsInlineable();
}
bool BinaryOperation::IsInlineable() const {
return left()->IsInlineable() && right()->IsInlineable();
}
bool CompareOperation::IsInlineable() const {
return left()->IsInlineable() && right()->IsInlineable();
}
bool CountOperation::IsInlineable() const {
return expression()->IsInlineable();
bool VariableDeclaration::IsInlineable() const {
return Declaration::IsInlineable() && fun() == NULL;
}
@ -1214,4 +988,169 @@ CaseClause::CaseClause(Isolate* isolate,
entry_id_(AstNode::GetNextId(isolate)) {
}
#define INCREASE_NODE_COUNT(NodeType) \
void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
increase_node_count(); \
}
INCREASE_NODE_COUNT(VariableDeclaration)
INCREASE_NODE_COUNT(ModuleDeclaration)
INCREASE_NODE_COUNT(ModuleLiteral)
INCREASE_NODE_COUNT(ModuleVariable)
INCREASE_NODE_COUNT(ModulePath)
INCREASE_NODE_COUNT(ModuleUrl)
INCREASE_NODE_COUNT(Block)
INCREASE_NODE_COUNT(ExpressionStatement)
INCREASE_NODE_COUNT(EmptyStatement)
INCREASE_NODE_COUNT(IfStatement)
INCREASE_NODE_COUNT(ContinueStatement)
INCREASE_NODE_COUNT(BreakStatement)
INCREASE_NODE_COUNT(ReturnStatement)
INCREASE_NODE_COUNT(Conditional)
INCREASE_NODE_COUNT(Literal)
INCREASE_NODE_COUNT(Assignment)
INCREASE_NODE_COUNT(Throw)
INCREASE_NODE_COUNT(Property)
INCREASE_NODE_COUNT(UnaryOperation)
INCREASE_NODE_COUNT(CountOperation)
INCREASE_NODE_COUNT(BinaryOperation)
INCREASE_NODE_COUNT(CompareOperation)
INCREASE_NODE_COUNT(ThisFunction)
#undef INCREASE_NODE_COUNT
void AstConstructionVisitor::VisitWithStatement(WithStatement* node) {
increase_node_count();
add_flag(kDontOptimize);
add_flag(kDontInline);
}
void AstConstructionVisitor::VisitSwitchStatement(SwitchStatement* node) {
increase_node_count();
add_flag(kDontInline);
}
void AstConstructionVisitor::VisitDoWhileStatement(DoWhileStatement* node) {
increase_node_count();
add_flag(kDontSelfOptimize);
}
void AstConstructionVisitor::VisitWhileStatement(WhileStatement* node) {
increase_node_count();
add_flag(kDontSelfOptimize);
}
void AstConstructionVisitor::VisitForStatement(ForStatement* node) {
increase_node_count();
add_flag(kDontSelfOptimize);
}
void AstConstructionVisitor::VisitForInStatement(ForInStatement* node) {
increase_node_count();
add_flag(kDontOptimize);
add_flag(kDontInline);
add_flag(kDontSelfOptimize);
}
void AstConstructionVisitor::VisitTryCatchStatement(TryCatchStatement* node) {
increase_node_count();
add_flag(kDontOptimize);
add_flag(kDontInline);
}
void AstConstructionVisitor::VisitTryFinallyStatement(
TryFinallyStatement* node) {
increase_node_count();
add_flag(kDontOptimize);
add_flag(kDontInline);
}
void AstConstructionVisitor::VisitDebuggerStatement(DebuggerStatement* node) {
increase_node_count();
add_flag(kDontOptimize);
add_flag(kDontInline);
}
void AstConstructionVisitor::VisitFunctionLiteral(FunctionLiteral* node) {
increase_node_count();
add_flag(kDontInline);
}
void AstConstructionVisitor::VisitSharedFunctionInfoLiteral(
SharedFunctionInfoLiteral* node) {
increase_node_count();
add_flag(kDontOptimize);
add_flag(kDontInline);
}
void AstConstructionVisitor::VisitVariableProxy(VariableProxy* node) {
increase_node_count();
// In theory, we'd have to add:
// if(node->var()->IsLookupSlot()) { add_flag(kDontInline); }
// However, node->var() is usually not bound yet at VariableProxy creation
// time, and LOOKUP variables only result from constructs that cannot
// be inlined anyway.
}
void AstConstructionVisitor::VisitRegExpLiteral(RegExpLiteral* node) {
increase_node_count();
add_flag(kDontInline); // TODO(1322): Allow materialized literals.
}
void AstConstructionVisitor::VisitObjectLiteral(ObjectLiteral* node) {
increase_node_count();
add_flag(kDontInline); // TODO(1322): Allow materialized literals.
}
void AstConstructionVisitor::VisitArrayLiteral(ArrayLiteral* node) {
increase_node_count();
add_flag(kDontInline); // TODO(1322): Allow materialized literals.
}
void AstConstructionVisitor::VisitCall(Call* node) {
increase_node_count();
add_flag(kDontSelfOptimize);
}
void AstConstructionVisitor::VisitCallNew(CallNew* node) {
increase_node_count();
add_flag(kDontSelfOptimize);
}
void AstConstructionVisitor::VisitCallRuntime(CallRuntime* node) {
increase_node_count();
add_flag(kDontSelfOptimize);
if (node->is_jsruntime()) {
// Don't try to inline JS runtime calls because we don't (currently) even
// optimize them.
add_flag(kDontInline);
} else if (node->function()->intrinsic_type == Runtime::INLINE &&
(node->name()->IsEqualTo(CStrVector("_ArgumentsLength")) ||
node->name()->IsEqualTo(CStrVector("_Arguments")))) {
// Don't inline the %_ArgumentsLength or %_Arguments because their
// implementation will not work. There is no stack frame to get them
// from.
add_flag(kDontInline);
}
}
} } // namespace v8::internal

1353
deps/v8/src/ast.h

File diff suppressed because it is too large

2
deps/v8/src/builtins.cc

@ -978,7 +978,7 @@ BUILTIN(ArrayConcat) {
return CallJsBuiltin(isolate, "ArrayConcat", args);
}
if (!JSArray::cast(arg)->HasFastElements()) {
if (!JSArray::cast(arg)->HasFastSmiOnlyElements()) {
elements_kind = FAST_ELEMENTS;
}
}

13
deps/v8/src/codegen.cc

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -62,18 +62,15 @@ void CodeGenerator::MakeCodePrologue(CompilationInfo* info) {
#ifdef DEBUG
bool print_source = false;
bool print_ast = false;
bool print_json_ast = false;
const char* ftype;
if (Isolate::Current()->bootstrapper()->IsActive()) {
print_source = FLAG_print_builtin_source;
print_ast = FLAG_print_builtin_ast;
print_json_ast = FLAG_print_builtin_json_ast;
ftype = "builtin";
} else {
print_source = FLAG_print_source;
print_ast = FLAG_print_ast;
print_json_ast = FLAG_print_json_ast;
Vector<const char> filter = CStrVector(FLAG_hydrogen_filter);
if (print_source && !filter.is_empty()) {
print_source = info->function()->name()->IsEqualTo(filter);
@ -81,9 +78,6 @@ void CodeGenerator::MakeCodePrologue(CompilationInfo* info) {
if (print_ast && !filter.is_empty()) {
print_ast = info->function()->name()->IsEqualTo(filter);
}
if (print_json_ast && !filter.is_empty()) {
print_json_ast = info->function()->name()->IsEqualTo(filter);
}
ftype = "user-defined";
}
@ -102,11 +96,6 @@ void CodeGenerator::MakeCodePrologue(CompilationInfo* info) {
PrintF("--- AST ---\n%s\n",
AstPrinter().PrintProgram(info->function()));
}
if (print_json_ast) {
JsonAstBuilder builder;
PrintF("%s", builder.BuildProgram(info->function()));
}
#endif // DEBUG
}

20
deps/v8/src/compiler.cc

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -110,6 +110,18 @@ void CompilationInfo::DisableOptimization() {
}
// Primitive functions are unlikely to be picked up by the stack-walking
// profiler, so they trigger their own optimization when they're called
// for the SharedFunctionInfo::kCallsUntilPrimitiveOptimization-th time.
bool CompilationInfo::ShouldSelfOptimize() {
return FLAG_self_optimization &&
FLAG_crankshaft &&
!Serializer::enabled() &&
!function()->flags()->Contains(kDontSelfOptimize) &&
(shared_info().is_null() || !shared_info()->optimization_disabled());
}
void CompilationInfo::AbortOptimization() {
Handle<Code> code(shared_info()->code());
SetCode(code);
@ -652,6 +664,9 @@ bool Compiler::CompileLazy(CompilationInfo* info) {
// Check the function has compiled code.
ASSERT(shared->is_compiled());
shared->set_code_age(0);
shared->set_dont_crankshaft(lit->flags()->Contains(kDontOptimize));
shared->set_dont_inline(lit->flags()->Contains(kDontInline));
shared->set_ast_node_count(lit->ast_node_count());
if (info->AllowOptimize() && !shared->optimization_disabled()) {
// If we're asked to always optimize, we compile the optimized
@ -750,6 +765,9 @@ void Compiler::SetFunctionInfo(Handle<SharedFunctionInfo> function_info,
function_info->set_language_mode(lit->language_mode());
function_info->set_uses_arguments(lit->scope()->arguments() != NULL);
function_info->set_has_duplicate_parameters(lit->has_duplicate_parameters());
function_info->set_ast_node_count(lit->ast_node_count());
function_info->set_dont_crankshaft(lit->flags()->Contains(kDontOptimize));
function_info->set_dont_inline(lit->flags()->Contains(kDontInline));
}

6
deps/v8/src/compiler.h

@ -168,6 +168,9 @@ class CompilationInfo BASE_EMBEDDED {
return V8::UseCrankshaft() && !closure_.is_null();
}
// Determines whether or not to insert a self-optimization header.
bool ShouldSelfOptimize();
// Disable all optimization attempts of this info for the rest of the
// current compilation pipeline.
void AbortOptimization();
@ -280,6 +283,9 @@ class Compiler : public AllStatic {
static const int kMaxInliningLevels = 3;
// Call count before primitive functions trigger their own optimization.
static const int kCallsUntilPrimitiveOpt = 200;
// All routines return a SharedFunctionInfo.
// If an error occurs an exception is raised and the return handle
// contains NULL.

4
deps/v8/src/cpu-profiler.cc

@ -1,4 +1,4 @@
// Copyright 2010 the V8 project authors. All rights reserved.
// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -42,7 +42,7 @@ namespace internal {
static const int kEventsBufferSize = 256 * KB;
static const int kTickSamplesBufferChunkSize = 64 * KB;
static const int kTickSamplesBufferChunksCount = 16;
static const int kProfilerStackSize = 32 * KB;
static const int kProfilerStackSize = 64 * KB;
ProfilerEventsProcessor::ProfilerEventsProcessor(ProfileGenerator* generator)

2
deps/v8/src/d8.cc

@ -1288,7 +1288,7 @@ bool Shell::SetOptions(int argc, char* argv[]) {
options.use_preemption = true;
argv[i] = NULL;
#endif // V8_SHARED
} else if (strcmp(argv[i], "--no-preemption") == 0) {
} else if (strcmp(argv[i], "--nopreemption") == 0) {
#ifdef V8_SHARED
printf("D8 with shared library does not support multi-threading\n");
return false;

36
deps/v8/src/flag-definitions.h

@ -109,11 +109,13 @@ private:
// Flags for experimental language features.
DEFINE_bool(harmony_typeof, false, "enable harmony semantics for typeof")
DEFINE_bool(harmony_scoping, false, "enable harmony block scoping")
DEFINE_bool(harmony_modules, false, "enable harmony modules")
DEFINE_bool(harmony_proxies, false, "enable harmony proxies")
DEFINE_bool(harmony_collections, false,
"enable harmony collections (sets, maps, and weak maps)")
DEFINE_bool(harmony, false, "enable all harmony features (except typeof)")
DEFINE_implication(harmony, harmony_scoping)
DEFINE_implication(harmony, harmony_modules)
DEFINE_implication(harmony, harmony_proxies)
DEFINE_implication(harmony, harmony_collections)
@ -136,7 +138,6 @@ DEFINE_bool(use_gvn, true, "use hydrogen global value numbering")
DEFINE_bool(use_canonicalizing, true, "use hydrogen instruction canonicalizing")
DEFINE_bool(use_inlining, true, "use function inlining")
DEFINE_bool(limit_inlining, true, "limit code size growth from inlining")
DEFINE_bool(eliminate_empty_blocks, true, "eliminate empty blocks")
DEFINE_bool(loop_invariant_code_motion, true, "loop invariant code motion")
DEFINE_bool(collect_megamorphic_maps_from_stub_cache,
true,
@ -164,12 +165,19 @@ DEFINE_int(stress_runs, 0, "number of stress runs")
DEFINE_bool(optimize_closures, true, "optimize closures")
DEFINE_int(loop_weight, 1, "loop weight for representation inference")
// Experimental profiler changes.
DEFINE_bool(experimental_profiler, false, "enable all profiler experiments")
DEFINE_bool(watch_ic_patching, false, "profiler considers IC stability")
DEFINE_bool(self_optimization, false,
"primitive functions trigger their own optimization")
DEFINE_implication(experimental_profiler, watch_ic_patching)
DEFINE_implication(experimental_profiler, self_optimization)
// assembler-ia32.cc / assembler-arm.cc / assembler-x64.cc
DEFINE_bool(debug_code, false,
"generate extra code (assertions) for debugging")
DEFINE_bool(code_comments, false, "emit comments in code disassembly")
DEFINE_bool(peephole_optimization, true,
"perform peephole optimizations in assembly code")
DEFINE_bool(enable_sse2, true,
"enable use of SSE2 instructions if available")
DEFINE_bool(enable_sse3, true,
@ -219,10 +227,8 @@ DEFINE_bool(lazy, true, "use lazy compilation")
DEFINE_bool(trace_opt, false, "trace lazy optimization")
DEFINE_bool(trace_opt_stats, false, "trace lazy optimization statistics")
DEFINE_bool(opt, true, "use adaptive optimizations")
DEFINE_bool(opt_eagerly, false, "be more eager when adaptively optimizing")
DEFINE_bool(always_opt, false, "always try to optimize functions")
DEFINE_bool(prepare_always_opt, false, "prepare for turning on always opt")
DEFINE_bool(deopt, true, "support deoptimization")
DEFINE_bool(trace_deopt, false, "trace deoptimization")
// compiler.cc
@ -303,11 +309,10 @@ DEFINE_bool(native_code_counters, false,
DEFINE_bool(always_compact, false, "Perform compaction on every full GC")
DEFINE_bool(lazy_sweeping, true,
"Use lazy sweeping for old pointer and data spaces")
DEFINE_bool(cleanup_caches_in_maps_at_gc, true,
"Flush code caches in maps during mark compact cycle.")
DEFINE_bool(never_compact, false,
"Never perform compaction on full GC - testing only")
DEFINE_bool(compact_code_space, false, "Compact code space")
DEFINE_bool(compact_code_space, true,
"Compact code space on full non-incremental collections")
DEFINE_bool(cleanup_code_caches_at_gc, true,
"Flush inline caches prior to mark compact collection and "
"flush code caches in maps during mark compact cycle.")
@ -315,14 +320,6 @@ DEFINE_int(random_seed, 0,
"Default seed for initializing random generator "
"(0, the default, means to use system random).")
DEFINE_bool(canonicalize_object_literal_maps, true,
"Canonicalize maps for object literals.")
DEFINE_int(max_map_space_pages, MapSpace::kMaxMapPageIndex - 1,
"Maximum number of pages in map space which still allows to encode "
"forwarding pointers. That's actually a constant, but it's useful "
"to control it with a flag for better testing.")
// objects.cc
DEFINE_bool(use_verbose_printer, true, "allows verbose printing")
@ -443,9 +440,6 @@ DEFINE_bool(print_builtin_source, false,
"pretty print source code for builtins")
DEFINE_bool(print_ast, false, "print source AST")
DEFINE_bool(print_builtin_ast, false, "print source AST for builtins")
DEFINE_bool(print_json_ast, false, "print source AST as JSON")
DEFINE_bool(print_builtin_json_ast, false,
"print source AST for builtins as JSON")
DEFINE_string(stop_at, "", "function name where to insert a breakpoint")
// compiler.cc
@ -475,10 +469,6 @@ DEFINE_bool(trace_normalization,
// runtime.cc
DEFINE_bool(trace_lazy, false, "trace lazy compilation")
// serialize.cc
DEFINE_bool(debug_serialization, false,
"write debug information into the snapshot.")
// spaces.cc
DEFINE_bool(collect_heap_spill_statistics, false,
"report heap spill statistics along with heap_stats "

14
deps/v8/src/frames-inl.h

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -77,18 +77,18 @@ inline StackHandler* StackHandler::FromAddress(Address address) {
}
inline bool StackHandler::is_entry() const {
return kind() == ENTRY;
inline bool StackHandler::is_js_entry() const {
return kind() == JS_ENTRY;
}
inline bool StackHandler::is_try_catch() const {
return kind() == TRY_CATCH;
inline bool StackHandler::is_catch() const {
return kind() == CATCH;
}
inline bool StackHandler::is_try_finally() const {
return kind() == TRY_FINALLY;
inline bool StackHandler::is_finally() const {
return kind() == FINALLY;
}

2
deps/v8/src/frames.cc

@ -1174,7 +1174,7 @@ void EntryFrame::Iterate(ObjectVisitor* v) const {
StackHandlerIterator it(this, top_handler());
ASSERT(!it.done());
StackHandler* handler = it.handler();
ASSERT(handler->is_entry());
ASSERT(handler->is_js_entry());
handler->Iterate(v, LookupCode());
#ifdef DEBUG
// Make sure that the entry frame does not contain more than one

20
deps/v8/src/frames.h

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -85,15 +85,17 @@ class InnerPointerToCodeCache {
class StackHandler BASE_EMBEDDED {
public:
enum Kind {
ENTRY,
TRY_CATCH,
TRY_FINALLY
JS_ENTRY,
CATCH,
FINALLY,
LAST_KIND = FINALLY
};
static const int kKindWidth = 2;
static const int kOffsetWidth = 32 - kKindWidth;
STATIC_ASSERT(LAST_KIND < (1 << kKindWidth));
static const int kIndexWidth = 32 - kKindWidth;
class KindField: public BitField<StackHandler::Kind, 0, kKindWidth> {};
class OffsetField: public BitField<unsigned, kKindWidth, kOffsetWidth> {};
class IndexField: public BitField<unsigned, kKindWidth, kIndexWidth> {};
// Get the address of this stack handler.
inline Address address() const;
@ -111,9 +113,9 @@ class StackHandler BASE_EMBEDDED {
static inline StackHandler* FromAddress(Address address);
// Testers
inline bool is_entry() const;
inline bool is_try_catch() const;
inline bool is_try_finally() const;
inline bool is_js_entry() const;
inline bool is_catch() const;
inline bool is_finally() const;
private:
// Accessors.

82
deps/v8/src/full-codegen.cc

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -51,7 +51,25 @@ void BreakableStatementChecker::Check(Expression* expr) {
}
void BreakableStatementChecker::VisitDeclaration(Declaration* decl) {
void BreakableStatementChecker::VisitVariableDeclaration(
VariableDeclaration* decl) {
}
void BreakableStatementChecker::VisitModuleDeclaration(
ModuleDeclaration* decl) {
}
void BreakableStatementChecker::VisitModuleLiteral(ModuleLiteral* module) {
}
void BreakableStatementChecker::VisitModuleVariable(ModuleVariable* module) {
}
void BreakableStatementChecker::VisitModulePath(ModulePath* module) {
}
void BreakableStatementChecker::VisitModuleUrl(ModuleUrl* module) {
}
@ -297,6 +315,9 @@ bool FullCodeGenerator::MakeCode(CompilationInfo* info) {
code->set_stack_check_table_offset(table_offset);
CodeGenerator::PrintCode(code, info);
info->SetCode(code); // May be an empty handle.
if (!code.is_null()) {
isolate->runtime_profiler()->NotifyCodeGenerated(code->instruction_size());
}
#ifdef ENABLE_GDB_JIT_INTERFACE
if (FLAG_gdbjit && !code.is_null()) {
GDBJITLineInfo* lineinfo =
@ -380,7 +401,7 @@ void FullCodeGenerator::RecordJSReturnSite(Call* call) {
void FullCodeGenerator::PrepareForBailoutForId(unsigned id, State state) {
// There's no need to prepare this code for bailouts from already optimized
// code or code that can't be optimized.
if (!FLAG_deopt || !info_->HasDeoptimizationSupport()) return;
if (!info_->HasDeoptimizationSupport()) return;
unsigned pc_and_state =
StateField::encode(state) | PcField::encode(masm_->pc_offset());
BailoutEntry entry = { id, pc_and_state };
@ -525,19 +546,19 @@ void FullCodeGenerator::DoTest(const TestContext* context) {
void FullCodeGenerator::VisitDeclarations(
ZoneList<Declaration*>* declarations) {
int length = declarations->length();
int global_count = 0;
for (int i = 0; i < length; i++) {
Declaration* decl = declarations->at(i);
EmitDeclaration(decl->proxy(), decl->mode(), decl->fun(), &global_count);
}
int save_global_count = global_count_;
global_count_ = 0;
AstVisitor::VisitDeclarations(declarations);
// Batch declare global functions and variables.
if (global_count > 0) {
if (global_count_ > 0) {
Handle<FixedArray> array =
isolate()->factory()->NewFixedArray(2 * global_count, TENURED);
isolate()->factory()->NewFixedArray(2 * global_count_, TENURED);
int length = declarations->length();
for (int j = 0, i = 0; i < length; i++) {
Declaration* decl = declarations->at(i);
VariableDeclaration* decl = declarations->at(i)->AsVariableDeclaration();
if (decl != NULL) {
Variable* var = decl->proxy()->var();
if (var->IsUnallocated()) {
@ -561,10 +582,43 @@ void FullCodeGenerator::VisitDeclarations(
}
}
}
}
// Invoke the platform-dependent code generator to do the actual
// declaration the global functions and variables.
DeclareGlobals(array);
}
global_count_ = save_global_count;
}
void FullCodeGenerator::VisitVariableDeclaration(VariableDeclaration* decl) {
EmitDeclaration(decl->proxy(), decl->mode(), decl->fun());
}
void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* decl) {
// TODO(rossberg)
}
void FullCodeGenerator::VisitModuleLiteral(ModuleLiteral* module) {
// TODO(rossberg)
}
void FullCodeGenerator::VisitModuleVariable(ModuleVariable* module) {
// TODO(rossberg)
}
void FullCodeGenerator::VisitModulePath(ModulePath* module) {
// TODO(rossberg)
}
void FullCodeGenerator::VisitModuleUrl(ModuleUrl* decl) {
// TODO(rossberg)
}
@ -1147,7 +1201,7 @@ void FullCodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
// Try block code. Sets up the exception handler chain.
__ bind(&try_entry);
__ PushTryHandler(IN_JAVASCRIPT, TRY_CATCH_HANDLER, stmt->index());
__ PushTryHandler(StackHandler::CATCH, stmt->index());
{ TryCatch try_body(this);
Visit(stmt->try_block());
}
@ -1204,7 +1258,7 @@ void FullCodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
// Set up try handler.
__ bind(&try_entry);
__ PushTryHandler(IN_JAVASCRIPT, TRY_FINALLY_HANDLER, stmt->index());
__ PushTryHandler(StackHandler::FINALLY, stmt->index());
{ TryFinally try_body(this, &finally_entry);
Visit(stmt->try_block());
}

6
deps/v8/src/full-codegen.h

@ -83,6 +83,7 @@ class FullCodeGenerator: public AstVisitor {
scope_(NULL),
nesting_stack_(NULL),
loop_depth_(0),
global_count_(0),
context_(NULL),
bailout_entries_(0),
stack_checks_(2), // There's always at least one.
@ -416,10 +417,10 @@ class FullCodeGenerator: public AstVisitor {
// Platform-specific code for a variable, constant, or function
// declaration. Functions have an initial value.
// Increments global_count_ for unallocated variables.
void EmitDeclaration(VariableProxy* proxy,
VariableMode mode,
FunctionLiteral* function,
int* global_count);
FunctionLiteral* function);
// Platform-specific code for checking the stack limit at the back edge of
// a loop.
@ -767,6 +768,7 @@ class FullCodeGenerator: public AstVisitor {
Label return_label_;
NestedStatement* nesting_stack_;
int loop_depth_;
int global_count_;
const ExpressionContext* context_;
ZoneList<BailoutEntry> bailout_entries_;
ZoneList<BailoutEntry> stack_checks_;

4
deps/v8/src/handles.cc

@ -711,7 +711,7 @@ Handle<FixedArray> GetEnumPropertyKeys(Handle<JSObject> object,
isolate);
}
isolate->counters()->enum_cache_misses()->Increment();
int num_enum = object->NumberOfEnumProperties();
int num_enum = object->NumberOfLocalProperties(DONT_ENUM);
Handle<FixedArray> storage = isolate->factory()->NewFixedArray(num_enum);
Handle<FixedArray> sort_array = isolate->factory()->NewFixedArray(num_enum);
Handle<DescriptorArray> descs =
@ -735,7 +735,7 @@ Handle<FixedArray> GetEnumPropertyKeys(Handle<JSObject> object,
ASSERT(storage->length() == index);
return storage;
} else {
int num_enum = object->NumberOfEnumProperties();
int num_enum = object->NumberOfLocalProperties(DONT_ENUM);
Handle<FixedArray> storage = isolate->factory()->NewFixedArray(num_enum);
Handle<FixedArray> sort_array = isolate->factory()->NewFixedArray(num_enum);
object->property_dictionary()->CopyEnumKeysTo(*storage, *sort_array);

11
deps/v8/src/heap.cc

@ -1201,7 +1201,9 @@ void Heap::Scavenge() {
promotion_queue_.Destroy();
LiveObjectList::UpdateReferencesForScavengeGC();
if (!FLAG_watch_ic_patching) {
isolate()->runtime_profiler()->UpdateSamplesAfterScavenge();
}
incremental_marking()->UpdateMarkingDequeAfterScavenge();
ASSERT(new_space_front == new_space_.top());
@ -2865,7 +2867,9 @@ MaybeObject* Heap::AllocateSharedFunctionInfo(Object* name) {
share->set_inferred_name(empty_string(), SKIP_WRITE_BARRIER);
share->set_initial_map(undefined_value(), SKIP_WRITE_BARRIER);
share->set_this_property_assignments(undefined_value(), SKIP_WRITE_BARRIER);
share->set_deopt_counter(Smi::FromInt(FLAG_deopt_every_n_times));
share->set_deopt_counter(FLAG_deopt_every_n_times);
share->set_profiler_ticks(0);
share->set_ast_node_count(0);
// Set integer fields (smi or int, depending on the architecture).
share->set_length(0);
@ -5839,10 +5843,7 @@ bool Heap::SetUp(bool create_heap_objects) {
if (!code_space_->SetUp()) return false;
// Initialize map space.
map_space_ = new MapSpace(this,
max_old_generation_size_,
FLAG_max_map_space_pages,
MAP_SPACE);
map_space_ = new MapSpace(this, max_old_generation_size_, MAP_SPACE);
if (map_space_ == NULL) return false;
if (!map_space_->SetUp()) return false;

3
deps/v8/src/heap.h

@ -243,7 +243,8 @@ namespace internal {
V(anonymous_function_symbol, "(anonymous function)") \
V(compare_ic_symbol, ".compare_ic") \
V(infinity_symbol, "Infinity") \
V(minus_infinity_symbol, "-Infinity")
V(minus_infinity_symbol, "-Infinity") \
V(hidden_stack_trace_symbol, "v8::hidden_stack_trace")
// Forward declarations.
class GCTracer;

7
deps/v8/src/hydrogen-instructions.cc

@ -893,6 +893,13 @@ void HCheckInstanceType::GetCheckMaskAndTag(uint8_t* mask, uint8_t* tag) {
void HCheckMap::PrintDataTo(StringStream* stream) {
value()->PrintNameTo(stream);
stream->Add(" %p", *map());
if (mode() == REQUIRE_EXACT_MAP) {
stream->Add(" [EXACT]");
} else if (!has_element_transitions_) {
stream->Add(" [EXACT*]");
} else {
stream->Add(" [MATCH ELEMENTS]");
}
}

42
deps/v8/src/hydrogen-instructions.h

@ -186,6 +186,7 @@ class LChunkBuilder;
V(InobjectFields) \
V(BackingStoreFields) \
V(ElementsKind) \
V(ElementsPointer) \
V(ArrayElements) \
V(DoubleArrayElements) \
V(SpecializedArrayElements) \
@ -646,6 +647,18 @@ class HValue: public ZoneObject {
return gvn_flags_.ContainsAnyOf(AllObservableSideEffectsFlagSet());
}
GVNFlagSet DependsOnFlags() const {
GVNFlagSet result = gvn_flags_;
result.Intersect(AllDependsOnFlagSet());
return result;
}
GVNFlagSet SideEffectFlags() const {
GVNFlagSet result = gvn_flags_;
result.Intersect(AllSideEffectsFlagSet());
return result;
}
GVNFlagSet ChangesFlags() const {
GVNFlagSet result = gvn_flags_;
result.Intersect(AllChangesFlagSet());
@ -722,6 +735,15 @@ class HValue: public ZoneObject {
representation_ = r;
}
static GVNFlagSet AllDependsOnFlagSet() {
GVNFlagSet result;
// Create changes mask.
#define ADD_FLAG(type) result.Add(kDependsOn##type);
GVN_FLAG_LIST(ADD_FLAG)
#undef ADD_FLAG
return result;
}
static GVNFlagSet AllChangesFlagSet() {
GVNFlagSet result;
// Create changes mask.
@ -743,6 +765,8 @@ class HValue: public ZoneObject {
static GVNFlagSet AllObservableSideEffectsFlagSet() {
GVNFlagSet result = AllChangesFlagSet();
result.Remove(kChangesElementsKind);
result.Remove(kChangesElementsPointer);
result.Remove(kChangesMaps);
return result;
}
@ -1920,8 +1944,7 @@ class HLoadElements: public HUnaryOperation {
explicit HLoadElements(HValue* value) : HUnaryOperation(value) {
set_representation(Representation::Tagged());
SetFlag(kUseGVN);
SetGVNFlag(kDependsOnMaps);
SetGVNFlag(kDependsOnElementsKind);
SetGVNFlag(kDependsOnElementsPointer);
}
virtual Representation RequiredInputRepresentation(int index) {
@ -1972,6 +1995,11 @@ class HCheckMap: public HTemplateInstruction<2> {
set_representation(Representation::Tagged());
SetFlag(kUseGVN);
SetGVNFlag(kDependsOnMaps);
// If the map to check doesn't have the untransitioned elements, it must not
// be hoisted above TransitionElements instructions.
if (mode == REQUIRE_EXACT_MAP || !map->has_fast_smi_only_elements()) {
SetGVNFlag(kDependsOnElementsKind);
}
has_element_transitions_ =
map->LookupElementsTransitionMap(FAST_DOUBLE_ELEMENTS, NULL) != NULL ||
map->LookupElementsTransitionMap(FAST_ELEMENTS, NULL) != NULL;
@ -4135,7 +4163,17 @@ class HTransitionElementsKind: public HTemplateInstruction<1> {
transitioned_map_(transitioned_map) {
SetOperandAt(0, object);
SetFlag(kUseGVN);
SetGVNFlag(kDependsOnMaps);
SetGVNFlag(kChangesElementsKind);
if (original_map->has_fast_double_elements()) {
SetGVNFlag(kChangesElementsPointer);
SetGVNFlag(kDependsOnElementsPointer);
SetGVNFlag(kDependsOnDoubleArrayElements);
} else if (transitioned_map->has_fast_double_elements()) {
SetGVNFlag(kChangesElementsPointer);
SetGVNFlag(kDependsOnElementsPointer);
SetGVNFlag(kDependsOnArrayElements);
}
set_representation(Representation::Tagged());
}

274
deps/v8/src/hydrogen.cc

@ -70,7 +70,8 @@ HBasicBlock::HBasicBlock(HGraph* graph)
deleted_phis_(4),
parent_loop_header_(NULL),
is_inline_return_target_(false),
is_deoptimizing_(false) { }
is_deoptimizing_(false),
dominates_loop_successors_(false) { }
void HBasicBlock::AttachLoopInformation() {
@ -315,6 +316,62 @@ void HBasicBlock::AssignCommonDominator(HBasicBlock* other) {
}
void HBasicBlock::AssignLoopSuccessorDominators() {
// Mark blocks that dominate all subsequent reachable blocks inside their
// loop. Exploit the fact that blocks are sorted in reverse post order. When
// the loop is visited in increasing block id order, if the number of
// non-loop-exiting successor edges at the dominator_candidate block doesn't
// exceed the number of previously encountered predecessor edges, there is no
// path from the loop header to any block with higher id that doesn't go
// through the dominator_candidate block. In this case, the
// dominator_candidate block is guaranteed to dominate all blocks reachable
// from it with higher ids.
HBasicBlock* last = loop_information()->GetLastBackEdge();
int outstanding_successors = 1; // one edge from the pre-header
// Header always dominates everything.
MarkAsLoopSuccessorDominator();
for (int j = block_id(); j <= last->block_id(); ++j) {
HBasicBlock* dominator_candidate = graph_->blocks()->at(j);
for (HPredecessorIterator it(dominator_candidate); !it.Done();
it.Advance()) {
HBasicBlock* predecessor = it.Current();
// Don't count back edges.
if (predecessor->block_id() < dominator_candidate->block_id()) {
outstanding_successors--;
}
}
// If more successors than predecessors have been seen in the loop up to
// now, it's not possible to guarantee that the current block dominates
// all of the blocks with higher IDs. In this case, assume conservatively
// that those paths through loop that don't go through the current block
// contain all of the loop's dependencies. Also be careful to record
// dominator information about the current loop that's being processed,
// and not nested loops, which will be processed when
// AssignLoopSuccessorDominators gets called on their header.
ASSERT(outstanding_successors >= 0);
HBasicBlock* parent_loop_header = dominator_candidate->parent_loop_header();
if (outstanding_successors == 0 &&
(parent_loop_header == this && !dominator_candidate->IsLoopHeader())) {
dominator_candidate->MarkAsLoopSuccessorDominator();
}
HControlInstruction* end = dominator_candidate->end();
for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
HBasicBlock* successor = it.Current();
// Only count successors that remain inside the loop and don't loop back
// to a loop header.
if (successor->block_id() > dominator_candidate->block_id() &&
successor->block_id() <= last->block_id()) {
// Backwards edges must land on loop headers.
ASSERT(successor->block_id() > dominator_candidate->block_id() ||
successor->IsLoopHeader());
outstanding_successors++;
}
}
}
}
int HBasicBlock::PredecessorIndexOf(HBasicBlock* predecessor) const {
for (int i = 0; i < predecessors_.length(); ++i) {
if (predecessors_[i] == predecessor) return i;
@ -646,9 +703,7 @@ Handle<Code> HGraph::Compile(CompilationInfo* info) {
MacroAssembler assembler(info->isolate(), NULL, 0);
LCodeGen generator(chunk, &assembler, info);
if (FLAG_eliminate_empty_blocks) {
chunk->MarkEmptyBlocks();
}
if (generator.GenerateCode()) {
if (FLAG_trace_codegen) {
@ -752,10 +807,12 @@ void HGraph::Postorder(HBasicBlock* block,
void HGraph::AssignDominators() {
HPhase phase("Assign dominators", this);
for (int i = 0; i < blocks_.length(); ++i) {
if (blocks_[i]->IsLoopHeader()) {
HBasicBlock* block = blocks_[i];
if (block->IsLoopHeader()) {
// Only the first predecessor of a loop header is from outside the loop.
// All others are back edges, and thus cannot dominate the loop header.
blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->first());
block->AssignCommonDominator(block->predecessors()->first());
block->AssignLoopSuccessorDominators();
} else {
for (int j = blocks_[i]->predecessors()->length() - 1; j >= 0; --j) {
blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->at(j));
@ -1373,7 +1430,8 @@ class HGlobalValueNumberer BASE_EMBEDDED {
void LoopInvariantCodeMotion();
void ProcessLoopBlock(HBasicBlock* block,
HBasicBlock* before_loop,
GVNFlagSet loop_kills);
GVNFlagSet loop_kills,
GVNFlagSet* accumulated_first_time_depends);
bool AllowCodeMotion();
bool ShouldMove(HInstruction* instr, HBasicBlock* loop_header);
@ -1398,6 +1456,7 @@ class HGlobalValueNumberer BASE_EMBEDDED {
bool HGlobalValueNumberer::Analyze() {
removed_side_effects_ = false;
ComputeBlockSideEffects();
if (FLAG_loop_invariant_code_motion) {
LoopInvariantCodeMotion();
@ -1409,6 +1468,12 @@ bool HGlobalValueNumberer::Analyze() {
void HGlobalValueNumberer::ComputeBlockSideEffects() {
// The Analyze phase of GVN can be called multiple times. Clear loop side
// effects before computing them to erase the contents from previous Analyze
// passes.
for (int i = 0; i < loop_side_effects_.length(); ++i) {
loop_side_effects_[i].RemoveAll();
}
for (int i = graph_->blocks()->length() - 1; i >= 0; --i) {
// Compute side effects for the block.
HBasicBlock* block = graph_->blocks()->at(i);
@ -1446,18 +1511,22 @@ void HGlobalValueNumberer::LoopInvariantCodeMotion() {
block->block_id(),
side_effects.ToIntegral());
GVNFlagSet accumulated_first_time_depends;
HBasicBlock* last = block->loop_information()->GetLastBackEdge();
for (int j = block->block_id(); j <= last->block_id(); ++j) {
ProcessLoopBlock(graph_->blocks()->at(j), block, side_effects);
ProcessLoopBlock(graph_->blocks()->at(j), block, side_effects,
&accumulated_first_time_depends);
}
}
}
}
void HGlobalValueNumberer::ProcessLoopBlock(HBasicBlock* block,
void HGlobalValueNumberer::ProcessLoopBlock(
HBasicBlock* block,
HBasicBlock* loop_header,
GVNFlagSet loop_kills) {
GVNFlagSet loop_kills,
GVNFlagSet* accumulated_first_time_depends) {
HBasicBlock* pre_header = loop_header->predecessors()->at(0);
GVNFlagSet depends_flags = HValue::ConvertChangesToDependsFlags(loop_kills);
TraceGVN("Loop invariant motion for B%d depends_flags=0x%x\n",
@ -1466,11 +1535,40 @@ void HGlobalValueNumberer::ProcessLoopBlock(HBasicBlock* block,
HInstruction* instr = block->first();
while (instr != NULL) {
HInstruction* next = instr->next();
if (instr->CheckFlag(HValue::kUseGVN) &&
!instr->gvn_flags().ContainsAnyOf(depends_flags)) {
TraceGVN("Checking instruction %d (%s)\n",
bool hoisted = false;
if (instr->CheckFlag(HValue::kUseGVN)) {
TraceGVN("Checking instruction %d (%s) instruction GVN flags 0x%X, "
"loop kills 0x%X\n",
instr->id(),
instr->Mnemonic(),
instr->gvn_flags().ToIntegral(),
depends_flags.ToIntegral());
bool can_hoist = !instr->gvn_flags().ContainsAnyOf(depends_flags);
if (!can_hoist && instr->IsTransitionElementsKind()) {
// It's only possible to hoist one time side effects if there are no
// dependencies on their changes from the loop header to the current
// instruction.
GVNFlagSet converted_changes =
HValue::ConvertChangesToDependsFlags(instr->ChangesFlags());
TraceGVN("Checking dependencies on one-time instruction %d (%s) "
"converted changes 0x%X, accumulated depends 0x%X\n",
instr->id(),
instr->Mnemonic());
instr->Mnemonic(),
converted_changes.ToIntegral(),
accumulated_first_time_depends->ToIntegral());
// It's possible to hoist one-time side effects from the current loop
// loop only if they dominate all of the successor blocks in the same
// loop and there are not any instructions that have Changes/DependsOn
// that intervene between it and the beginning of the loop header.
bool in_nested_loop = block != loop_header &&
((block->parent_loop_header() != loop_header) ||
block->IsLoopHeader());
can_hoist = !in_nested_loop &&
block->IsLoopSuccessorDominator() &&
!accumulated_first_time_depends->ContainsAnyOf(converted_changes);
}
if (can_hoist) {
bool inputs_loop_invariant = true;
for (int i = 0; i < instr->OperandCount(); ++i) {
if (instr->OperandAt(i)->IsDefinedAfter(pre_header)) {
@ -1479,12 +1577,23 @@ void HGlobalValueNumberer::ProcessLoopBlock(HBasicBlock* block,
}
if (inputs_loop_invariant && ShouldMove(instr, loop_header)) {
TraceGVN("Found loop invariant instruction %d\n", instr->id());
TraceGVN("Hoisting loop invariant instruction %d\n", instr->id());
// Move the instruction out of the loop.
instr->Unlink();
instr->InsertBefore(pre_header->end());
if (instr->HasSideEffects()) removed_side_effects_ = true;
hoisted = true;
}
}
}
if (!hoisted) {
// If an instruction is not hoisted, we have to account for its side
// effects when hoisting later HTransitionElementsKind instructions.
accumulated_first_time_depends->Add(instr->DependsOnFlags());
GVNFlagSet converted_changes =
HValue::ConvertChangesToDependsFlags(instr->SideEffectFlags());
accumulated_first_time_depends->Add(converted_changes);
}
instr = next;
}
}
@ -2334,7 +2443,7 @@ HGraph* HGraphBuilder::CreateGraph() {
// Handle implicit declaration of the function name in named function
// expressions before other declarations.
if (scope->is_function_scope() && scope->function() != NULL) {
HandleDeclaration(scope->function(), CONST, NULL);
HandleVariableDeclaration(scope->function(), CONST, NULL);
}
VisitDeclarations(scope->declarations());
AddSimulate(AstNode::kDeclarationsId);
@ -2392,7 +2501,8 @@ HGraph* HGraphBuilder::CreateGraph() {
// could only be discovered by removing side-effect-generating instructions
// during the first pass.
if (FLAG_smi_only_arrays && removed_side_effects) {
gvn.Analyze();
removed_side_effects = gvn.Analyze();
ASSERT(!removed_side_effects);
}
}
@ -4796,8 +4906,8 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
// Do a quick check on source code length to avoid parsing large
// inlining candidates.
if ((FLAG_limit_inlining && target->shared()->SourceSize() > kMaxSourceSize)
|| target->shared()->SourceSize() > kUnlimitedMaxSourceSize) {
if ((FLAG_limit_inlining && target_shared->SourceSize() > kMaxSourceSize)
|| target_shared->SourceSize() > kUnlimitedMaxSourceSize) {
TraceInline(target, caller, "target text too big");
return false;
}
@ -4807,6 +4917,17 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
TraceInline(target, caller, "target not inlineable");
return false;
}
if (target_shared->dont_inline() || target_shared->dont_crankshaft()) {
TraceInline(target, caller, "target contains unsupported syntax [early]");
return false;
}
int nodes_added = target_shared->ast_node_count();
if ((FLAG_limit_inlining && nodes_added > kMaxInlinedSize) ||
nodes_added > kUnlimitedMaxInlinedSize) {
TraceInline(target, caller, "target AST is too large [early]");
return false;
}
#if !defined(V8_TARGET_ARCH_IA32)
// Target must be able to use caller's context.
@ -4851,8 +4972,6 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
return false;
}
int count_before = AstNode::Count();
// Parse and allocate variables.
CompilationInfo target_info(target);
if (!ParserApi::Parse(&target_info, kNoParsingFlags) ||
@ -4872,11 +4991,17 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
}
FunctionLiteral* function = target_info.function();
// Count the number of AST nodes added by inlining this call.
int nodes_added = AstNode::Count() - count_before;
// The following conditions must be checked again after re-parsing, because
// earlier the information might not have been complete due to lazy parsing.
nodes_added = function->ast_node_count();
if ((FLAG_limit_inlining && nodes_added > kMaxInlinedSize) ||
nodes_added > kUnlimitedMaxInlinedSize) {
TraceInline(target, caller, "target AST is too large");
TraceInline(target, caller, "target AST is too large [late]");
return false;
}
AstProperties::Flags* flags(function->flags());
if (flags->Contains(kDontInline) || flags->Contains(kDontOptimize)) {
TraceInline(target, caller, "target contains unsupported syntax [late]");
return false;
}
@ -4895,13 +5020,6 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
return false;
}
}
// All statements in the body must be inlineable.
for (int i = 0, count = function->body()->length(); i < count; ++i) {
if (!function->body()->at(i)->IsInlineable()) {
TraceInline(target, caller, "target contains unsupported syntax");
return false;
}
}
// Generate the deoptimization data for the unoptimized version of
// the target function if we don't already have it.
@ -5050,7 +5168,38 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
}
bool HGraphBuilder::TryInlineBuiltinFunction(Call* expr,
bool HGraphBuilder::TryInlineBuiltinFunctionCall(Call* expr, bool drop_extra) {
if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
switch (id) {
case kMathRound:
case kMathFloor:
case kMathAbs:
case kMathSqrt:
case kMathLog:
case kMathSin:
case kMathCos:
if (expr->arguments()->length() == 1) {
HValue* argument = Pop();
HValue* context = environment()->LookupContext();
Drop(1); // Receiver.
HUnaryMathOperation* op =
new(zone()) HUnaryMathOperation(context, argument, id);
op->set_position(expr->position());
if (drop_extra) Drop(1); // Optionally drop the function.
ast_context()->ReturnInstruction(op, expr->id());
return true;
}
break;
default:
// Not supported for inlining yet.
break;
}
return false;
}
bool HGraphBuilder::TryInlineBuiltinMethodCall(Call* expr,
HValue* receiver,
Handle<Map> receiver_map,
CheckType check_type) {
@ -5147,7 +5296,7 @@ bool HGraphBuilder::TryInlineBuiltinFunction(Call* expr,
case kMathRandom:
if (argument_count == 1 && check_type == RECEIVER_MAP_CHECK) {
AddCheckConstantFunction(expr, receiver, receiver_map, true);
Drop(1);
Drop(1); // Receiver.
HValue* context = environment()->LookupContext();
HGlobalObject* global_object = new(zone()) HGlobalObject(context);
AddInstruction(global_object);
@ -5315,10 +5464,15 @@ void HGraphBuilder::VisitCall(Call* expr) {
Handle<Map> receiver_map = (types == NULL || types->is_empty())
? Handle<Map>::null()
: types->first();
if (TryInlineBuiltinFunction(expr,
if (TryInlineBuiltinMethodCall(expr,
receiver,
receiver_map,
expr->check_type())) {
if (FLAG_trace_inlining) {
PrintF("Inlining builtin ");
expr->target()->ShortPrint();
PrintF("\n");
}
return;
}
@ -5389,6 +5543,14 @@ void HGraphBuilder::VisitCall(Call* expr) {
IsGlobalObject());
environment()->SetExpressionStackAt(receiver_index, global_receiver);
if (TryInlineBuiltinFunctionCall(expr, false)) { // Nothing to drop.
if (FLAG_trace_inlining) {
PrintF("Inlining builtin ");
expr->target()->ShortPrint();
PrintF("\n");
}
return;
}
if (TryInline(expr)) return;
call = PreProcessCall(new(zone()) HCallKnownGlobal(expr->target(),
argument_count));
@ -5415,6 +5577,16 @@ void HGraphBuilder::VisitCall(Call* expr) {
PushAndAdd(receiver);
CHECK_ALIVE(VisitExpressions(expr->arguments()));
AddInstruction(new(zone()) HCheckFunction(function, expr->target()));
if (TryInlineBuiltinFunctionCall(expr, true)) { // Drop the function.
if (FLAG_trace_inlining) {
PrintF("Inlining builtin ");
expr->target()->ShortPrint();
PrintF("\n");
}
return;
}
if (TryInline(expr, true)) { // Drop function from environment.
return;
} else {
@ -6368,12 +6540,12 @@ void HGraphBuilder::VisitThisFunction(ThisFunction* expr) {
}
void HGraphBuilder::VisitDeclaration(Declaration* decl) {
HandleDeclaration(decl->proxy(), decl->mode(), decl->fun());
void HGraphBuilder::VisitVariableDeclaration(VariableDeclaration* decl) {
HandleVariableDeclaration(decl->proxy(), decl->mode(), decl->fun());
}
void HGraphBuilder::HandleDeclaration(VariableProxy* proxy,
void HGraphBuilder::HandleVariableDeclaration(VariableProxy* proxy,
VariableMode mode,
FunctionLiteral* function) {
Variable* var = proxy->var();
@ -6410,6 +6582,31 @@ void HGraphBuilder::HandleDeclaration(VariableProxy* proxy,
}
void HGraphBuilder::VisitModuleDeclaration(ModuleDeclaration* decl) {
// TODO(rossberg)
}
void HGraphBuilder::VisitModuleLiteral(ModuleLiteral* module) {
// TODO(rossberg)
}
void HGraphBuilder::VisitModuleVariable(ModuleVariable* module) {
// TODO(rossberg)
}
void HGraphBuilder::VisitModulePath(ModulePath* module) {
// TODO(rossberg)
}
void HGraphBuilder::VisitModuleUrl(ModuleUrl* module) {
// TODO(rossberg)
}
// Generators for inline runtime functions.
// Support for types.
void HGraphBuilder::GenerateIsSmi(CallRuntime* call) {
@ -7200,7 +7397,10 @@ void HTracer::Trace(const char* name, HGraph* graph, LChunk* chunk) {
}
PrintEmptyProperty("xhandlers");
PrintEmptyProperty("flags");
const char* flags = current->IsLoopSuccessorDominator()
? "dom-loop-succ"
: "";
PrintStringProperty("flags", flags);
if (current->dominator() != NULL) {
PrintBlockProperty("dominator", current->dominator()->block_id());

29
deps/v8/src/hydrogen.h

@ -126,6 +126,7 @@ class HBasicBlock: public ZoneObject {
int PredecessorIndexOf(HBasicBlock* predecessor) const;
void AddSimulate(int ast_id) { AddInstruction(CreateSimulate(ast_id)); }
void AssignCommonDominator(HBasicBlock* other);
void AssignLoopSuccessorDominators();
void FinishExitWithDeoptimization(HDeoptimize::UseEnvironment has_uses) {
FinishExit(CreateDeoptimize(has_uses));
@ -149,6 +150,13 @@ class HBasicBlock: public ZoneObject {
bool IsDeoptimizing() const { return is_deoptimizing_; }
void MarkAsDeoptimizing() { is_deoptimizing_ = true; }
bool IsLoopSuccessorDominator() const {
return dominates_loop_successors_;
}
void MarkAsLoopSuccessorDominator() {
dominates_loop_successors_ = true;
}
inline Zone* zone();
#ifdef DEBUG
@ -182,6 +190,22 @@ class HBasicBlock: public ZoneObject {
HBasicBlock* parent_loop_header_;
bool is_inline_return_target_;
bool is_deoptimizing_;
bool dominates_loop_successors_;
};
class HPredecessorIterator BASE_EMBEDDED {
public:
explicit HPredecessorIterator(HBasicBlock* block)
: predecessor_list_(block->predecessors()), current_(0) { }
bool Done() { return current_ >= predecessor_list_->length(); }
HBasicBlock* Current() { return predecessor_list_->at(current_); }
void Advance() { current_++; }
private:
const ZoneList<HBasicBlock*>* predecessor_list_;
int current_;
};
@ -815,7 +839,7 @@ class HGraphBuilder: public AstVisitor {
INLINE_RUNTIME_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_DECLARATION)
#undef INLINE_FUNCTION_GENERATOR_DECLARATION
void HandleDeclaration(VariableProxy* proxy,
void HandleVariableDeclaration(VariableProxy* proxy,
VariableMode mode,
FunctionLiteral* function);
@ -918,10 +942,11 @@ class HGraphBuilder: public AstVisitor {
bool TryCallApply(Call* expr);
bool TryInline(Call* expr, bool drop_extra = false);
bool TryInlineBuiltinFunction(Call* expr,
bool TryInlineBuiltinMethodCall(Call* expr,
HValue* receiver,
Handle<Map> receiver_map,
CheckType check_type);
bool TryInlineBuiltinFunctionCall(Call* expr, bool drop_extra);
// If --trace-inlining, print a line of the inlining trace. Inlining
// succeeded if the reason string is NULL and failed if there is a

4
deps/v8/src/ia32/code-stubs-ia32.cc

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -5022,7 +5022,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// Invoke: Link this frame into the handler chain. There's only one
// handler block in this code object, so its index is 0.
__ bind(&invoke);
__ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER, 0);
__ PushTryHandler(StackHandler::JS_ENTRY, 0);
// Clear any pending exceptions.
__ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value()));

34
deps/v8/src/ia32/full-codegen-ia32.cc

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -129,6 +129,26 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
}
#endif
// We can optionally optimize based on counters rather than statistical
// sampling.
if (info->ShouldSelfOptimize()) {
if (FLAG_trace_opt) {
PrintF("[adding self-optimization header to %s]\n",
*info->function()->debug_name()->ToCString());
}
MaybeObject* maybe_cell = isolate()->heap()->AllocateJSGlobalPropertyCell(
Smi::FromInt(Compiler::kCallsUntilPrimitiveOpt));
JSGlobalPropertyCell* cell;
if (maybe_cell->To(&cell)) {
__ sub(Operand::Cell(Handle<JSGlobalPropertyCell>(cell)),
Immediate(Smi::FromInt(1)));
Handle<Code> compile_stub(
isolate()->builtins()->builtin(Builtins::kLazyRecompile));
STATIC_ASSERT(kSmiTag == 0);
__ j(zero, compile_stub);
}
}
// Strict mode functions and builtins need to replace the receiver
// with undefined when called as functions (without an explicit
// receiver object). ecx is zero for method calls and non-zero for
@ -261,11 +281,11 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
// For named function expressions, declare the function name as a
// constant.
if (scope()->is_function_scope() && scope()->function() != NULL) {
int ignored = 0;
VariableProxy* proxy = scope()->function();
ASSERT(proxy->var()->mode() == CONST ||
proxy->var()->mode() == CONST_HARMONY);
EmitDeclaration(proxy, proxy->var()->mode(), NULL, &ignored);
ASSERT(proxy->var()->location() != Variable::UNALLOCATED);
EmitDeclaration(proxy, proxy->var()->mode(), NULL);
}
VisitDeclarations(scope()->declarations());
}
@ -681,8 +701,7 @@ void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
VariableMode mode,
FunctionLiteral* function,
int* global_count) {
FunctionLiteral* function) {
// If it was not possible to allocate the variable at compile time, we
// need to "declare" it at runtime to make sure it actually exists in the
// local context.
@ -691,7 +710,7 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
(mode == CONST || mode == CONST_HARMONY || mode == LET);
switch (variable->location()) {
case Variable::UNALLOCATED:
++(*global_count);
++global_count_;
break;
case Variable::PARAMETER:
@ -771,9 +790,6 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
}
void FullCodeGenerator::VisitDeclaration(Declaration* decl) { }
void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
// Call the runtime to declare the globals.
__ push(esi); // The context is the first argument.

1
deps/v8/src/ia32/lithium-codegen-ia32.cc

@ -622,7 +622,6 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
int length = deoptimizations_.length();
if (length == 0) return;
ASSERT(FLAG_deopt);
Handle<DeoptimizationInputData> data =
factory()->NewDeoptimizationInputData(length, TENURED);

25
deps/v8/src/ia32/macro-assembler-ia32.cc

@ -764,8 +764,7 @@ void MacroAssembler::LeaveApiExitFrame() {
}
void MacroAssembler::PushTryHandler(CodeLocation try_location,
HandlerType type,
void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
int handler_index) {
// Adjust this code if not the case.
STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
@ -776,25 +775,21 @@ void MacroAssembler::PushTryHandler(CodeLocation try_location,
STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
// We will build up the handler from the bottom by pushing on the stack.
// First compute the state and push the frame pointer and context.
unsigned state = StackHandler::OffsetField::encode(handler_index);
if (try_location == IN_JAVASCRIPT) {
push(ebp);
push(esi);
state |= (type == TRY_CATCH_HANDLER)
? StackHandler::KindField::encode(StackHandler::TRY_CATCH)
: StackHandler::KindField::encode(StackHandler::TRY_FINALLY);
} else {
ASSERT(try_location == IN_JS_ENTRY);
// First push the frame pointer and context.
if (kind == StackHandler::JS_ENTRY) {
// The frame pointer does not point to a JS frame so we save NULL for
// ebp. We expect the code throwing an exception to check ebp before
// dereferencing it to restore the context.
push(Immediate(0)); // NULL frame pointer.
push(Immediate(Smi::FromInt(0))); // No context.
state |= StackHandler::KindField::encode(StackHandler::ENTRY);
} else {
push(ebp);
push(esi);
}
// Push the state and the code object.
unsigned state =
StackHandler::IndexField::encode(handler_index) |
StackHandler::KindField::encode(kind);
push(Immediate(state));
Push(CodeObject());
@ -904,7 +899,7 @@ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
mov(esp, Operand(esp, StackHandlerConstants::kNextOffset));
bind(&check_kind);
STATIC_ASSERT(StackHandler::ENTRY == 0);
STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
test(Operand(esp, StackHandlerConstants::kStateOffset),
Immediate(StackHandler::KindField::kMask));
j(not_zero, &fetch_next);

4
deps/v8/src/ia32/macro-assembler-ia32.h

@ -491,9 +491,7 @@ class MacroAssembler: public Assembler {
// Exception handling
// Push a new try handler and link it into try handler chain.
void PushTryHandler(CodeLocation try_location,
HandlerType type,
int handler_index);
void PushTryHandler(StackHandler::Kind kind, int handler_index);
// Unlink the stack handler on top of the stack from the try handler chain.
void PopTryHandler();

69
deps/v8/src/ia32/stub-cache-ia32.cc

@ -1345,25 +1345,25 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
} else {
Label call_builtin;
if (argc == 1) { // Otherwise fall through to call builtin.
Label attempt_to_grow_elements, with_write_barrier;
// Get the elements array of the object.
__ mov(ebx, FieldOperand(edx, JSArray::kElementsOffset));
__ mov(edi, FieldOperand(edx, JSArray::kElementsOffset));
// Check that the elements are in fast mode and writable.
__ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
__ cmp(FieldOperand(edi, HeapObject::kMapOffset),
Immediate(factory()->fixed_array_map()));
__ j(not_equal, &call_builtin);
if (argc == 1) { // Otherwise fall through to call builtin.
Label attempt_to_grow_elements, with_write_barrier;
// Get the array's length into eax and calculate new length.
__ mov(eax, FieldOperand(edx, JSArray::kLengthOffset));
STATIC_ASSERT(kSmiTagSize == 1);
STATIC_ASSERT(kSmiTag == 0);
__ add(eax, Immediate(Smi::FromInt(argc)));
// Get the element's length into ecx.
__ mov(ecx, FieldOperand(ebx, FixedArray::kLengthOffset));
// Get the elements' length into ecx.
__ mov(ecx, FieldOperand(edi, FixedArray::kLengthOffset));
// Check if we could survive without allocation.
__ cmp(eax, ecx);
@ -1376,29 +1376,52 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
// Save new length.
__ mov(FieldOperand(edx, JSArray::kLengthOffset), eax);
// Push the element.
__ lea(edx, FieldOperand(ebx,
eax, times_half_pointer_size,
FixedArray::kHeaderSize - argc * kPointerSize));
__ mov(Operand(edx, 0), ecx);
// Store the value.
__ mov(FieldOperand(edi,
eax,
times_half_pointer_size,
FixedArray::kHeaderSize - argc * kPointerSize),
ecx);
__ ret((argc + 1) * kPointerSize);
__ bind(&with_write_barrier);
__ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
__ CheckFastObjectElements(edi, &call_builtin);
__ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) {
Label fast_object, not_fast_object;
__ CheckFastObjectElements(ebx, &not_fast_object, Label::kNear);
__ jmp(&fast_object);
// In case of fast smi-only, convert to fast object, otherwise bail out.
__ bind(&not_fast_object);
__ CheckFastSmiOnlyElements(ebx, &call_builtin);
// edi: elements array
// edx: receiver
// ebx: map
__ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
FAST_ELEMENTS,
ebx,
edi,
&call_builtin);
ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm());
// Restore edi.
__ mov(edi, FieldOperand(edx, JSArray::kElementsOffset));
__ bind(&fast_object);
} else {
__ CheckFastObjectElements(ebx, &call_builtin);
}
// Save new length.
__ mov(FieldOperand(edx, JSArray::kLengthOffset), eax);
// Push the element.
__ lea(edx, FieldOperand(ebx,
// Store the value.
__ lea(edx, FieldOperand(edi,
eax, times_half_pointer_size,
FixedArray::kHeaderSize - argc * kPointerSize));
__ mov(Operand(edx, 0), ecx);
__ RecordWrite(ebx, edx, ecx, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
__ RecordWrite(edi, edx, ecx, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
OMIT_SMI_CHECK);
__ ret((argc + 1) * kPointerSize);
@ -1408,11 +1431,11 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ jmp(&call_builtin);
}
__ mov(edi, Operand(esp, argc * kPointerSize));
__ mov(ebx, Operand(esp, argc * kPointerSize));
// Growing elements that are SMI-only requires special handling in case
// the new element is non-Smi. For now, delegate to the builtin.
Label no_fast_elements_check;
__ JumpIfSmi(edi, &no_fast_elements_check);
__ JumpIfSmi(ebx, &no_fast_elements_check);
__ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
__ CheckFastObjectElements(ecx, &call_builtin, Label::kFar);
__ bind(&no_fast_elements_check);
@ -1431,7 +1454,7 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ mov(ecx, Operand::StaticVariable(new_space_allocation_top));
// Check if it's the end of elements.
__ lea(edx, FieldOperand(ebx,
__ lea(edx, FieldOperand(edi,
eax, times_half_pointer_size,
FixedArray::kHeaderSize - argc * kPointerSize));
__ cmp(edx, ecx);
@ -1444,7 +1467,7 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ mov(Operand::StaticVariable(new_space_allocation_top), ecx);
// Push the argument...
__ mov(Operand(edx, 0), edi);
__ mov(Operand(edx, 0), ebx);
// ... and fill the rest with holes.
for (int i = 1; i < kAllocationDelta; i++) {
__ mov(Operand(edx, i * kPointerSize),
@ -1456,13 +1479,13 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
// tell the incremental marker to rescan the object that we just grew. We
// don't need to worry about the holes because they are in old space and
// already marked black.
__ RecordWrite(ebx, edx, edi, kDontSaveFPRegs, OMIT_REMEMBERED_SET);
__ RecordWrite(edi, edx, ebx, kDontSaveFPRegs, OMIT_REMEMBERED_SET);
// Restore receiver to edx as finish sequence assumes it's here.
__ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
// Increment element's and array's sizes.
__ add(FieldOperand(ebx, FixedArray::kLengthOffset),
__ add(FieldOperand(edi, FixedArray::kLengthOffset),
Immediate(Smi::FromInt(kAllocationDelta)));
// NOTE: This only happen in new-space, where we don't

5
deps/v8/src/ic-inl.h

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -29,6 +29,8 @@
#define V8_IC_INL_H_
#include "ic.h"
#include "compiler.h"
#include "debug.h"
#include "macro-assembler.h"
@ -89,6 +91,7 @@ void IC::SetTargetAtAddress(Address address, Code* target) {
Assembler::set_target_address_at(address, target->instruction_start());
target->GetHeap()->incremental_marking()->RecordCodeTargetPatch(address,
target);
PostPatching();
}

25
deps/v8/src/ic.cc

@ -292,6 +292,31 @@ Failure* IC::ReferenceError(const char* type, Handle<String> name) {
}
void IC::PostPatching() {
if (FLAG_watch_ic_patching) {
Isolate::Current()->runtime_profiler()->NotifyICChanged();
// We do not want to optimize until the ICs have settled down,
// so when they are patched, we postpone optimization for the
// current function and the functions above it on the stack that
// might want to inline this one.
StackFrameIterator it;
if (it.done()) return;
it.Advance();
static const int kStackFramesToMark = Compiler::kMaxInliningLevels - 1;
for (int i = 0; i < kStackFramesToMark; ++i) {
if (it.done()) return;
StackFrame* raw_frame = it.frame();
if (raw_frame->is_java_script()) {
JSFunction* function =
JSFunction::cast(JavaScriptFrame::cast(raw_frame)->function());
function->shared()->set_profiler_ticks(0);
}
it.Advance();
}
}
}
void IC::Clear(Address address) {
Code* target = GetTargetAtAddress(address);

1
deps/v8/src/ic.h

@ -165,6 +165,7 @@ class IC {
// Access the target code for the given IC address.
static inline Code* GetTargetAtAddress(Address address);
static inline void SetTargetAtAddress(Address address, Code* target);
static void PostPatching();
private:
// Frame pointer for the frame that uses (calls) the IC.

3
deps/v8/src/incremental-marking.cc

@ -505,7 +505,8 @@ void IncrementalMarking::StartMarking(CompactionFlag flag) {
}
is_compacting_ = !FLAG_never_compact && (flag == ALLOW_COMPACTION) &&
heap_->mark_compact_collector()->StartCompaction();
heap_->mark_compact_collector()->StartCompaction(
MarkCompactCollector::INCREMENTAL_COMPACTION);
state_ = MARKING;

112
deps/v8/src/isolate.cc

@ -542,6 +542,18 @@ Handle<String> Isolate::StackTraceString() {
}
void Isolate::CaptureAndSetCurrentStackTraceFor(Handle<JSObject> error_object) {
if (capture_stack_trace_for_uncaught_exceptions_) {
// Capture stack trace for a detailed exception message.
Handle<String> key = factory()->hidden_stack_trace_symbol();
Handle<JSArray> stack_trace = CaptureCurrentStackTrace(
stack_trace_for_uncaught_exceptions_frame_limit_,
stack_trace_for_uncaught_exceptions_options_);
JSObject::SetHiddenProperty(error_object, key, stack_trace);
}
}
Handle<JSArray> Isolate::CaptureCurrentStackTrace(
int frame_limit, StackTrace::StackTraceOptions options) {
// Ensure no negative values.
@ -1011,7 +1023,7 @@ bool Isolate::ShouldReportException(bool* can_be_caught_externally,
// Find the top-most try-catch handler.
StackHandler* handler =
StackHandler::FromAddress(Isolate::handler(thread_local_top()));
while (handler != NULL && !handler->is_try_catch()) {
while (handler != NULL && !handler->is_catch()) {
handler = handler->next();
}
@ -1037,22 +1049,39 @@ bool Isolate::ShouldReportException(bool* can_be_caught_externally,
}
void Isolate::DoThrow(MaybeObject* exception, MessageLocation* location) {
bool Isolate::IsErrorObject(Handle<Object> obj) {
if (!obj->IsJSObject()) return false;
String* error_key = *(factory()->LookupAsciiSymbol("$Error"));
Object* error_constructor =
js_builtins_object()->GetPropertyNoExceptionThrown(error_key);
for (Object* prototype = *obj; !prototype->IsNull();
prototype = prototype->GetPrototype()) {
if (!prototype->IsJSObject()) return false;
if (JSObject::cast(prototype)->map()->constructor() == error_constructor) {
return true;
}
}
return false;
}
void Isolate::DoThrow(Object* exception, MessageLocation* location) {
ASSERT(!has_pending_exception());
HandleScope scope;
Object* exception_object = Smi::FromInt(0);
bool is_object = exception->ToObject(&exception_object);
Handle<Object> exception_handle(exception_object);
Handle<Object> exception_handle(exception);
// Determine reporting and whether the exception is caught externally.
bool catchable_by_javascript = is_catchable_by_javascript(exception);
// Only real objects can be caught by JS.
ASSERT(!catchable_by_javascript || is_object);
bool can_be_caught_externally = false;
bool should_report_exception =
ShouldReportException(&can_be_caught_externally, catchable_by_javascript);
bool report_exception = catchable_by_javascript && should_report_exception;
bool try_catch_needs_message =
can_be_caught_externally && try_catch_handler()->capture_message_;
bool bootstrapping = bootstrapper()->IsActive();
#ifdef ENABLE_DEBUGGER_SUPPORT
// Notify debugger of exception.
@ -1061,34 +1090,52 @@ void Isolate::DoThrow(MaybeObject* exception, MessageLocation* location) {
}
#endif
// Generate the message.
Handle<Object> message_obj;
MessageLocation potential_computed_location;
bool try_catch_needs_message =
can_be_caught_externally &&
try_catch_handler()->capture_message_;
// Generate the message if required.
if (report_exception || try_catch_needs_message) {
MessageLocation potential_computed_location;
if (location == NULL) {
// If no location was specified we use a computed one instead
// If no location was specified we use a computed one instead.
ComputeLocation(&potential_computed_location);
location = &potential_computed_location;
}
if (!bootstrapper()->IsActive()) {
// It's not safe to try to make message objects or collect stack
// traces while the bootstrapper is active since the infrastructure
// may not have been properly initialized.
// It's not safe to try to make message objects or collect stack traces
// while the bootstrapper is active since the infrastructure may not have
// been properly initialized.
if (!bootstrapping) {
Handle<String> stack_trace;
if (FLAG_trace_exception) stack_trace = StackTraceString();
Handle<JSArray> stack_trace_object;
if (report_exception && capture_stack_trace_for_uncaught_exceptions_) {
if (capture_stack_trace_for_uncaught_exceptions_) {
if (IsErrorObject(exception_handle)) {
// We fetch the stack trace that corresponds to this error object.
String* key = heap()->hidden_stack_trace_symbol();
Object* stack_property =
JSObject::cast(*exception_handle)->GetHiddenProperty(key);
// Property lookup may have failed. In this case it's probably not
// a valid Error object.
if (stack_property->IsJSArray()) {
stack_trace_object = Handle<JSArray>(JSArray::cast(stack_property));
}
}
if (stack_trace_object.is_null()) {
// Not an error object, we capture at throw site.
stack_trace_object = CaptureCurrentStackTrace(
stack_trace_for_uncaught_exceptions_frame_limit_,
stack_trace_for_uncaught_exceptions_options_);
}
ASSERT(is_object); // Can't use the handle unless there's a real object.
message_obj = MessageHandler::MakeMessageObject("uncaught_exception",
location, HandleVector<Object>(&exception_handle, 1), stack_trace,
}
Handle<Object> message_obj = MessageHandler::MakeMessageObject(
"uncaught_exception",
location,
HandleVector<Object>(&exception_handle, 1),
stack_trace,
stack_trace_object);
thread_local_top()->pending_message_obj_ = *message_obj;
if (location != NULL) {
thread_local_top()->pending_message_script_ = *location->script();
thread_local_top()->pending_message_start_pos_ = location->start_pos();
thread_local_top()->pending_message_end_pos_ = location->end_pos();
}
} else if (location != NULL && !location->script().is_null()) {
// We are bootstrapping and caught an error where the location is set
// and we have a script for the location.
@ -1104,30 +1151,13 @@ void Isolate::DoThrow(MaybeObject* exception, MessageLocation* location) {
// Save the message for reporting if the the exception remains uncaught.
thread_local_top()->has_pending_message_ = report_exception;
if (!message_obj.is_null()) {
thread_local_top()->pending_message_obj_ = *message_obj;
if (location != NULL) {
thread_local_top()->pending_message_script_ = *location->script();
thread_local_top()->pending_message_start_pos_ = location->start_pos();
thread_local_top()->pending_message_end_pos_ = location->end_pos();
}
}
// Do not forget to clean catcher_ if currently thrown exception cannot
// be caught. If necessary, ReThrow will update the catcher.
thread_local_top()->catcher_ = can_be_caught_externally ?
try_catch_handler() : NULL;
// NOTE: Notifying the debugger or generating the message
// may have caused new exceptions. For now, we just ignore
// that and set the pending exception to the original one.
if (is_object) {
set_pending_exception(*exception_handle);
} else {
// Failures are not on the heap so they neither need nor work with handles.
ASSERT(exception_handle->IsFailure());
set_pending_exception(exception);
}
}
@ -1163,8 +1193,8 @@ bool Isolate::IsExternallyCaught() {
StackHandler* handler =
StackHandler::FromAddress(Isolate::handler(thread_local_top()));
while (handler != NULL && handler->address() < external_handler_address) {
ASSERT(!handler->is_try_catch());
if (handler->is_try_finally()) return false;
ASSERT(!handler->is_catch());
if (handler->is_finally()) return false;
handler = handler->next();
}

10
deps/v8/src/isolate.h

@ -362,7 +362,7 @@ typedef List<HeapObject*, PreallocatedStorage> DebugObjectCache;
/* Serializer state. */ \
V(ExternalReferenceTable*, external_reference_table, NULL) \
/* AstNode state. */ \
V(unsigned, ast_node_id, 0) \
V(int, ast_node_id, 0) \
V(unsigned, ast_node_count, 0) \
/* SafeStackFrameIterator activations count. */ \
V(int, safe_stack_iterator_counter, 0) \
@ -703,6 +703,8 @@ class Isolate {
int frame_limit,
StackTrace::StackTraceOptions options);
void CaptureAndSetCurrentStackTraceFor(Handle<JSObject> error_object);
// Returns if the top context may access the given global object. If
// the result is false, the pending exception is guaranteed to be
// set.
@ -729,7 +731,7 @@ class Isolate {
// Promote a scheduled exception to pending. Asserts has_scheduled_exception.
Failure* PromoteScheduledException();
void DoThrow(MaybeObject* exception, MessageLocation* location);
void DoThrow(Object* exception, MessageLocation* location);
// Checks if exception should be reported and finds out if it's
// caught externally.
bool ShouldReportException(bool* can_be_caught_externally,
@ -1141,6 +1143,10 @@ class Isolate {
void InitializeDebugger();
// Traverse prototype chain to find out whether the object is derived from
// the Error object.
bool IsErrorObject(Handle<Object> obj);
int stack_trace_nesting_level_;
StringStream* incomplete_message_;
// The preallocated memory thread singleton.

4
deps/v8/src/list-inl.h

@ -72,9 +72,9 @@ void List<T, P>::ResizeAdd(const T& element) {
template<typename T, class P>
void List<T, P>::ResizeAddInternal(const T& element) {
ASSERT(length_ >= capacity_);
// Grow the list capacity by 50%, but make sure to let it grow
// Grow the list capacity by 100%, but make sure to let it grow
// even when the capacity is zero (possible initial case).
int new_capacity = 1 + capacity_ + (capacity_ >> 1);
int new_capacity = 1 + 2 * capacity_;
// Since the element reference could be an element of the list, copy
// it out of the old backing storage before resizing.
T temp = element;

16
deps/v8/src/macro-assembler.h

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -36,20 +36,6 @@ enum InvokeFlag {
};
enum CodeLocation {
IN_JAVASCRIPT,
IN_JS_ENTRY,
IN_C_ENTRY
};
enum HandlerType {
TRY_CATCH_HANDLER,
TRY_FINALLY_HANDLER,
JS_ENTRY_HANDLER
};
// Types of uncatchable exceptions.
enum UncatchableExceptionType {
OUT_OF_MEMORY,

31
deps/v8/src/mark-compact.cc

@ -242,14 +242,14 @@ static void TraceFragmentation(PagedSpace* space) {
}
bool MarkCompactCollector::StartCompaction() {
bool MarkCompactCollector::StartCompaction(CompactionMode mode) {
if (!compacting_) {
ASSERT(evacuation_candidates_.length() == 0);
CollectEvacuationCandidates(heap()->old_pointer_space());
CollectEvacuationCandidates(heap()->old_data_space());
if (FLAG_compact_code_space) {
if (FLAG_compact_code_space && mode == NON_INCREMENTAL_COMPACTION) {
CollectEvacuationCandidates(heap()->code_space());
} else if (FLAG_trace_fragmentation) {
TraceFragmentation(heap()->code_space());
@ -697,7 +697,7 @@ void MarkCompactCollector::Prepare(GCTracer* tracer) {
// Don't start compaction if we are in the middle of incremental
// marking cycle. We did not collect any slots.
if (!FLAG_never_compact && !was_marked_incrementally_) {
StartCompaction();
StartCompaction(NON_INCREMENTAL_COMPACTION);
}
PagedSpaces spaces;
@ -809,6 +809,8 @@ class CodeFlusher {
isolate_->heap()->mark_compact_collector()->
RecordCodeEntrySlot(slot, target);
RecordSharedFunctionInfoCodeSlot(shared);
candidate = next_candidate;
}
@ -831,12 +833,21 @@ class CodeFlusher {
candidate->set_code(lazy_compile);
}
RecordSharedFunctionInfoCodeSlot(candidate);
candidate = next_candidate;
}
shared_function_info_candidates_head_ = NULL;
}
void RecordSharedFunctionInfoCodeSlot(SharedFunctionInfo* shared) {
Object** slot = HeapObject::RawField(shared,
SharedFunctionInfo::kCodeOffset);
isolate_->heap()->mark_compact_collector()->
RecordSlot(slot, slot, HeapObject::cast(*slot));
}
static JSFunction** GetNextCandidateField(JSFunction* candidate) {
return reinterpret_cast<JSFunction**>(
candidate->address() + JSFunction::kCodeEntryOffset);
@ -1314,6 +1325,16 @@ class StaticMarkingVisitor : public StaticVisitorBase {
re->SetDataAtUnchecked(JSRegExp::saved_code_index(is_ascii),
code,
heap);
// Saving a copy might create a pointer into compaction candidate
// that was not observed by marker. This might happen if JSRegExp data
// was marked through the compilation cache before marker reached JSRegExp
// object.
FixedArray* data = FixedArray::cast(re->data());
Object** slot = data->data_start() + JSRegExp::saved_code_index(is_ascii);
heap->mark_compact_collector()->
RecordSlot(slot, slot, code);
// Set a number in the 0-255 range to guarantee no smi overflow.
re->SetDataAtUnchecked(JSRegExp::code_index(is_ascii),
Smi::FromInt(heap->sweep_generation() & 0xff),
@ -2352,9 +2373,11 @@ void MarkCompactCollector::AfterMarking() {
code_flusher_->ProcessCandidates();
}
if (!FLAG_watch_ic_patching) {
// Clean up dead objects from the runtime profiler.
heap()->isolate()->runtime_profiler()->RemoveDeadSamples();
}
}
void MarkCompactCollector::ProcessMapCaches() {
@ -3360,9 +3383,11 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
heap_->UpdateReferencesInExternalStringTable(
&UpdateReferenceInExternalStringTableEntry);
if (!FLAG_watch_ic_patching) {
// Update JSFunction pointers from the runtime profiler.
heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact(
&updating_visitor);
}
EvacuationWeakObjectRetainer evacuation_object_retainer;
heap()->ProcessWeakReferences(&evacuation_object_retainer);

7
deps/v8/src/mark-compact.h

@ -441,7 +441,12 @@ class MarkCompactCollector {
// Performs a global garbage collection.
void CollectGarbage();
bool StartCompaction();
enum CompactionMode {
INCREMENTAL_COMPACTION,
NON_INCREMENTAL_COMPACTION
};
bool StartCompaction(CompactionMode mode);
void AbortCompaction();

6
deps/v8/src/messages.js

@ -1078,9 +1078,9 @@ function captureStackTrace(obj, cons_opt) {
if (stackTraceLimit < 0 || stackTraceLimit > 10000) {
stackTraceLimit = 10000;
}
var raw_stack = %CollectStackTrace(cons_opt
? cons_opt
: captureStackTrace, stackTraceLimit);
var raw_stack = %CollectStackTrace(obj,
cons_opt ? cons_opt : captureStackTrace,
stackTraceLimit);
DefineOneShotAccessor(obj, 'stack', function (obj) {
return FormatRawStackTrace(obj, raw_stack);
});

4
deps/v8/src/mips/assembler-mips.cc

@ -1245,6 +1245,7 @@ void Assembler::and_(Register rd, Register rs, Register rt) {
void Assembler::andi(Register rt, Register rs, int32_t j) {
ASSERT(is_uint16(j));
GenInstrImmediate(ANDI, rs, rt, j);
}
@ -1255,6 +1256,7 @@ void Assembler::or_(Register rd, Register rs, Register rt) {
void Assembler::ori(Register rt, Register rs, int32_t j) {
ASSERT(is_uint16(j));
GenInstrImmediate(ORI, rs, rt, j);
}
@ -1265,6 +1267,7 @@ void Assembler::xor_(Register rd, Register rs, Register rt) {
void Assembler::xori(Register rt, Register rs, int32_t j) {
ASSERT(is_uint16(j));
GenInstrImmediate(XORI, rs, rt, j);
}
@ -1445,6 +1448,7 @@ void Assembler::swr(Register rd, const MemOperand& rs) {
void Assembler::lui(Register rd, int32_t j) {
ASSERT(is_uint16(j));
GenInstrImmediate(LUI, zero_reg, rd, j);
}

39
deps/v8/src/mips/builtins-mips.cc

@ -116,7 +116,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
Label* gc_required) {
const int initial_capacity = JSArray::kPreallocatedArrayElements;
STATIC_ASSERT(initial_capacity >= 0);
__ LoadGlobalInitialConstructedArrayMap(array_function, scratch2, scratch1);
__ LoadInitialArrayMap(array_function, scratch2, scratch1);
// Allocate the JSArray object together with space for a fixed array with the
// requested elements.
@ -212,8 +212,7 @@ static void AllocateJSArray(MacroAssembler* masm,
bool fill_with_hole,
Label* gc_required) {
// Load the initial map from the array function.
__ LoadGlobalInitialConstructedArrayMap(array_function, scratch2,
elements_array_storage);
__ LoadInitialArrayMap(array_function, scratch2, elements_array_storage);
if (FLAG_debug_code) { // Assert that array size is not zero.
__ Assert(
@ -924,22 +923,15 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// t4: JSObject
__ bind(&allocated);
__ push(t4);
// Push the function and the allocated receiver from the stack.
// sp[0]: receiver (newly allocated object)
// sp[1]: constructor function
// sp[2]: number of arguments (smi-tagged)
__ lw(a1, MemOperand(sp, kPointerSize));
__ MultiPushReversed(a1.bit() | t4.bit());
__ push(t4);
// Reload the number of arguments from the stack.
// a1: constructor function
// sp[0]: receiver
// sp[1]: constructor function
// sp[2]: receiver
// sp[3]: constructor function
// sp[4]: number of arguments (smi-tagged)
__ lw(a3, MemOperand(sp, 4 * kPointerSize));
// sp[1]: receiver
// sp[2]: constructor function
// sp[3]: number of arguments (smi-tagged)
__ lw(a1, MemOperand(sp, 2 * kPointerSize));
__ lw(a3, MemOperand(sp, 3 * kPointerSize));
// Set up pointer to last argument.
__ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
@ -953,10 +945,9 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// a2: address of last argument (caller sp)
// a3: number of arguments (smi-tagged)
// sp[0]: receiver
// sp[1]: constructor function
// sp[2]: receiver
// sp[3]: constructor function
// sp[4]: number of arguments (smi-tagged)
// sp[1]: receiver
// sp[2]: constructor function
// sp[3]: number of arguments (smi-tagged)
Label loop, entry;
__ jmp(&entry);
__ bind(&loop);
@ -984,14 +975,6 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
NullCallWrapper(), CALL_AS_METHOD);
}
// Pop the function from the stack.
// v0: result
// sp[0]: constructor function
// sp[2]: receiver
// sp[3]: constructor function
// sp[4]: number of arguments (smi-tagged)
__ Pop();
// Restore context from the frame.
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));

2
deps/v8/src/mips/code-stubs-mips.cc

@ -4140,7 +4140,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// Invoke: Link this frame into the handler chain. There's only one
// handler block in this code object, so its index is 0.
__ bind(&invoke);
__ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER, 0);
__ PushTryHandler(StackHandler::JS_ENTRY, 0);
// If an exception not caught by another handler occurs, this handler
// returns control to the code after the bal(&invoke) above, which
// restores all kCalleeSaved registers (including cp and fp) to their

62
deps/v8/src/mips/ic-mips.cc

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -1198,14 +1198,16 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
Label slow, array, extra, check_if_double_array;
Label fast_object_with_map_check, fast_object_without_map_check;
Label fast_double_with_map_check, fast_double_without_map_check;
Label transition_smi_elements, finish_object_store, non_double_value;
Label transition_double_elements;
// Register usage.
Register value = a0;
Register key = a1;
Register receiver = a2;
Register elements = a3; // Elements array of the receiver.
Register receiver_map = a3;
Register elements_map = t2;
Register receiver_map = t3;
Register elements = t3; // Elements array of the receiver.
// t0 and t1 are used as general scratch registers.
// Check that the key is a smi.
@ -1298,9 +1300,11 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
__ mov(v0, value);
__ bind(&non_smi_value);
// Escape to slow case when writing non-smi into smi-only array.
__ CheckFastObjectElements(receiver_map, scratch_value, &slow);
// Escape to elements kind transition case.
__ CheckFastObjectElements(receiver_map, scratch_value,
&transition_smi_elements);
// Fast elements array, store the value to the elements backing store.
__ bind(&finish_object_store);
__ Addu(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
__ sll(scratch_value, key, kPointerSizeLog2 - kSmiTagSize);
__ Addu(address, address, scratch_value);
@ -1326,13 +1330,57 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
key,
receiver,
elements,
a3,
t0,
t1,
t2,
t3,
&slow);
&transition_double_elements);
__ Ret(USE_DELAY_SLOT);
__ mov(v0, value);
__ bind(&transition_smi_elements);
// Transition the array appropriately depending on the value type.
__ lw(t0, FieldMemOperand(value, HeapObject::kMapOffset));
__ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
__ Branch(&non_double_value, ne, t0, Operand(at));
// Value is a double. Transition FAST_SMI_ONLY_ELEMENTS ->
// FAST_DOUBLE_ELEMENTS and complete the store.
__ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
FAST_DOUBLE_ELEMENTS,
receiver_map,
t0,
&slow);
ASSERT(receiver_map.is(a3)); // Transition code expects map in a3
ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &slow);
__ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
__ jmp(&fast_double_without_map_check);
__ bind(&non_double_value);
// Value is not a double, FAST_SMI_ONLY_ELEMENTS -> FAST_ELEMENTS
__ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
FAST_ELEMENTS,
receiver_map,
t0,
&slow);
ASSERT(receiver_map.is(a3)); // Transition code expects map in a3
ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm);
__ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
__ jmp(&finish_object_store);
__ bind(&transition_double_elements);
// Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
// HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
// transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
__ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
FAST_ELEMENTS,
receiver_map,
t0,
&slow);
ASSERT(receiver_map.is(a3)); // Transition code expects map in a3
ElementsTransitionGenerator::GenerateDoubleToObject(masm, &slow);
__ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
__ jmp(&finish_object_store);
}

1
deps/v8/src/mips/lithium-codegen-mips.cc

@ -640,7 +640,6 @@ void LCodeGen::DeoptimizeIf(Condition cc,
void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
int length = deoptimizations_.length();
if (length == 0) return;
ASSERT(FLAG_deopt);
Handle<DeoptimizationInputData> data =
factory()->NewDeoptimizationInputData(length, TENURED);

77
deps/v8/src/mips/macro-assembler-mips.cc

@ -771,18 +771,18 @@ void MacroAssembler::li(Register rd, Operand j, bool gen2instr) {
} else if (!(j.imm32_ & kHiMask)) {
ori(rd, zero_reg, j.imm32_);
} else if (!(j.imm32_ & kImm16Mask)) {
lui(rd, (j.imm32_ & kHiMask) >> kLuiShift);
lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask);
} else {
lui(rd, (j.imm32_ & kHiMask) >> kLuiShift);
lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask);
ori(rd, rd, (j.imm32_ & kImm16Mask));
}
} else if (MustUseReg(j.rmode_) || gen2instr) {
if (MustUseReg(j.rmode_)) {
RecordRelocInfo(j.rmode_, j.imm32_);
}
// We need always the same number of instructions as we may need to patch
// We always need the same number of instructions as we may need to patch
// this code to load another value which may need 2 instructions to load.
lui(rd, (j.imm32_ & kHiMask) >> kLuiShift);
lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask);
ori(rd, rd, (j.imm32_ & kImm16Mask));
}
}
@ -2576,8 +2576,7 @@ void MacroAssembler::DebugBreak() {
// ---------------------------------------------------------------------------
// Exception handling.
void MacroAssembler::PushTryHandler(CodeLocation try_location,
HandlerType type,
void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
int handler_index) {
// Adjust this code if not the case.
STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
@ -2589,30 +2588,23 @@ void MacroAssembler::PushTryHandler(CodeLocation try_location,
// For the JSEntry handler, we must preserve a0-a3 and s0.
// t1-t3 are available. We will build up the handler from the bottom by
// pushing on the stack. First compute the state.
unsigned state = StackHandler::OffsetField::encode(handler_index);
if (try_location == IN_JAVASCRIPT) {
state |= (type == TRY_CATCH_HANDLER)
? StackHandler::KindField::encode(StackHandler::TRY_CATCH)
: StackHandler::KindField::encode(StackHandler::TRY_FINALLY);
} else {
ASSERT(try_location == IN_JS_ENTRY);
state |= StackHandler::KindField::encode(StackHandler::ENTRY);
}
// pushing on the stack.
// Set up the code object (t1) and the state (t2) for pushing.
unsigned state =
StackHandler::IndexField::encode(handler_index) |
StackHandler::KindField::encode(kind);
li(t1, Operand(CodeObject()));
li(t2, Operand(state));
// Push the frame pointer, context, state, and code object.
if (try_location == IN_JAVASCRIPT) {
MultiPush(t1.bit() | t2.bit() | cp.bit() | fp.bit());
} else {
if (kind == StackHandler::JS_ENTRY) {
ASSERT_EQ(Smi::FromInt(0), 0);
// The second zero_reg indicates no context.
// The first zero_reg is the NULL frame pointer.
// The operands are reversed to match the order of MultiPush/Pop.
Push(zero_reg, zero_reg, t2, t1);
} else {
MultiPush(t1.bit() | t2.bit() | cp.bit() | fp.bit());
}
// Link the current handler as the next handler.
@ -2727,7 +2719,7 @@ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
lw(sp, MemOperand(sp, StackHandlerConstants::kNextOffset));
bind(&check_kind);
STATIC_ASSERT(StackHandler::ENTRY == 0);
STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
lw(a2, MemOperand(sp, StackHandlerConstants::kStateOffset));
And(a2, a2, Operand(StackHandler::KindField::kMask));
Branch(&fetch_next, ne, a2, Operand(zero_reg));
@ -4279,26 +4271,41 @@ void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
}
void MacroAssembler::LoadGlobalInitialConstructedArrayMap(
void MacroAssembler::LoadTransitionedArrayMapConditional(
ElementsKind expected_kind,
ElementsKind transitioned_kind,
Register map_in_out,
Register scratch,
Label* no_map_match) {
// Load the global or builtins object from the current context.
lw(scratch, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
lw(scratch, FieldMemOperand(scratch, GlobalObject::kGlobalContextOffset));
// Check that the function's map is the same as the expected cached map.
int expected_index =
Context::GetContextMapIndexFromElementsKind(expected_kind);
lw(at, MemOperand(scratch, Context::SlotOffset(expected_index)));
Branch(no_map_match, ne, map_in_out, Operand(at));
// Use the transitioned cached map.
int trans_index =
Context::GetContextMapIndexFromElementsKind(transitioned_kind);
lw(map_in_out, MemOperand(scratch, Context::SlotOffset(trans_index)));
}
void MacroAssembler::LoadInitialArrayMap(
Register function_in, Register scratch, Register map_out) {
ASSERT(!function_in.is(map_out));
Label done;
lw(map_out, FieldMemOperand(function_in,
JSFunction::kPrototypeOrInitialMapOffset));
if (!FLAG_smi_only_arrays) {
// Load the global or builtins object from the current context.
lw(scratch, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
lw(scratch, FieldMemOperand(scratch, GlobalObject::kGlobalContextOffset));
// Check that the function's map is same as the cached map.
lw(at, MemOperand(
scratch, Context::SlotOffset(Context::SMI_JS_ARRAY_MAP_INDEX)));
Branch(&done, ne, map_out, Operand(at));
// Use the cached transitioned map.
lw(map_out,
MemOperand(scratch,
Context::SlotOffset(Context::OBJECT_JS_ARRAY_MAP_INDEX)));
LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
FAST_ELEMENTS,
map_out,
scratch,
&done);
}
bind(&done);
}

19
deps/v8/src/mips/macro-assembler-mips.h

@ -772,8 +772,19 @@ class MacroAssembler: public Assembler {
void LoadContext(Register dst, int context_chain_length);
// Load the initial map for new Arrays of a given type.
void LoadGlobalInitialConstructedArrayMap(Register function_in,
// Conditionally load the cached Array transitioned map of type
// transitioned_kind from the global context if the map in register
// map_in_out is the cached Array map in the global context of
// expected_kind.
void LoadTransitionedArrayMapConditional(
ElementsKind expected_kind,
ElementsKind transitioned_kind,
Register map_in_out,
Register scratch,
Label* no_map_match);
// Load the initial map for new Arrays from a JSFunction.
void LoadInitialArrayMap(Register function_in,
Register scratch,
Register map_out);
@ -854,9 +865,7 @@ class MacroAssembler: public Assembler {
// Exception handling.
// Push a new try handler and link into try handler chain.
void PushTryHandler(CodeLocation try_location,
HandlerType type,
int handler_index);
void PushTryHandler(StackHandler::Kind kind, int handler_index);
// Unlink the stack handler on top of the stack from the try handler chain.
// Must preserve the result register.

52
deps/v8/src/mips/stub-cache-mips.cc

@ -1468,7 +1468,10 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ Ret();
} else {
Label call_builtin;
Register elements = a3;
if (argc == 1) { // Otherwise fall through to call the builtin.
Label attempt_to_grow_elements;
Register elements = t2;
Register end_elements = t1;
// Get the elements array of the object.
__ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
@ -1480,16 +1483,13 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
&call_builtin,
DONT_DO_SMI_CHECK);
if (argc == 1) { // Otherwise fall through to call the builtin.
Label attempt_to_grow_elements;
// Get the array's length into v0 and calculate new length.
__ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
STATIC_ASSERT(kSmiTagSize == 1);
STATIC_ASSERT(kSmiTag == 0);
__ Addu(v0, v0, Operand(Smi::FromInt(argc)));
// Get the element's length.
// Get the elements' length.
__ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
// Check if we could survive without allocation.
@ -1503,7 +1503,7 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
// Save new length.
__ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
// Push the element.
// Store the value.
// We may need a register containing the address end_elements below,
// so write back the value in end_elements.
__ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize);
@ -1519,13 +1519,33 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ bind(&with_write_barrier);
__ lw(t2, FieldMemOperand(receiver, HeapObject::kMapOffset));
__ CheckFastObjectElements(t2, t2, &call_builtin);
__ lw(a3, FieldMemOperand(receiver, HeapObject::kMapOffset));
if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) {
Label fast_object, not_fast_object;
__ CheckFastObjectElements(a3, t3, &not_fast_object);
__ jmp(&fast_object);
// In case of fast smi-only, convert to fast object, otherwise bail out.
__ bind(&not_fast_object);
__ CheckFastSmiOnlyElements(a3, t3, &call_builtin);
// edx: receiver
// r3: map
__ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
FAST_ELEMENTS,
a3,
t3,
&call_builtin);
__ mov(a2, receiver);
ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm());
__ bind(&fast_object);
} else {
__ CheckFastObjectElements(a3, a3, &call_builtin);
}
// Save new length.
__ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
// Push the element.
// Store the value.
// We may need a register containing the address end_elements below,
// so write back the value in end_elements.
__ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize);
@ -1573,23 +1593,23 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ Addu(end_elements, elements, end_elements);
__ Addu(end_elements, end_elements, Operand(kEndElementsOffset));
__ li(t3, Operand(new_space_allocation_top));
__ lw(t2, MemOperand(t3));
__ Branch(&call_builtin, ne, end_elements, Operand(t2));
__ lw(a3, MemOperand(t3));
__ Branch(&call_builtin, ne, end_elements, Operand(a3));
__ li(t5, Operand(new_space_allocation_limit));
__ lw(t5, MemOperand(t5));
__ Addu(t2, t2, Operand(kAllocationDelta * kPointerSize));
__ Branch(&call_builtin, hi, t2, Operand(t5));
__ Addu(a3, a3, Operand(kAllocationDelta * kPointerSize));
__ Branch(&call_builtin, hi, a3, Operand(t5));
// We fit and could grow elements.
// Update new_space_allocation_top.
__ sw(t2, MemOperand(t3));
__ sw(a3, MemOperand(t3));
// Push the argument.
__ sw(a2, MemOperand(end_elements));
// Fill the rest with holes.
__ LoadRoot(t2, Heap::kTheHoleValueRootIndex);
__ LoadRoot(a3, Heap::kTheHoleValueRootIndex);
for (int i = 1; i < kAllocationDelta; i++) {
__ sw(t2, MemOperand(end_elements, i * kPointerSize));
__ sw(a3, MemOperand(end_elements, i * kPointerSize));
}
// Update elements' and array's sizes.

20
deps/v8/src/objects-inl.h

@ -3530,6 +3530,8 @@ ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
ACCESSORS(SharedFunctionInfo, this_property_assignments, Object,
kThisPropertyAssignmentsOffset)
SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset)
BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
kHiddenPrototypeBit)
BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
@ -3576,6 +3578,8 @@ SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
SMI_ACCESSORS(SharedFunctionInfo, this_property_assignments_count,
kThisPropertyAssignmentsCountOffset)
SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset)
SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
SMI_ACCESSORS(SharedFunctionInfo, deopt_counter, kDeoptCounterOffset)
#else
#define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
@ -3626,6 +3630,9 @@ PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
this_property_assignments_count,
kThisPropertyAssignmentsCountOffset)
PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, opt_count, kOptCountOffset)
PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, deopt_counter, kDeoptCounterOffset)
#endif
@ -3708,6 +3715,9 @@ BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
kNameShouldPrintAsAnonymous)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_crankshaft,
kDontCrankshaft)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_inline, kDontInline)
ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
@ -3777,16 +3787,6 @@ void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
}
Smi* SharedFunctionInfo::deopt_counter() {
return reinterpret_cast<Smi*>(READ_FIELD(this, kDeoptCounterOffset));
}
void SharedFunctionInfo::set_deopt_counter(Smi* value) {
WRITE_FIELD(this, kDeoptCounterOffset, value);
}
bool SharedFunctionInfo::is_compiled() {
return code() !=
Isolate::Current()->builtins()->builtin(Builtins::kLazyCompile);

104
deps/v8/src/objects.cc

@ -3773,12 +3773,15 @@ MaybeObject* JSObject::GetHiddenPropertiesDictionary(bool create_if_absent) {
// code zero) it will always occupy the first entry if present.
DescriptorArray* descriptors = this->map()->instance_descriptors();
if ((descriptors->number_of_descriptors() > 0) &&
(descriptors->GetKey(0) == GetHeap()->hidden_symbol()) &&
descriptors->IsProperty(0)) {
ASSERT(descriptors->GetType(0) == FIELD);
(descriptors->GetKey(0) == GetHeap()->hidden_symbol())) {
if (descriptors->GetType(0) == FIELD) {
Object* hidden_store =
this->FastPropertyAt(descriptors->GetFieldIndex(0));
return StringDictionary::cast(hidden_store);
} else {
ASSERT(descriptors->GetType(0) == NULL_DESCRIPTOR ||
descriptors->GetType(0) == MAP_TRANSITION);
}
}
} else {
PropertyAttributes attributes;
@ -3819,11 +3822,14 @@ MaybeObject* JSObject::SetHiddenPropertiesDictionary(
// code zero) it will always occupy the first entry if present.
DescriptorArray* descriptors = this->map()->instance_descriptors();
if ((descriptors->number_of_descriptors() > 0) &&
(descriptors->GetKey(0) == GetHeap()->hidden_symbol()) &&
descriptors->IsProperty(0)) {
ASSERT(descriptors->GetType(0) == FIELD);
(descriptors->GetKey(0) == GetHeap()->hidden_symbol())) {
if (descriptors->GetType(0) == FIELD) {
this->FastPropertyAtPut(descriptors->GetFieldIndex(0), dictionary);
return this;
} else {
ASSERT(descriptors->GetType(0) == NULL_DESCRIPTOR ||
descriptors->GetType(0) == MAP_TRANSITION);
}
}
}
MaybeObject* store_result =
@ -4247,11 +4253,14 @@ bool JSReceiver::IsSimpleEnum() {
}
int Map::NumberOfDescribedProperties() {
int Map::NumberOfDescribedProperties(PropertyAttributes filter) {
int result = 0;
DescriptorArray* descs = instance_descriptors();
for (int i = 0; i < descs->number_of_descriptors(); i++) {
if (descs->IsProperty(i)) result++;
PropertyDetails details(descs->GetDetails(i));
if (descs->IsProperty(i) && (details.attributes() & filter) == 0) {
result++;
}
}
return result;
}
@ -5502,7 +5511,7 @@ class PolymorphicCodeCacheHashTableKey : public HashTableKey {
for (int i = 0; i < maps_->length(); ++i) {
bool match_found = false;
for (int j = 0; j < other_maps.length(); ++j) {
if (maps_->at(i)->EquivalentTo(*other_maps.at(j))) {
if (*(maps_->at(i)) == *(other_maps.at(j))) {
match_found = true;
break;
}
@ -5721,6 +5730,11 @@ void DescriptorArray::SetEnumCache(FixedArray* bridge_storage,
}
static bool InsertionPointFound(String* key1, String* key2) {
return key1->Hash() > key2->Hash() || key1 == key2;
}
MaybeObject* DescriptorArray::CopyInsert(Descriptor* descriptor,
TransitionFlag transition_flag) {
// Transitions are only kept when inserting another transition.
@ -5793,28 +5807,24 @@ MaybeObject* DescriptorArray::CopyInsert(Descriptor* descriptor,
// Copy the descriptors, filtering out transitions and null descriptors,
// and inserting or replacing a descriptor.
uint32_t descriptor_hash = descriptor->GetKey()->Hash();
int from_index = 0;
int to_index = 0;
for (; from_index < number_of_descriptors(); from_index++) {
String* key = GetKey(from_index);
if (key->Hash() > descriptor_hash || key == descriptor->GetKey()) {
break;
}
if (IsNullDescriptor(from_index)) continue;
if (remove_transitions && IsTransitionOnly(from_index)) continue;
new_descriptors->CopyFrom(to_index++, this, from_index, witness);
}
new_descriptors->Set(to_index++, descriptor, witness);
int insertion_index = -1;
int from_index = 0;
while (from_index < number_of_descriptors()) {
if (insertion_index < 0 &&
InsertionPointFound(GetKey(from_index), descriptor->GetKey())) {
insertion_index = to_index++;
if (replacing) from_index++;
for (; from_index < number_of_descriptors(); from_index++) {
if (IsNullDescriptor(from_index)) continue;
if (remove_transitions && IsTransitionOnly(from_index)) continue;
} else {
if (!(IsNullDescriptor(from_index) ||
(remove_transitions && IsTransitionOnly(from_index)))) {
new_descriptors->CopyFrom(to_index++, this, from_index, witness);
}
from_index++;
}
}
if (insertion_index < 0) insertion_index = to_index++;
new_descriptors->Set(insertion_index, descriptor, witness);
ASSERT(to_index == new_descriptors->number_of_descriptors());
SLOW_ASSERT(new_descriptors->IsSortedNoDuplicates());
@ -5829,14 +5839,14 @@ MaybeObject* DescriptorArray::RemoveTransitions() {
// not be allocated.
// Compute the size of the map transition entries to be removed.
int num_removed = 0;
int new_number_of_descriptors = 0;
for (int i = 0; i < number_of_descriptors(); i++) {
if (!IsProperty(i)) num_removed++;
if (IsProperty(i)) new_number_of_descriptors++;
}
// Allocate the new descriptor array.
DescriptorArray* new_descriptors;
{ MaybeObject* maybe_result = Allocate(number_of_descriptors() - num_removed);
{ MaybeObject* maybe_result = Allocate(new_number_of_descriptors);
if (!maybe_result->To<DescriptorArray>(&new_descriptors)) {
return maybe_result;
}
@ -7606,13 +7616,10 @@ bool SharedFunctionInfo::HasSourceCode() {
}
Object* SharedFunctionInfo::GetSourceCode() {
Isolate* isolate = GetIsolate();
if (!HasSourceCode()) return isolate->heap()->undefined_value();
HandleScope scope(isolate);
Object* source = Script::cast(script())->source();
return *SubString(Handle<String>(String::cast(source), isolate),
start_position(), end_position());
Handle<Object> SharedFunctionInfo::GetSourceCode() {
if (!HasSourceCode()) return GetIsolate()->factory()->undefined_value();
Handle<String> source(String::cast(Script::cast(script())->source()));
return SubString(source, start_position(), end_position());
}
@ -10355,24 +10362,9 @@ bool JSObject::HasRealNamedCallbackProperty(String* key) {
int JSObject::NumberOfLocalProperties(PropertyAttributes filter) {
if (HasFastProperties()) {
DescriptorArray* descs = map()->instance_descriptors();
int result = 0;
for (int i = 0; i < descs->number_of_descriptors(); i++) {
PropertyDetails details(descs->GetDetails(i));
if (details.IsProperty() && (details.attributes() & filter) == 0) {
result++;
}
}
return result;
} else {
return property_dictionary()->NumberOfElementsFilterAttributes(filter);
}
}
int JSObject::NumberOfEnumProperties() {
return NumberOfLocalProperties(static_cast<PropertyAttributes>(DONT_ENUM));
return HasFastProperties() ?
map()->NumberOfDescribedProperties(filter) :
property_dictionary()->NumberOfElementsFilterAttributes(filter);
}
@ -10493,7 +10485,7 @@ void FixedArray::SortPairs(FixedArray* numbers, uint32_t len) {
// purpose of this function is to provide reflection information for the object
// mirrors.
void JSObject::GetLocalPropertyNames(FixedArray* storage, int index) {
ASSERT(storage->length() >= (NumberOfLocalProperties(NONE) - index));
ASSERT(storage->length() >= (NumberOfLocalProperties() - index));
if (HasFastProperties()) {
DescriptorArray* descs = map()->instance_descriptors();
for (int i = 0; i < descs->number_of_descriptors(); i++) {

53
deps/v8/src/objects.h

@ -1638,7 +1638,7 @@ class JSObject: public JSReceiver {
Handle<String> key,
Handle<Object> value);
// Returns a failure if a GC is required.
MaybeObject* SetHiddenProperty(String* key, Object* value);
MUST_USE_RESULT MaybeObject* SetHiddenProperty(String* key, Object* value);
// Gets the value of a hidden property with the given key. Returns undefined
// if the property doesn't exist (or if called on a detached proxy),
// otherwise returns the value set for the key.
@ -1807,9 +1807,7 @@ class JSObject: public JSReceiver {
// Returns the number of properties on this object filtering out properties
// with the specified attributes (ignoring interceptors).
int NumberOfLocalProperties(PropertyAttributes filter);
// Returns the number of enumerable properties (ignoring interceptors).
int NumberOfEnumProperties();
int NumberOfLocalProperties(PropertyAttributes filter = NONE);
// Fill in details for properties into storage starting at the specified
// index.
void GetLocalPropertyNames(FixedArray* storage, int index);
@ -4638,8 +4636,9 @@ class Map: public HeapObject {
// Returns the next free property index (only valid for FAST MODE).
int NextFreePropertyIndex();
// Returns the number of properties described in instance_descriptors.
int NumberOfDescribedProperties();
// Returns the number of properties described in instance_descriptors
// filtering out properties with the specified attributes.
int NumberOfDescribedProperties(PropertyAttributes filter = NONE);
// Casting.
static inline Map* cast(Object* obj);
@ -4697,12 +4696,6 @@ class Map: public HeapObject {
// The "shared" flags of both this map and |other| are ignored.
bool EquivalentToForNormalization(Map* other, PropertyNormalizationMode mode);
// Returns true if this map and |other| describe equivalent objects.
// The "shared" flags of both this map and |other| are ignored.
bool EquivalentTo(Map* other) {
return EquivalentToForNormalization(other, KEEP_INOBJECT_PROPERTIES);
}
// Returns the contents of this map's descriptor array for the given string.
// May return NULL. |safe_to_add_transition| is set to false and NULL
// is returned if adding transitions is not allowed.
@ -5204,8 +5197,14 @@ class SharedFunctionInfo: public HeapObject {
// A counter used to determine when to stress the deoptimizer with a
// deopt.
inline Smi* deopt_counter();
inline void set_deopt_counter(Smi* counter);
inline int deopt_counter();
inline void set_deopt_counter(int counter);
inline int profiler_ticks();
inline void set_profiler_ticks(int ticks);
inline int ast_node_count();
inline void set_ast_node_count(int count);
// Add information on assignments of the form this.x = ...;
void SetThisPropertyAssignmentsInfo(
@ -5279,6 +5278,12 @@ class SharedFunctionInfo: public HeapObject {
// through the API, which does not change this flag).
DECL_BOOLEAN_ACCESSORS(is_anonymous)
// Indicates that the function cannot be crankshafted.
DECL_BOOLEAN_ACCESSORS(dont_crankshaft)
// Indicates that the function cannot be inlined.
DECL_BOOLEAN_ACCESSORS(dont_inline)
// Indicates whether or not the code in the shared function support
// deoptimization.
inline bool has_deoptimization_support();
@ -5316,7 +5321,7 @@ class SharedFunctionInfo: public HeapObject {
// [source code]: Source code for the function.
bool HasSourceCode();
Object* GetSourceCode();
Handle<Object> GetSourceCode();
inline int opt_count();
inline void set_opt_count(int opt_count);
@ -5373,12 +5378,12 @@ class SharedFunctionInfo: public HeapObject {
kInferredNameOffset + kPointerSize;
static const int kThisPropertyAssignmentsOffset =
kInitialMapOffset + kPointerSize;
static const int kDeoptCounterOffset =
static const int kProfilerTicksOffset =
kThisPropertyAssignmentsOffset + kPointerSize;
#if V8_HOST_ARCH_32_BIT
// Smi fields.
static const int kLengthOffset =
kDeoptCounterOffset + kPointerSize;
kProfilerTicksOffset + kPointerSize;
static const int kFormalParameterCountOffset = kLengthOffset + kPointerSize;
static const int kExpectedNofPropertiesOffset =
kFormalParameterCountOffset + kPointerSize;
@ -5396,8 +5401,11 @@ class SharedFunctionInfo: public HeapObject {
kCompilerHintsOffset + kPointerSize;
static const int kOptCountOffset =
kThisPropertyAssignmentsCountOffset + kPointerSize;
static const int kAstNodeCountOffset = kOptCountOffset + kPointerSize;
static const int kDeoptCounterOffset =
kAstNodeCountOffset + kPointerSize;
// Total size.
static const int kSize = kOptCountOffset + kPointerSize;
static const int kSize = kDeoptCounterOffset + kPointerSize;
#else
// The only reason to use smi fields instead of int fields
// is to allow iteration without maps decoding during
@ -5409,7 +5417,7 @@ class SharedFunctionInfo: public HeapObject {
// word is not set and thus this word cannot be treated as pointer
// to HeapObject during old space traversal.
static const int kLengthOffset =
kDeoptCounterOffset + kPointerSize;
kProfilerTicksOffset + kPointerSize;
static const int kFormalParameterCountOffset =
kLengthOffset + kIntSize;
@ -5433,8 +5441,11 @@ class SharedFunctionInfo: public HeapObject {
static const int kOptCountOffset =
kThisPropertyAssignmentsCountOffset + kIntSize;
static const int kAstNodeCountOffset = kOptCountOffset + kIntSize;
static const int kDeoptCounterOffset = kAstNodeCountOffset + kIntSize;
// Total size.
static const int kSize = kOptCountOffset + kIntSize;
static const int kSize = kDeoptCounterOffset + kIntSize;
#endif
@ -5481,6 +5492,8 @@ class SharedFunctionInfo: public HeapObject {
kBoundFunction,
kIsAnonymous,
kNameShouldPrintAsAnonymous,
kDontCrankshaft,
kDontInline,
kCompilerHintsCount // Pseudo entry
};

400
deps/v8/src/parser.cc

@ -481,62 +481,6 @@ class Parser::BlockState BASE_EMBEDDED {
};
class Parser::FunctionState BASE_EMBEDDED {
public:
FunctionState(Parser* parser, Scope* scope, Isolate* isolate);
~FunctionState();
int NextMaterializedLiteralIndex() {
return next_materialized_literal_index_++;
}
int materialized_literal_count() {
return next_materialized_literal_index_ - JSFunction::kLiteralsPrefixSize;
}
int NextHandlerIndex() { return next_handler_index_++; }
int handler_count() { return next_handler_index_; }
void SetThisPropertyAssignmentInfo(
bool only_simple_this_property_assignments,
Handle<FixedArray> this_property_assignments) {
only_simple_this_property_assignments_ =
only_simple_this_property_assignments;
this_property_assignments_ = this_property_assignments;
}
bool only_simple_this_property_assignments() {
return only_simple_this_property_assignments_;
}
Handle<FixedArray> this_property_assignments() {
return this_property_assignments_;
}
void AddProperty() { expected_property_count_++; }
int expected_property_count() { return expected_property_count_; }
private:
// Used to assign an index to each literal that needs materialization in
// the function. Includes regexp literals, and boilerplate for object and
// array literals.
int next_materialized_literal_index_;
// Used to assign a per-function index to try and catch handlers.
int next_handler_index_;
// Properties count estimation.
int expected_property_count_;
// Keeps track of assignments to properties of this. Used for
// optimizing constructors.
bool only_simple_this_property_assignments_;
Handle<FixedArray> this_property_assignments_;
Parser* parser_;
FunctionState* outer_function_state_;
Scope* outer_scope_;
unsigned saved_ast_node_id_;
};
Parser::FunctionState::FunctionState(Parser* parser,
Scope* scope,
Isolate* isolate)
@ -548,7 +492,8 @@ Parser::FunctionState::FunctionState(Parser* parser,
parser_(parser),
outer_function_state_(parser->current_function_state_),
outer_scope_(parser->top_scope_),
saved_ast_node_id_(isolate->ast_node_id()) {
saved_ast_node_id_(isolate->ast_node_id()),
factory_(isolate) {
parser->top_scope_ = scope;
parser->current_function_state_ = this;
isolate->set_ast_node_id(AstNode::kDeclarationsId + 1);
@ -602,12 +547,16 @@ Parser::Parser(Handle<Script> script,
fni_(NULL),
allow_natives_syntax_((parser_flags & kAllowNativesSyntax) != 0),
allow_lazy_((parser_flags & kAllowLazy) != 0),
allow_modules_((parser_flags & kAllowModules) != 0),
stack_overflow_(false),
parenthesized_function_(false) {
AstNode::ResetIds();
if ((parser_flags & kLanguageModeMask) == EXTENDED_MODE) {
scanner().SetHarmonyScoping(true);
}
if ((parser_flags & kAllowModules) != 0) {
scanner().SetHarmonyModules(true);
}
}
@ -674,8 +623,7 @@ FunctionLiteral* Parser::DoParseProgram(CompilationInfo* info,
}
if (ok) {
result = new(zone()) FunctionLiteral(
isolate(),
result = factory()->NewFunctionLiteral(
no_name,
top_scope_,
body,
@ -685,8 +633,10 @@ FunctionLiteral* Parser::DoParseProgram(CompilationInfo* info,
function_state.only_simple_this_property_assignments(),
function_state.this_property_assignments(),
0,
false, // Does not have duplicate parameters.
FunctionLiteral::ANONYMOUS_EXPRESSION,
false); // Does not have duplicate parameters.
false); // Top-level literal doesn't count for the AST's properties.
result->set_ast_properties(factory()->visitor()->ast_properties());
} else if (stack_overflow_) {
isolate()->StackOverflow();
}
@ -1274,7 +1224,7 @@ Statement* Parser::ParseStatement(ZoneStringList* labels, bool* ok) {
case Token::SEMICOLON:
Next();
return EmptyStatement();
return factory()->NewEmptyStatement();
case Token::IF:
stmt = ParseIfStatement(labels, ok);
@ -1322,7 +1272,7 @@ Statement* Parser::ParseStatement(ZoneStringList* labels, bool* ok) {
// one must take great care not to treat it as a
// fall-through. It is much easier just to wrap the entire
// try-statement in a statement block and put the labels there
Block* result = new(zone()) Block(isolate(), labels, 1, false);
Block* result = factory()->NewBlock(labels, 1, false);
Target target(&this->target_stack_, result);
TryStatement* statement = ParseTryStatement(CHECK_OK);
if (statement) {
@ -1454,9 +1404,9 @@ VariableProxy* Parser::Declare(Handle<String> name,
// a performance issue since it may lead to repeated
// Runtime::DeclareContextSlot() calls.
VariableProxy* proxy = declaration_scope->NewUnresolved(
name, scanner().location().beg_pos);
factory(), name, scanner().location().beg_pos);
declaration_scope->AddDeclaration(
new(zone()) Declaration(proxy, mode, fun, top_scope_));
factory()->NewVariableDeclaration(proxy, mode, fun, top_scope_));
if ((mode == CONST || mode == CONST_HARMONY) &&
declaration_scope->is_global_scope()) {
@ -1564,10 +1514,11 @@ Statement* Parser::ParseNativeDeclaration(bool* ok) {
// introduced dynamically when we meet their declarations, whereas
// other functions are set up when entering the surrounding scope.
SharedFunctionInfoLiteral* lit =
new(zone()) SharedFunctionInfoLiteral(isolate(), shared);
factory()->NewSharedFunctionInfoLiteral(shared);
VariableProxy* var = Declare(name, VAR, NULL, true, CHECK_OK);
return new(zone()) ExpressionStatement(new(zone()) Assignment(
isolate(), Token::INIT_VAR, var, lit, RelocInfo::kNoPosition));
return factory()->NewExpressionStatement(
factory()->NewAssignment(
Token::INIT_VAR, var, lit, RelocInfo::kNoPosition));
}
@ -1589,7 +1540,7 @@ Statement* Parser::ParseFunctionDeclaration(bool* ok) {
// initial value upon entering the corresponding scope.
VariableMode mode = is_extended_mode() ? LET : VAR;
Declare(name, mode, fun, true, CHECK_OK);
return EmptyStatement();
return factory()->NewEmptyStatement();
}
@ -1603,7 +1554,7 @@ Block* Parser::ParseBlock(ZoneStringList* labels, bool* ok) {
// (ECMA-262, 3rd, 12.2)
//
// Construct block expecting 16 statements.
Block* result = new(zone()) Block(isolate(), labels, 16, false);
Block* result = factory()->NewBlock(labels, 16, false);
Target target(&this->target_stack_, result);
Expect(Token::LBRACE, CHECK_OK);
InitializationBlockFinder block_finder(top_scope_, target_stack_);
@ -1626,7 +1577,7 @@ Block* Parser::ParseScopedBlock(ZoneStringList* labels, bool* ok) {
// '{' SourceElement* '}'
// Construct block expecting 16 statements.
Block* body = new(zone()) Block(isolate(), labels, 16, false);
Block* body = factory()->NewBlock(labels, 16, false);
Scope* block_scope = NewScope(top_scope_, BLOCK_SCOPE);
// Parse the statements and collect escaping labels.
@ -1676,8 +1627,8 @@ bool Parser::IsEvalOrArguments(Handle<String> string) {
// If the variable declaration declares exactly one non-const
// variable, then *var is set to that variable. In all other cases,
// *var is untouched; in particular, it is the caller's responsibility
// variable, then *out is set to that variable. In all other cases,
// *out is untouched; in particular, it is the caller's responsibility
// to initialize it properly. This mechanism is used for the parsing
// of 'for-in' loops.
Block* Parser::ParseVariableDeclarations(
@ -1786,7 +1737,7 @@ Block* Parser::ParseVariableDeclarations(
// is inside an initializer block, it is ignored.
//
// Create new block with one expected declaration.
Block* block = new(zone()) Block(isolate(), NULL, 1, true);
Block* block = factory()->NewBlock(NULL, 1, true);
int nvars = 0; // the number of variables declared
Handle<String> name;
do {
@ -1907,7 +1858,7 @@ Block* Parser::ParseVariableDeclarations(
// Compute the arguments for the runtime call.
ZoneList<Expression*>* arguments = new(zone()) ZoneList<Expression*>(3);
// We have at least 1 parameter.
arguments->Add(NewLiteral(name));
arguments->Add(factory()->NewLiteral(name));
CallRuntime* initialize;
if (is_const) {
@ -1918,9 +1869,7 @@ Block* Parser::ParseVariableDeclarations(
// and add it to the initialization statement block.
// Note that the function does different things depending on
// the number of arguments (1 or 2).
initialize =
new(zone()) CallRuntime(
isolate(),
initialize = factory()->NewCallRuntime(
isolate()->factory()->InitializeConstGlobal_symbol(),
Runtime::FunctionForId(Runtime::kInitializeConstGlobal),
arguments);
@ -1928,7 +1877,7 @@ Block* Parser::ParseVariableDeclarations(
// Add strict mode.
// We may want to pass singleton to avoid Literal allocations.
LanguageMode language_mode = initialization_scope->language_mode();
arguments->Add(NewNumberLiteral(language_mode));
arguments->Add(factory()->NewNumberLiteral(language_mode));
// Be careful not to assign a value to the global variable if
// we're in a with. The initialization value should not
@ -1943,15 +1892,13 @@ Block* Parser::ParseVariableDeclarations(
// and add it to the initialization statement block.
// Note that the function does different things depending on
// the number of arguments (2 or 3).
initialize =
new(zone()) CallRuntime(
isolate(),
initialize = factory()->NewCallRuntime(
isolate()->factory()->InitializeVarGlobal_symbol(),
Runtime::FunctionForId(Runtime::kInitializeVarGlobal),
arguments);
}
block->AddStatement(new(zone()) ExpressionStatement(initialize));
block->AddStatement(factory()->NewExpressionStatement(initialize));
} else if (needs_init) {
// Constant initializations always assign to the declared constant which
// is always at the function scope level. This is only relevant for
@ -1964,8 +1911,8 @@ Block* Parser::ParseVariableDeclarations(
ASSERT(proxy->var() != NULL);
ASSERT(value != NULL);
Assignment* assignment =
new(zone()) Assignment(isolate(), init_op, proxy, value, position);
block->AddStatement(new(zone()) ExpressionStatement(assignment));
factory()->NewAssignment(init_op, proxy, value, position);
block->AddStatement(factory()->NewExpressionStatement(assignment));
value = NULL;
}
@ -1976,10 +1923,11 @@ Block* Parser::ParseVariableDeclarations(
// 'var' initializations are simply assignments (with all the consequences
// if they are inside a 'with' statement - they may change a 'with' object
// property).
VariableProxy* proxy = initialization_scope->NewUnresolved(name);
VariableProxy* proxy =
initialization_scope->NewUnresolved(factory(), name);
Assignment* assignment =
new(zone()) Assignment(isolate(), init_op, proxy, value, position);
block->AddStatement(new(zone()) ExpressionStatement(assignment));
factory()->NewAssignment(init_op, proxy, value, position);
block->AddStatement(factory()->NewExpressionStatement(assignment));
}
if (fni_ != NULL) fni_->Leave();
@ -2059,7 +2007,7 @@ Statement* Parser::ParseExpressionOrLabelledStatement(ZoneStringList* labels,
// Parsed expression statement.
ExpectSemicolon(CHECK_OK);
return new(zone()) ExpressionStatement(expr);
return factory()->NewExpressionStatement(expr);
}
@ -2077,10 +2025,9 @@ IfStatement* Parser::ParseIfStatement(ZoneStringList* labels, bool* ok) {
Next();
else_statement = ParseStatement(labels, CHECK_OK);
} else {
else_statement = EmptyStatement();
else_statement = factory()->NewEmptyStatement();
}
return new(zone()) IfStatement(
isolate(), condition, then_statement, else_statement);
return factory()->NewIfStatement(condition, then_statement, else_statement);
}
@ -2110,7 +2057,7 @@ Statement* Parser::ParseContinueStatement(bool* ok) {
return NULL;
}
ExpectSemicolon(CHECK_OK);
return new(zone()) ContinueStatement(target);
return factory()->NewContinueStatement(target);
}
@ -2128,7 +2075,8 @@ Statement* Parser::ParseBreakStatement(ZoneStringList* labels, bool* ok) {
// Parse labeled break statements that target themselves into
// empty statements, e.g. 'l1: l2: l3: break l2;'
if (!label.is_null() && ContainsLabel(labels, label)) {
return EmptyStatement();
ExpectSemicolon(CHECK_OK);
return factory()->NewEmptyStatement();
}
BreakableStatement* target = NULL;
target = LookupBreakTarget(label, CHECK_OK);
@ -2145,7 +2093,7 @@ Statement* Parser::ParseBreakStatement(ZoneStringList* labels, bool* ok) {
return NULL;
}
ExpectSemicolon(CHECK_OK);
return new(zone()) BreakStatement(target);
return factory()->NewBreakStatement(target);
}
@ -2165,11 +2113,11 @@ Statement* Parser::ParseReturnStatement(bool* ok) {
tok == Token::RBRACE ||
tok == Token::EOS) {
ExpectSemicolon(CHECK_OK);
result = new(zone()) ReturnStatement(GetLiteralUndefined());
result = factory()->NewReturnStatement(GetLiteralUndefined());
} else {
Expression* expr = ParseExpression(true, CHECK_OK);
ExpectSemicolon(CHECK_OK);
result = new(zone()) ReturnStatement(expr);
result = factory()->NewReturnStatement(expr);
}
// An ECMAScript program is considered syntactically incorrect if it
@ -2182,7 +2130,7 @@ Statement* Parser::ParseReturnStatement(bool* ok) {
declaration_scope->is_eval_scope()) {
Handle<String> type = isolate()->factory()->illegal_return_symbol();
Expression* throw_error = NewThrowSyntaxError(type, Handle<Object>::null());
return new(zone()) ExpressionStatement(throw_error);
return factory()->NewExpressionStatement(throw_error);
}
return result;
}
@ -2212,7 +2160,7 @@ Statement* Parser::ParseWithStatement(ZoneStringList* labels, bool* ok) {
stmt = ParseStatement(labels, CHECK_OK);
with_scope->set_end_position(scanner().location().end_pos);
}
return new(zone()) WithStatement(expr, stmt);
return factory()->NewWithStatement(expr, stmt);
}
@ -2254,7 +2202,7 @@ SwitchStatement* Parser::ParseSwitchStatement(ZoneStringList* labels,
// SwitchStatement ::
// 'switch' '(' Expression ')' '{' CaseClause* '}'
SwitchStatement* statement = new(zone()) SwitchStatement(isolate(), labels);
SwitchStatement* statement = factory()->NewSwitchStatement(labels);
Target target(&this->target_stack_, statement);
Expect(Token::SWITCH, CHECK_OK);
@ -2290,8 +2238,7 @@ Statement* Parser::ParseThrowStatement(bool* ok) {
Expression* exception = ParseExpression(true, CHECK_OK);
ExpectSemicolon(CHECK_OK);
return new(zone()) ExpressionStatement(
new(zone()) Throw(isolate(), exception, pos));
return factory()->NewExpressionStatement(factory()->NewThrow(exception, pos));
}
@ -2378,13 +2325,10 @@ TryStatement* Parser::ParseTryStatement(bool* ok) {
// If we have both, create an inner try/catch.
ASSERT(catch_scope != NULL && catch_variable != NULL);
int index = current_function_state_->NextHandlerIndex();
TryCatchStatement* statement = new(zone()) TryCatchStatement(index,
try_block,
catch_scope,
catch_variable,
catch_block);
TryCatchStatement* statement = factory()->NewTryCatchStatement(
index, try_block, catch_scope, catch_variable, catch_block);
statement->set_escaping_targets(try_collector.targets());
try_block = new(zone()) Block(isolate(), NULL, 1, false);
try_block = factory()->NewBlock(NULL, 1, false);
try_block->AddStatement(statement);
catch_block = NULL; // Clear to indicate it's been handled.
}
@ -2394,17 +2338,12 @@ TryStatement* Parser::ParseTryStatement(bool* ok) {
ASSERT(finally_block == NULL);
ASSERT(catch_scope != NULL && catch_variable != NULL);
int index = current_function_state_->NextHandlerIndex();
result = new(zone()) TryCatchStatement(index,
try_block,
catch_scope,
catch_variable,
catch_block);
result = factory()->NewTryCatchStatement(
index, try_block, catch_scope, catch_variable, catch_block);
} else {
ASSERT(finally_block != NULL);
int index = current_function_state_->NextHandlerIndex();
result = new(zone()) TryFinallyStatement(index,
try_block,
finally_block);
result = factory()->NewTryFinallyStatement(index, try_block, finally_block);
// Combine the jump targets of the try block and the possible catch block.
try_collector.targets()->AddAll(*catch_collector.targets());
}
@ -2419,7 +2358,7 @@ DoWhileStatement* Parser::ParseDoWhileStatement(ZoneStringList* labels,
// DoStatement ::
// 'do' Statement 'while' '(' Expression ')' ';'
DoWhileStatement* loop = new(zone()) DoWhileStatement(isolate(), labels);
DoWhileStatement* loop = factory()->NewDoWhileStatement(labels);
Target target(&this->target_stack_, loop);
Expect(Token::DO, CHECK_OK);
@ -2450,7 +2389,7 @@ WhileStatement* Parser::ParseWhileStatement(ZoneStringList* labels, bool* ok) {
// WhileStatement ::
// 'while' '(' Expression ')' Statement
WhileStatement* loop = new(zone()) WhileStatement(isolate(), labels);
WhileStatement* loop = factory()->NewWhileStatement(labels);
Target target(&this->target_stack_, loop);
Expect(Token::WHILE, CHECK_OK);
@ -2485,8 +2424,8 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) {
ParseVariableDeclarations(kForStatement, NULL, &name, CHECK_OK);
if (peek() == Token::IN && !name.is_null()) {
VariableProxy* each = top_scope_->NewUnresolved(name);
ForInStatement* loop = new(zone()) ForInStatement(isolate(), labels);
VariableProxy* each = top_scope_->NewUnresolved(factory(), name);
ForInStatement* loop = factory()->NewForInStatement(labels);
Target target(&this->target_stack_, loop);
Expect(Token::IN, CHECK_OK);
@ -2495,7 +2434,7 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) {
Statement* body = ParseStatement(NULL, CHECK_OK);
loop->Initialize(each, enumerable, body);
Block* result = new(zone()) Block(isolate(), NULL, 2, false);
Block* result = factory()->NewBlock(NULL, 2, false);
result->AddStatement(variable_statement);
result->AddStatement(loop);
top_scope_ = saved_scope;
@ -2533,9 +2472,9 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) {
// TODO(keuchel): Move the temporary variable to the block scope, after
// implementing stack allocated block scoped variables.
Variable* temp = top_scope_->DeclarationScope()->NewTemporary(name);
VariableProxy* temp_proxy = new(zone()) VariableProxy(isolate(), temp);
VariableProxy* each = top_scope_->NewUnresolved(name);
ForInStatement* loop = new(zone()) ForInStatement(isolate(), labels);
VariableProxy* temp_proxy = factory()->NewVariableProxy(temp);
VariableProxy* each = top_scope_->NewUnresolved(factory(), name);
ForInStatement* loop = factory()->NewForInStatement(labels);
Target target(&this->target_stack_, loop);
Expect(Token::IN, CHECK_OK);
@ -2543,14 +2482,11 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) {
Expect(Token::RPAREN, CHECK_OK);
Statement* body = ParseStatement(NULL, CHECK_OK);
Block* body_block = new(zone()) Block(isolate(), NULL, 3, false);
Assignment* assignment = new(zone()) Assignment(isolate(),
Token::ASSIGN,
each,
temp_proxy,
RelocInfo::kNoPosition);
Block* body_block = factory()->NewBlock(NULL, 3, false);
Assignment* assignment = factory()->NewAssignment(
Token::ASSIGN, each, temp_proxy, RelocInfo::kNoPosition);
Statement* assignment_statement =
new(zone()) ExpressionStatement(assignment);
factory()->NewExpressionStatement(assignment);
body_block->AddStatement(variable_statement);
body_block->AddStatement(assignment_statement);
body_block->AddStatement(body);
@ -2577,7 +2513,7 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) {
isolate()->factory()->invalid_lhs_in_for_in_symbol();
expression = NewThrowReferenceError(type);
}
ForInStatement* loop = new(zone()) ForInStatement(isolate(), labels);
ForInStatement* loop = factory()->NewForInStatement(labels);
Target target(&this->target_stack_, loop);
Expect(Token::IN, CHECK_OK);
@ -2594,13 +2530,13 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) {
return loop;
} else {
init = new(zone()) ExpressionStatement(expression);
init = factory()->NewExpressionStatement(expression);
}
}
}
// Standard 'for' loop
ForStatement* loop = new(zone()) ForStatement(isolate(), labels);
ForStatement* loop = factory()->NewForStatement(labels);
Target target(&this->target_stack_, loop);
// Parsed initializer at this point.
@ -2615,7 +2551,7 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) {
Statement* next = NULL;
if (peek() != Token::RPAREN) {
Expression* exp = ParseExpression(true, CHECK_OK);
next = new(zone()) ExpressionStatement(exp);
next = factory()->NewExpressionStatement(exp);
}
Expect(Token::RPAREN, CHECK_OK);
@ -2635,7 +2571,7 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) {
// for (; c; n) b
// }
ASSERT(init != NULL);
Block* result = new(zone()) Block(isolate(), NULL, 2, false);
Block* result = factory()->NewBlock(NULL, 2, false);
result->AddStatement(init);
result->AddStatement(loop);
result->set_block_scope(for_scope);
@ -2659,8 +2595,8 @@ Expression* Parser::ParseExpression(bool accept_IN, bool* ok) {
Expect(Token::COMMA, CHECK_OK);
int position = scanner().location().beg_pos;
Expression* right = ParseAssignmentExpression(accept_IN, CHECK_OK);
result = new(zone()) BinaryOperation(
isolate(), Token::COMMA, result, right, position);
result =
factory()->NewBinaryOperation(Token::COMMA, result, right, position);
}
return result;
}
@ -2735,7 +2671,7 @@ Expression* Parser::ParseAssignmentExpression(bool accept_IN, bool* ok) {
fni_->Leave();
}
return new(zone()) Assignment(isolate(), op, expression, right, pos);
return factory()->NewAssignment(op, expression, right, pos);
}
@ -2757,8 +2693,8 @@ Expression* Parser::ParseConditionalExpression(bool accept_IN, bool* ok) {
Expect(Token::COLON, CHECK_OK);
int right_position = scanner().peek_location().beg_pos;
Expression* right = ParseAssignmentExpression(accept_IN, CHECK_OK);
return new(zone()) Conditional(
isolate(), expression, left, right, left_position, right_position);
return factory()->NewConditional(
expression, left, right, left_position, right_position);
}
@ -2789,41 +2725,47 @@ Expression* Parser::ParseBinaryExpression(int prec, bool accept_IN, bool* ok) {
switch (op) {
case Token::ADD:
x = NewNumberLiteral(x_val + y_val);
x = factory()->NewNumberLiteral(x_val + y_val);
continue;
case Token::SUB:
x = NewNumberLiteral(x_val - y_val);
x = factory()->NewNumberLiteral(x_val - y_val);
continue;
case Token::MUL:
x = NewNumberLiteral(x_val * y_val);
x = factory()->NewNumberLiteral(x_val * y_val);
continue;
case Token::DIV:
x = NewNumberLiteral(x_val / y_val);
x = factory()->NewNumberLiteral(x_val / y_val);
continue;
case Token::BIT_OR:
x = NewNumberLiteral(DoubleToInt32(x_val) | DoubleToInt32(y_val));
case Token::BIT_OR: {
int value = DoubleToInt32(x_val) | DoubleToInt32(y_val);
x = factory()->NewNumberLiteral(value);
continue;
case Token::BIT_AND:
x = NewNumberLiteral(DoubleToInt32(x_val) & DoubleToInt32(y_val));
}
case Token::BIT_AND: {
int value = DoubleToInt32(x_val) & DoubleToInt32(y_val);
x = factory()->NewNumberLiteral(value);
continue;
case Token::BIT_XOR:
x = NewNumberLiteral(DoubleToInt32(x_val) ^ DoubleToInt32(y_val));
}
case Token::BIT_XOR: {
int value = DoubleToInt32(x_val) ^ DoubleToInt32(y_val);
x = factory()->NewNumberLiteral(value);
continue;
}
case Token::SHL: {
int value = DoubleToInt32(x_val) << (DoubleToInt32(y_val) & 0x1f);
x = NewNumberLiteral(value);
x = factory()->NewNumberLiteral(value);
continue;
}
case Token::SHR: {
uint32_t shift = DoubleToInt32(y_val) & 0x1f;
uint32_t value = DoubleToUint32(x_val) >> shift;
x = NewNumberLiteral(value);
x = factory()->NewNumberLiteral(value);
continue;
}
case Token::SAR: {
uint32_t shift = DoubleToInt32(y_val) & 0x1f;
int value = ArithmeticShiftRight(DoubleToInt32(x_val), shift);
x = NewNumberLiteral(value);
x = factory()->NewNumberLiteral(value);
continue;
}
default:
@ -2842,15 +2784,15 @@ Expression* Parser::ParseBinaryExpression(int prec, bool accept_IN, bool* ok) {
case Token::NE_STRICT: cmp = Token::EQ_STRICT; break;
default: break;
}
x = new(zone()) CompareOperation(isolate(), cmp, x, y, position);
x = factory()->NewCompareOperation(cmp, x, y, position);
if (cmp != op) {
// The comparison was negated - add a NOT.
x = new(zone()) UnaryOperation(isolate(), Token::NOT, x, position);
x = factory()->NewUnaryOperation(Token::NOT, x, position);
}
} else {
// We have a "normal" binary operation.
x = new(zone()) BinaryOperation(isolate(), op, x, y, position);
x = factory()->NewBinaryOperation(op, x, y, position);
}
}
}
@ -2883,7 +2825,7 @@ Expression* Parser::ParseUnaryExpression(bool* ok) {
// Convert the literal to a boolean condition and negate it.
bool condition = literal->ToBoolean()->IsTrue();
Handle<Object> result(isolate()->heap()->ToBoolean(!condition));
return NewLiteral(result);
return factory()->NewLiteral(result);
} else if (literal->IsNumber()) {
// Compute some expressions involving only number literals.
double value = literal->Number();
@ -2891,9 +2833,9 @@ Expression* Parser::ParseUnaryExpression(bool* ok) {
case Token::ADD:
return expression;
case Token::SUB:
return NewNumberLiteral(-value);
return factory()->NewNumberLiteral(-value);
case Token::BIT_NOT:
return NewNumberLiteral(~DoubleToInt32(value));
return factory()->NewNumberLiteral(~DoubleToInt32(value));
default:
break;
}
@ -2910,7 +2852,7 @@ Expression* Parser::ParseUnaryExpression(bool* ok) {
}
}
return new(zone()) UnaryOperation(isolate(), op, expression, position);
return factory()->NewUnaryOperation(op, expression, position);
} else if (Token::IsCountOp(op)) {
op = Next();
@ -2932,8 +2874,7 @@ Expression* Parser::ParseUnaryExpression(bool* ok) {
MarkAsLValue(expression);
int position = scanner().location().beg_pos;
return new(zone()) CountOperation(isolate(),
op,
return factory()->NewCountOperation(op,
true /* prefix */,
expression,
position);
@ -2970,8 +2911,7 @@ Expression* Parser::ParsePostfixExpression(bool* ok) {
Token::Value next = Next();
int position = scanner().location().beg_pos;
expression =
new(zone()) CountOperation(isolate(),
next,
factory()->NewCountOperation(next,
false /* postfix */,
expression,
position);
@ -2997,7 +2937,7 @@ Expression* Parser::ParseLeftHandSideExpression(bool* ok) {
Consume(Token::LBRACK);
int pos = scanner().location().beg_pos;
Expression* index = ParseExpression(true, CHECK_OK);
result = new(zone()) Property(isolate(), result, index, pos);
result = factory()->NewProperty(result, index, pos);
Expect(Token::RBRACK, CHECK_OK);
break;
}
@ -3030,7 +2970,7 @@ Expression* Parser::ParseLeftHandSideExpression(bool* ok) {
callee->IsVariable(isolate()->factory()->eval_symbol())) {
top_scope_->DeclarationScope()->RecordEvalCall();
}
result = NewCall(result, args, pos);
result = factory()->NewCall(result, args, pos);
break;
}
@ -3038,10 +2978,8 @@ Expression* Parser::ParseLeftHandSideExpression(bool* ok) {
Consume(Token::PERIOD);
int pos = scanner().location().beg_pos;
Handle<String> name = ParseIdentifierName(CHECK_OK);
result = new(zone()) Property(isolate(),
result,
NewLiteral(name),
pos);
result =
factory()->NewProperty(result, factory()->NewLiteral(name), pos);
if (fni_ != NULL) fni_->PushLiteralName(name);
break;
}
@ -3077,10 +3015,8 @@ Expression* Parser::ParseNewPrefix(PositionStack* stack, bool* ok) {
if (!stack->is_empty()) {
int last = stack->pop();
result = new(zone()) CallNew(isolate(),
result,
new(zone()) ZoneList<Expression*>(0),
last);
result = factory()->NewCallNew(
result, new(zone()) ZoneList<Expression*>(0), last);
}
return result;
}
@ -3132,7 +3068,7 @@ Expression* Parser::ParseMemberWithNewPrefixesExpression(PositionStack* stack,
Consume(Token::LBRACK);
int pos = scanner().location().beg_pos;
Expression* index = ParseExpression(true, CHECK_OK);
result = new(zone()) Property(isolate(), result, index, pos);
result = factory()->NewProperty(result, index, pos);
if (fni_ != NULL) {
if (index->IsPropertyName()) {
fni_->PushLiteralName(index->AsLiteral()->AsPropertyName());
@ -3148,10 +3084,8 @@ Expression* Parser::ParseMemberWithNewPrefixesExpression(PositionStack* stack,
Consume(Token::PERIOD);
int pos = scanner().location().beg_pos;
Handle<String> name = ParseIdentifierName(CHECK_OK);
result = new(zone()) Property(isolate(),
result,
NewLiteral(name),
pos);
result =
factory()->NewProperty(result, factory()->NewLiteral(name), pos);
if (fni_ != NULL) fni_->PushLiteralName(name);
break;
}
@ -3160,7 +3094,7 @@ Expression* Parser::ParseMemberWithNewPrefixesExpression(PositionStack* stack,
// Consume one of the new prefixes (already parsed).
ZoneList<Expression*>* args = ParseArguments(CHECK_OK);
int last = stack->pop();
result = new(zone()) CallNew(isolate(), result, args, last);
result = factory()->NewCallNew(result, args, last);
break;
}
default:
@ -3179,7 +3113,7 @@ DebuggerStatement* Parser::ParseDebuggerStatement(bool* ok) {
Expect(Token::DEBUGGER, CHECK_OK);
ExpectSemicolon(CHECK_OK);
return new(zone()) DebuggerStatement();
return factory()->NewDebuggerStatement();
}
@ -3244,33 +3178,31 @@ Expression* Parser::ParsePrimaryExpression(bool* ok) {
switch (peek()) {
case Token::THIS: {
Consume(Token::THIS);
result = new(zone()) VariableProxy(isolate(), top_scope_->receiver());
result = factory()->NewVariableProxy(top_scope_->receiver());
break;
}
case Token::NULL_LITERAL:
Consume(Token::NULL_LITERAL);
result = new(zone()) Literal(
isolate(), isolate()->factory()->null_value());
result = factory()->NewLiteral(isolate()->factory()->null_value());
break;
case Token::TRUE_LITERAL:
Consume(Token::TRUE_LITERAL);
result = new(zone()) Literal(
isolate(), isolate()->factory()->true_value());
result = factory()->NewLiteral(isolate()->factory()->true_value());
break;
case Token::FALSE_LITERAL:
Consume(Token::FALSE_LITERAL);
result = new(zone()) Literal(
isolate(), isolate()->factory()->false_value());
result = factory()->NewLiteral(isolate()->factory()->false_value());
break;
case Token::IDENTIFIER:
case Token::FUTURE_STRICT_RESERVED_WORD: {
Handle<String> name = ParseIdentifier(CHECK_OK);
if (fni_ != NULL) fni_->PushVariableName(name);
result = top_scope_->NewUnresolved(name, scanner().location().beg_pos);
result = top_scope_->NewUnresolved(
factory(), name, scanner().location().beg_pos);
break;
}
@ -3280,14 +3212,14 @@ Expression* Parser::ParsePrimaryExpression(bool* ok) {
double value = StringToDouble(isolate()->unicode_cache(),
scanner().literal_ascii_string(),
ALLOW_HEX | ALLOW_OCTALS);
result = NewNumberLiteral(value);
result = factory()->NewNumberLiteral(value);
break;
}
case Token::STRING: {
Consume(Token::STRING);
Handle<String> symbol = GetSymbol(CHECK_OK);
result = NewLiteral(symbol);
result = factory()->NewLiteral(symbol);
if (fni_ != NULL) fni_->PushLiteralName(symbol);
break;
}
@ -3481,8 +3413,8 @@ Expression* Parser::ParseArrayLiteral(bool* ok) {
literals->set(0, Smi::FromInt(elements_kind));
literals->set(1, *element_values);
return new(zone()) ArrayLiteral(
isolate(), literals, values, literal_index, is_simple, depth);
return factory()->NewArrayLiteral(
literals, values, literal_index, is_simple, depth);
}
@ -3759,9 +3691,7 @@ ObjectLiteral::Property* Parser::ParseObjectLiteralGetSet(bool is_getter,
CHECK_OK);
// Allow any number of parameters for compatibilty with JSC.
// Specification only allows zero parameters for get and one for set.
ObjectLiteral::Property* property =
new(zone()) ObjectLiteral::Property(is_getter, value);
return property;
return factory()->NewObjectLiteralProperty(is_getter, value);
} else {
ReportUnexpectedToken(next);
*ok = false;
@ -3826,7 +3756,7 @@ Expression* Parser::ParseObjectLiteral(bool* ok) {
}
// Failed to parse as get/set property, so it's just a property
// called "get" or "set".
key = NewLiteral(id);
key = factory()->NewLiteral(id);
break;
}
case Token::STRING: {
@ -3835,10 +3765,10 @@ Expression* Parser::ParseObjectLiteral(bool* ok) {
if (fni_ != NULL) fni_->PushLiteralName(string);
uint32_t index;
if (!string.is_null() && string->AsArrayIndex(&index)) {
key = NewNumberLiteral(index);
key = factory()->NewNumberLiteral(index);
break;
}
key = NewLiteral(string);
key = factory()->NewLiteral(string);
break;
}
case Token::NUMBER: {
@ -3847,14 +3777,14 @@ Expression* Parser::ParseObjectLiteral(bool* ok) {
double value = StringToDouble(isolate()->unicode_cache(),
scanner().literal_ascii_string(),
ALLOW_HEX | ALLOW_OCTALS);
key = NewNumberLiteral(value);
key = factory()->NewNumberLiteral(value);
break;
}
default:
if (Token::IsKeyword(next)) {
Consume(next);
Handle<String> string = GetSymbol(CHECK_OK);
key = NewLiteral(string);
key = factory()->NewLiteral(string);
} else {
// Unexpected token.
Token::Value next = Next();
@ -3909,8 +3839,7 @@ Expression* Parser::ParseObjectLiteral(bool* ok) {
&is_simple,
&fast_elements,
&depth);
return new(zone()) ObjectLiteral(isolate(),
constant_properties,
return factory()->NewObjectLiteral(constant_properties,
properties,
literal_index,
is_simple,
@ -3935,8 +3864,7 @@ Expression* Parser::ParseRegExpLiteral(bool seen_equal, bool* ok) {
Handle<String> js_flags = NextLiteralString(TENURED);
Next();
return new(zone()) RegExpLiteral(
isolate(), js_pattern, js_flags, literal_index);
return factory()->NewRegExpLiteral(js_pattern, js_flags, literal_index);
}
@ -3967,7 +3895,7 @@ ZoneList<Expression*>* Parser::ParseArguments(bool* ok) {
class SingletonLogger : public ParserRecorder {
public:
SingletonLogger() : has_error_(false), start_(-1), end_(-1) { }
~SingletonLogger() { }
virtual ~SingletonLogger() { }
void Reset() { has_error_ = false; }
@ -4088,6 +4016,7 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> function_name,
bool only_simple_this_property_assignments;
Handle<FixedArray> this_property_assignments;
bool has_duplicate_parameters = false;
AstProperties ast_properties;
// Parse function body.
{ FunctionState function_state(this, scope, isolate());
top_scope_->SetScopeName(function_name);
@ -4150,7 +4079,8 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> function_name,
} else {
fvar_mode = CONST;
}
fvar = top_scope_->DeclareFunctionVar(function_name, fvar_mode);
fvar =
top_scope_->DeclareFunctionVar(function_name, fvar_mode, factory());
}
// Determine whether the function will be lazily compiled.
@ -4237,13 +4167,13 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> function_name,
if (!is_lazily_compiled) {
body = new(zone()) ZoneList<Statement*>(8);
if (fvar != NULL) {
VariableProxy* fproxy = top_scope_->NewUnresolved(function_name);
VariableProxy* fproxy =
top_scope_->NewUnresolved(factory(), function_name);
fproxy->BindTo(fvar);
body->Add(new(zone()) ExpressionStatement(
new(zone()) Assignment(isolate(),
fvar_init_op,
body->Add(factory()->NewExpressionStatement(
factory()->NewAssignment(fvar_init_op,
fproxy,
new(zone()) ThisFunction(isolate()),
factory()->NewThisFunction(),
RelocInfo::kNoPosition)));
}
ParseSourceElements(body, Token::RBRACE, CHECK_OK);
@ -4305,6 +4235,7 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> function_name,
scope->end_position(),
CHECK_OK);
}
ast_properties = *factory()->visitor()->ast_properties();
}
if (is_extended_mode()) {
@ -4312,8 +4243,7 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> function_name,
}
FunctionLiteral* function_literal =
new(zone()) FunctionLiteral(isolate(),
function_name,
factory()->NewFunctionLiteral(function_name,
scope,
body,
materialized_literal_count,
@ -4322,9 +4252,11 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> function_name,
only_simple_this_property_assignments,
this_property_assignments,
num_parameters,
has_duplicate_parameters,
type,
has_duplicate_parameters);
true);
function_literal->set_function_token_position(function_token_position);
function_literal->set_ast_properties(&ast_properties);
if (fni_ != NULL && should_infer_name) fni_->AddFunction(function_literal);
return function_literal;
@ -4343,7 +4275,8 @@ preparser::PreParser::PreParseResult Parser::LazyParseFunctionLiteral(
NULL,
stack_limit,
do_allow_lazy,
allow_natives_syntax_);
allow_natives_syntax_,
allow_modules_);
}
preparser::PreParser::PreParseResult result =
reusable_preparser_->PreParseLazyFunction(top_scope_->language_mode(),
@ -4394,7 +4327,7 @@ Expression* Parser::ParseV8Intrinsic(bool* ok) {
}
// We have a valid intrinsics call or a call to a builtin.
return new(zone()) CallRuntime(isolate(), name, function, args);
return factory()->NewCallRuntime(name, function, args);
}
@ -4450,17 +4383,12 @@ void Parser::ExpectSemicolon(bool* ok) {
Literal* Parser::GetLiteralUndefined() {
return NewLiteral(isolate()->factory()->undefined_value());
return factory()->NewLiteral(isolate()->factory()->undefined_value());
}
Literal* Parser::GetLiteralTheHole() {
return NewLiteral(isolate()->factory()->the_hole_value());
}
Literal* Parser::GetLiteralNumber(double value) {
return NewNumberLiteral(value);
return factory()->NewLiteral(isolate()->factory()->the_hole_value());
}
@ -4638,11 +4566,6 @@ void Parser::RegisterTargetUse(Label* target, Target* stop) {
}
Literal* Parser::NewNumberLiteral(double number) {
return NewLiteral(isolate()->factory()->NewNumber(number, TENURED));
}
Expression* Parser::NewThrowReferenceError(Handle<String> type) {
return NewThrowError(isolate()->factory()->MakeReferenceError_symbol(),
type, HandleVector<Object>(NULL, 0));
@ -4686,15 +4609,11 @@ Expression* Parser::NewThrowError(Handle<String> constructor,
elements, FAST_ELEMENTS, TENURED);
ZoneList<Expression*>* args = new(zone()) ZoneList<Expression*>(2);
args->Add(NewLiteral(type));
args->Add(NewLiteral(array));
CallRuntime* call_constructor = new(zone()) CallRuntime(isolate(),
constructor,
NULL,
args);
return new(zone()) Throw(isolate(),
call_constructor,
scanner().location().beg_pos);
args->Add(factory()->NewLiteral(type));
args->Add(factory()->NewLiteral(array));
CallRuntime* call_constructor =
factory()->NewCallRuntime(constructor, NULL, args);
return factory()->NewThrow(call_constructor, scanner().location().beg_pos);
}
// ----------------------------------------------------------------------------
@ -5665,8 +5584,11 @@ bool ParserApi::Parse(CompilationInfo* info, int parsing_flags) {
// Harmony scoping is requested.
parsing_flags |= EXTENDED_MODE;
}
if (!info->is_native() && FLAG_harmony_modules) {
parsing_flags |= kAllowModules;
}
if (FLAG_allow_natives_syntax || info->is_native()) {
// We requre %identifier(..) syntax.
// We require %identifier(..) syntax.
parsing_flags |= kAllowNativesSyntax;
}
if (info->is_lazy()) {

94
deps/v8/src/parser.h

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -435,9 +435,8 @@ class Parser {
v8::Extension* extension,
ScriptDataImpl* pre_data);
virtual ~Parser() {
if (reusable_preparser_ != NULL) {
delete reusable_preparser_;
}
reusable_preparser_ = NULL;
}
// Returns NULL if parsing failed.
@ -477,7 +476,69 @@ class Parser {
};
class BlockState;
class FunctionState;
class FunctionState BASE_EMBEDDED {
public:
FunctionState(Parser* parser,
Scope* scope,
Isolate* isolate);
~FunctionState();
int NextMaterializedLiteralIndex() {
return next_materialized_literal_index_++;
}
int materialized_literal_count() {
return next_materialized_literal_index_ - JSFunction::kLiteralsPrefixSize;
}
int NextHandlerIndex() { return next_handler_index_++; }
int handler_count() { return next_handler_index_; }
void SetThisPropertyAssignmentInfo(
bool only_simple_this_property_assignments,
Handle<FixedArray> this_property_assignments) {
only_simple_this_property_assignments_ =
only_simple_this_property_assignments;
this_property_assignments_ = this_property_assignments;
}
bool only_simple_this_property_assignments() {
return only_simple_this_property_assignments_;
}
Handle<FixedArray> this_property_assignments() {
return this_property_assignments_;
}
void AddProperty() { expected_property_count_++; }
int expected_property_count() { return expected_property_count_; }
AstNodeFactory<AstConstructionVisitor>* factory() { return &factory_; }
private:
// Used to assign an index to each literal that needs materialization in
// the function. Includes regexp literals, and boilerplate for object and
// array literals.
int next_materialized_literal_index_;
// Used to assign a per-function index to try and catch handlers.
int next_handler_index_;
// Properties count estimation.
int expected_property_count_;
// Keeps track of assignments to properties of this. Used for
// optimizing constructors.
bool only_simple_this_property_assignments_;
Handle<FixedArray> this_property_assignments_;
Parser* parser_;
FunctionState* outer_function_state_;
Scope* outer_scope_;
int saved_ast_node_id_;
AstNodeFactory<AstConstructionVisitor> factory_;
};
FunctionLiteral* ParseLazy(CompilationInfo* info,
UC16CharacterStream* source,
@ -651,7 +712,6 @@ class Parser {
// Get odd-ball literals.
Literal* GetLiteralUndefined();
Literal* GetLiteralTheHole();
Literal* GetLiteralNumber(double value);
Handle<String> ParseIdentifier(bool* ok);
Handle<String> ParseIdentifierOrStrictReservedWord(
@ -699,31 +759,12 @@ class Parser {
// Factory methods.
Statement* EmptyStatement() {
static v8::internal::EmptyStatement* empty =
::new v8::internal::EmptyStatement();
return empty;
}
Scope* NewScope(Scope* parent, ScopeType type);
Handle<String> LookupSymbol(int symbol_id);
Handle<String> LookupCachedSymbol(int symbol_id);
Expression* NewCall(Expression* expression,
ZoneList<Expression*>* arguments,
int pos) {
return new(zone()) Call(isolate(), expression, arguments, pos);
}
inline Literal* NewLiteral(Handle<Object> handle) {
return new(zone()) Literal(isolate(), handle);
}
// Create a number literal.
Literal* NewNumberLiteral(double value);
// Generate AST node that throw a ReferenceError with the given type.
Expression* NewThrowReferenceError(Handle<String> type);
@ -746,6 +787,10 @@ class Parser {
preparser::PreParser::PreParseResult LazyParseFunctionLiteral(
SingletonLogger* logger);
AstNodeFactory<AstConstructionVisitor>* factory() {
return current_function_state_->factory();
}
Isolate* isolate_;
ZoneList<Handle<String> > symbol_cache_;
@ -762,6 +807,7 @@ class Parser {
Mode mode_;
bool allow_natives_syntax_;
bool allow_lazy_;
bool allow_modules_;
bool stack_overflow_;
// If true, the next (and immediately following) function literal is
// preceded by a parenthesis.

2
deps/v8/src/platform-freebsd.cc

@ -710,7 +710,7 @@ class SignalSender : public Thread {
FULL_INTERVAL
};
static const int kSignalSenderStackSize = 32 * KB;
static const int kSignalSenderStackSize = 64 * KB;
explicit SignalSender(int interval)
: Thread(Thread::Options("SignalSender", kSignalSenderStackSize)),

2
deps/v8/src/platform-linux.cc

@ -1060,7 +1060,7 @@ class SignalSender : public Thread {
FULL_INTERVAL
};
static const int kSignalSenderStackSize = 32 * KB;
static const int kSignalSenderStackSize = 64 * KB;
explicit SignalSender(int interval)
: Thread(Thread::Options("SignalSender", kSignalSenderStackSize)),

2
deps/v8/src/platform-macos.cc

@ -733,7 +733,7 @@ class Sampler::PlatformData : public Malloced {
class SamplerThread : public Thread {
public:
static const int kSamplerThreadStackSize = 32 * KB;
static const int kSamplerThreadStackSize = 64 * KB;
explicit SamplerThread(int interval)
: Thread(Thread::Options("SamplerThread", kSamplerThreadStackSize)),

2
deps/v8/src/platform-openbsd.cc

@ -782,7 +782,7 @@ class SignalSender : public Thread {
FULL_INTERVAL
};
static const int kSignalSenderStackSize = 32 * KB;
static const int kSignalSenderStackSize = 64 * KB;
explicit SignalSender(int interval)
: Thread(Thread::Options("SignalSender", kSignalSenderStackSize)),

2
deps/v8/src/platform-solaris.cc

@ -704,7 +704,7 @@ class SignalSender : public Thread {
FULL_INTERVAL
};
static const int kSignalSenderStackSize = 32 * KB;
static const int kSignalSenderStackSize = 64 * KB;
explicit SignalSender(int interval)
: Thread(Thread::Options("SignalSender", kSignalSenderStackSize)),

2
deps/v8/src/platform-win32.cc

@ -1894,7 +1894,7 @@ class Sampler::PlatformData : public Malloced {
class SamplerThread : public Thread {
public:
static const int kSamplerThreadStackSize = 32 * KB;
static const int kSamplerThreadStackSize = 64 * KB;
explicit SamplerThread(int interval)
: Thread(Thread::Options("SamplerThread", kSamplerThreadStackSize)),

10
deps/v8/src/preparser.h

@ -115,7 +115,8 @@ class PreParser {
i::ParserRecorder* log,
uintptr_t stack_limit,
bool allow_lazy,
bool allow_natives_syntax)
bool allow_natives_syntax,
bool allow_modules)
: scanner_(scanner),
log_(log),
scope_(NULL),
@ -124,6 +125,7 @@ class PreParser {
strict_mode_violation_type_(NULL),
stack_overflow_(false),
allow_lazy_(allow_lazy),
allow_modules_(allow_modules),
allow_natives_syntax_(allow_natives_syntax),
parenthesized_function_(false),
harmony_scoping_(scanner->HarmonyScoping()) { }
@ -140,8 +142,9 @@ class PreParser {
uintptr_t stack_limit) {
bool allow_lazy = (flags & i::kAllowLazy) != 0;
bool allow_natives_syntax = (flags & i::kAllowNativesSyntax) != 0;
return PreParser(scanner, log, stack_limit,
allow_lazy, allow_natives_syntax).PreParse();
bool allow_modules = (flags & i::kAllowModules) != 0;
return PreParser(scanner, log, stack_limit, allow_lazy,
allow_natives_syntax, allow_modules).PreParse();
}
// Parses a single function literal, from the opening parentheses before
@ -647,6 +650,7 @@ class PreParser {
const char* strict_mode_violation_type_;
bool stack_overflow_;
bool allow_lazy_;
bool allow_modules_;
bool allow_natives_syntax_;
bool parenthesized_function_;
bool harmony_scoping_;

454
deps/v8/src/prettyprinter.cc

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -58,7 +58,7 @@ void PrettyPrinter::VisitBlock(Block* node) {
}
void PrettyPrinter::VisitDeclaration(Declaration* node) {
void PrettyPrinter::VisitVariableDeclaration(VariableDeclaration* node) {
Print("var ");
PrintLiteral(node->proxy()->name(), false);
if (node->fun() != NULL) {
@ -69,6 +69,38 @@ void PrettyPrinter::VisitDeclaration(Declaration* node) {
}
void PrettyPrinter::VisitModuleDeclaration(ModuleDeclaration* node) {
Print("module ");
PrintLiteral(node->proxy()->name(), false);
Print(" = ");
Visit(node->module());
Print(";");
}
void PrettyPrinter::VisitModuleLiteral(ModuleLiteral* node) {
VisitBlock(node->body());
}
void PrettyPrinter::VisitModuleVariable(ModuleVariable* node) {
PrintLiteral(node->var()->name(), false);
}
void PrettyPrinter::VisitModulePath(ModulePath* node) {
Visit(node->module());
Print(".");
PrintLiteral(node->name(), false);
}
void PrettyPrinter::VisitModuleUrl(ModuleUrl* node) {
Print("at ");
PrintLiteral(node->url(), true);
}
void PrettyPrinter::VisitExpressionStatement(ExpressionStatement* node) {
Visit(node->expression());
Print(";");
@ -711,7 +743,7 @@ void AstPrinter::VisitBlock(Block* node) {
}
void AstPrinter::VisitDeclaration(Declaration* node) {
void AstPrinter::VisitVariableDeclaration(VariableDeclaration* node) {
if (node->fun() == NULL) {
// var or const declarations
PrintLiteralWithModeIndented(Variable::Mode2String(node->mode()),
@ -728,6 +760,35 @@ void AstPrinter::VisitDeclaration(Declaration* node) {
}
void AstPrinter::VisitModuleDeclaration(ModuleDeclaration* node) {
IndentedScope indent(this, "MODULE");
PrintLiteralIndented("NAME", node->proxy()->name(), true);
Visit(node->module());
}
void AstPrinter::VisitModuleLiteral(ModuleLiteral* node) {
VisitBlock(node->body());
}
void AstPrinter::VisitModuleVariable(ModuleVariable* node) {
PrintLiteralIndented("VARIABLE", node->var()->name(), false);
}
void AstPrinter::VisitModulePath(ModulePath* node) {
IndentedScope indent(this, "PATH");
PrintIndentedVisit("MODULE", node->module());
PrintLiteralIndented("NAME", node->name(), false);
}
void AstPrinter::VisitModuleUrl(ModuleUrl* node) {
PrintLiteralIndented("URL", node->url(), true);
}
void AstPrinter::VisitExpressionStatement(ExpressionStatement* node) {
Visit(node->expression());
}
@ -1018,393 +1079,6 @@ void AstPrinter::VisitThisFunction(ThisFunction* node) {
IndentedScope indent(this, "THIS-FUNCTION");
}
TagScope::TagScope(JsonAstBuilder* builder, const char* name)
: builder_(builder), next_(builder->tag()), has_body_(false) {
if (next_ != NULL) {
next_->use();
builder->Print(",\n");
}
builder->set_tag(this);
builder->PrintIndented("[");
builder->Print("\"%s\"", name);
builder->increase_indent(JsonAstBuilder::kTagIndentSize);
}
TagScope::~TagScope() {
builder_->decrease_indent(JsonAstBuilder::kTagIndentSize);
if (has_body_) {
builder_->Print("\n");
builder_->PrintIndented("]");
} else {
builder_->Print("]");
}
builder_->set_tag(next_);
}
AttributesScope::AttributesScope(JsonAstBuilder* builder)
: builder_(builder), attribute_count_(0) {
builder->set_attributes(this);
builder->tag()->use();
builder->Print(",\n");
builder->PrintIndented("{");
builder->increase_indent(JsonAstBuilder::kAttributesIndentSize);
}
AttributesScope::~AttributesScope() {
builder_->decrease_indent(JsonAstBuilder::kAttributesIndentSize);
if (attribute_count_ > 1) {
builder_->Print("\n");
builder_->PrintIndented("}");
} else {
builder_->Print("}");
}
builder_->set_attributes(NULL);
}
const char* JsonAstBuilder::BuildProgram(FunctionLiteral* program) {
Init();
Visit(program);
Print("\n");
return Output();
}
void JsonAstBuilder::AddAttributePrefix(const char* name) {
if (attributes()->is_used()) {
Print(",\n");
PrintIndented("\"");
} else {
Print("\"");
}
Print("%s\":", name);
attributes()->use();
}
void JsonAstBuilder::AddAttribute(const char* name, Handle<String> value) {
SmartArrayPointer<char> value_string = value->ToCString();
AddAttributePrefix(name);
Print("\"%s\"", *value_string);
}
void JsonAstBuilder::AddAttribute(const char* name, const char* value) {
AddAttributePrefix(name);
Print("\"%s\"", value);
}
void JsonAstBuilder::AddAttribute(const char* name, int value) {
AddAttributePrefix(name);
Print("%d", value);
}
void JsonAstBuilder::AddAttribute(const char* name, bool value) {
AddAttributePrefix(name);
Print(value ? "true" : "false");
}
void JsonAstBuilder::VisitBlock(Block* stmt) {
TagScope tag(this, "Block");
VisitStatements(stmt->statements());
}
void JsonAstBuilder::VisitExpressionStatement(ExpressionStatement* stmt) {
TagScope tag(this, "ExpressionStatement");
Visit(stmt->expression());
}
void JsonAstBuilder::VisitEmptyStatement(EmptyStatement* stmt) {
TagScope tag(this, "EmptyStatement");
}
void JsonAstBuilder::VisitIfStatement(IfStatement* stmt) {
TagScope tag(this, "IfStatement");
Visit(stmt->condition());
Visit(stmt->then_statement());
Visit(stmt->else_statement());
}
void JsonAstBuilder::VisitContinueStatement(ContinueStatement* stmt) {
TagScope tag(this, "ContinueStatement");
}
void JsonAstBuilder::VisitBreakStatement(BreakStatement* stmt) {
TagScope tag(this, "BreakStatement");
}
void JsonAstBuilder::VisitReturnStatement(ReturnStatement* stmt) {
TagScope tag(this, "ReturnStatement");
Visit(stmt->expression());
}
void JsonAstBuilder::VisitWithStatement(WithStatement* stmt) {
TagScope tag(this, "WithStatement");
Visit(stmt->expression());
Visit(stmt->statement());
}
void JsonAstBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
TagScope tag(this, "SwitchStatement");
}
void JsonAstBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) {
TagScope tag(this, "DoWhileStatement");
Visit(stmt->body());
Visit(stmt->cond());
}
void JsonAstBuilder::VisitWhileStatement(WhileStatement* stmt) {
TagScope tag(this, "WhileStatement");
Visit(stmt->cond());
Visit(stmt->body());
}
void JsonAstBuilder::VisitForStatement(ForStatement* stmt) {
TagScope tag(this, "ForStatement");
if (stmt->init() != NULL) Visit(stmt->init());
if (stmt->cond() != NULL) Visit(stmt->cond());
Visit(stmt->body());
if (stmt->next() != NULL) Visit(stmt->next());
}
void JsonAstBuilder::VisitForInStatement(ForInStatement* stmt) {
TagScope tag(this, "ForInStatement");
Visit(stmt->each());
Visit(stmt->enumerable());
Visit(stmt->body());
}
void JsonAstBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) {
TagScope tag(this, "TryCatchStatement");
{ AttributesScope attributes(this);
AddAttribute("variable", stmt->variable()->name());
}
Visit(stmt->try_block());
Visit(stmt->catch_block());
}
void JsonAstBuilder::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
TagScope tag(this, "TryFinallyStatement");
Visit(stmt->try_block());
Visit(stmt->finally_block());
}
void JsonAstBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) {
TagScope tag(this, "DebuggerStatement");
}
void JsonAstBuilder::VisitFunctionLiteral(FunctionLiteral* expr) {
TagScope tag(this, "FunctionLiteral");
{
AttributesScope attributes(this);
AddAttribute("name", expr->name());
}
VisitDeclarations(expr->scope()->declarations());
VisitStatements(expr->body());
}
void JsonAstBuilder::VisitSharedFunctionInfoLiteral(
SharedFunctionInfoLiteral* expr) {
TagScope tag(this, "SharedFunctionInfoLiteral");
}
void JsonAstBuilder::VisitConditional(Conditional* expr) {
TagScope tag(this, "Conditional");
}
void JsonAstBuilder::VisitVariableProxy(VariableProxy* expr) {
TagScope tag(this, "Variable");
{
AttributesScope attributes(this);
Variable* var = expr->var();
AddAttribute("name", var->name());
switch (var->location()) {
case Variable::UNALLOCATED:
AddAttribute("location", "UNALLOCATED");
break;
case Variable::PARAMETER:
AddAttribute("location", "PARAMETER");
AddAttribute("index", var->index());
break;
case Variable::LOCAL:
AddAttribute("location", "LOCAL");
AddAttribute("index", var->index());
break;
case Variable::CONTEXT:
AddAttribute("location", "CONTEXT");
AddAttribute("index", var->index());
break;
case Variable::LOOKUP:
AddAttribute("location", "LOOKUP");
break;
}
}
}
void JsonAstBuilder::VisitLiteral(Literal* expr) {
TagScope tag(this, "Literal");
{
AttributesScope attributes(this);
Handle<Object> handle = expr->handle();
if (handle->IsString()) {
AddAttribute("handle", Handle<String>(String::cast(*handle)));
} else if (handle->IsSmi()) {
AddAttribute("handle", Smi::cast(*handle)->value());
}
}
}
void JsonAstBuilder::VisitRegExpLiteral(RegExpLiteral* expr) {
TagScope tag(this, "RegExpLiteral");
}
void JsonAstBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
TagScope tag(this, "ObjectLiteral");
}
void JsonAstBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
TagScope tag(this, "ArrayLiteral");
}
void JsonAstBuilder::VisitAssignment(Assignment* expr) {
TagScope tag(this, "Assignment");
{
AttributesScope attributes(this);
AddAttribute("op", Token::Name(expr->op()));
}
Visit(expr->target());
Visit(expr->value());
}
void JsonAstBuilder::VisitThrow(Throw* expr) {
TagScope tag(this, "Throw");
Visit(expr->exception());
}
void JsonAstBuilder::VisitProperty(Property* expr) {
TagScope tag(this, "Property");
Visit(expr->obj());
Visit(expr->key());
}
void JsonAstBuilder::VisitCall(Call* expr) {
TagScope tag(this, "Call");
Visit(expr->expression());
VisitExpressions(expr->arguments());
}
void JsonAstBuilder::VisitCallNew(CallNew* expr) {
TagScope tag(this, "CallNew");
Visit(expr->expression());
VisitExpressions(expr->arguments());
}
void JsonAstBuilder::VisitCallRuntime(CallRuntime* expr) {
TagScope tag(this, "CallRuntime");
{
AttributesScope attributes(this);
AddAttribute("name", expr->name());
}
VisitExpressions(expr->arguments());
}
void JsonAstBuilder::VisitUnaryOperation(UnaryOperation* expr) {
TagScope tag(this, "UnaryOperation");
{
AttributesScope attributes(this);
AddAttribute("op", Token::Name(expr->op()));
}
Visit(expr->expression());
}
void JsonAstBuilder::VisitCountOperation(CountOperation* expr) {
TagScope tag(this, "CountOperation");
{
AttributesScope attributes(this);
AddAttribute("is_prefix", expr->is_prefix());
AddAttribute("op", Token::Name(expr->op()));
}
Visit(expr->expression());
}
void JsonAstBuilder::VisitBinaryOperation(BinaryOperation* expr) {
TagScope tag(this, "BinaryOperation");
{
AttributesScope attributes(this);
AddAttribute("op", Token::Name(expr->op()));
}
Visit(expr->left());
Visit(expr->right());
}
void JsonAstBuilder::VisitCompareOperation(CompareOperation* expr) {
TagScope tag(this, "CompareOperation");
{
AttributesScope attributes(this);
AddAttribute("op", Token::Name(expr->op()));
}
Visit(expr->left());
Visit(expr->right());
}
void JsonAstBuilder::VisitThisFunction(ThisFunction* expr) {
TagScope tag(this, "ThisFunction");
}
void JsonAstBuilder::VisitDeclaration(Declaration* decl) {
TagScope tag(this, "Declaration");
{
AttributesScope attributes(this);
AddAttribute("mode", Variable::Mode2String(decl->mode()));
}
Visit(decl->proxy());
if (decl->fun() != NULL) Visit(decl->fun());
}
#endif // DEBUG
} } // namespace v8::internal

103
deps/v8/src/prettyprinter.h

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -112,107 +112,6 @@ class AstPrinter: public PrettyPrinter {
int indent_;
};
// Forward declaration of helper classes.
class TagScope;
class AttributesScope;
// Build a C string containing a JSON representation of a function's
// AST. The representation is based on JsonML (www.jsonml.org).
class JsonAstBuilder: public PrettyPrinter {
public:
JsonAstBuilder()
: indent_(0), top_tag_scope_(NULL), attributes_scope_(NULL) {
}
virtual ~JsonAstBuilder() {}
// Controls the indentation of subsequent lines of a tag body after
// the first line.
static const int kTagIndentSize = 2;
// Controls the indentation of subsequent lines of an attributes
// blocks's body after the first line.
static const int kAttributesIndentSize = 1;
// Construct a JSON representation of a function literal.
const char* BuildProgram(FunctionLiteral* program);
// Print text indented by the current indentation level.
void PrintIndented(const char* text) { Print("%*s%s", indent_, "", text); }
// Change the indentation level.
void increase_indent(int amount) { indent_ += amount; }
void decrease_indent(int amount) { indent_ -= amount; }
// The builder maintains a stack of opened AST node constructors.
// Each node constructor corresponds to a JsonML tag.
TagScope* tag() { return top_tag_scope_; }
void set_tag(TagScope* scope) { top_tag_scope_ = scope; }
// The builder maintains a pointer to the currently opened attributes
// of current AST node or NULL if the attributes are not opened.
AttributesScope* attributes() { return attributes_scope_; }
void set_attributes(AttributesScope* scope) { attributes_scope_ = scope; }
// Add an attribute to the currently opened attributes.
void AddAttribute(const char* name, Handle<String> value);
void AddAttribute(const char* name, const char* value);
void AddAttribute(const char* name, int value);
void AddAttribute(const char* name, bool value);
// AST node visit functions.
#define DECLARE_VISIT(type) virtual void Visit##type(type* node);
AST_NODE_LIST(DECLARE_VISIT)
#undef DECLARE_VISIT
private:
int indent_;
TagScope* top_tag_scope_;
AttributesScope* attributes_scope_;
// Utility function used by AddAttribute implementations.
void AddAttributePrefix(const char* name);
};
// The JSON AST builder keeps a stack of open element tags (AST node
// constructors from the current iteration point to the root of the
// AST). TagScope is a helper class to manage the opening and closing
// of tags, the indentation of their bodies, and comma separating their
// contents.
class TagScope BASE_EMBEDDED {
public:
TagScope(JsonAstBuilder* builder, const char* name);
~TagScope();
void use() { has_body_ = true; }
private:
JsonAstBuilder* builder_;
TagScope* next_;
bool has_body_;
};
// AttributesScope is a helper class to manage the opening and closing
// of attribute blocks, the indentation of their bodies, and comma
// separating their contents. JsonAstBuilder::AddAttribute adds an
// attribute to the currently open AttributesScope. They cannot be
// nested so the builder keeps an optional single scope rather than a
// stack.
class AttributesScope BASE_EMBEDDED {
public:
explicit AttributesScope(JsonAstBuilder* builder);
~AttributesScope();
bool is_used() { return attribute_count_ > 0; }
void use() { ++attribute_count_; }
private:
JsonAstBuilder* builder_;
int attribute_count_;
};
#endif // DEBUG
} } // namespace v8::internal

88
deps/v8/src/profile-generator.cc

@ -1131,6 +1131,7 @@ const char* HeapEntry::TypeAsString() {
case kRegExp: return "/regexp/";
case kHeapNumber: return "/number/";
case kNative: return "/native/";
case kSynthetic: return "/synthetic/";
default: return "???";
}
}
@ -2698,6 +2699,45 @@ class GlobalHandlesExtractor : public ObjectVisitor {
NativeObjectsExplorer* explorer_;
};
class BasicHeapEntriesAllocator : public HeapEntriesAllocator {
public:
BasicHeapEntriesAllocator(
HeapSnapshot* snapshot,
HeapEntry::Type entries_type)
: snapshot_(snapshot),
collection_(snapshot_->collection()),
entries_type_(entries_type) {
}
virtual HeapEntry* AllocateEntry(
HeapThing ptr, int children_count, int retainers_count);
private:
HeapSnapshot* snapshot_;
HeapSnapshotsCollection* collection_;
HeapEntry::Type entries_type_;
};
HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(
HeapThing ptr, int children_count, int retainers_count) {
v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
intptr_t elements = info->GetElementCount();
intptr_t size = info->GetSizeInBytes();
return snapshot_->AddEntry(
entries_type_,
elements != -1 ?
collection_->names()->GetFormatted(
"%s / %" V8_PTR_PREFIX "d entries",
info->GetLabel(),
info->GetElementCount()) :
collection_->names()->GetCopy(info->GetLabel()),
HeapObjectsMap::GenerateId(info),
size != -1 ? static_cast<int>(size) : 0,
children_count,
retainers_count);
}
NativeObjectsExplorer::NativeObjectsExplorer(
HeapSnapshot* snapshot, SnapshottingProgressReportingInterface* progress)
: snapshot_(snapshot),
@ -2707,6 +2747,10 @@ NativeObjectsExplorer::NativeObjectsExplorer(
objects_by_info_(RetainedInfosMatch),
native_groups_(StringsMatch),
filler_(NULL) {
synthetic_entries_allocator_ =
new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic);
native_entries_allocator_ =
new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative);
}
@ -2728,27 +2772,8 @@ NativeObjectsExplorer::~NativeObjectsExplorer() {
reinterpret_cast<v8::RetainedObjectInfo*>(p->value);
info->Dispose();
}
}
HeapEntry* NativeObjectsExplorer::AllocateEntry(
HeapThing ptr, int children_count, int retainers_count) {
v8::RetainedObjectInfo* info =
reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
intptr_t elements = info->GetElementCount();
intptr_t size = info->GetSizeInBytes();
return snapshot_->AddEntry(
HeapEntry::kNative,
elements != -1 ?
collection_->names()->GetFormatted(
"%s / %" V8_PTR_PREFIX "d entries",
info->GetLabel(),
info->GetElementCount()) :
collection_->names()->GetCopy(info->GetLabel()),
HeapObjectsMap::GenerateId(info),
size != -1 ? static_cast<int>(size) : 0,
children_count,
retainers_count);
delete synthetic_entries_allocator_;
delete native_entries_allocator_;
}
@ -2790,12 +2815,14 @@ void NativeObjectsExplorer::FillImplicitReferences() {
for (int i = 0; i < groups->length(); ++i) {
ImplicitRefGroup* group = groups->at(i);
HeapObject* parent = *group->parent_;
HeapEntry* parent_entry = filler_->FindOrAddEntry(parent, this);
HeapEntry* parent_entry =
filler_->FindOrAddEntry(parent, native_entries_allocator_);
ASSERT(parent_entry != NULL);
Object*** children = group->children_;
for (size_t j = 0; j < group->length_; ++j) {
Object* child = *children[j];
HeapEntry* child_entry = filler_->FindOrAddEntry(child, this);
HeapEntry* child_entry =
filler_->FindOrAddEntry(child, native_entries_allocator_);
filler_->SetNamedReference(
HeapGraphEdge::kInternal,
parent, parent_entry,
@ -2886,11 +2913,13 @@ NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
void NativeObjectsExplorer::SetNativeRootReference(
v8::RetainedObjectInfo* info) {
HeapEntry* child_entry = filler_->FindOrAddEntry(info, this);
HeapEntry* child_entry =
filler_->FindOrAddEntry(info, native_entries_allocator_);
ASSERT(child_entry != NULL);
NativeGroupRetainedObjectInfo* group_info =
FindOrAddGroupInfo(info->GetGroupLabel());
HeapEntry* group_entry = filler_->FindOrAddEntry(group_info, this);
HeapEntry* group_entry =
filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_);
filler_->SetNamedAutoIndexReference(
HeapGraphEdge::kInternal,
group_info, group_entry,
@ -2902,7 +2931,8 @@ void NativeObjectsExplorer::SetWrapperNativeReferences(
HeapObject* wrapper, v8::RetainedObjectInfo* info) {
HeapEntry* wrapper_entry = filler_->FindEntry(wrapper);
ASSERT(wrapper_entry != NULL);
HeapEntry* info_entry = filler_->FindOrAddEntry(info, this);
HeapEntry* info_entry =
filler_->FindOrAddEntry(info, native_entries_allocator_);
ASSERT(info_entry != NULL);
filler_->SetNamedReference(HeapGraphEdge::kInternal,
wrapper, wrapper_entry,
@ -2920,7 +2950,8 @@ void NativeObjectsExplorer::SetRootNativeRootsReference() {
entry = native_groups_.Next(entry)) {
NativeGroupRetainedObjectInfo* group_info =
static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
HeapEntry* group_entry = filler_->FindOrAddEntry(group_info, this);
HeapEntry* group_entry =
filler_->FindOrAddEntry(group_info, native_entries_allocator_);
ASSERT(group_entry != NULL);
filler_->SetIndexedAutoIndexReference(
HeapGraphEdge::kElement,
@ -3547,7 +3578,8 @@ void HeapSnapshotJSONSerializer::SerializeNodes() {
"," JSON_S("closure")
"," JSON_S("regexp")
"," JSON_S("number")
"," JSON_S("native"))
"," JSON_S("native")
"," JSON_S("synthetic"))
"," JSON_S("string")
"," JSON_S("number")
"," JSON_S("number")

11
deps/v8/src/profile-generator.h

@ -525,7 +525,8 @@ class HeapEntry BASE_EMBEDDED {
kClosure = v8::HeapGraphNode::kClosure,
kRegExp = v8::HeapGraphNode::kRegExp,
kHeapNumber = v8::HeapGraphNode::kHeapNumber,
kNative = v8::HeapGraphNode::kNative
kNative = v8::HeapGraphNode::kNative,
kSynthetic = v8::HeapGraphNode::kSynthetic
};
HeapEntry() { }
@ -1026,16 +1027,16 @@ class V8HeapExplorer : public HeapEntriesAllocator {
DISALLOW_COPY_AND_ASSIGN(V8HeapExplorer);
};
class NativeGroupRetainedObjectInfo;
// An implementation of retained native objects extractor.
class NativeObjectsExplorer : public HeapEntriesAllocator {
class NativeObjectsExplorer {
public:
NativeObjectsExplorer(HeapSnapshot* snapshot,
SnapshottingProgressReportingInterface* progress);
virtual ~NativeObjectsExplorer();
virtual HeapEntry* AllocateEntry(
HeapThing ptr, int children_count, int retainers_count);
void AddRootEntries(SnapshotFillerInterface* filler);
int EstimateObjectsCount();
bool IterateAndExtractReferences(SnapshotFillerInterface* filler);
@ -1074,6 +1075,8 @@ class NativeObjectsExplorer : public HeapEntriesAllocator {
// RetainedObjectInfo* -> List<HeapObject*>*
HashMap objects_by_info_;
HashMap native_groups_;
HeapEntriesAllocator* synthetic_entries_allocator_;
HeapEntriesAllocator* native_entries_allocator_;
// Used during references extraction.
SnapshotFillerInterface* filler_;

4
deps/v8/src/property-details.h

@ -119,10 +119,6 @@ class PropertyDetails BASE_EMBEDDED {
PropertyType type() { return TypeField::decode(value_); }
bool IsProperty() {
return IsRealProperty(type());
}
PropertyAttributes attributes() { return AttributesField::decode(value_); }
int index() { return StorageField::decode(value_); }

2
deps/v8/src/property.h

@ -264,7 +264,7 @@ class LookupResult BASE_EMBEDDED {
// Is the result is a property excluding transitions and the null
// descriptor?
bool IsProperty() {
return IsFound() && GetPropertyDetails().IsProperty();
return IsFound() && IsRealProperty(GetPropertyDetails().type());
}
bool IsCacheable() { return cacheable_; }

40
deps/v8/src/rewriter.cc

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -42,12 +42,18 @@ class Processor: public AstVisitor {
: result_(result),
result_assigned_(false),
is_set_(false),
in_try_(false) {
}
in_try_(false),
factory_(isolate()) { }
virtual ~Processor() { }
void Process(ZoneList<Statement*>* statements);
bool result_assigned() const { return result_assigned_; }
AstNodeFactory<AstNullVisitor>* factory() {
return &factory_;
}
private:
Variable* result_;
@ -64,15 +70,13 @@ class Processor: public AstVisitor {
bool is_set_;
bool in_try_;
AstNodeFactory<AstNullVisitor> factory_;
Expression* SetResult(Expression* value) {
result_assigned_ = true;
Zone* zone = isolate()->zone();
VariableProxy* result_proxy = new(zone) VariableProxy(isolate(), result_);
return new(zone) Assignment(isolate(),
Token::ASSIGN,
result_proxy,
value,
RelocInfo::kNoPosition);
VariableProxy* result_proxy = factory()->NewVariableProxy(result_);
return factory()->NewAssignment(
Token::ASSIGN, result_proxy, value, RelocInfo::kNoPosition);
}
// Node visitors.
@ -205,7 +209,12 @@ void Processor::VisitWithStatement(WithStatement* node) {
// Do nothing:
void Processor::VisitDeclaration(Declaration* node) {}
void Processor::VisitVariableDeclaration(VariableDeclaration* node) {}
void Processor::VisitModuleDeclaration(ModuleDeclaration* node) {}
void Processor::VisitModuleLiteral(ModuleLiteral* node) {}
void Processor::VisitModuleVariable(ModuleVariable* node) {}
void Processor::VisitModulePath(ModulePath* node) {}
void Processor::VisitModuleUrl(ModuleUrl* node) {}
void Processor::VisitEmptyStatement(EmptyStatement* node) {}
void Processor::VisitReturnStatement(ReturnStatement* node) {}
void Processor::VisitDebuggerStatement(DebuggerStatement* node) {}
@ -237,8 +246,6 @@ bool Rewriter::Rewrite(CompilationInfo* info) {
if (processor.result_assigned()) {
ASSERT(function->end_position() != RelocInfo::kNoPosition);
Isolate* isolate = info->isolate();
Zone* zone = isolate->zone();
// Set the position of the assignment statement one character past the
// source code, such that it definitely is not in the source code range
// of an immediate inner scope. For example in
@ -246,10 +253,11 @@ bool Rewriter::Rewrite(CompilationInfo* info) {
// the end position of the function generated for executing the eval code
// coincides with the end of the with scope which is the position of '1'.
int position = function->end_position();
VariableProxy* result_proxy = new(zone) VariableProxy(
isolate, result->name(), false, position);
VariableProxy* result_proxy = processor.factory()->NewVariableProxy(
result->name(), false, position);
result_proxy->BindTo(result);
Statement* result_statement = new(zone) ReturnStatement(result_proxy);
Statement* result_statement =
processor.factory()->NewReturnStatement(result_proxy);
result_statement->set_statement_pos(position);
body->Add(result_statement);
}

60
deps/v8/src/runtime-profiler.cc

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -46,6 +46,8 @@ namespace internal {
// Optimization sampler constants.
static const int kSamplerFrameCount = 2;
// Constants for statistical profiler.
static const int kSamplerFrameWeight[kSamplerFrameCount] = { 2, 1 };
static const int kSamplerTicksBetweenThresholdAdjustment = 32;
@ -58,6 +60,16 @@ static const int kSamplerThresholdSizeFactorInit = 3;
static const int kSizeLimit = 1500;
// Constants for counter based profiler.
// Number of times a function has to be seen on the stack before it is
// optimized.
static const int kProfilerTicksBeforeOptimization = 2;
// Maximum size in bytes of generated code for a function to be optimized
// the very first time it is seen on the stack.
static const int kMaxSizeEarlyOpt = 500;
Atomic32 RuntimeProfiler::state_ = 0;
// TODO(isolates): Create the semaphore lazily and clean it up when no
@ -90,13 +102,13 @@ void RuntimeProfiler::GlobalSetup() {
}
void RuntimeProfiler::Optimize(JSFunction* function) {
void RuntimeProfiler::Optimize(JSFunction* function, const char* reason) {
ASSERT(function->IsOptimizable());
if (FLAG_trace_opt) {
PrintF("[marking ");
function->PrintName();
PrintF(" 0x%" V8PRIxPTR, reinterpret_cast<intptr_t>(function->address()));
PrintF(" for recompilation");
PrintF(" for recompilation, reason: %s", reason);
PrintF("]\n");
}
@ -192,6 +204,7 @@ void RuntimeProfiler::OptimizeNow() {
JavaScriptFrame* frame = it.frame();
JSFunction* function = JSFunction::cast(frame->function());
if (!FLAG_watch_ic_patching) {
// Adjust threshold each time we have processed
// a certain number of ticks.
if (sampler_ticks_until_threshold_adjustment_ > 0) {
@ -206,6 +219,7 @@ void RuntimeProfiler::OptimizeNow() {
}
}
}
}
if (function->IsMarkedForLazyRecompilation()) {
Code* unoptimized = function->shared()->code();
@ -217,6 +231,31 @@ void RuntimeProfiler::OptimizeNow() {
// Do not record non-optimizable functions.
if (!function->IsOptimizable()) continue;
if (FLAG_watch_ic_patching) {
int ticks = function->shared()->profiler_ticks();
if (ticks >= kProfilerTicksBeforeOptimization) {
// If this particular function hasn't had any ICs patched for enough
// ticks, optimize it now.
Optimize(function, "hot and stable");
} else if (!any_ic_changed_ &&
function->shared()->code()->instruction_size() < kMaxSizeEarlyOpt) {
// If no IC was patched since the last tick and this function is very
// small, optimistically optimize it now.
Optimize(function, "small function");
} else if (!code_generated_ &&
!any_ic_changed_ &&
total_code_generated_ > 0 &&
total_code_generated_ < 2000) {
// If no code was generated and no IC was patched since the last tick,
// but a little code has already been generated since last Reset(),
// then type info might already be stable and we can optimize now.
Optimize(function, "stable on startup");
} else {
function->shared()->set_profiler_ticks(ticks + 1);
}
} else { // !FLAG_counting_profiler
samples[sample_count++] = function;
int function_size = function->shared()->SourceSize();
@ -227,10 +266,14 @@ void RuntimeProfiler::OptimizeNow() {
int threshold = sampler_threshold_ * threshold_size_factor;
if (LookupSample(function) >= threshold) {
Optimize(function);
Optimize(function, "sampler window lookup");
}
}
}
if (FLAG_watch_ic_patching) {
any_ic_changed_ = false;
code_generated_ = false;
} else { // !FLAG_counting_profiler
// Add the collected functions as samples. It's important not to do
// this as part of collecting them because this will interfere with
// the sample lookup in case of recursive functions.
@ -238,6 +281,7 @@ void RuntimeProfiler::OptimizeNow() {
AddSample(samples[i], kSamplerFrameWeight[i]);
}
}
}
void RuntimeProfiler::NotifyTick() {
@ -247,7 +291,9 @@ void RuntimeProfiler::NotifyTick() {
void RuntimeProfiler::SetUp() {
ASSERT(has_been_globally_set_up_);
if (!FLAG_watch_ic_patching) {
ClearSampleBuffer();
}
// If the ticker hasn't already started, make sure to do so to get
// the ticks for the runtime profiler.
if (IsEnabled()) isolate_->logger()->EnsureTickerStarted();
@ -255,11 +301,15 @@ void RuntimeProfiler::SetUp() {
void RuntimeProfiler::Reset() {
if (FLAG_watch_ic_patching) {
total_code_generated_ = 0;
} else { // !FLAG_counting_profiler
sampler_threshold_ = kSamplerThresholdInit;
sampler_threshold_size_factor_ = kSamplerThresholdSizeFactorInit;
sampler_ticks_until_threshold_adjustment_ =
kSamplerTicksBetweenThresholdAdjustment;
}
}
void RuntimeProfiler::TearDown() {

17
deps/v8/src/runtime-profiler.h

@ -1,4 +1,4 @@
// Copyright 2010 the V8 project authors. All rights reserved.
// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -61,6 +61,15 @@ class RuntimeProfiler {
Object** SamplerWindowAddress();
int SamplerWindowSize();
void NotifyICChanged() { any_ic_changed_ = true; }
void NotifyCodeGenerated(int generated_code_size) {
if (FLAG_watch_ic_patching) {
code_generated_ = true;
total_code_generated_ += generated_code_size;
}
}
// Rate limiting support.
// VM thread interface.
@ -97,7 +106,7 @@ class RuntimeProfiler {
static void HandleWakeUp(Isolate* isolate);
void Optimize(JSFunction* function);
void Optimize(JSFunction* function, const char* reason);
void AttemptOnStackReplacement(JSFunction* function);
@ -119,6 +128,10 @@ class RuntimeProfiler {
int sampler_window_position_;
int sampler_window_weight_[kSamplerWindowSize];
bool any_ic_changed_;
bool code_generated_;
int total_code_generated_;
// Possible state values:
// -1 => the profiler thread is waiting on the semaphore
// 0 or positive => the number of isolates running JavaScript code.

24
deps/v8/src/runtime.cc

@ -165,7 +165,7 @@ MUST_USE_RESULT static MaybeObject* DeepCopyBoilerplate(Isolate* isolate,
}
} else {
{ MaybeObject* maybe_result =
heap->AllocateFixedArray(copy->NumberOfLocalProperties(NONE));
heap->AllocateFixedArray(copy->NumberOfLocalProperties());
if (!maybe_result->ToObject(&result)) return maybe_result;
}
FixedArray* names = FixedArray::cast(result);
@ -273,7 +273,6 @@ static Handle<Map> ComputeObjectLiteralMap(
Isolate* isolate = context->GetIsolate();
int properties_length = constant_properties->length();
int number_of_properties = properties_length / 2;
if (FLAG_canonicalize_object_literal_maps) {
// Check that there are only symbols and array indices among keys.
int number_of_symbol_keys = 0;
for (int p = 0; p != properties_length; p += 2) {
@ -312,7 +311,6 @@ static Handle<Map> ComputeObjectLiteralMap(
*is_result_from_cache = true;
return isolate->factory()->ObjectLiteralMapFromCache(context, keys);
}
}
*is_result_from_cache = false;
return isolate->factory()->CopyMap(
Handle<Map>(context->object_function()->initial_map()),
@ -2003,11 +2001,12 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionGetScript) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionGetSourceCode) {
NoHandleAllocation ha;
HandleScope scope(isolate);
ASSERT(args.length() == 1);
CONVERT_CHECKED(JSFunction, f, args[0]);
return f->shared()->GetSourceCode();
CONVERT_ARG_CHECKED(JSFunction, f, 0);
Handle<SharedFunctionInfo> shared(f->shared());
return *shared->GetSourceCode();
}
@ -5010,7 +5009,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetLocalPropertyNames) {
return *isolate->factory()->NewJSArray(0);
}
int n;
n = jsproto->NumberOfLocalProperties(static_cast<PropertyAttributes>(NONE));
n = jsproto->NumberOfLocalProperties();
local_property_count[i] = n;
total_property_count += n;
if (i < length - 1) {
@ -8419,6 +8418,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LazyRecompile) {
ASSERT(args.length() == 1);
Handle<JSFunction> function = args.at<JSFunction>(0);
function->shared()->set_profiler_ticks(0);
// If the function is not compiled ignore the lazy
// recompilation. This can happen if the debugger is activated and
// the function is returned to the not compiled state.
@ -13263,9 +13264,10 @@ static bool ShowFrameInStackTrace(StackFrame* raw_frame,
// element segments each containing a receiver, function, code and
// native code offset.
RUNTIME_FUNCTION(MaybeObject*, Runtime_CollectStackTrace) {
ASSERT_EQ(args.length(), 2);
Handle<Object> caller = args.at<Object>(0);
CONVERT_NUMBER_CHECKED(int32_t, limit, Int32, args[1]);
ASSERT_EQ(args.length(), 3);
CONVERT_ARG_CHECKED(JSObject, error_object, 0);
Handle<Object> caller = args.at<Object>(1);
CONVERT_NUMBER_CHECKED(int32_t, limit, Int32, args[2]);
HandleScope scope(isolate);
Factory* factory = isolate->factory();
@ -13315,6 +13317,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CollectStackTrace) {
iter.Advance();
}
Handle<JSArray> result = factory->NewJSArrayWithElements(elements);
// Capture and attach a more detailed stack trace if necessary.
isolate->CaptureAndSetCurrentStackTraceFor(error_object);
result->set_length(Smi::FromInt(cursor));
return *result;
}

2
deps/v8/src/runtime.h

@ -229,7 +229,7 @@ namespace internal {
F(FunctionIsAPIFunction, 1, 1) \
F(FunctionIsBuiltin, 1, 1) \
F(GetScript, 1, 1) \
F(CollectStackTrace, 2, 1) \
F(CollectStackTrace, 3, 1) \
F(GetV8Version, 0, 1) \
\
F(ClassOf, 1, 1) \

18
deps/v8/src/scanner.cc

@ -41,7 +41,8 @@ namespace internal {
Scanner::Scanner(UnicodeCache* unicode_cache)
: unicode_cache_(unicode_cache),
octal_pos_(Location::invalid()),
harmony_scoping_(false) { }
harmony_scoping_(false),
harmony_modules_(false) { }
void Scanner::Initialize(UC16CharacterStream* source) {
@ -830,7 +831,8 @@ uc32 Scanner::ScanIdentifierUnicodeEscape() {
KEYWORD_GROUP('e') \
KEYWORD("else", Token::ELSE) \
KEYWORD("enum", Token::FUTURE_RESERVED_WORD) \
KEYWORD("export", Token::FUTURE_RESERVED_WORD) \
KEYWORD("export", harmony_modules \
? Token::EXPORT : Token::FUTURE_RESERVED_WORD) \
KEYWORD("extends", Token::FUTURE_RESERVED_WORD) \
KEYWORD_GROUP('f') \
KEYWORD("false", Token::FALSE_LITERAL) \
@ -840,13 +842,17 @@ uc32 Scanner::ScanIdentifierUnicodeEscape() {
KEYWORD_GROUP('i') \
KEYWORD("if", Token::IF) \
KEYWORD("implements", Token::FUTURE_STRICT_RESERVED_WORD) \
KEYWORD("import", Token::FUTURE_RESERVED_WORD) \
KEYWORD("import", harmony_modules \
? Token::IMPORT : Token::FUTURE_RESERVED_WORD) \
KEYWORD("in", Token::IN) \
KEYWORD("instanceof", Token::INSTANCEOF) \
KEYWORD("interface", Token::FUTURE_STRICT_RESERVED_WORD) \
KEYWORD_GROUP('l') \
KEYWORD("let", harmony_scoping \
? Token::LET : Token::FUTURE_STRICT_RESERVED_WORD) \
KEYWORD_GROUP('m') \
KEYWORD("module", harmony_modules \
? Token::MODULE : Token::IDENTIFIER) \
KEYWORD_GROUP('n') \
KEYWORD("new", Token::NEW) \
KEYWORD("null", Token::NULL_LITERAL) \
@ -879,7 +885,8 @@ uc32 Scanner::ScanIdentifierUnicodeEscape() {
static Token::Value KeywordOrIdentifierToken(const char* input,
int input_length,
bool harmony_scoping) {
bool harmony_scoping,
bool harmony_modules) {
ASSERT(input_length >= 1);
const int kMinLength = 2;
const int kMaxLength = 10;
@ -955,7 +962,8 @@ Token::Value Scanner::ScanIdentifierOrKeyword() {
Vector<const char> chars = next_.literal_chars->ascii_literal();
return KeywordOrIdentifierToken(chars.start(),
chars.length(),
harmony_scoping_);
harmony_scoping_,
harmony_modules_);
}
return Token::IDENTIFIER;

20
deps/v8/src/scanner.h

@ -51,8 +51,9 @@ enum ParsingFlags {
// STRICT_MODE,
// EXTENDED_MODE,
kLanguageModeMask = 0x03,
kAllowLazy = 4,
kAllowNativesSyntax = 8
kAllowLazy = 0x04,
kAllowNativesSyntax = 0x08,
kAllowModules = 0x10
};
STATIC_ASSERT((kLanguageModeMask & CLASSIC_MODE) == CLASSIC_MODE);
@ -403,8 +404,14 @@ class Scanner {
bool HarmonyScoping() const {
return harmony_scoping_;
}
void SetHarmonyScoping(bool block_scoping) {
harmony_scoping_ = block_scoping;
void SetHarmonyScoping(bool scoping) {
harmony_scoping_ = scoping;
}
bool HarmonyModules() const {
return harmony_modules_;
}
void SetHarmonyModules(bool modules) {
harmony_modules_ = modules;
}
@ -552,9 +559,10 @@ class Scanner {
// Whether there is a multi-line comment that contains a
// line-terminator after the current token, and before the next.
bool has_multiline_comment_before_next_;
// Whether we scan 'let' as a keyword for harmony block scoped
// let bindings.
// Whether we scan 'let' as a keyword for harmony block-scoped let bindings.
bool harmony_scoping_;
// Whether we scan 'module', 'import', 'export' as keywords.
bool harmony_modules_;
};
} } // namespace v8::internal

60
deps/v8/src/scopes.cc

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -272,8 +272,11 @@ bool Scope::Analyze(CompilationInfo* info) {
top = top->outer_scope();
}
// Allocated the variables.
top->AllocateVariables(info->global_scope());
// Allocate the variables.
{
AstNodeFactory<AstNullVisitor> ast_node_factory(info->isolate());
top->AllocateVariables(info->global_scope(), &ast_node_factory);
}
#ifdef DEBUG
if (info->isolate()->bootstrapper()->IsActive()
@ -415,7 +418,8 @@ Variable* Scope::LocalLookup(Handle<String> name) {
}
Variable* Scope::LookupFunctionVar(Handle<String> name) {
Variable* Scope::LookupFunctionVar(Handle<String> name,
AstNodeFactory<AstNullVisitor>* factory) {
if (function_ != NULL && function_->name().is_identical_to(name)) {
return function_->var();
} else if (!scope_info_.is_null()) {
@ -423,7 +427,7 @@ Variable* Scope::LookupFunctionVar(Handle<String> name) {
VariableMode mode;
int index = scope_info_->FunctionContextSlotIndex(*name, &mode);
if (index < 0) return NULL;
Variable* var = DeclareFunctionVar(name, mode);
Variable* var = DeclareFunctionVar(name, mode, factory);
var->AllocateTo(Variable::CONTEXT, index);
return var;
} else {
@ -443,15 +447,6 @@ Variable* Scope::Lookup(Handle<String> name) {
}
Variable* Scope::DeclareFunctionVar(Handle<String> name, VariableMode mode) {
ASSERT(is_function_scope() && function_ == NULL);
Variable* function_var = new Variable(
this, name, mode, true, Variable::NORMAL, kCreatedInitialized);
function_ = new(isolate_->zone()) VariableProxy(isolate_, function_var);
return function_var;
}
void Scope::DeclareParameter(Handle<String> name, VariableMode mode) {
ASSERT(!already_resolved());
ASSERT(is_function_scope());
@ -489,18 +484,6 @@ Variable* Scope::DeclareGlobal(Handle<String> name) {
}
VariableProxy* Scope::NewUnresolved(Handle<String> name, int position) {
// Note that we must not share the unresolved variables with
// the same name because they may be removed selectively via
// RemoveUnresolved().
ASSERT(!already_resolved());
VariableProxy* proxy = new(isolate_->zone()) VariableProxy(
isolate_, name, false, position);
unresolved_.Add(proxy);
return proxy;
}
void Scope::RemoveUnresolved(VariableProxy* var) {
// Most likely (always?) any variable we want to remove
// was just added before, so we search backwards.
@ -623,7 +606,8 @@ void Scope::CollectStackAndContextLocals(ZoneList<Variable*>* stack_locals,
}
void Scope::AllocateVariables(Scope* global_scope) {
void Scope::AllocateVariables(Scope* global_scope,
AstNodeFactory<AstNullVisitor>* factory) {
// 1) Propagate scope information.
bool outer_scope_calls_non_strict_eval = false;
if (outer_scope_ != NULL) {
@ -634,7 +618,7 @@ void Scope::AllocateVariables(Scope* global_scope) {
PropagateScopeInfo(outer_scope_calls_non_strict_eval);
// 2) Resolve variables.
ResolveVariablesRecursively(global_scope);
ResolveVariablesRecursively(global_scope, factory);
// 3) Allocate variables.
AllocateVariablesRecursively();
@ -897,7 +881,8 @@ Variable* Scope::NonLocal(Handle<String> name, VariableMode mode) {
Variable* Scope::LookupRecursive(Handle<String> name,
BindingKind* binding_kind) {
BindingKind* binding_kind,
AstNodeFactory<AstNullVisitor>* factory) {
ASSERT(binding_kind != NULL);
// Try to find the variable in this scope.
Variable* var = LocalLookup(name);
@ -914,11 +899,11 @@ Variable* Scope::LookupRecursive(Handle<String> name,
// if any. We can do this for all scopes, since the function variable is
// only present - if at all - for function scopes.
*binding_kind = UNBOUND;
var = LookupFunctionVar(name);
var = LookupFunctionVar(name, factory);
if (var != NULL) {
*binding_kind = BOUND;
} else if (outer_scope_ != NULL) {
var = outer_scope_->LookupRecursive(name, binding_kind);
var = outer_scope_->LookupRecursive(name, binding_kind, factory);
if (*binding_kind == BOUND && (is_function_scope() || is_with_scope())) {
var->ForceContextAllocation();
}
@ -951,7 +936,8 @@ Variable* Scope::LookupRecursive(Handle<String> name,
void Scope::ResolveVariable(Scope* global_scope,
VariableProxy* proxy) {
VariableProxy* proxy,
AstNodeFactory<AstNullVisitor>* factory) {
ASSERT(global_scope == NULL || global_scope->is_global_scope());
// If the proxy is already resolved there's nothing to do
@ -960,7 +946,7 @@ void Scope::ResolveVariable(Scope* global_scope,
// Otherwise, try to resolve the variable.
BindingKind binding_kind;
Variable* var = LookupRecursive(proxy->name(), &binding_kind);
Variable* var = LookupRecursive(proxy->name(), &binding_kind, factory);
switch (binding_kind) {
case BOUND:
// We found a variable binding.
@ -1001,17 +987,19 @@ void Scope::ResolveVariable(Scope* global_scope,
}
void Scope::ResolveVariablesRecursively(Scope* global_scope) {
void Scope::ResolveVariablesRecursively(
Scope* global_scope,
AstNodeFactory<AstNullVisitor>* factory) {
ASSERT(global_scope == NULL || global_scope->is_global_scope());
// Resolve unresolved variables for this scope.
for (int i = 0; i < unresolved_.length(); i++) {
ResolveVariable(global_scope, unresolved_[i]);
ResolveVariable(global_scope, unresolved_[i], factory);
}
// Resolve unresolved variables for inner scopes.
for (int i = 0; i < inner_scopes_.length(); i++) {
inner_scopes_[i]->ResolveVariablesRecursively(global_scope);
inner_scopes_[i]->ResolveVariablesRecursively(global_scope, factory);
}
}

42
deps/v8/src/scopes.h

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -115,7 +115,8 @@ class Scope: public ZoneObject {
// between this scope and the outer scope. (ECMA-262, 3rd., requires that
// the name of named function literal is kept in an intermediate scope
// in between this scope and the next outer scope.)
Variable* LookupFunctionVar(Handle<String> name);
Variable* LookupFunctionVar(Handle<String> name,
AstNodeFactory<AstNullVisitor>* factory);
// Lookup a variable in this scope or outer scopes.
// Returns the variable or NULL if not found.
@ -124,7 +125,16 @@ class Scope: public ZoneObject {
// Declare the function variable for a function literal. This variable
// is in an intermediate scope between this function scope and the the
// outer scope. Only possible for function scopes; at most one variable.
Variable* DeclareFunctionVar(Handle<String> name, VariableMode mode);
template<class Visitor>
Variable* DeclareFunctionVar(Handle<String> name,
VariableMode mode,
AstNodeFactory<Visitor>* factory) {
ASSERT(is_function_scope() && function_ == NULL);
Variable* function_var = new Variable(
this, name, mode, true, Variable::NORMAL, kCreatedInitialized);
function_ = factory->NewVariableProxy(function_var);
return function_var;
}
// Declare a parameter in this scope. When there are duplicated
// parameters the rightmost one 'wins'. However, the implementation
@ -144,8 +154,18 @@ class Scope: public ZoneObject {
Variable* DeclareGlobal(Handle<String> name);
// Create a new unresolved variable.
VariableProxy* NewUnresolved(Handle<String> name,
int position = RelocInfo::kNoPosition);
template<class Visitor>
VariableProxy* NewUnresolved(AstNodeFactory<Visitor>* factory,
Handle<String> name,
int position = RelocInfo::kNoPosition) {
// Note that we must not share the unresolved variables with
// the same name because they may be removed selectively via
// RemoveUnresolved().
ASSERT(!already_resolved());
VariableProxy* proxy = factory->NewVariableProxy(name, false, position);
unresolved_.Add(proxy);
return proxy;
}
// Remove a unresolved variable. During parsing, an unresolved variable
// may have been added optimistically, but then only the variable name
@ -332,7 +352,8 @@ class Scope: public ZoneObject {
// In the case of code compiled and run using 'eval', the context
// parameter is the context in which eval was called. In all other
// cases the context parameter is an empty handle.
void AllocateVariables(Scope* global_scope);
void AllocateVariables(Scope* global_scope,
AstNodeFactory<AstNullVisitor>* factory);
// Current number of var or const locals.
int num_var_or_const() { return num_var_or_const_; }
@ -519,10 +540,13 @@ class Scope: public ZoneObject {
// scope. If the code is executed because of a call to 'eval', the context
// parameter should be set to the calling context of 'eval'.
Variable* LookupRecursive(Handle<String> name,
BindingKind* binding_kind);
BindingKind* binding_kind,
AstNodeFactory<AstNullVisitor>* factory);
void ResolveVariable(Scope* global_scope,
VariableProxy* proxy);
void ResolveVariablesRecursively(Scope* global_scope);
VariableProxy* proxy,
AstNodeFactory<AstNullVisitor>* factory);
void ResolveVariablesRecursively(Scope* global_scope,
AstNodeFactory<AstNullVisitor>* factory);
// Scope analysis.
bool PropagateScopeInfo(bool outer_scope_calls_non_strict_eval);

7
deps/v8/src/spaces.h

@ -2364,12 +2364,9 @@ class FixedSpace : public PagedSpace {
class MapSpace : public FixedSpace {
public:
// Creates a map space object with a maximum capacity.
MapSpace(Heap* heap,
intptr_t max_capacity,
int max_map_space_pages,
AllocationSpace id)
MapSpace(Heap* heap, intptr_t max_capacity, AllocationSpace id)
: FixedSpace(heap, max_capacity, id, Map::kSize, "map"),
max_map_space_pages_(max_map_space_pages) {
max_map_space_pages_(kMaxMapPageIndex - 1) {
}
// Given an index, returns the page address.

3
deps/v8/src/token.h

@ -170,7 +170,10 @@ namespace internal {
T(FUTURE_RESERVED_WORD, NULL, 0) \
T(FUTURE_STRICT_RESERVED_WORD, NULL, 0) \
K(CONST, "const", 0) \
K(EXPORT, "export", 0) \
K(IMPORT, "import", 0) \
K(LET, "let", 0) \
K(MODULE, "module", 0) \
\
/* Illegal token - not able to scan. */ \
T(ILLEGAL, "ILLEGAL", 0) \

5
deps/v8/src/v8.cc

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -255,9 +255,6 @@ void V8::InitializeOncePerProcess() {
RuntimeProfiler::GlobalSetup();
// Peephole optimization might interfere with deoptimization.
FLAG_peephole_optimization = !use_crankshaft_;
ElementsAccessor::InitializeOncePerProcess();
if (FLAG_stress_compaction) {

2
deps/v8/src/version.cc

@ -34,7 +34,7 @@
// cannot be changed without changing the SCons build script.
#define MAJOR_VERSION 3
#define MINOR_VERSION 9
#define BUILD_NUMBER 2
#define BUILD_NUMBER 5
#define PATCH_LEVEL 0
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)

4
deps/v8/src/x64/code-stubs-x64.cc

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -4077,7 +4077,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// Invoke: Link this frame into the handler chain. There's only one
// handler block in this code object, so its index is 0.
__ bind(&invoke);
__ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER, 0);
__ PushTryHandler(StackHandler::JS_ENTRY, 0);
// Clear any pending exceptions.
__ LoadRoot(rax, Heap::kTheHoleValueRootIndex);

35
deps/v8/src/x64/full-codegen-x64.cc

@ -1,4 +1,4 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -129,6 +129,27 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
}
#endif
// We can optionally optimize based on counters rather than statistical
// sampling.
if (info->ShouldSelfOptimize()) {
if (FLAG_trace_opt) {
PrintF("[adding self-optimization header to %s]\n",
*info->function()->debug_name()->ToCString());
}
MaybeObject* maybe_cell = isolate()->heap()->AllocateJSGlobalPropertyCell(
Smi::FromInt(Compiler::kCallsUntilPrimitiveOpt));
JSGlobalPropertyCell* cell;
if (maybe_cell->To(&cell)) {
__ movq(rax, Handle<JSGlobalPropertyCell>(cell),
RelocInfo::EMBEDDED_OBJECT);
__ SmiAddConstant(FieldOperand(rax, JSGlobalPropertyCell::kValueOffset),
Smi::FromInt(-1));
Handle<Code> compile_stub(
isolate()->builtins()->builtin(Builtins::kLazyRecompile));
__ j(zero, compile_stub, RelocInfo::CODE_TARGET);
}
}
// Strict mode functions and builtins need to replace the receiver
// with undefined when called as functions (without an explicit
// receiver object). rcx is zero for method calls and non-zero for
@ -256,11 +277,11 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
// For named function expressions, declare the function name as a
// constant.
if (scope()->is_function_scope() && scope()->function() != NULL) {
int ignored = 0;
VariableProxy* proxy = scope()->function();
ASSERT(proxy->var()->mode() == CONST ||
proxy->var()->mode() == CONST_HARMONY);
EmitDeclaration(proxy, proxy->var()->mode(), NULL, &ignored);
ASSERT(proxy->var()->location() != Variable::UNALLOCATED);
EmitDeclaration(proxy, proxy->var()->mode(), NULL);
}
VisitDeclarations(scope()->declarations());
}
@ -678,8 +699,7 @@ void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
VariableMode mode,
FunctionLiteral* function,
int* global_count) {
FunctionLiteral* function) {
// If it was not possible to allocate the variable at compile time, we
// need to "declare" it at runtime to make sure it actually exists in the
// local context.
@ -688,7 +708,7 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
(mode == CONST || mode == CONST_HARMONY || mode == LET);
switch (variable->location()) {
case Variable::UNALLOCATED:
++(*global_count);
++global_count_;
break;
case Variable::PARAMETER:
@ -769,9 +789,6 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
}
void FullCodeGenerator::VisitDeclaration(Declaration* decl) { }
void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
// Call the runtime to declare the globals.
__ push(rsi); // The context is the first argument.

5
deps/v8/src/x64/lithium-codegen-x64.cc

@ -555,7 +555,6 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
int length = deoptimizations_.length();
if (length == 0) return;
ASSERT(FLAG_deopt);
Handle<DeoptimizationInputData> data =
factory()->NewDeoptimizationInputData(length, TENURED);
@ -993,11 +992,11 @@ void LCodeGen::DoMulI(LMulI* instr) {
DeoptimizeIf(no_condition, instr->environment());
}
} else if (right->IsStackSlot()) {
__ or_(kScratchRegister, ToOperand(right));
__ orl(kScratchRegister, ToOperand(right));
DeoptimizeIf(sign, instr->environment());
} else {
// Test the non-zero operand for negative sign.
__ or_(kScratchRegister, ToRegister(right));
__ orl(kScratchRegister, ToRegister(right));
DeoptimizeIf(sign, instr->environment());
}
__ bind(&done);

24
deps/v8/src/x64/macro-assembler-x64.cc

@ -2453,8 +2453,7 @@ Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
}
void MacroAssembler::PushTryHandler(CodeLocation try_location,
HandlerType type,
void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
int handler_index) {
// Adjust this code if not the case.
STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
@ -2465,25 +2464,22 @@ void MacroAssembler::PushTryHandler(CodeLocation try_location,
STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
// We will build up the handler from the bottom by pushing on the stack.
// First compute the state and push the frame pointer and context.
unsigned state = StackHandler::OffsetField::encode(handler_index);
if (try_location == IN_JAVASCRIPT) {
push(rbp);
push(rsi);
state |= (type == TRY_CATCH_HANDLER)
? StackHandler::KindField::encode(StackHandler::TRY_CATCH)
: StackHandler::KindField::encode(StackHandler::TRY_FINALLY);
} else {
ASSERT(try_location == IN_JS_ENTRY);
// First push the frame pointer and context.
if (kind == StackHandler::JS_ENTRY) {
// The frame pointer does not point to a JS frame so we save NULL for
// rbp. We expect the code throwing an exception to check rbp before
// dereferencing it to restore the context.
push(Immediate(0)); // NULL frame pointer.
Push(Smi::FromInt(0)); // No context.
state |= StackHandler::KindField::encode(StackHandler::ENTRY);
} else {
push(rbp);
push(rsi);
}
// Push the state and the code object.
unsigned state =
StackHandler::IndexField::encode(handler_index) |
StackHandler::KindField::encode(kind);
push(Immediate(state));
Push(CodeObject());
@ -2594,7 +2590,7 @@ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
movq(rsp, Operand(rsp, StackHandlerConstants::kNextOffset));
bind(&check_kind);
STATIC_ASSERT(StackHandler::ENTRY == 0);
STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
testl(Operand(rsp, StackHandlerConstants::kStateOffset),
Immediate(StackHandler::KindField::kMask));
j(not_zero, &fetch_next);

4
deps/v8/src/x64/macro-assembler-x64.h

@ -961,9 +961,7 @@ class MacroAssembler: public Assembler {
// Exception handling
// Push a new try handler and link it into try handler chain.
void PushTryHandler(CodeLocation try_location,
HandlerType type,
int handler_index);
void PushTryHandler(StackHandler::Kind kind, int handler_index);
// Unlink the stack handler on top of the stack from the try handler chain.
void PopTryHandler();

68
deps/v8/src/x64/stub-cache-x64.cc

@ -1331,24 +1331,24 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
} else {
Label call_builtin;
if (argc == 1) { // Otherwise fall through to call builtin.
Label attempt_to_grow_elements, with_write_barrier;
// Get the elements array of the object.
__ movq(rbx, FieldOperand(rdx, JSArray::kElementsOffset));
__ movq(rdi, FieldOperand(rdx, JSArray::kElementsOffset));
// Check that the elements are in fast mode and writable.
__ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
__ Cmp(FieldOperand(rdi, HeapObject::kMapOffset),
factory()->fixed_array_map());
__ j(not_equal, &call_builtin);
if (argc == 1) { // Otherwise fall through to call builtin.
Label attempt_to_grow_elements, with_write_barrier;
// Get the array's length into rax and calculate new length.
__ SmiToInteger32(rax, FieldOperand(rdx, JSArray::kLengthOffset));
STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue);
__ addl(rax, Immediate(argc));
// Get the element's length into rcx.
__ SmiToInteger32(rcx, FieldOperand(rbx, FixedArray::kLengthOffset));
// Get the elements' length into rcx.
__ SmiToInteger32(rcx, FieldOperand(rdi, FixedArray::kLengthOffset));
// Check if we could survive without allocation.
__ cmpl(rax, rcx);
@ -1361,30 +1361,52 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
// Save new length.
__ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax);
// Push the element.
__ lea(rdx, FieldOperand(rbx,
rax, times_pointer_size,
FixedArray::kHeaderSize - argc * kPointerSize));
__ movq(Operand(rdx, 0), rcx);
// Store the value.
__ movq(FieldOperand(rdi,
rax,
times_pointer_size,
FixedArray::kHeaderSize - argc * kPointerSize),
rcx);
__ Integer32ToSmi(rax, rax); // Return new length as smi.
__ ret((argc + 1) * kPointerSize);
__ bind(&with_write_barrier);
__ movq(rdi, FieldOperand(rdx, HeapObject::kMapOffset));
__ CheckFastObjectElements(rdi, &call_builtin);
__ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) {
Label fast_object, not_fast_object;
__ CheckFastObjectElements(rbx, &not_fast_object, Label::kNear);
__ jmp(&fast_object);
// In case of fast smi-only, convert to fast object, otherwise bail out.
__ bind(&not_fast_object);
__ CheckFastSmiOnlyElements(rbx, &call_builtin);
// rdx: receiver
// rbx: map
__ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
FAST_ELEMENTS,
rbx,
r10,
&call_builtin);
ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm());
__ bind(&fast_object);
} else {
__ CheckFastObjectElements(rbx, &call_builtin);
}
__ CheckFastObjectElements(rbx, &call_builtin);
// Save new length.
__ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax);
// Push the element.
__ lea(rdx, FieldOperand(rbx,
// Store the value.
__ lea(rdx, FieldOperand(rdi,
rax, times_pointer_size,
FixedArray::kHeaderSize - argc * kPointerSize));
__ movq(Operand(rdx, 0), rcx);
__ RecordWrite(rbx, rdx, rcx, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
__ RecordWrite(rdi, rdx, rcx, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
OMIT_SMI_CHECK);
__ Integer32ToSmi(rax, rax); // Return new length as smi.
@ -1395,11 +1417,11 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ jmp(&call_builtin);
}
__ movq(rdi, Operand(rsp, argc * kPointerSize));
__ movq(rbx, Operand(rsp, argc * kPointerSize));
// Growing elements that are SMI-only requires special handling in case
// the new element is non-Smi. For now, delegate to the builtin.
Label no_fast_elements_check;
__ JumpIfSmi(rdi, &no_fast_elements_check);
__ JumpIfSmi(rbx, &no_fast_elements_check);
__ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
__ CheckFastObjectElements(rcx, &call_builtin, Label::kFar);
__ bind(&no_fast_elements_check);
@ -1414,7 +1436,7 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ Load(rcx, new_space_allocation_top);
// Check if it's the end of elements.
__ lea(rdx, FieldOperand(rbx,
__ lea(rdx, FieldOperand(rdi,
rax, times_pointer_size,
FixedArray::kHeaderSize - argc * kPointerSize));
__ cmpq(rdx, rcx);
@ -1429,7 +1451,7 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ Store(new_space_allocation_top, rcx);
// Push the argument...
__ movq(Operand(rdx, 0), rdi);
__ movq(Operand(rdx, 0), rbx);
// ... and fill the rest with holes.
__ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
for (int i = 1; i < kAllocationDelta; i++) {
@ -1441,13 +1463,13 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
// tell the incremental marker to rescan the object that we just grew. We
// don't need to worry about the holes because they are in old space and
// already marked black.
__ RecordWrite(rbx, rdx, rdi, kDontSaveFPRegs, OMIT_REMEMBERED_SET);
__ RecordWrite(rdi, rdx, rbx, kDontSaveFPRegs, OMIT_REMEMBERED_SET);
// Restore receiver to rdx as finish sequence assumes it's here.
__ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
// Increment element's and array's sizes.
__ SmiAddConstant(FieldOperand(rbx, FixedArray::kLengthOffset),
__ SmiAddConstant(FieldOperand(rdi, FixedArray::kLengthOffset),
Smi::FromInt(kAllocationDelta));
// Make new length a smi before returning it.

131
deps/v8/test/cctest/test-api.cc

@ -13535,6 +13535,137 @@ TEST(CaptureStackTraceForUncaughtExceptionAndSetters) {
}
static void RethrowStackTraceHandler(v8::Handle<v8::Message> message,
v8::Handle<v8::Value> data) {
// Use the frame where JavaScript is called from.
v8::Handle<v8::StackTrace> stack_trace = message->GetStackTrace();
CHECK(!stack_trace.IsEmpty());
int frame_count = stack_trace->GetFrameCount();
CHECK_EQ(3, frame_count);
int line_number[] = {1, 2, 5};
for (int i = 0; i < frame_count; i++) {
CHECK_EQ(line_number[i], stack_trace->GetFrame(i)->GetLineNumber());
}
}
// Test that we only return the stack trace at the site where the exception
// is first thrown (not where it is rethrown).
TEST(RethrowStackTrace) {
v8::HandleScope scope;
LocalContext env;
// We make sure that
// - the stack trace of the ReferenceError in g() is reported.
// - the stack trace is not overwritten when e1 is rethrown by t().
// - the stack trace of e2 does not overwrite that of e1.
const char* source =
"function g() { error; } \n"
"function f() { g(); } \n"
"function t(e) { throw e; } \n"
"try { \n"
" f(); \n"
"} catch (e1) { \n"
" try { \n"
" error; \n"
" } catch (e2) { \n"
" t(e1); \n"
" } \n"
"} \n";
v8::V8::AddMessageListener(RethrowStackTraceHandler);
v8::V8::SetCaptureStackTraceForUncaughtExceptions(true);
CompileRun(source);
v8::V8::SetCaptureStackTraceForUncaughtExceptions(false);
v8::V8::RemoveMessageListeners(RethrowStackTraceHandler);
}
static void RethrowPrimitiveStackTraceHandler(v8::Handle<v8::Message> message,
v8::Handle<v8::Value> data) {
v8::Handle<v8::StackTrace> stack_trace = message->GetStackTrace();
CHECK(!stack_trace.IsEmpty());
int frame_count = stack_trace->GetFrameCount();
CHECK_EQ(2, frame_count);
int line_number[] = {3, 7};
for (int i = 0; i < frame_count; i++) {
CHECK_EQ(line_number[i], stack_trace->GetFrame(i)->GetLineNumber());
}
}
// Test that we do not recognize identity for primitive exceptions.
TEST(RethrowPrimitiveStackTrace) {
v8::HandleScope scope;
LocalContext env;
// We do not capture stack trace for non Error objects on creation time.
// Instead, we capture the stack trace on last throw.
const char* source =
"function g() { throw 404; } \n"
"function f() { g(); } \n"
"function t(e) { throw e; } \n"
"try { \n"
" f(); \n"
"} catch (e1) { \n"
" t(e1) \n"
"} \n";
v8::V8::AddMessageListener(RethrowPrimitiveStackTraceHandler);
v8::V8::SetCaptureStackTraceForUncaughtExceptions(true);
CompileRun(source);
v8::V8::SetCaptureStackTraceForUncaughtExceptions(false);
v8::V8::RemoveMessageListeners(RethrowPrimitiveStackTraceHandler);
}
static void RethrowExistingStackTraceHandler(v8::Handle<v8::Message> message,
v8::Handle<v8::Value> data) {
// Use the frame where JavaScript is called from.
v8::Handle<v8::StackTrace> stack_trace = message->GetStackTrace();
CHECK(!stack_trace.IsEmpty());
CHECK_EQ(1, stack_trace->GetFrameCount());
CHECK_EQ(1, stack_trace->GetFrame(0)->GetLineNumber());
}
// Test that the stack trace is captured when the error object is created and
// not where it is thrown.
TEST(RethrowExistingStackTrace) {
v8::HandleScope scope;
LocalContext env;
const char* source =
"var e = new Error(); \n"
"throw e; \n";
v8::V8::AddMessageListener(RethrowExistingStackTraceHandler);
v8::V8::SetCaptureStackTraceForUncaughtExceptions(true);
CompileRun(source);
v8::V8::SetCaptureStackTraceForUncaughtExceptions(false);
v8::V8::RemoveMessageListeners(RethrowExistingStackTraceHandler);
}
static void RethrowBogusErrorStackTraceHandler(v8::Handle<v8::Message> message,
v8::Handle<v8::Value> data) {
// Use the frame where JavaScript is called from.
v8::Handle<v8::StackTrace> stack_trace = message->GetStackTrace();
CHECK(!stack_trace.IsEmpty());
CHECK_EQ(1, stack_trace->GetFrameCount());
CHECK_EQ(2, stack_trace->GetFrame(0)->GetLineNumber());
}
// Test that the stack trace is captured where the bogus Error object is thrown.
TEST(RethrowBogusErrorStackTrace) {
v8::HandleScope scope;
LocalContext env;
const char* source =
"var e = {__proto__: new Error()} \n"
"throw e; \n";
v8::V8::AddMessageListener(RethrowBogusErrorStackTraceHandler);
v8::V8::SetCaptureStackTraceForUncaughtExceptions(true);
CompileRun(source);
v8::V8::SetCaptureStackTraceForUncaughtExceptions(false);
v8::V8::RemoveMessageListeners(RethrowBogusErrorStackTraceHandler);
}
v8::Handle<Value> AnalyzeStackOfEvalWithSourceURL(const v8::Arguments& args) {
v8::HandleScope scope;
v8::Handle<v8::StackTrace> stackTrace =

5
deps/v8/test/cctest/test-ast.cc

@ -1,4 +1,4 @@
// Copyright 2006-2008 the V8 project authors. All rights reserved.
// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -40,7 +40,8 @@ TEST(List) {
CHECK_EQ(0, list->length());
ZoneScope zone_scope(Isolate::Current(), DELETE_ON_EXIT);
AstNode* node = new(ZONE) EmptyStatement();
AstNodeFactory<AstNullVisitor> factory(Isolate::Current());
AstNode* node = factory.NewEmptyStatement();
list->Add(node);
CHECK_EQ(1, list->length());
CHECK_EQ(node, list->at(0));

4
deps/v8/test/cctest/test-heap-profiler.cc

@ -774,7 +774,7 @@ TEST(HeapSnapshotRetainedObjectInfo) {
}
const v8::HeapGraphNode* native_group_aaa = GetNode(
snapshot->GetRoot(), v8::HeapGraphNode::kNative, "aaa-group");
snapshot->GetRoot(), v8::HeapGraphNode::kSynthetic, "aaa-group");
CHECK_NE(NULL, native_group_aaa);
CHECK_EQ(1, native_group_aaa->GetChildrenCount());
const v8::HeapGraphNode* aaa = GetNode(
@ -783,7 +783,7 @@ TEST(HeapSnapshotRetainedObjectInfo) {
CHECK_EQ(2, aaa->GetChildrenCount());
const v8::HeapGraphNode* native_group_ccc = GetNode(
snapshot->GetRoot(), v8::HeapGraphNode::kNative, "ccc-group");
snapshot->GetRoot(), v8::HeapGraphNode::kSynthetic, "ccc-group");
const v8::HeapGraphNode* ccc = GetNode(
native_group_ccc, v8::HeapGraphNode::kNative, "ccc");
CHECK_NE(NULL, ccc);

10
deps/v8/test/cctest/test-mark-compact.cc

@ -1,4 +1,4 @@
// Copyright 2006-2008 the V8 project authors. All rights reserved.
// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -534,15 +534,15 @@ TEST(BootUpMemoryUse) {
intptr_t booted_memory = MemoryInUse();
if (sizeof(initial_memory) == 8) {
if (v8::internal::Snapshot::IsEnabled()) {
CHECK_LE(booted_memory - initial_memory, 6654 * 1024); // 6444.
CHECK_LE(booted_memory - initial_memory, 6686 * 1024); // 6476.
} else {
CHECK_LE(booted_memory - initial_memory, 6777 * 1024); // 6596.
CHECK_LE(booted_memory - initial_memory, 6809 * 1024); // 6628.
}
} else {
if (v8::internal::Snapshot::IsEnabled()) {
CHECK_LE(booted_memory - initial_memory, 6500 * 1024); // 6356.
CHECK_LE(booted_memory - initial_memory, 6532 * 1024); // 6388.
} else {
CHECK_LE(booted_memory - initial_memory, 6654 * 1024); // 6424
CHECK_LE(booted_memory - initial_memory, 6686 * 1024); // 6456
}
}
}

Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save