Browse Source

Upgrade v8 to 1.3.7

v0.7.4-release
Ryan 16 years ago
parent
commit
b5b65ddcd7
  1. 11
      deps/v8/ChangeLog
  2. 12
      deps/v8/SConstruct
  3. 6
      deps/v8/include/v8.h
  4. 6
      deps/v8/src/api.cc
  5. 20
      deps/v8/src/arm/builtins-arm.cc
  6. 2
      deps/v8/src/arm/cfg-arm.cc
  7. 105
      deps/v8/src/arm/codegen-arm.cc
  8. 2
      deps/v8/src/arm/disasm-arm.cc
  9. 21
      deps/v8/src/arm/ic-arm.cc
  10. 21
      deps/v8/src/arm/macro-assembler-arm.cc
  11. 4
      deps/v8/src/arm/macro-assembler-arm.h
  12. 15
      deps/v8/src/arm/stub-cache-arm.cc
  13. 2
      deps/v8/src/arm/virtual-frame-arm.cc
  14. 5
      deps/v8/src/assembler.cc
  15. 3
      deps/v8/src/assembler.h
  16. 6
      deps/v8/src/debug.cc
  17. 2
      deps/v8/src/execution.cc
  18. 4
      deps/v8/src/heap-inl.h
  19. 13
      deps/v8/src/heap.cc
  20. 78
      deps/v8/src/heap.h
  21. 2
      deps/v8/src/mksnapshot.cc
  22. 13
      deps/v8/src/objects-inl.h
  23. 2
      deps/v8/src/objects.h
  24. 18
      deps/v8/src/platform-win32.cc
  25. 6
      deps/v8/src/runtime.cc
  26. 24
      deps/v8/src/serialize.cc
  27. 67
      deps/v8/src/spaces.cc
  28. 25
      deps/v8/src/spaces.h
  29. 12
      deps/v8/src/utils.h
  30. 10
      deps/v8/src/v8.cc
  31. 2
      deps/v8/src/v8.h
  32. 2
      deps/v8/src/version.cc
  33. 6
      deps/v8/src/x64/codegen-x64.cc
  34. 64
      deps/v8/test/cctest/test-api.cc
  35. 16
      deps/v8/test/cctest/test-debug.cc
  36. 12
      deps/v8/test/cctest/test-disasm-arm.cc
  37. 2
      deps/v8/test/cctest/test-log.cc
  38. 10
      deps/v8/test/cctest/test-serialize.cc
  39. 6
      deps/v8/test/cctest/test-strings.cc
  40. 4
      deps/v8/test/mjsunit/date-parse.js
  41. 2
      deps/v8/test/mjsunit/mjsunit.status
  42. 2
      deps/v8/test/mozilla/mozilla.status
  43. 2
      deps/v8/tools/visual_studio/d8_x64.vcproj
  44. 2
      deps/v8/tools/visual_studio/v8_cctest_x64.vcproj
  45. 2
      deps/v8/tools/visual_studio/v8_shell_sample_x64.vcproj

11
deps/v8/ChangeLog

@ -1,3 +1,14 @@
2009-08-25: Version 1.3.7
Reduced the size of generated code on ARM platforms by reducing
the size of constant pools.
Changed build files to not include the 'ENV' user environment
variable in the build environment.
Changed the handling of idle notifications.
2009-08-21: Version 1.3.6 2009-08-21: Version 1.3.6
Add support for forceful termination of JavaScript execution. Add support for forceful termination of JavaScript execution.

12
deps/v8/SConstruct

@ -789,12 +789,20 @@ def BuildSpecific(env, mode, env_overrides):
context = BuildContext(options, env_overrides, samples=SplitList(env['sample'])) context = BuildContext(options, env_overrides, samples=SplitList(env['sample']))
library_flags = context.AddRelevantFlags(os.environ, LIBRARY_FLAGS) # Remove variables which can't be imported from the user's external
# environment into a construction environment.
user_environ = os.environ.copy()
try:
del user_environ['ENV']
except KeyError:
pass
library_flags = context.AddRelevantFlags(user_environ, LIBRARY_FLAGS)
v8_flags = context.AddRelevantFlags(library_flags, V8_EXTRA_FLAGS) v8_flags = context.AddRelevantFlags(library_flags, V8_EXTRA_FLAGS)
mksnapshot_flags = context.AddRelevantFlags(library_flags, MKSNAPSHOT_EXTRA_FLAGS) mksnapshot_flags = context.AddRelevantFlags(library_flags, MKSNAPSHOT_EXTRA_FLAGS)
dtoa_flags = context.AddRelevantFlags(library_flags, DTOA_EXTRA_FLAGS) dtoa_flags = context.AddRelevantFlags(library_flags, DTOA_EXTRA_FLAGS)
cctest_flags = context.AddRelevantFlags(v8_flags, CCTEST_EXTRA_FLAGS) cctest_flags = context.AddRelevantFlags(v8_flags, CCTEST_EXTRA_FLAGS)
sample_flags = context.AddRelevantFlags(os.environ, SAMPLE_FLAGS) sample_flags = context.AddRelevantFlags(user_environ, SAMPLE_FLAGS)
d8_flags = context.AddRelevantFlags(library_flags, D8_FLAGS) d8_flags = context.AddRelevantFlags(library_flags, D8_FLAGS)
context.flags = { context.flags = {

6
deps/v8/include/v8.h

@ -2280,9 +2280,13 @@ class V8EXPORT V8 {
/** /**
* Optional notification that the embedder is idle. * Optional notification that the embedder is idle.
* V8 uses the notification to reduce memory footprint. * V8 uses the notification to reduce memory footprint.
* This call can be used repeatedly if the embedder remains idle.
* \param is_high_priority tells whether the embedder is high priority. * \param is_high_priority tells whether the embedder is high priority.
* Returns true if the embedder should stop calling IdleNotification
* until real work has been done. This indicates that V8 has done
* as much cleanup as it will be able to do.
*/ */
static void IdleNotification(bool is_high_priority); static bool IdleNotification(bool is_high_priority);
/** /**
* Optional notification that the system is running low on memory. * Optional notification that the system is running low on memory.

6
deps/v8/src/api.cc

@ -2604,8 +2604,8 @@ bool v8::V8::Dispose() {
} }
void v8::V8::IdleNotification(bool is_high_priority) { bool v8::V8::IdleNotification(bool is_high_priority) {
i::V8::IdleNotification(is_high_priority); return i::V8::IdleNotification(is_high_priority);
} }
@ -3335,7 +3335,7 @@ void V8::ResumeProfilerEx(int flags) {
flags &= ~(PROFILER_MODULE_HEAP_SNAPSHOT | PROFILER_MODULE_CPU); flags &= ~(PROFILER_MODULE_HEAP_SNAPSHOT | PROFILER_MODULE_CPU);
const int current_flags = i::Logger::GetActiveProfilerModules(); const int current_flags = i::Logger::GetActiveProfilerModules();
i::Logger::ResumeProfiler(flags); i::Logger::ResumeProfiler(flags);
i::Heap::CollectAllGarbage(); i::Heap::CollectAllGarbage(false);
i::Logger::PauseProfiler(~current_flags & flags); i::Logger::PauseProfiler(~current_flags & flags);
} else { } else {
i::Logger::ResumeProfiler(flags); i::Logger::ResumeProfiler(flags);

20
deps/v8/src/arm/builtins-arm.cc

@ -214,9 +214,13 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Enter an internal frame. // Enter an internal frame.
__ EnterInternalFrame(); __ EnterInternalFrame();
// Setup the context from the function argument. // Set up the context from the function argument.
__ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
// Set up the roots register.
ExternalReference roots_address = ExternalReference::roots_address();
__ mov(r10, Operand(roots_address));
// Push the function and the receiver onto the stack. // Push the function and the receiver onto the stack.
__ push(r1); __ push(r1);
__ push(r2); __ push(r2);
@ -239,7 +243,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Initialize all JavaScript callee-saved registers, since they will be seen // Initialize all JavaScript callee-saved registers, since they will be seen
// by the garbage collector as part of handlers. // by the garbage collector as part of handlers.
__ mov(r4, Operand(Factory::undefined_value())); __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
__ mov(r5, Operand(r4)); __ mov(r5, Operand(r4));
__ mov(r6, Operand(r4)); __ mov(r6, Operand(r4));
__ mov(r7, Operand(r4)); __ mov(r7, Operand(r4));
@ -282,7 +286,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
{ Label done; { Label done;
__ tst(r0, Operand(r0)); __ tst(r0, Operand(r0));
__ b(ne, &done); __ b(ne, &done);
__ mov(r2, Operand(Factory::undefined_value())); __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
__ push(r2); __ push(r2);
__ add(r0, r0, Operand(1)); __ add(r0, r0, Operand(1));
__ bind(&done); __ bind(&done);
@ -323,10 +327,10 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
__ tst(r2, Operand(kSmiTagMask)); __ tst(r2, Operand(kSmiTagMask));
__ b(eq, &call_to_object); __ b(eq, &call_to_object);
__ mov(r3, Operand(Factory::null_value())); __ LoadRoot(r3, Heap::kNullValueRootIndex);
__ cmp(r2, r3); __ cmp(r2, r3);
__ b(eq, &use_global_receiver); __ b(eq, &use_global_receiver);
__ mov(r3, Operand(Factory::undefined_value())); __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
__ cmp(r2, r3); __ cmp(r2, r3);
__ b(eq, &use_global_receiver); __ b(eq, &use_global_receiver);
@ -492,10 +496,10 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
__ ldr(r0, MemOperand(fp, kRecvOffset)); __ ldr(r0, MemOperand(fp, kRecvOffset));
__ tst(r0, Operand(kSmiTagMask)); __ tst(r0, Operand(kSmiTagMask));
__ b(eq, &call_to_object); __ b(eq, &call_to_object);
__ mov(r1, Operand(Factory::null_value())); __ LoadRoot(r1, Heap::kNullValueRootIndex);
__ cmp(r0, r1); __ cmp(r0, r1);
__ b(eq, &use_global_receiver); __ b(eq, &use_global_receiver);
__ mov(r1, Operand(Factory::undefined_value())); __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
__ cmp(r0, r1); __ cmp(r0, r1);
__ b(eq, &use_global_receiver); __ b(eq, &use_global_receiver);
@ -665,7 +669,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// r1: function // r1: function
// r2: expected number of arguments // r2: expected number of arguments
// r3: code entry to call // r3: code entry to call
__ mov(ip, Operand(Factory::undefined_value())); __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ sub(r2, fp, Operand(r2, LSL, kPointerSizeLog2)); __ sub(r2, fp, Operand(r2, LSL, kPointerSizeLog2));
__ sub(r2, r2, Operand(4 * kPointerSize)); // Adjust for frame. __ sub(r2, r2, Operand(4 * kPointerSize)); // Adjust for frame.

2
deps/v8/src/arm/cfg-arm.cc

@ -67,7 +67,7 @@ void EntryNode::Compile(MacroAssembler* masm) {
__ add(fp, sp, Operand(2 * kPointerSize)); __ add(fp, sp, Operand(2 * kPointerSize));
int count = CfgGlobals::current()->fun()->scope()->num_stack_slots(); int count = CfgGlobals::current()->fun()->scope()->num_stack_slots();
if (count > 0) { if (count > 0) {
__ mov(ip, Operand(Factory::undefined_value())); __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
for (int i = 0; i < count; i++) { for (int i = 0; i < count; i++) {
__ push(ip); __ push(ip);
} }

105
deps/v8/src/arm/codegen-arm.cc

@ -305,7 +305,7 @@ void CodeGenerator::GenCode(FunctionLiteral* fun) {
// sp: stack pointer // sp: stack pointer
// fp: frame pointer // fp: frame pointer
// cp: callee's context // cp: callee's context
__ mov(r0, Operand(Factory::undefined_value())); __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
function_return_.Bind(); function_return_.Bind();
if (FLAG_trace) { if (FLAG_trace) {
@ -478,11 +478,11 @@ void CodeGenerator::Load(Expression* x, TypeofState typeof_state) {
JumpTarget loaded; JumpTarget loaded;
JumpTarget materialize_true; JumpTarget materialize_true;
materialize_true.Branch(cc_reg_); materialize_true.Branch(cc_reg_);
__ mov(r0, Operand(Factory::false_value())); __ LoadRoot(r0, Heap::kFalseValueRootIndex);
frame_->EmitPush(r0); frame_->EmitPush(r0);
loaded.Jump(); loaded.Jump();
materialize_true.Bind(); materialize_true.Bind();
__ mov(r0, Operand(Factory::true_value())); __ LoadRoot(r0, Heap::kTrueValueRootIndex);
frame_->EmitPush(r0); frame_->EmitPush(r0);
loaded.Bind(); loaded.Bind();
cc_reg_ = al; cc_reg_ = al;
@ -499,7 +499,7 @@ void CodeGenerator::Load(Expression* x, TypeofState typeof_state) {
// Load "true" if necessary. // Load "true" if necessary.
if (true_target.is_linked()) { if (true_target.is_linked()) {
true_target.Bind(); true_target.Bind();
__ mov(r0, Operand(Factory::true_value())); __ LoadRoot(r0, Heap::kTrueValueRootIndex);
frame_->EmitPush(r0); frame_->EmitPush(r0);
} }
// If both "true" and "false" need to be loaded jump across the code for // If both "true" and "false" need to be loaded jump across the code for
@ -510,7 +510,7 @@ void CodeGenerator::Load(Expression* x, TypeofState typeof_state) {
// Load "false" if necessary. // Load "false" if necessary.
if (false_target.is_linked()) { if (false_target.is_linked()) {
false_target.Bind(); false_target.Bind();
__ mov(r0, Operand(Factory::false_value())); __ LoadRoot(r0, Heap::kFalseValueRootIndex);
frame_->EmitPush(r0); frame_->EmitPush(r0);
} }
// A value is loaded on all paths reaching this point. // A value is loaded on all paths reaching this point.
@ -640,15 +640,18 @@ void CodeGenerator::ToBoolean(JumpTarget* true_target,
// Fast case checks // Fast case checks
// Check if the value is 'false'. // Check if the value is 'false'.
__ cmp(r0, Operand(Factory::false_value())); __ LoadRoot(ip, Heap::kFalseValueRootIndex);
__ cmp(r0, ip);
false_target->Branch(eq); false_target->Branch(eq);
// Check if the value is 'true'. // Check if the value is 'true'.
__ cmp(r0, Operand(Factory::true_value())); __ LoadRoot(ip, Heap::kTrueValueRootIndex);
__ cmp(r0, ip);
true_target->Branch(eq); true_target->Branch(eq);
// Check if the value is 'undefined'. // Check if the value is 'undefined'.
__ cmp(r0, Operand(Factory::undefined_value())); __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ cmp(r0, ip);
false_target->Branch(eq); false_target->Branch(eq);
// Check if the value is a smi. // Check if the value is a smi.
@ -661,7 +664,8 @@ void CodeGenerator::ToBoolean(JumpTarget* true_target,
frame_->EmitPush(r0); frame_->EmitPush(r0);
frame_->CallRuntime(Runtime::kToBool, 1); frame_->CallRuntime(Runtime::kToBool, 1);
// Convert the result (r0) to a condition code. // Convert the result (r0) to a condition code.
__ cmp(r0, Operand(Factory::false_value())); __ LoadRoot(ip, Heap::kFalseValueRootIndex);
__ cmp(r0, ip);
cc_reg_ = ne; cc_reg_ = ne;
} }
@ -1185,7 +1189,7 @@ void CodeGenerator::VisitDeclaration(Declaration* node) {
// 'undefined') because we may have a (legal) redeclaration and we // 'undefined') because we may have a (legal) redeclaration and we
// must not destroy the current value. // must not destroy the current value.
if (node->mode() == Variable::CONST) { if (node->mode() == Variable::CONST) {
__ mov(r0, Operand(Factory::the_hole_value())); __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
frame_->EmitPush(r0); frame_->EmitPush(r0);
} else if (node->fun() != NULL) { } else if (node->fun() != NULL) {
LoadAndSpill(node->fun()); LoadAndSpill(node->fun());
@ -1725,9 +1729,11 @@ void CodeGenerator::VisitForInStatement(ForInStatement* node) {
// Both SpiderMonkey and kjs ignore null and undefined in contrast // Both SpiderMonkey and kjs ignore null and undefined in contrast
// to the specification. 12.6.4 mandates a call to ToObject. // to the specification. 12.6.4 mandates a call to ToObject.
frame_->EmitPop(r0); frame_->EmitPop(r0);
__ cmp(r0, Operand(Factory::undefined_value())); __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ cmp(r0, ip);
exit.Branch(eq); exit.Branch(eq);
__ cmp(r0, Operand(Factory::null_value())); __ LoadRoot(ip, Heap::kNullValueRootIndex);
__ cmp(r0, ip);
exit.Branch(eq); exit.Branch(eq);
// Stack layout in body: // Stack layout in body:
@ -1759,7 +1765,8 @@ void CodeGenerator::VisitForInStatement(ForInStatement* node) {
// Otherwise, we got a FixedArray, and we have to do a slow check. // Otherwise, we got a FixedArray, and we have to do a slow check.
__ mov(r2, Operand(r0)); __ mov(r2, Operand(r0));
__ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset)); __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
__ cmp(r1, Operand(Factory::meta_map())); __ LoadRoot(ip, Heap::kMetaMapRootIndex);
__ cmp(r1, ip);
fixed_array.Branch(ne); fixed_array.Branch(ne);
// Get enum cache // Get enum cache
@ -1833,7 +1840,8 @@ void CodeGenerator::VisitForInStatement(ForInStatement* node) {
__ mov(r3, Operand(r0)); __ mov(r3, Operand(r0));
// If the property has been removed while iterating, we just skip it. // If the property has been removed while iterating, we just skip it.
__ cmp(r3, Operand(Factory::null_value())); __ LoadRoot(ip, Heap::kNullValueRootIndex);
__ cmp(r3, ip);
node->continue_target()->Branch(eq); node->continue_target()->Branch(eq);
end_del_check.Bind(); end_del_check.Bind();
@ -2093,7 +2101,7 @@ void CodeGenerator::VisitTryFinally(TryFinally* node) {
// Fake a top of stack value (unneeded when FALLING) and set the // Fake a top of stack value (unneeded when FALLING) and set the
// state in r2, then jump around the unlink blocks if any. // state in r2, then jump around the unlink blocks if any.
__ mov(r0, Operand(Factory::undefined_value())); __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
frame_->EmitPush(r0); frame_->EmitPush(r0);
__ mov(r2, Operand(Smi::FromInt(FALLING))); __ mov(r2, Operand(Smi::FromInt(FALLING)));
if (nof_unlinks > 0) { if (nof_unlinks > 0) {
@ -2135,7 +2143,7 @@ void CodeGenerator::VisitTryFinally(TryFinally* node) {
frame_->EmitPush(r0); frame_->EmitPush(r0);
} else { } else {
// Fake TOS for targets that shadowed breaks and continues. // Fake TOS for targets that shadowed breaks and continues.
__ mov(r0, Operand(Factory::undefined_value())); __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
frame_->EmitPush(r0); frame_->EmitPush(r0);
} }
__ mov(r2, Operand(Smi::FromInt(JUMPING + i))); __ mov(r2, Operand(Smi::FromInt(JUMPING + i)));
@ -2322,8 +2330,9 @@ void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) {
r2, r2,
&slow)); &slow));
if (potential_slot->var()->mode() == Variable::CONST) { if (potential_slot->var()->mode() == Variable::CONST) {
__ cmp(r0, Operand(Factory::the_hole_value())); __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
__ mov(r0, Operand(Factory::undefined_value()), LeaveCC, eq); __ cmp(r0, ip);
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
} }
// There is always control flow to slow from // There is always control flow to slow from
// ContextSlotOperandCheckExtensions so we have to jump around // ContextSlotOperandCheckExtensions so we have to jump around
@ -2360,8 +2369,9 @@ void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) {
// value. // value.
Comment cmnt(masm_, "[ Unhole const"); Comment cmnt(masm_, "[ Unhole const");
frame_->EmitPop(r0); frame_->EmitPop(r0);
__ cmp(r0, Operand(Factory::the_hole_value())); __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
__ mov(r0, Operand(Factory::undefined_value()), LeaveCC, eq); __ cmp(r0, ip);
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
frame_->EmitPush(r0); frame_->EmitPush(r0);
} }
} }
@ -2404,7 +2414,8 @@ void CodeGenerator::LoadFromGlobalSlotCheckExtensions(Slot* slot,
__ bind(&next); __ bind(&next);
// Terminate at global context. // Terminate at global context.
__ ldr(tmp2, FieldMemOperand(tmp, HeapObject::kMapOffset)); __ ldr(tmp2, FieldMemOperand(tmp, HeapObject::kMapOffset));
__ cmp(tmp2, Operand(Factory::global_context_map())); __ LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
__ cmp(tmp2, ip);
__ b(eq, &fast); __ b(eq, &fast);
// Check that extension is NULL. // Check that extension is NULL.
__ ldr(tmp2, ContextOperand(tmp, Context::EXTENSION_INDEX)); __ ldr(tmp2, ContextOperand(tmp, Context::EXTENSION_INDEX));
@ -2501,7 +2512,8 @@ void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
__ ldr(r2, FieldMemOperand(r1, literal_offset)); __ ldr(r2, FieldMemOperand(r1, literal_offset));
JumpTarget done; JumpTarget done;
__ cmp(r2, Operand(Factory::undefined_value())); __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ cmp(r2, ip);
done.Branch(ne); done.Branch(ne);
// If the entry is undefined we call the runtime system to computed // If the entry is undefined we call the runtime system to computed
@ -2583,7 +2595,8 @@ void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
// Check whether we need to materialize the object literal boilerplate. // Check whether we need to materialize the object literal boilerplate.
// If so, jump to the deferred code. // If so, jump to the deferred code.
__ cmp(r2, Operand(Factory::undefined_value())); __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ cmp(r2, Operand(ip));
deferred->Branch(eq); deferred->Branch(eq);
deferred->BindExit(); deferred->BindExit();
@ -2705,7 +2718,8 @@ void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
// Check whether we need to materialize the object literal boilerplate. // Check whether we need to materialize the object literal boilerplate.
// If so, jump to the deferred code. // If so, jump to the deferred code.
__ cmp(r2, Operand(Factory::undefined_value())); __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ cmp(r2, Operand(ip));
deferred->Branch(eq); deferred->Branch(eq);
deferred->BindExit(); deferred->BindExit();
@ -3036,7 +3050,7 @@ void CodeGenerator::VisitCallEval(CallEval* node) {
// Prepare stack for call to resolved function. // Prepare stack for call to resolved function.
LoadAndSpill(function); LoadAndSpill(function);
__ mov(r2, Operand(Factory::undefined_value())); __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
frame_->EmitPush(r2); // Slot for receiver frame_->EmitPush(r2); // Slot for receiver
int arg_count = args->length(); int arg_count = args->length();
for (int i = 0; i < arg_count; i++) { for (int i = 0; i < arg_count; i++) {
@ -3180,7 +3194,7 @@ void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
// Non-JS objects have class null. // Non-JS objects have class null.
null.Bind(); null.Bind();
__ mov(r0, Operand(Factory::null_value())); __ LoadRoot(r0, Heap::kNullValueRootIndex);
frame_->EmitPush(r0); frame_->EmitPush(r0);
// All done. // All done.
@ -3253,7 +3267,7 @@ void CodeGenerator::GenerateLog(ZoneList<Expression*>* args) {
__ CallRuntime(Runtime::kLog, 2); __ CallRuntime(Runtime::kLog, 2);
} }
#endif #endif
__ mov(r0, Operand(Factory::undefined_value())); __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
frame_->EmitPush(r0); frame_->EmitPush(r0);
} }
@ -3274,7 +3288,7 @@ void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) {
void CodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* args) { void CodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* args) {
VirtualFrame::SpilledScope spilled_scope; VirtualFrame::SpilledScope spilled_scope;
ASSERT(args->length() == 2); ASSERT(args->length() == 2);
__ mov(r0, Operand(Factory::undefined_value())); __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
frame_->EmitPush(r0); frame_->EmitPush(r0);
} }
@ -3494,14 +3508,14 @@ void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
} else { } else {
// Default: Result of deleting non-global, not dynamically // Default: Result of deleting non-global, not dynamically
// introduced variables is false. // introduced variables is false.
__ mov(r0, Operand(Factory::false_value())); __ LoadRoot(r0, Heap::kFalseValueRootIndex);
} }
} else { } else {
// Default: Result of deleting expressions is true. // Default: Result of deleting expressions is true.
LoadAndSpill(node->expression()); // may have side-effects LoadAndSpill(node->expression()); // may have side-effects
frame_->Drop(); frame_->Drop();
__ mov(r0, Operand(Factory::true_value())); __ LoadRoot(r0, Heap::kTrueValueRootIndex);
} }
frame_->EmitPush(r0); frame_->EmitPush(r0);
@ -3554,7 +3568,7 @@ void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
case Token::VOID: case Token::VOID:
// since the stack top is cached in r0, popping and then // since the stack top is cached in r0, popping and then
// pushing a value can be done by just writing to r0. // pushing a value can be done by just writing to r0.
__ mov(r0, Operand(Factory::undefined_value())); __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
break; break;
case Token::ADD: { case Token::ADD: {
@ -3880,14 +3894,16 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
if (left_is_null || right_is_null) { if (left_is_null || right_is_null) {
LoadAndSpill(left_is_null ? right : left); LoadAndSpill(left_is_null ? right : left);
frame_->EmitPop(r0); frame_->EmitPop(r0);
__ cmp(r0, Operand(Factory::null_value())); __ LoadRoot(ip, Heap::kNullValueRootIndex);
__ cmp(r0, ip);
// The 'null' value is only equal to 'undefined' if using non-strict // The 'null' value is only equal to 'undefined' if using non-strict
// comparisons. // comparisons.
if (op != Token::EQ_STRICT) { if (op != Token::EQ_STRICT) {
true_target()->Branch(eq); true_target()->Branch(eq);
__ cmp(r0, Operand(Factory::undefined_value())); __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ cmp(r0, Operand(ip));
true_target()->Branch(eq); true_target()->Branch(eq);
__ tst(r0, Operand(kSmiTagMask)); __ tst(r0, Operand(kSmiTagMask));
@ -3924,7 +3940,8 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
__ tst(r1, Operand(kSmiTagMask)); __ tst(r1, Operand(kSmiTagMask));
true_target()->Branch(eq); true_target()->Branch(eq);
__ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset)); __ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset));
__ cmp(r1, Operand(Factory::heap_number_map())); __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
__ cmp(r1, ip);
cc_reg_ = eq; cc_reg_ = eq;
} else if (check->Equals(Heap::string_symbol())) { } else if (check->Equals(Heap::string_symbol())) {
@ -3944,13 +3961,16 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
cc_reg_ = lt; cc_reg_ = lt;
} else if (check->Equals(Heap::boolean_symbol())) { } else if (check->Equals(Heap::boolean_symbol())) {
__ cmp(r1, Operand(Factory::true_value())); __ LoadRoot(ip, Heap::kTrueValueRootIndex);
__ cmp(r1, ip);
true_target()->Branch(eq); true_target()->Branch(eq);
__ cmp(r1, Operand(Factory::false_value())); __ LoadRoot(ip, Heap::kFalseValueRootIndex);
__ cmp(r1, ip);
cc_reg_ = eq; cc_reg_ = eq;
} else if (check->Equals(Heap::undefined_symbol())) { } else if (check->Equals(Heap::undefined_symbol())) {
__ cmp(r1, Operand(Factory::undefined_value())); __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ cmp(r1, ip);
true_target()->Branch(eq); true_target()->Branch(eq);
__ tst(r1, Operand(kSmiTagMask)); __ tst(r1, Operand(kSmiTagMask));
@ -3975,7 +3995,8 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
false_target()->Branch(eq); false_target()->Branch(eq);
__ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
__ cmp(r1, Operand(Factory::null_value())); __ LoadRoot(ip, Heap::kNullValueRootIndex);
__ cmp(r1, ip);
true_target()->Branch(eq); true_target()->Branch(eq);
// It can be an undetectable object. // It can be an undetectable object.
@ -4206,7 +4227,8 @@ void Reference::SetValue(InitState init_state) {
// executed, the code is identical to a normal store (see below). // executed, the code is identical to a normal store (see below).
Comment cmnt(masm, "[ Init const"); Comment cmnt(masm, "[ Init const");
__ ldr(r2, cgen_->SlotOperand(slot, r2)); __ ldr(r2, cgen_->SlotOperand(slot, r2));
__ cmp(r2, Operand(Factory::the_hole_value())); __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
__ cmp(r2, ip);
exit.Branch(ne); exit.Branch(ne);
} }
@ -4939,7 +4961,7 @@ static void AllocateHeapNumber(
// Tag and adjust back to start of new object. // Tag and adjust back to start of new object.
__ sub(result_reg, result_reg, Operand(HeapNumber::kSize - kHeapObjectTag)); __ sub(result_reg, result_reg, Operand(HeapNumber::kSize - kHeapObjectTag));
// Get heap number map into scratch2. // Get heap number map into scratch2.
__ mov(scratch2, Operand(Factory::heap_number_map())); __ LoadRoot(scratch2, Heap::kHeapNumberMapRootIndex);
// Store heap number map in new object. // Store heap number map in new object.
__ str(scratch2, FieldMemOperand(result_reg, HeapObject::kMapOffset)); __ str(scratch2, FieldMemOperand(result_reg, HeapObject::kMapOffset));
} }
@ -6090,7 +6112,8 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ bind(&loop); __ bind(&loop);
__ cmp(r2, Operand(r4)); __ cmp(r2, Operand(r4));
__ b(eq, &is_instance); __ b(eq, &is_instance);
__ cmp(r2, Operand(Factory::null_value())); __ LoadRoot(ip, Heap::kNullValueRootIndex);
__ cmp(r2, ip);
__ b(eq, &is_not_instance); __ b(eq, &is_not_instance);
__ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
__ ldr(r2, FieldMemOperand(r2, Map::kPrototypeOffset)); __ ldr(r2, FieldMemOperand(r2, Map::kPrototypeOffset));

2
deps/v8/src/arm/disasm-arm.cc

@ -842,7 +842,7 @@ static const int kMaxRegisters = 16;
// formatting. See for example the command "objdump -d <binary file>". // formatting. See for example the command "objdump -d <binary file>".
static const char* reg_names[kMaxRegisters] = { static const char* reg_names[kMaxRegisters] = {
"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7", "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
"r8", "r9", "sl", "fp", "ip", "sp", "lr", "pc", "r8", "r9", "r10", "fp", "ip", "sp", "lr", "pc",
}; };

21
deps/v8/src/arm/ic-arm.cc

@ -87,7 +87,8 @@ static void GenerateDictionaryLoad(MacroAssembler* masm,
// Check that the properties array is a dictionary. // Check that the properties array is a dictionary.
__ ldr(t0, FieldMemOperand(t1, JSObject::kPropertiesOffset)); __ ldr(t0, FieldMemOperand(t1, JSObject::kPropertiesOffset));
__ ldr(r3, FieldMemOperand(t0, HeapObject::kMapOffset)); __ ldr(r3, FieldMemOperand(t0, HeapObject::kMapOffset));
__ cmp(r3, Operand(Factory::hash_table_map())); __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
__ cmp(r3, ip);
__ b(ne, miss); __ b(ne, miss);
// Compute the capacity mask. // Compute the capacity mask.
@ -254,9 +255,11 @@ void CallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
// Check for boolean. // Check for boolean.
__ bind(&non_string); __ bind(&non_string);
__ cmp(r1, Operand(Factory::true_value())); __ LoadRoot(ip, Heap::kTrueValueRootIndex);
__ cmp(r1, ip);
__ b(eq, &boolean); __ b(eq, &boolean);
__ cmp(r1, Operand(Factory::false_value())); __ LoadRoot(ip, Heap::kFalseValueRootIndex);
__ cmp(r1, ip);
__ b(ne, &miss); __ b(ne, &miss);
__ bind(&boolean); __ bind(&boolean);
StubCompiler::GenerateLoadGlobalFunctionPrototype( StubCompiler::GenerateLoadGlobalFunctionPrototype(
@ -582,7 +585,8 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
__ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset)); __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset));
// Check that the object is in fast mode (not dictionary). // Check that the object is in fast mode (not dictionary).
__ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset)); __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
__ cmp(r3, Operand(Factory::fixed_array_map())); __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
__ cmp(r3, ip);
__ b(ne, &slow); __ b(ne, &slow);
// Check that the key (index) is within bounds. // Check that the key (index) is within bounds.
__ ldr(r3, FieldMemOperand(r1, Array::kLengthOffset)); __ ldr(r3, FieldMemOperand(r1, Array::kLengthOffset));
@ -601,7 +605,8 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
__ bind(&fast); __ bind(&fast);
__ add(r3, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); __ add(r3, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
__ ldr(r0, MemOperand(r3, r0, LSL, kPointerSizeLog2)); __ ldr(r0, MemOperand(r3, r0, LSL, kPointerSizeLog2));
__ cmp(r0, Operand(Factory::the_hole_value())); __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
__ cmp(r0, ip);
// In case the loaded value is the_hole we have to consult GetProperty // In case the loaded value is the_hole we have to consult GetProperty
// to ensure the prototype chain is searched. // to ensure the prototype chain is searched.
__ b(eq, &slow); __ b(eq, &slow);
@ -661,7 +666,8 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
__ ldr(r3, FieldMemOperand(r3, JSObject::kElementsOffset)); __ ldr(r3, FieldMemOperand(r3, JSObject::kElementsOffset));
// Check that the object is in fast mode (not dictionary). // Check that the object is in fast mode (not dictionary).
__ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset)); __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
__ cmp(r2, Operand(Factory::fixed_array_map())); __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
__ cmp(r2, ip);
__ b(ne, &slow); __ b(ne, &slow);
// Untag the key (for checking against untagged length in the fixed array). // Untag the key (for checking against untagged length in the fixed array).
__ mov(r1, Operand(r1, ASR, kSmiTagSize)); __ mov(r1, Operand(r1, ASR, kSmiTagSize));
@ -710,7 +716,8 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
__ bind(&array); __ bind(&array);
__ ldr(r2, FieldMemOperand(r3, JSObject::kElementsOffset)); __ ldr(r2, FieldMemOperand(r3, JSObject::kElementsOffset));
__ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset)); __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
__ cmp(r1, Operand(Factory::fixed_array_map())); __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
__ cmp(r1, ip);
__ b(ne, &slow); __ b(ne, &slow);
// Check the key against the length in the array, compute the // Check the key against the length in the array, compute the

21
deps/v8/src/arm/macro-assembler-arm.cc

@ -174,6 +174,13 @@ void MacroAssembler::SmiJumpTable(Register index, Vector<Label*> targets) {
} }
void MacroAssembler::LoadRoot(Register destination,
Heap::RootListIndex index,
Condition cond) {
ldr(destination, MemOperand(r10, index << kPointerSizeLog2), cond);
}
// Will clobber 4 registers: object, offset, scratch, ip. The // Will clobber 4 registers: object, offset, scratch, ip. The
// register 'object' contains a heap object pointer. The heap object // register 'object' contains a heap object pointer. The heap object
// tag is shifted away. // tag is shifted away.
@ -714,7 +721,8 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
push(holder_reg); // Temporarily save holder on the stack. push(holder_reg); // Temporarily save holder on the stack.
// Read the first word and compare to the global_context_map. // Read the first word and compare to the global_context_map.
ldr(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset)); ldr(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
cmp(holder_reg, Operand(Factory::global_context_map())); LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
cmp(holder_reg, ip);
Check(eq, "JSGlobalObject::global_context should be a global context."); Check(eq, "JSGlobalObject::global_context should be a global context.");
pop(holder_reg); // Restore holder. pop(holder_reg); // Restore holder.
} }
@ -731,11 +739,13 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
// that ip is clobbered as part of cmp with an object Operand. // that ip is clobbered as part of cmp with an object Operand.
push(holder_reg); // Temporarily save holder on the stack. push(holder_reg); // Temporarily save holder on the stack.
mov(holder_reg, ip); // Move ip to its holding place. mov(holder_reg, ip); // Move ip to its holding place.
cmp(holder_reg, Operand(Factory::null_value())); LoadRoot(ip, Heap::kNullValueRootIndex);
cmp(holder_reg, ip);
Check(ne, "JSGlobalProxy::context() should not be null."); Check(ne, "JSGlobalProxy::context() should not be null.");
ldr(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset)); ldr(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
cmp(holder_reg, Operand(Factory::global_context_map())); LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
cmp(holder_reg, ip);
Check(eq, "JSGlobalObject::global_context should be a global context."); Check(eq, "JSGlobalObject::global_context should be a global context.");
// Restore ip is not needed. ip is reloaded below. // Restore ip is not needed. ip is reloaded below.
pop(holder_reg); // Restore holder. pop(holder_reg); // Restore holder.
@ -792,7 +802,8 @@ void MacroAssembler::TryGetFunctionPrototype(Register function,
// If the prototype or initial map is the hole, don't return it and // If the prototype or initial map is the hole, don't return it and
// simply miss the cache instead. This will allow us to allocate a // simply miss the cache instead. This will allow us to allocate a
// prototype object on-demand in the runtime system. // prototype object on-demand in the runtime system.
cmp(result, Operand(Factory::the_hole_value())); LoadRoot(ip, Heap::kTheHoleValueRootIndex);
cmp(result, ip);
b(eq, miss); b(eq, miss);
// If the function does not have an initial map, we're done. // If the function does not have an initial map, we're done.
@ -832,7 +843,7 @@ void MacroAssembler::IllegalOperation(int num_arguments) {
if (num_arguments > 0) { if (num_arguments > 0) {
add(sp, sp, Operand(num_arguments * kPointerSize)); add(sp, sp, Operand(num_arguments * kPointerSize));
} }
mov(r0, Operand(Factory::undefined_value())); LoadRoot(r0, Heap::kUndefinedValueRootIndex);
} }

4
deps/v8/src/arm/macro-assembler-arm.h

@ -89,6 +89,10 @@ class MacroAssembler: public Assembler {
void Ret(Condition cond = al); void Ret(Condition cond = al);
// Jumps to the label at the index given by the Smi in "index". // Jumps to the label at the index given by the Smi in "index".
void SmiJumpTable(Register index, Vector<Label*> targets); void SmiJumpTable(Register index, Vector<Label*> targets);
// Load an object from the root table.
void LoadRoot(Register destination,
Heap::RootListIndex index,
Condition cond = al);
// Sets the remembered set bit for [address+offset], where address is the // Sets the remembered set bit for [address+offset], where address is the
// address of the heap object 'object'. The address must be in the first 8K // address of the heap object 'object'. The address must be in the first 8K

15
deps/v8/src/arm/stub-cache-arm.cc

@ -395,7 +395,8 @@ Register StubCompiler::CheckPrototypes(JSObject* object,
__ mov(scratch, Operand(Handle<Object>(cell))); __ mov(scratch, Operand(Handle<Object>(cell)));
__ ldr(scratch, __ ldr(scratch,
FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset)); FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
__ cmp(scratch, Operand(Factory::the_hole_value())); __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
__ cmp(scratch, ip);
__ b(ne, miss); __ b(ne, miss);
} }
object = JSObject::cast(object->GetPrototype()); object = JSObject::cast(object->GetPrototype());
@ -667,9 +668,11 @@ Object* CallStubCompiler::CompileCallConstant(Object* object,
case BOOLEAN_CHECK: { case BOOLEAN_CHECK: {
Label fast; Label fast;
// Check that the object is a boolean. // Check that the object is a boolean.
__ cmp(r1, Operand(Factory::true_value())); __ LoadRoot(ip, Heap::kTrueValueRootIndex);
__ cmp(r1, ip);
__ b(eq, &fast); __ b(eq, &fast);
__ cmp(r1, Operand(Factory::false_value())); __ LoadRoot(ip, Heap::kFalseValueRootIndex);
__ cmp(r1, ip);
__ b(ne, &miss); __ b(ne, &miss);
__ bind(&fast); __ bind(&fast);
// Check that the maps starting from the prototype haven't changed. // Check that the maps starting from the prototype haven't changed.
@ -688,7 +691,8 @@ Object* CallStubCompiler::CompileCallConstant(Object* object,
__ ldr(r3, FieldMemOperand(r1, JSObject::kElementsOffset)); __ ldr(r3, FieldMemOperand(r1, JSObject::kElementsOffset));
// Check that the object is in fast mode (not dictionary). // Check that the object is in fast mode (not dictionary).
__ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset)); __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
__ cmp(r2, Operand(Factory::fixed_array_map())); __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
__ cmp(r2, ip);
__ b(ne, &miss); __ b(ne, &miss);
break; break;
@ -1108,7 +1112,8 @@ Object* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
// Check for deleted property if property can actually be deleted. // Check for deleted property if property can actually be deleted.
if (!is_dont_delete) { if (!is_dont_delete) {
__ cmp(r0, Operand(Factory::the_hole_value())); __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
__ cmp(r0, ip);
__ b(eq, &miss); __ b(eq, &miss);
} }

2
deps/v8/src/arm/virtual-frame-arm.cc

@ -139,7 +139,7 @@ void VirtualFrame::AllocateStackSlots() {
Comment cmnt(masm(), "[ Allocate space for locals"); Comment cmnt(masm(), "[ Allocate space for locals");
Adjust(count); Adjust(count);
// Initialize stack slots with 'undefined' value. // Initialize stack slots with 'undefined' value.
__ mov(ip, Operand(Factory::undefined_value())); __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
for (int i = 0; i < count; i++) { for (int i = 0; i < count; i++) {
__ push(ip); __ push(ip);
} }

5
deps/v8/src/assembler.cc

@ -563,6 +563,11 @@ ExternalReference ExternalReference::the_hole_value_location() {
} }
ExternalReference ExternalReference::roots_address() {
return ExternalReference(Heap::roots_address());
}
ExternalReference ExternalReference::address_of_stack_guard_limit() { ExternalReference ExternalReference::address_of_stack_guard_limit() {
return ExternalReference(StackGuard::address_of_jslimit()); return ExternalReference(StackGuard::address_of_jslimit());
} }

3
deps/v8/src/assembler.h

@ -401,6 +401,9 @@ class ExternalReference BASE_EMBEDDED {
// Static variable Factory::the_hole_value.location() // Static variable Factory::the_hole_value.location()
static ExternalReference the_hole_value_location(); static ExternalReference the_hole_value_location();
// Static variable Heap::roots_address()
static ExternalReference roots_address();
// Static variable StackGuard::address_of_jslimit() // Static variable StackGuard::address_of_jslimit()
static ExternalReference address_of_stack_guard_limit(); static ExternalReference address_of_stack_guard_limit();

6
deps/v8/src/debug.cc

@ -1548,8 +1548,8 @@ void Debug::CreateScriptCache() {
// Perform two GCs to get rid of all unreferenced scripts. The first GC gets // Perform two GCs to get rid of all unreferenced scripts. The first GC gets
// rid of all the cached script wrappers and the second gets rid of the // rid of all the cached script wrappers and the second gets rid of the
// scripts which is no longer referenced. // scripts which is no longer referenced.
Heap::CollectAllGarbage(); Heap::CollectAllGarbage(false);
Heap::CollectAllGarbage(); Heap::CollectAllGarbage(false);
ASSERT(script_cache_ == NULL); ASSERT(script_cache_ == NULL);
script_cache_ = new ScriptCache(); script_cache_ = new ScriptCache();
@ -1599,7 +1599,7 @@ Handle<FixedArray> Debug::GetLoadedScripts() {
// Perform GC to get unreferenced scripts evicted from the cache before // Perform GC to get unreferenced scripts evicted from the cache before
// returning the content. // returning the content.
Heap::CollectAllGarbage(); Heap::CollectAllGarbage(false);
// Get the scripts from the cache. // Get the scripts from the cache.
return script_cache_->GetScripts(); return script_cache_->GetScripts();

2
deps/v8/src/execution.cc

@ -677,7 +677,7 @@ v8::Handle<v8::FunctionTemplate> GCExtension::GetNativeFunction(
v8::Handle<v8::Value> GCExtension::GC(const v8::Arguments& args) { v8::Handle<v8::Value> GCExtension::GC(const v8::Arguments& args) {
// All allocation spaces other than NEW_SPACE have the same effect. // All allocation spaces other than NEW_SPACE have the same effect.
Heap::CollectAllGarbage(); Heap::CollectAllGarbage(false);
return v8::Undefined(); return v8::Undefined();
} }

4
deps/v8/src/heap-inl.h

@ -238,7 +238,7 @@ int Heap::AdjustAmountOfExternalAllocatedMemory(int change_in_bytes) {
amount_of_external_allocated_memory_ - amount_of_external_allocated_memory_ -
amount_of_external_allocated_memory_at_last_global_gc_; amount_of_external_allocated_memory_at_last_global_gc_;
if (amount_since_last_global_gc > external_allocation_limit_) { if (amount_since_last_global_gc > external_allocation_limit_) {
CollectAllGarbage(); CollectAllGarbage(false);
} }
} else { } else {
// Avoid underflow. // Avoid underflow.
@ -285,7 +285,7 @@ void Heap::SetLastScriptId(Object* last_script_id) {
} \ } \
if (!__object__->IsRetryAfterGC()) RETURN_EMPTY; \ if (!__object__->IsRetryAfterGC()) RETURN_EMPTY; \
Counters::gc_last_resort_from_handles.Increment(); \ Counters::gc_last_resort_from_handles.Increment(); \
Heap::CollectAllGarbage(); \ Heap::CollectAllGarbage(false); \
{ \ { \
AlwaysAllocateScope __scope__; \ AlwaysAllocateScope __scope__; \
__object__ = FUNCTION_CALL; \ __object__ = FUNCTION_CALL; \

13
deps/v8/src/heap.cc

@ -332,7 +332,7 @@ void Heap::CollectAllGarbageIfContextDisposed() {
// informed decisions about when to force a collection. // informed decisions about when to force a collection.
if (!FLAG_expose_gc && context_disposed_pending_) { if (!FLAG_expose_gc && context_disposed_pending_) {
HistogramTimerScope scope(&Counters::gc_context); HistogramTimerScope scope(&Counters::gc_context);
CollectAllGarbage(); CollectAllGarbage(false);
} }
context_disposed_pending_ = false; context_disposed_pending_ = false;
} }
@ -465,8 +465,9 @@ void Heap::PerformGarbageCollection(AllocationSpace space,
old_gen_allocation_limit_ = old_gen_allocation_limit_ =
old_gen_size + Max(kMinimumAllocationLimit, old_gen_size / 2); old_gen_size + Max(kMinimumAllocationLimit, old_gen_size / 2);
old_gen_exhausted_ = false; old_gen_exhausted_ = false;
} } else {
Scavenge(); Scavenge();
}
Counters::objs_since_last_young.Set(0); Counters::objs_since_last_young.Set(0);
PostGarbageCollectionProcessing(); PostGarbageCollectionProcessing();
@ -520,6 +521,12 @@ void Heap::MarkCompact(GCTracer* tracer) {
Counters::objs_since_last_full.Set(0); Counters::objs_since_last_full.Set(0);
context_disposed_pending_ = false; context_disposed_pending_ = false;
Scavenge();
// Shrink new space as much as possible after compacting full
// garbage collections.
if (is_compacting) new_space_.Shrink();
} }
@ -668,8 +675,6 @@ void Heap::Scavenge() {
survived_since_last_expansion_ > new_space_.Capacity()) { survived_since_last_expansion_ > new_space_.Capacity()) {
// Grow the size of new space if there is room to grow and enough // Grow the size of new space if there is room to grow and enough
// data has survived scavenge since the last expansion. // data has survived scavenge since the last expansion.
// TODO(1240712): NewSpace::Grow has a return value which is
// ignored here.
new_space_.Grow(); new_space_.Grow();
survived_since_last_expansion_ = 0; survived_since_last_expansion_ = 0;
} }

78
deps/v8/src/heap.h

@ -629,7 +629,7 @@ class Heap : public AllStatic {
// Performs a full garbage collection. Force compaction if the // Performs a full garbage collection. Force compaction if the
// parameter is true. // parameter is true.
static void CollectAllGarbage(bool force_compaction = false); static void CollectAllGarbage(bool force_compaction);
// Performs a full garbage collection if a context has been disposed // Performs a full garbage collection if a context has been disposed
// since the last time the check was performed. // since the last time the check was performed.
@ -733,6 +733,9 @@ class Heap : public AllStatic {
// Update the next script id. // Update the next script id.
static inline void SetLastScriptId(Object* last_script_id); static inline void SetLastScriptId(Object* last_script_id);
// Generated code can embed this address to get access to the roots.
static Object** roots_address() { return roots_; }
#ifdef DEBUG #ifdef DEBUG
static void Print(); static void Print();
static void PrintHandles(); static void PrintHandles();
@ -839,6 +842,59 @@ class Heap : public AllStatic {
> old_gen_allocation_limit_; > old_gen_allocation_limit_;
} }
// Can be called when the embedding application is idle.
static bool IdleNotification() {
static const int kIdlesBeforeCollection = 7;
static int number_idle_notifications = 0;
static int last_gc_count = gc_count_;
bool finished = false;
if (last_gc_count == gc_count_) {
number_idle_notifications++;
} else {
number_idle_notifications = 0;
last_gc_count = gc_count_;
}
if (number_idle_notifications >= kIdlesBeforeCollection) {
// The first time through we collect without forcing compaction.
// The second time through we force compaction and quit.
bool force_compaction =
number_idle_notifications > kIdlesBeforeCollection;
CollectAllGarbage(force_compaction);
last_gc_count = gc_count_;
if (force_compaction) {
number_idle_notifications = 0;
finished = true;
}
}
// Uncommit unused memory in new space.
Heap::UncommitFromSpace();
return finished;
}
// Declare all the root indices.
enum RootListIndex {
#define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
#undef ROOT_INDEX_DECLARATION
// Utility type maps
#define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
STRUCT_LIST(DECLARE_STRUCT_MAP)
#undef DECLARE_STRUCT_MAP
#define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex,
SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
#undef SYMBOL_DECLARATION
kSymbolTableRootIndex,
kStrongRootListLength = kSymbolTableRootIndex,
kRootListLength
};
private: private:
static int semispace_size_; static int semispace_size_;
static int initial_semispace_size_; static int initial_semispace_size_;
@ -923,26 +979,6 @@ class Heap : public AllStatic {
// last GC. // last GC.
static int old_gen_exhausted_; static int old_gen_exhausted_;
// Declare all the root indices.
enum RootListIndex {
#define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
#undef ROOT_INDEX_DECLARATION
// Utility type maps
#define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
STRUCT_LIST(DECLARE_STRUCT_MAP)
#undef DECLARE_STRUCT_MAP
#define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex,
SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
#undef SYMBOL_DECLARATION
kSymbolTableRootIndex,
kStrongRootListLength = kSymbolTableRootIndex,
kRootListLength
};
static Object* roots_[kRootListLength]; static Object* roots_[kRootListLength];
struct StringTypeTable { struct StringTypeTable {

2
deps/v8/src/mksnapshot.cc

@ -171,7 +171,7 @@ int main(int argc, char** argv) {
} }
} }
// Get rid of unreferenced scripts with a global GC. // Get rid of unreferenced scripts with a global GC.
i::Heap::CollectAllGarbage(); i::Heap::CollectAllGarbage(false);
i::Serializer ser; i::Serializer ser;
ser.Serialize(); ser.Serialize();
v8::internal::byte* bytes; v8::internal::byte* bytes;

13
deps/v8/src/objects-inl.h

@ -814,15 +814,13 @@ Failure* Failure::RetryAfterGC(int requested_bytes) {
Failure* Failure::Construct(Type type, int value) { Failure* Failure::Construct(Type type, int value) {
int info = (value << kFailureTypeTagSize) | type; int info = (value << kFailureTypeTagSize) | type;
// TODO(X64): Stop using Smi validation for non-smi checks, even if they ASSERT(((info << kFailureTagSize) >> kFailureTagSize) == info);
// happen to be identical at the moment.
ASSERT(Smi::IsValid(info)); // Same validation check as in Smi
return reinterpret_cast<Failure*>( return reinterpret_cast<Failure*>(
(static_cast<intptr_t>(info) << kFailureTagSize) | kFailureTag); (static_cast<intptr_t>(info) << kFailureTagSize) | kFailureTag);
} }
bool Smi::IsValid(int value) { bool Smi::IsValid(intptr_t value) {
#ifdef DEBUG #ifdef DEBUG
bool in_range = (value >= kMinValue) && (value <= kMaxValue); bool in_range = (value >= kMinValue) && (value <= kMaxValue);
#endif #endif
@ -937,12 +935,13 @@ MapWord MapWord::EncodeAddress(Address map_address, int offset) {
Address MapWord::DecodeMapAddress(MapSpace* map_space) { Address MapWord::DecodeMapAddress(MapSpace* map_space) {
int map_page_index = (value_ & kMapPageIndexMask) >> kMapPageIndexShift; int map_page_index =
static_cast<int>((value_ & kMapPageIndexMask) >> kMapPageIndexShift);
ASSERT_MAP_PAGE_INDEX(map_page_index); ASSERT_MAP_PAGE_INDEX(map_page_index);
int map_page_offset = int map_page_offset = static_cast<int>(
((value_ & kMapPageOffsetMask) >> kMapPageOffsetShift) ((value_ & kMapPageOffsetMask) >> kMapPageOffsetShift)
<< kObjectAlignmentBits; << kObjectAlignmentBits);
return (map_space->PageAddress(map_page_index) + map_page_offset); return (map_space->PageAddress(map_page_index) + map_page_offset);
} }

2
deps/v8/src/objects.h

@ -905,7 +905,7 @@ class Smi: public Object {
static inline Smi* FromIntptr(intptr_t value); static inline Smi* FromIntptr(intptr_t value);
// Returns whether value can be represented in a Smi. // Returns whether value can be represented in a Smi.
static inline bool IsValid(int value); static inline bool IsValid(intptr_t value);
static inline bool IsIntptrValid(intptr_t); static inline bool IsIntptrValid(intptr_t);

18
deps/v8/src/platform-win32.cc

@ -54,10 +54,6 @@
#define _WIN32_WINNT 0x500 #define _WIN32_WINNT 0x500
#endif #endif
#ifdef _WIN64
#error Windows 64-bit blatforms not supported
#endif
#include <windows.h> #include <windows.h>
#include <time.h> // For LocalOffset() implementation. #include <time.h> // For LocalOffset() implementation.
@ -1190,6 +1186,9 @@ int OS::StackWalk(Vector<OS::StackFrame> frames) {
memset(&context, 0, sizeof(context)); memset(&context, 0, sizeof(context));
context.ContextFlags = CONTEXT_CONTROL; context.ContextFlags = CONTEXT_CONTROL;
context.ContextFlags = CONTEXT_CONTROL; context.ContextFlags = CONTEXT_CONTROL;
#ifdef _WIN64
// TODO(X64): Implement context capture.
#else
__asm call x __asm call x
__asm x: pop eax __asm x: pop eax
__asm mov context.Eip, eax __asm mov context.Eip, eax
@ -1199,15 +1198,22 @@ int OS::StackWalk(Vector<OS::StackFrame> frames) {
// capture the context instead of inline assembler. However it is // capture the context instead of inline assembler. However it is
// only available on XP, Vista, Server 2003 and Server 2008 which // only available on XP, Vista, Server 2003 and Server 2008 which
// might not be sufficient. // might not be sufficient.
#endif
// Initialize the stack walking // Initialize the stack walking
STACKFRAME64 stack_frame; STACKFRAME64 stack_frame;
memset(&stack_frame, 0, sizeof(stack_frame)); memset(&stack_frame, 0, sizeof(stack_frame));
#ifdef _WIN64
stack_frame.AddrPC.Offset = context.Rip;
stack_frame.AddrFrame.Offset = context.Rbp;
stack_frame.AddrStack.Offset = context.Rsp;
#else
stack_frame.AddrPC.Offset = context.Eip; stack_frame.AddrPC.Offset = context.Eip;
stack_frame.AddrPC.Mode = AddrModeFlat;
stack_frame.AddrFrame.Offset = context.Ebp; stack_frame.AddrFrame.Offset = context.Ebp;
stack_frame.AddrFrame.Mode = AddrModeFlat;
stack_frame.AddrStack.Offset = context.Esp; stack_frame.AddrStack.Offset = context.Esp;
#endif
stack_frame.AddrPC.Mode = AddrModeFlat;
stack_frame.AddrFrame.Mode = AddrModeFlat;
stack_frame.AddrStack.Mode = AddrModeFlat; stack_frame.AddrStack.Mode = AddrModeFlat;
int frames_count = 0; int frames_count = 0;

6
deps/v8/src/runtime.cc

@ -7263,7 +7263,7 @@ static Object* Runtime_DebugReferencedBy(Arguments args) {
ASSERT(args.length() == 3); ASSERT(args.length() == 3);
// First perform a full GC in order to avoid references from dead objects. // First perform a full GC in order to avoid references from dead objects.
Heap::CollectAllGarbage(); Heap::CollectAllGarbage(false);
// Check parameters. // Check parameters.
CONVERT_CHECKED(JSObject, target, args[0]); CONVERT_CHECKED(JSObject, target, args[0]);
@ -7339,7 +7339,7 @@ static Object* Runtime_DebugConstructedBy(Arguments args) {
ASSERT(args.length() == 2); ASSERT(args.length() == 2);
// First perform a full GC in order to avoid dead objects. // First perform a full GC in order to avoid dead objects.
Heap::CollectAllGarbage(); Heap::CollectAllGarbage(false);
// Check parameters. // Check parameters.
CONVERT_CHECKED(JSFunction, constructor, args[0]); CONVERT_CHECKED(JSFunction, constructor, args[0]);
@ -7633,7 +7633,7 @@ void Runtime::PerformGC(Object* result) {
// Handle last resort GC and make sure to allow future allocations // Handle last resort GC and make sure to allow future allocations
// to grow the heap without causing GCs (if possible). // to grow the heap without causing GCs (if possible).
Counters::gc_last_resort_from_js.Increment(); Counters::gc_last_resort_from_js.Increment();
Heap::CollectAllGarbage(); Heap::CollectAllGarbage(false);
} }
} }

24
deps/v8/src/serialize.cc

@ -672,13 +672,17 @@ void ExternalReferenceTable::PopulateTable() {
UNCLASSIFIED, UNCLASSIFIED,
2, 2,
"Factory::the_hole_value().location()"); "Factory::the_hole_value().location()");
Add(ExternalReference::address_of_stack_guard_limit().address(), Add(ExternalReference::roots_address().address(),
UNCLASSIFIED, UNCLASSIFIED,
3, 3,
"Heap::roots_address()");
Add(ExternalReference::address_of_stack_guard_limit().address(),
UNCLASSIFIED,
4,
"StackGuard::address_of_jslimit()"); "StackGuard::address_of_jslimit()");
Add(ExternalReference::address_of_regexp_stack_limit().address(), Add(ExternalReference::address_of_regexp_stack_limit().address(),
UNCLASSIFIED, UNCLASSIFIED,
4, 5,
"RegExpStack::limit_address()"); "RegExpStack::limit_address()");
Add(ExternalReference::new_space_start().address(), Add(ExternalReference::new_space_start().address(),
UNCLASSIFIED, UNCLASSIFIED,
@ -699,36 +703,36 @@ void ExternalReferenceTable::PopulateTable() {
#ifdef ENABLE_DEBUGGER_SUPPORT #ifdef ENABLE_DEBUGGER_SUPPORT
Add(ExternalReference::debug_break().address(), Add(ExternalReference::debug_break().address(),
UNCLASSIFIED, UNCLASSIFIED,
5, 10,
"Debug::Break()"); "Debug::Break()");
Add(ExternalReference::debug_step_in_fp_address().address(), Add(ExternalReference::debug_step_in_fp_address().address(),
UNCLASSIFIED, UNCLASSIFIED,
10, 11,
"Debug::step_in_fp_addr()"); "Debug::step_in_fp_addr()");
#endif #endif
Add(ExternalReference::double_fp_operation(Token::ADD).address(), Add(ExternalReference::double_fp_operation(Token::ADD).address(),
UNCLASSIFIED, UNCLASSIFIED,
11, 12,
"add_two_doubles"); "add_two_doubles");
Add(ExternalReference::double_fp_operation(Token::SUB).address(), Add(ExternalReference::double_fp_operation(Token::SUB).address(),
UNCLASSIFIED, UNCLASSIFIED,
12, 13,
"sub_two_doubles"); "sub_two_doubles");
Add(ExternalReference::double_fp_operation(Token::MUL).address(), Add(ExternalReference::double_fp_operation(Token::MUL).address(),
UNCLASSIFIED, UNCLASSIFIED,
13, 14,
"mul_two_doubles"); "mul_two_doubles");
Add(ExternalReference::double_fp_operation(Token::DIV).address(), Add(ExternalReference::double_fp_operation(Token::DIV).address(),
UNCLASSIFIED, UNCLASSIFIED,
14, 15,
"div_two_doubles"); "div_two_doubles");
Add(ExternalReference::double_fp_operation(Token::MOD).address(), Add(ExternalReference::double_fp_operation(Token::MOD).address(),
UNCLASSIFIED, UNCLASSIFIED,
15, 16,
"mod_two_doubles"); "mod_two_doubles");
Add(ExternalReference::compare_doubles().address(), Add(ExternalReference::compare_doubles().address(),
UNCLASSIFIED, UNCLASSIFIED,
16, 17,
"compare_doubles"); "compare_doubles");
} }

67
deps/v8/src/spaces.cc

@ -951,15 +951,43 @@ void NewSpace::Flip() {
} }
bool NewSpace::Grow() { void NewSpace::Grow() {
ASSERT(Capacity() < MaximumCapacity()); ASSERT(Capacity() < MaximumCapacity());
// TODO(1240712): Failure to double the from space can result in if (to_space_.Grow()) {
// semispaces of different sizes. In the event of that failure, the // Only grow from space if we managed to grow to space.
// to space doubling should be rolled back before returning false. if (!from_space_.Grow()) {
if (!to_space_.Grow() || !from_space_.Grow()) return false; // If we managed to grow to space but couldn't grow from space,
// attempt to shrink to space.
if (!to_space_.ShrinkTo(from_space_.Capacity())) {
// We are in an inconsistent state because we could not
// commit/uncommit memory from new space.
V8::FatalProcessOutOfMemory("Failed to grow new space.");
}
}
}
allocation_info_.limit = to_space_.high();
ASSERT_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
}
void NewSpace::Shrink() {
int new_capacity = Max(InitialCapacity(), 2 * Size());
int rounded_new_capacity = RoundUp(new_capacity, OS::AllocateAlignment());
if (rounded_new_capacity < Capacity() &&
to_space_.ShrinkTo(rounded_new_capacity)) {
// Only shrink from space if we managed to shrink to space.
if (!from_space_.ShrinkTo(rounded_new_capacity)) {
// If we managed to shrink to space but couldn't shrink from
// space, attempt to grow to space again.
if (!to_space_.GrowTo(from_space_.Capacity())) {
// We are in an inconsistent state because we could not
// commit/uncommit memory from new space.
V8::FatalProcessOutOfMemory("Failed to shrink new space.");
}
}
}
allocation_info_.limit = to_space_.high(); allocation_info_.limit = to_space_.high();
ASSERT_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_); ASSERT_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
return true;
} }
@ -1058,6 +1086,7 @@ bool SemiSpace::Setup(Address start,
// otherwise. In the mark-compact collector, the memory region of the from // otherwise. In the mark-compact collector, the memory region of the from
// space is used as the marking stack. It requires contiguous memory // space is used as the marking stack. It requires contiguous memory
// addresses. // addresses.
initial_capacity_ = initial_capacity;
capacity_ = initial_capacity; capacity_ = initial_capacity;
maximum_capacity_ = maximum_capacity; maximum_capacity_ = maximum_capacity;
committed_ = false; committed_ = false;
@ -1091,6 +1120,32 @@ bool SemiSpace::Grow() {
} }
bool SemiSpace::GrowTo(int new_capacity) {
ASSERT(new_capacity <= maximum_capacity_);
ASSERT(new_capacity > capacity_);
size_t delta = new_capacity - capacity_;
ASSERT(IsAligned(delta, OS::AllocateAlignment()));
if (!MemoryAllocator::CommitBlock(high(), delta, executable())) {
return false;
}
capacity_ = new_capacity;
return true;
}
bool SemiSpace::ShrinkTo(int new_capacity) {
ASSERT(new_capacity >= initial_capacity_);
ASSERT(new_capacity < capacity_);
size_t delta = capacity_ - new_capacity;
ASSERT(IsAligned(delta, OS::AllocateAlignment()));
if (!MemoryAllocator::UncommitBlock(high() - delta, delta)) {
return false;
}
capacity_ = new_capacity;
return true;
}
#ifdef DEBUG #ifdef DEBUG
void SemiSpace::Print() { } void SemiSpace::Print() { }

25
deps/v8/src/spaces.h

@ -1010,6 +1010,15 @@ class SemiSpace : public Space {
// address range to grow). // address range to grow).
bool Grow(); bool Grow();
// Grow the semispace to the new capacity. The new capacity
// requested must be larger than the current capacity.
bool GrowTo(int new_capacity);
// Shrinks the semispace to the new capacity. The new capacity
// requested must be more than the amount of used memory in the
// semispace and less than the current capacity.
bool ShrinkTo(int new_capacity);
// Returns the start address of the space. // Returns the start address of the space.
Address low() { return start_; } Address low() { return start_; }
// Returns one past the end address of the space. // Returns one past the end address of the space.
@ -1057,11 +1066,14 @@ class SemiSpace : public Space {
// Returns the maximum capacity of the semi space. // Returns the maximum capacity of the semi space.
int MaximumCapacity() { return maximum_capacity_; } int MaximumCapacity() { return maximum_capacity_; }
// Returns the initial capacity of the semi space.
int InitialCapacity() { return initial_capacity_; }
private: private:
// The current and maximum capacity of the space. // The current and maximum capacity of the space.
int capacity_; int capacity_;
int maximum_capacity_; int maximum_capacity_;
int initial_capacity_;
// The start address of the space. // The start address of the space.
Address start_; Address start_;
@ -1152,8 +1164,11 @@ class NewSpace : public Space {
void Flip(); void Flip();
// Grow the capacity of the semispaces. Assumes that they are not at // Grow the capacity of the semispaces. Assumes that they are not at
// their maximum capacity. Returns a flag indicating success or failure. // their maximum capacity.
bool Grow(); void Grow();
// Shrink the capacity of the semispaces.
void Shrink();
// True if the address or object lies in the address range of either // True if the address or object lies in the address range of either
// semispace (not necessarily below the allocation pointer). // semispace (not necessarily below the allocation pointer).
@ -1181,6 +1196,12 @@ class NewSpace : public Space {
return to_space_.MaximumCapacity(); return to_space_.MaximumCapacity();
} }
// Returns the initial capacity of a semispace.
int InitialCapacity() {
ASSERT(to_space_.InitialCapacity() == from_space_.InitialCapacity());
return to_space_.InitialCapacity();
}
// Return the address of the allocation pointer in the active semispace. // Return the address of the allocation pointer in the active semispace.
Address top() { return allocation_info_.top; } Address top() { return allocation_info_.top; }
// Return the address of the first object in the active semispace. // Return the address of the first object in the active semispace.

12
deps/v8/src/utils.h

@ -114,8 +114,10 @@ static inline bool IsAligned(T value, T alignment) {
// Returns true if (addr + offset) is aligned. // Returns true if (addr + offset) is aligned.
static inline bool IsAddressAligned(Address addr, int alignment, int offset) { static inline bool IsAddressAligned(Address addr,
int offs = OffsetFrom(addr + offset); intptr_t alignment,
int offset) {
intptr_t offs = OffsetFrom(addr + offset);
return IsAligned(offs, alignment); return IsAligned(offs, alignment);
} }
@ -446,15 +448,15 @@ class ScopedVector : public Vector<T> {
inline Vector<const char> CStrVector(const char* data) { inline Vector<const char> CStrVector(const char* data) {
return Vector<const char>(data, strlen(data)); return Vector<const char>(data, static_cast<int>(strlen(data)));
} }
inline Vector<char> MutableCStrVector(char* data) { inline Vector<char> MutableCStrVector(char* data) {
return Vector<char>(data, strlen(data)); return Vector<char>(data, static_cast<int>(strlen(data)));
} }
inline Vector<char> MutableCStrVector(char* data, int max) { inline Vector<char> MutableCStrVector(char* data, int max) {
int length = strlen(data); int length = static_cast<int>(strlen(data));
return Vector<char>(data, (length < max) ? length : max); return Vector<char>(data, (length < max) ? length : max);
} }

10
deps/v8/src/v8.cc

@ -157,13 +157,13 @@ uint32_t V8::Random() {
} }
void V8::IdleNotification(bool is_high_priority) { bool V8::IdleNotification(bool is_high_priority) {
if (!FLAG_use_idle_notification) return; if (!FLAG_use_idle_notification) return false;
// Ignore high priority instances of V8. // Ignore high priority instances of V8.
if (is_high_priority) return; if (is_high_priority) return false;
// Uncommit unused memory in new space. // Tell the heap that it may want to adjust.
Heap::UncommitFromSpace(); return Heap::IdleNotification();
} }

2
deps/v8/src/v8.h

@ -100,7 +100,7 @@ class V8 : public AllStatic {
static Smi* RandomPositiveSmi(); static Smi* RandomPositiveSmi();
// Idle notification directly from the API. // Idle notification directly from the API.
static void IdleNotification(bool is_high_priority); static bool IdleNotification(bool is_high_priority);
private: private:
// True if engine is currently running // True if engine is currently running

2
deps/v8/src/version.cc

@ -34,7 +34,7 @@
// cannot be changed without changing the SCons build script. // cannot be changed without changing the SCons build script.
#define MAJOR_VERSION 1 #define MAJOR_VERSION 1
#define MINOR_VERSION 3 #define MINOR_VERSION 3
#define BUILD_NUMBER 6 #define BUILD_NUMBER 7
#define PATCH_LEVEL 0 #define PATCH_LEVEL 0
#define CANDIDATE_VERSION false #define CANDIDATE_VERSION false

6
deps/v8/src/x64/codegen-x64.cc

@ -4586,7 +4586,7 @@ Result CodeGenerator::LoadFromGlobalSlotCheckExtensions(
: RelocInfo::CODE_TARGET_CONTEXT; : RelocInfo::CODE_TARGET_CONTEXT;
Result answer = frame_->CallLoadIC(mode); Result answer = frame_->CallLoadIC(mode);
// A test rax instruction following the call signals that the inobject // A test rax instruction following the call signals that the inobject
// property case was inlined. Ensure that there is not a test eax // property case was inlined. Ensure that there is not a test rax
// instruction here. // instruction here.
masm_->nop(); masm_->nop();
// Discard the global object. The result is in answer. // Discard the global object. The result is in answer.
@ -6264,8 +6264,8 @@ bool CodeGenerator::FoldConstantSmis(Token::Value op, int left, int right) {
} else { } else {
unsigned_left >>= shift_amount; unsigned_left >>= shift_amount;
} }
ASSERT(Smi::IsValid(unsigned_left)); // Converted to signed. ASSERT(Smi::IsValid(static_cast<int32_t>(unsigned_left)));
answer_object = Smi::FromInt(unsigned_left); // Converted to signed. answer_object = Smi::FromInt(static_cast<int32_t>(unsigned_left));
break; break;
} }
default: default:

64
deps/v8/test/cctest/test-api.cc

@ -462,11 +462,11 @@ THREADED_TEST(ScriptUsingStringResource) {
CHECK(source->IsExternal()); CHECK(source->IsExternal());
CHECK_EQ(resource, CHECK_EQ(resource,
static_cast<TestResource*>(source->GetExternalStringResource())); static_cast<TestResource*>(source->GetExternalStringResource()));
v8::internal::Heap::CollectAllGarbage(); v8::internal::Heap::CollectAllGarbage(false);
CHECK_EQ(0, TestResource::dispose_count); CHECK_EQ(0, TestResource::dispose_count);
} }
v8::internal::CompilationCache::Clear(); v8::internal::CompilationCache::Clear();
v8::internal::Heap::CollectAllGarbage(); v8::internal::Heap::CollectAllGarbage(false);
CHECK_EQ(1, TestResource::dispose_count); CHECK_EQ(1, TestResource::dispose_count);
} }
@ -483,11 +483,11 @@ THREADED_TEST(ScriptUsingAsciiStringResource) {
Local<Value> value = script->Run(); Local<Value> value = script->Run();
CHECK(value->IsNumber()); CHECK(value->IsNumber());
CHECK_EQ(7, value->Int32Value()); CHECK_EQ(7, value->Int32Value());
v8::internal::Heap::CollectAllGarbage(); v8::internal::Heap::CollectAllGarbage(false);
CHECK_EQ(0, TestAsciiResource::dispose_count); CHECK_EQ(0, TestAsciiResource::dispose_count);
} }
v8::internal::CompilationCache::Clear(); v8::internal::CompilationCache::Clear();
v8::internal::Heap::CollectAllGarbage(); v8::internal::Heap::CollectAllGarbage(false);
CHECK_EQ(1, TestAsciiResource::dispose_count); CHECK_EQ(1, TestAsciiResource::dispose_count);
} }
@ -505,11 +505,11 @@ THREADED_TEST(ScriptMakingExternalString) {
Local<Value> value = script->Run(); Local<Value> value = script->Run();
CHECK(value->IsNumber()); CHECK(value->IsNumber());
CHECK_EQ(7, value->Int32Value()); CHECK_EQ(7, value->Int32Value());
v8::internal::Heap::CollectAllGarbage(); v8::internal::Heap::CollectAllGarbage(false);
CHECK_EQ(0, TestResource::dispose_count); CHECK_EQ(0, TestResource::dispose_count);
} }
v8::internal::CompilationCache::Clear(); v8::internal::CompilationCache::Clear();
v8::internal::Heap::CollectAllGarbage(); v8::internal::Heap::CollectAllGarbage(false);
CHECK_EQ(1, TestResource::dispose_count); CHECK_EQ(1, TestResource::dispose_count);
} }
@ -528,11 +528,11 @@ THREADED_TEST(ScriptMakingExternalAsciiString) {
Local<Value> value = script->Run(); Local<Value> value = script->Run();
CHECK(value->IsNumber()); CHECK(value->IsNumber());
CHECK_EQ(7, value->Int32Value()); CHECK_EQ(7, value->Int32Value());
v8::internal::Heap::CollectAllGarbage(); v8::internal::Heap::CollectAllGarbage(false);
CHECK_EQ(0, TestAsciiResource::dispose_count); CHECK_EQ(0, TestAsciiResource::dispose_count);
} }
v8::internal::CompilationCache::Clear(); v8::internal::CompilationCache::Clear();
v8::internal::Heap::CollectAllGarbage(); v8::internal::Heap::CollectAllGarbage(false);
CHECK_EQ(1, TestAsciiResource::dispose_count); CHECK_EQ(1, TestAsciiResource::dispose_count);
} }
@ -550,8 +550,8 @@ THREADED_TEST(UsingExternalString) {
i::Handle<i::String> isymbol = i::Factory::SymbolFromString(istring); i::Handle<i::String> isymbol = i::Factory::SymbolFromString(istring);
CHECK(isymbol->IsSymbol()); CHECK(isymbol->IsSymbol());
} }
i::Heap::CollectAllGarbage(); i::Heap::CollectAllGarbage(false);
i::Heap::CollectAllGarbage(); i::Heap::CollectAllGarbage(false);
} }
@ -568,8 +568,8 @@ THREADED_TEST(UsingExternalAsciiString) {
i::Handle<i::String> isymbol = i::Factory::SymbolFromString(istring); i::Handle<i::String> isymbol = i::Factory::SymbolFromString(istring);
CHECK(isymbol->IsSymbol()); CHECK(isymbol->IsSymbol());
} }
i::Heap::CollectAllGarbage(); i::Heap::CollectAllGarbage(false);
i::Heap::CollectAllGarbage(); i::Heap::CollectAllGarbage(false);
} }
@ -1333,12 +1333,12 @@ THREADED_TEST(InternalFieldsNativePointers) {
// Check reading and writing aligned pointers. // Check reading and writing aligned pointers.
obj->SetPointerInInternalField(0, aligned); obj->SetPointerInInternalField(0, aligned);
i::Heap::CollectAllGarbage(); i::Heap::CollectAllGarbage(false);
CHECK_EQ(aligned, obj->GetPointerFromInternalField(0)); CHECK_EQ(aligned, obj->GetPointerFromInternalField(0));
// Check reading and writing unaligned pointers. // Check reading and writing unaligned pointers.
obj->SetPointerInInternalField(0, unaligned); obj->SetPointerInInternalField(0, unaligned);
i::Heap::CollectAllGarbage(); i::Heap::CollectAllGarbage(false);
CHECK_EQ(unaligned, obj->GetPointerFromInternalField(0)); CHECK_EQ(unaligned, obj->GetPointerFromInternalField(0));
delete[] data; delete[] data;
@ -1351,7 +1351,7 @@ THREADED_TEST(IdentityHash) {
// Ensure that the test starts with an fresh heap to test whether the hash // Ensure that the test starts with an fresh heap to test whether the hash
// code is based on the address. // code is based on the address.
i::Heap::CollectAllGarbage(); i::Heap::CollectAllGarbage(false);
Local<v8::Object> obj = v8::Object::New(); Local<v8::Object> obj = v8::Object::New();
int hash = obj->GetIdentityHash(); int hash = obj->GetIdentityHash();
int hash1 = obj->GetIdentityHash(); int hash1 = obj->GetIdentityHash();
@ -1361,7 +1361,7 @@ THREADED_TEST(IdentityHash) {
// objects should not be assigned the same hash code. If the test below fails // objects should not be assigned the same hash code. If the test below fails
// the random number generator should be evaluated. // the random number generator should be evaluated.
CHECK_NE(hash, hash2); CHECK_NE(hash, hash2);
i::Heap::CollectAllGarbage(); i::Heap::CollectAllGarbage(false);
int hash3 = v8::Object::New()->GetIdentityHash(); int hash3 = v8::Object::New()->GetIdentityHash();
// Make sure that the identity hash is not based on the initial address of // Make sure that the identity hash is not based on the initial address of
// the object alone. If the test below fails the random number generator // the object alone. If the test below fails the random number generator
@ -1381,7 +1381,7 @@ THREADED_TEST(HiddenProperties) {
v8::Local<v8::String> empty = v8_str(""); v8::Local<v8::String> empty = v8_str("");
v8::Local<v8::String> prop_name = v8_str("prop_name"); v8::Local<v8::String> prop_name = v8_str("prop_name");
i::Heap::CollectAllGarbage(); i::Heap::CollectAllGarbage(false);
// Make sure delete of a non-existent hidden value works // Make sure delete of a non-existent hidden value works
CHECK(obj->DeleteHiddenValue(key)); CHECK(obj->DeleteHiddenValue(key));
@ -1391,7 +1391,7 @@ THREADED_TEST(HiddenProperties) {
CHECK(obj->SetHiddenValue(key, v8::Integer::New(2002))); CHECK(obj->SetHiddenValue(key, v8::Integer::New(2002)));
CHECK_EQ(2002, obj->GetHiddenValue(key)->Int32Value()); CHECK_EQ(2002, obj->GetHiddenValue(key)->Int32Value());
i::Heap::CollectAllGarbage(); i::Heap::CollectAllGarbage(false);
// Make sure we do not find the hidden property. // Make sure we do not find the hidden property.
CHECK(!obj->Has(empty)); CHECK(!obj->Has(empty));
@ -1402,7 +1402,7 @@ THREADED_TEST(HiddenProperties) {
CHECK_EQ(2002, obj->GetHiddenValue(key)->Int32Value()); CHECK_EQ(2002, obj->GetHiddenValue(key)->Int32Value());
CHECK_EQ(2003, obj->Get(empty)->Int32Value()); CHECK_EQ(2003, obj->Get(empty)->Int32Value());
i::Heap::CollectAllGarbage(); i::Heap::CollectAllGarbage(false);
// Add another property and delete it afterwards to force the object in // Add another property and delete it afterwards to force the object in
// slow case. // slow case.
@ -1413,7 +1413,7 @@ THREADED_TEST(HiddenProperties) {
CHECK(obj->Delete(prop_name)); CHECK(obj->Delete(prop_name));
CHECK_EQ(2002, obj->GetHiddenValue(key)->Int32Value()); CHECK_EQ(2002, obj->GetHiddenValue(key)->Int32Value());
i::Heap::CollectAllGarbage(); i::Heap::CollectAllGarbage(false);
CHECK(obj->DeleteHiddenValue(key)); CHECK(obj->DeleteHiddenValue(key));
CHECK(obj->GetHiddenValue(key).IsEmpty()); CHECK(obj->GetHiddenValue(key).IsEmpty());
@ -1429,7 +1429,7 @@ static v8::Handle<Value> InterceptorForHiddenProperties(
} }
// The whole goal of this interceptor is to cause a GC during local property // The whole goal of this interceptor is to cause a GC during local property
// lookup. // lookup.
i::Heap::CollectAllGarbage(); i::Heap::CollectAllGarbage(false);
i::FLAG_always_compact = saved_always_compact; i::FLAG_always_compact = saved_always_compact;
return v8::Handle<Value>(); return v8::Handle<Value>();
} }
@ -2982,7 +2982,7 @@ static v8::Handle<Value> ArgumentsTestCallback(const v8::Arguments& args) {
CHECK_EQ(v8::Integer::New(3), args[2]); CHECK_EQ(v8::Integer::New(3), args[2]);
CHECK_EQ(v8::Undefined(), args[3]); CHECK_EQ(v8::Undefined(), args[3]);
v8::HandleScope scope; v8::HandleScope scope;
i::Heap::CollectAllGarbage(); i::Heap::CollectAllGarbage(false);
return v8::Undefined(); return v8::Undefined();
} }
@ -4960,7 +4960,7 @@ static v8::Handle<Value> InterceptorHasOwnPropertyGetterGC(
Local<String> name, Local<String> name,
const AccessorInfo& info) { const AccessorInfo& info) {
ApiTestFuzzer::Fuzz(); ApiTestFuzzer::Fuzz();
i::Heap::CollectAllGarbage(); i::Heap::CollectAllGarbage(false);
return v8::Handle<Value>(); return v8::Handle<Value>();
} }
@ -6165,8 +6165,8 @@ static int GetSurvivingGlobalObjectsCount() {
// the first garbage collection but some of the maps have already // the first garbage collection but some of the maps have already
// been marked at that point. Therefore some of the maps are not // been marked at that point. Therefore some of the maps are not
// collected until the second garbage collection. // collected until the second garbage collection.
v8::internal::Heap::CollectAllGarbage(); v8::internal::Heap::CollectAllGarbage(false);
v8::internal::Heap::CollectAllGarbage(); v8::internal::Heap::CollectAllGarbage(false);
v8::internal::HeapIterator it; v8::internal::HeapIterator it;
while (it.has_next()) { while (it.has_next()) {
v8::internal::HeapObject* object = it.next(); v8::internal::HeapObject* object = it.next();
@ -6242,7 +6242,7 @@ THREADED_TEST(NewPersistentHandleFromWeakCallback) {
// weak callback of the first handle would be able to 'reallocate' it. // weak callback of the first handle would be able to 'reallocate' it.
handle1.MakeWeak(NULL, NewPersistentHandleCallback); handle1.MakeWeak(NULL, NewPersistentHandleCallback);
handle2.Dispose(); handle2.Dispose();
i::Heap::CollectAllGarbage(); i::Heap::CollectAllGarbage(false);
} }
@ -6250,7 +6250,7 @@ v8::Persistent<v8::Object> to_be_disposed;
void DisposeAndForceGcCallback(v8::Persistent<v8::Value> handle, void*) { void DisposeAndForceGcCallback(v8::Persistent<v8::Value> handle, void*) {
to_be_disposed.Dispose(); to_be_disposed.Dispose();
i::Heap::CollectAllGarbage(); i::Heap::CollectAllGarbage(false);
} }
@ -6265,7 +6265,7 @@ THREADED_TEST(DoNotUseDeletedNodesInSecondLevelGc) {
} }
handle1.MakeWeak(NULL, DisposeAndForceGcCallback); handle1.MakeWeak(NULL, DisposeAndForceGcCallback);
to_be_disposed = handle2; to_be_disposed = handle2;
i::Heap::CollectAllGarbage(); i::Heap::CollectAllGarbage(false);
} }
@ -6842,7 +6842,7 @@ class RegExpInterruptTest {
{ {
v8::Locker lock; v8::Locker lock;
// TODO(lrn): Perhaps create some garbage before collecting. // TODO(lrn): Perhaps create some garbage before collecting.
i::Heap::CollectAllGarbage(); i::Heap::CollectAllGarbage(false);
gc_count_++; gc_count_++;
} }
i::OS::Sleep(1); i::OS::Sleep(1);
@ -6963,7 +6963,7 @@ class ApplyInterruptTest {
while (gc_during_apply_ < kRequiredGCs) { while (gc_during_apply_ < kRequiredGCs) {
{ {
v8::Locker lock; v8::Locker lock;
i::Heap::CollectAllGarbage(); i::Heap::CollectAllGarbage(false);
gc_count_++; gc_count_++;
} }
i::OS::Sleep(1); i::OS::Sleep(1);
@ -7680,11 +7680,11 @@ THREADED_TEST(PixelArray) {
uint8_t* pixel_data = reinterpret_cast<uint8_t*>(malloc(kElementCount)); uint8_t* pixel_data = reinterpret_cast<uint8_t*>(malloc(kElementCount));
i::Handle<i::PixelArray> pixels = i::Factory::NewPixelArray(kElementCount, i::Handle<i::PixelArray> pixels = i::Factory::NewPixelArray(kElementCount,
pixel_data); pixel_data);
i::Heap::CollectAllGarbage(); // Force GC to trigger verification. i::Heap::CollectAllGarbage(false); // Force GC to trigger verification.
for (int i = 0; i < kElementCount; i++) { for (int i = 0; i < kElementCount; i++) {
pixels->set(i, i); pixels->set(i, i);
} }
i::Heap::CollectAllGarbage(); // Force GC to trigger verification. i::Heap::CollectAllGarbage(false); // Force GC to trigger verification.
for (int i = 0; i < kElementCount; i++) { for (int i = 0; i < kElementCount; i++) {
CHECK_EQ(i, pixels->get(i)); CHECK_EQ(i, pixels->get(i));
CHECK_EQ(i, pixel_data[i]); CHECK_EQ(i, pixel_data[i]);

16
deps/v8/test/cctest/test-debug.cc

@ -414,8 +414,8 @@ void CheckDebuggerUnloaded(bool check_functions) {
CHECK_EQ(NULL, Debug::debug_info_list_); CHECK_EQ(NULL, Debug::debug_info_list_);
// Collect garbage to ensure weak handles are cleared. // Collect garbage to ensure weak handles are cleared.
Heap::CollectAllGarbage(); Heap::CollectAllGarbage(false);
Heap::CollectAllGarbage(); Heap::CollectAllGarbage(false);
// Iterate the head and check that there are no debugger related objects left. // Iterate the head and check that there are no debugger related objects left.
HeapIterator iterator; HeapIterator iterator;
@ -843,7 +843,7 @@ static void DebugEventBreakPointCollectGarbage(
Heap::CollectGarbage(0, v8::internal::NEW_SPACE); Heap::CollectGarbage(0, v8::internal::NEW_SPACE);
} else { } else {
// Mark sweep (and perhaps compact). // Mark sweep (and perhaps compact).
Heap::CollectAllGarbage(); Heap::CollectAllGarbage(false);
} }
} }
} }
@ -1206,7 +1206,7 @@ static void CallAndGC(v8::Local<v8::Object> recv, v8::Local<v8::Function> f) {
CHECK_EQ(2 + i * 3, break_point_hit_count); CHECK_EQ(2 + i * 3, break_point_hit_count);
// Mark sweep (and perhaps compact) and call function. // Mark sweep (and perhaps compact) and call function.
Heap::CollectAllGarbage(); Heap::CollectAllGarbage(false);
f->Call(recv, 0, NULL); f->Call(recv, 0, NULL);
CHECK_EQ(3 + i * 3, break_point_hit_count); CHECK_EQ(3 + i * 3, break_point_hit_count);
} }
@ -5094,7 +5094,7 @@ TEST(ScriptCollectedEvent) {
// Do garbage collection to ensure that only the script in this test will be // Do garbage collection to ensure that only the script in this test will be
// collected afterwards. // collected afterwards.
Heap::CollectAllGarbage(); Heap::CollectAllGarbage(false);
script_collected_count = 0; script_collected_count = 0;
v8::Debug::SetDebugEventListener(DebugEventScriptCollectedEvent, v8::Debug::SetDebugEventListener(DebugEventScriptCollectedEvent,
@ -5106,7 +5106,7 @@ TEST(ScriptCollectedEvent) {
// Do garbage collection to collect the script above which is no longer // Do garbage collection to collect the script above which is no longer
// referenced. // referenced.
Heap::CollectAllGarbage(); Heap::CollectAllGarbage(false);
CHECK_EQ(2, script_collected_count); CHECK_EQ(2, script_collected_count);
@ -5141,7 +5141,7 @@ TEST(ScriptCollectedEventContext) {
// Do garbage collection to ensure that only the script in this test will be // Do garbage collection to ensure that only the script in this test will be
// collected afterwards. // collected afterwards.
Heap::CollectAllGarbage(); Heap::CollectAllGarbage(false);
v8::Debug::SetMessageHandler2(ScriptCollectedMessageHandler); v8::Debug::SetMessageHandler2(ScriptCollectedMessageHandler);
{ {
@ -5152,7 +5152,7 @@ TEST(ScriptCollectedEventContext) {
// Do garbage collection to collect the script above which is no longer // Do garbage collection to collect the script above which is no longer
// referenced. // referenced.
Heap::CollectAllGarbage(); Heap::CollectAllGarbage(false);
CHECK_EQ(2, script_collected_message_count); CHECK_EQ(2, script_collected_message_count);

12
deps/v8/test/cctest/test-disasm-arm.cc

@ -123,13 +123,13 @@ TEST(Type0) {
"20354189 eorcss r4, r5, r9, lsl #3"); "20354189 eorcss r4, r5, r9, lsl #3");
COMPARE(sub(r5, r6, Operand(r10, LSL, 31), LeaveCC, hs), COMPARE(sub(r5, r6, Operand(r10, LSL, 31), LeaveCC, hs),
"20465f8a subcs r5, r6, sl, lsl #31"); "20465f8a subcs r5, r6, r10, lsl #31");
COMPARE(sub(r5, r6, Operand(r10, LSL, 30), SetCC, cc), COMPARE(sub(r5, r6, Operand(r10, LSL, 30), SetCC, cc),
"30565f0a subccs r5, r6, sl, lsl #30"); "30565f0a subccs r5, r6, r10, lsl #30");
COMPARE(sub(r5, r6, Operand(r10, LSL, 24), LeaveCC, lo), COMPARE(sub(r5, r6, Operand(r10, LSL, 24), LeaveCC, lo),
"30465c0a subcc r5, r6, sl, lsl #24"); "30465c0a subcc r5, r6, r10, lsl #24");
COMPARE(sub(r5, r6, Operand(r10, LSL, 16), SetCC, mi), COMPARE(sub(r5, r6, Operand(r10, LSL, 16), SetCC, mi),
"4056580a submis r5, r6, sl, lsl #16"); "4056580a submis r5, r6, r10, lsl #16");
COMPARE(rsb(r6, r7, Operand(fp)), COMPARE(rsb(r6, r7, Operand(fp)),
"e067600b rsb r6, r7, fp"); "e067600b rsb r6, r7, fp");
@ -163,7 +163,7 @@ TEST(Type0) {
COMPARE(sbc(r7, r9, Operand(ip, ROR, 4)), COMPARE(sbc(r7, r9, Operand(ip, ROR, 4)),
"e0c9726c sbc r7, r9, ip, ror #4"); "e0c9726c sbc r7, r9, ip, ror #4");
COMPARE(sbc(r7, r10, Operand(ip), SetCC), COMPARE(sbc(r7, r10, Operand(ip), SetCC),
"e0da700c sbcs r7, sl, ip"); "e0da700c sbcs r7, r10, ip");
COMPARE(sbc(r7, ip, Operand(ip, ROR, 31), SetCC, hi), COMPARE(sbc(r7, ip, Operand(ip, ROR, 31), SetCC, hi),
"80dc7fec sbchis r7, ip, ip, ror #31"); "80dc7fec sbchis r7, ip, ip, ror #31");
@ -240,7 +240,7 @@ TEST(Type0) {
"51d10004 bicpls r0, r1, r4"); "51d10004 bicpls r0, r1, r4");
COMPARE(mvn(r10, Operand(r1)), COMPARE(mvn(r10, Operand(r1)),
"e1e0a001 mvn sl, r1"); "e1e0a001 mvn r10, r1");
COMPARE(mvn(r9, Operand(r2)), COMPARE(mvn(r9, Operand(r2)),
"e1e09002 mvn r9, r2"); "e1e09002 mvn r9, r2");
COMPARE(mvn(r0, Operand(r3), SetCC), COMPARE(mvn(r0, Operand(r3), SetCC),

2
deps/v8/test/cctest/test-log.cc

@ -685,7 +685,7 @@ TEST(EquivalenceOfLoggingAndTraversal) {
" obj.test =\n" " obj.test =\n"
" (function a(j) { return function b() { return j; } })(100);\n" " (function a(j) { return function b() { return j; } })(100);\n"
"})(this);"); "})(this);");
i::Heap::CollectAllGarbage(); i::Heap::CollectAllGarbage(false);
EmbeddedVector<char, 204800> buffer; EmbeddedVector<char, 204800> buffer;
int log_size; int log_size;

10
deps/v8/test/cctest/test-serialize.cc

@ -125,12 +125,14 @@ TEST(ExternalReferenceEncoder) {
encoder.Encode(the_hole_value_location.address())); encoder.Encode(the_hole_value_location.address()));
ExternalReference stack_guard_limit_address = ExternalReference stack_guard_limit_address =
ExternalReference::address_of_stack_guard_limit(); ExternalReference::address_of_stack_guard_limit();
CHECK_EQ(make_code(UNCLASSIFIED, 3), CHECK_EQ(make_code(UNCLASSIFIED, 4),
encoder.Encode(stack_guard_limit_address.address())); encoder.Encode(stack_guard_limit_address.address()));
CHECK_EQ(make_code(UNCLASSIFIED, 5), CHECK_EQ(make_code(UNCLASSIFIED, 10),
encoder.Encode(ExternalReference::debug_break().address())); encoder.Encode(ExternalReference::debug_break().address()));
CHECK_EQ(make_code(UNCLASSIFIED, 6), CHECK_EQ(make_code(UNCLASSIFIED, 6),
encoder.Encode(ExternalReference::new_space_start().address())); encoder.Encode(ExternalReference::new_space_start().address()));
CHECK_EQ(make_code(UNCLASSIFIED, 3),
encoder.Encode(ExternalReference::roots_address().address()));
} }
@ -157,9 +159,9 @@ TEST(ExternalReferenceDecoder) {
CHECK_EQ(ExternalReference::the_hole_value_location().address(), CHECK_EQ(ExternalReference::the_hole_value_location().address(),
decoder.Decode(make_code(UNCLASSIFIED, 2))); decoder.Decode(make_code(UNCLASSIFIED, 2)));
CHECK_EQ(ExternalReference::address_of_stack_guard_limit().address(), CHECK_EQ(ExternalReference::address_of_stack_guard_limit().address(),
decoder.Decode(make_code(UNCLASSIFIED, 3))); decoder.Decode(make_code(UNCLASSIFIED, 4)));
CHECK_EQ(ExternalReference::debug_break().address(), CHECK_EQ(ExternalReference::debug_break().address(),
decoder.Decode(make_code(UNCLASSIFIED, 5))); decoder.Decode(make_code(UNCLASSIFIED, 10)));
CHECK_EQ(ExternalReference::new_space_start().address(), CHECK_EQ(ExternalReference::new_space_start().address(),
decoder.Decode(make_code(UNCLASSIFIED, 6))); decoder.Decode(make_code(UNCLASSIFIED, 6)));
} }

6
deps/v8/test/cctest/test-strings.cc

@ -480,7 +480,7 @@ TEST(Regress9746) {
// symbol entry in the symbol table because it is used by the script // symbol entry in the symbol table because it is used by the script
// kept alive by the weak wrapper. Make sure we don't destruct the // kept alive by the weak wrapper. Make sure we don't destruct the
// external string. // external string.
Heap::CollectAllGarbage(); Heap::CollectAllGarbage(false);
CHECK(!resource_destructed); CHECK(!resource_destructed);
{ {
@ -499,7 +499,7 @@ TEST(Regress9746) {
// Forcing another garbage collection should let us get rid of the // Forcing another garbage collection should let us get rid of the
// slice from the symbol table. The external string remains in the // slice from the symbol table. The external string remains in the
// heap until the next GC. // heap until the next GC.
Heap::CollectAllGarbage(); Heap::CollectAllGarbage(false);
CHECK(!resource_destructed); CHECK(!resource_destructed);
v8::HandleScope scope; v8::HandleScope scope;
Handle<String> key_string = Factory::NewStringFromAscii(key_vector); Handle<String> key_string = Factory::NewStringFromAscii(key_vector);
@ -508,7 +508,7 @@ TEST(Regress9746) {
// Forcing yet another garbage collection must allow us to finally // Forcing yet another garbage collection must allow us to finally
// get rid of the external string. // get rid of the external string.
Heap::CollectAllGarbage(); Heap::CollectAllGarbage(false);
CHECK(resource_destructed); CHECK(resource_destructed);
delete[] source; delete[] source;

4
deps/v8/test/mjsunit/date-parse.js

@ -250,8 +250,8 @@ testCasesMisc.forEach(testDateParseMisc);
// Test that we can parse our own date format. // Test that we can parse our own date format.
// (Dates from 1970 to ~2070 with 95h steps.) // (Dates from 1970 to ~2070 with 150h steps.)
for (var i = 0; i < 24 * 365 * 100; i += 95) { for (var i = 0; i < 24 * 365 * 100; i += 150) {
var ms = i * (3600 * 1000); var ms = i * (3600 * 1000);
var s = (new Date(ms)).toString(); var s = (new Date(ms)).toString();
assertEquals(ms, Date.parse(s), "parse own: " + s); assertEquals(ms, Date.parse(s), "parse own: " + s);

2
deps/v8/test/mjsunit/mjsunit.status

@ -52,7 +52,7 @@ debug-evaluate-recursive: CRASH || FAIL
debug-changebreakpoint: CRASH || FAIL debug-changebreakpoint: CRASH || FAIL
debug-clearbreakpoint: CRASH || FAIL debug-clearbreakpoint: CRASH || FAIL
debug-clearbreakpointgroup: PASS, FAIL if $mode == debug debug-clearbreakpointgroup: PASS, FAIL if $mode == debug
debug-conditional-breakpoints: FAIL debug-conditional-breakpoints: CRASH || FAIL
debug-evaluate: CRASH || FAIL debug-evaluate: CRASH || FAIL
debug-ignore-breakpoints: CRASH || FAIL debug-ignore-breakpoints: CRASH || FAIL
debug-multiple-breakpoints: CRASH || FAIL debug-multiple-breakpoints: CRASH || FAIL

2
deps/v8/test/mozilla/mozilla.status

@ -171,7 +171,7 @@ js1_5/Regress/regress-98901: PASS || FAIL
# Tests that sorting arrays of ints is less than 3 times as fast # Tests that sorting arrays of ints is less than 3 times as fast
# as sorting arrays of strings. # as sorting arrays of strings.
js1_5/extensions/regress-371636: PASS || FAIL js1_5/extensions/regress-371636: PASS || FAIL || TIMEOUT if $mode == debug
# Tests depend on GC timings. Inherently flaky. # Tests depend on GC timings. Inherently flaky.

2
deps/v8/tools/visual_studio/d8_x64.vcproj

@ -50,6 +50,7 @@
<Tool <Tool
Name="VCLinkerTool" Name="VCLinkerTool"
AdditionalDependencies="winmm.lib Ws2_32.lib" AdditionalDependencies="winmm.lib Ws2_32.lib"
TargetMachine="17"
/> />
<Tool <Tool
Name="VCALinkTool" Name="VCALinkTool"
@ -111,6 +112,7 @@
<Tool <Tool
Name="VCLinkerTool" Name="VCLinkerTool"
AdditionalDependencies="winmm.lib Ws2_32.lib" AdditionalDependencies="winmm.lib Ws2_32.lib"
TargetMachine="17"
/> />
<Tool <Tool
Name="VCALinkTool" Name="VCALinkTool"

2
deps/v8/tools/visual_studio/v8_cctest_x64.vcproj

@ -50,6 +50,7 @@
<Tool <Tool
Name="VCLinkerTool" Name="VCLinkerTool"
AdditionalDependencies="winmm.lib Ws2_32.lib" AdditionalDependencies="winmm.lib Ws2_32.lib"
TargetMachine="17"
/> />
<Tool <Tool
Name="VCALinkTool" Name="VCALinkTool"
@ -111,6 +112,7 @@
<Tool <Tool
Name="VCLinkerTool" Name="VCLinkerTool"
AdditionalDependencies="winmm.lib Ws2_32.lib" AdditionalDependencies="winmm.lib Ws2_32.lib"
TargetMachine="17"
/> />
<Tool <Tool
Name="VCALinkTool" Name="VCALinkTool"

2
deps/v8/tools/visual_studio/v8_shell_sample_x64.vcproj

@ -50,6 +50,7 @@
<Tool <Tool
Name="VCLinkerTool" Name="VCLinkerTool"
AdditionalDependencies="winmm.lib Ws2_32.lib" AdditionalDependencies="winmm.lib Ws2_32.lib"
TargetMachine="17"
/> />
<Tool <Tool
Name="VCALinkTool" Name="VCALinkTool"
@ -111,6 +112,7 @@
<Tool <Tool
Name="VCLinkerTool" Name="VCLinkerTool"
AdditionalDependencies="winmm.lib Ws2_32.lib" AdditionalDependencies="winmm.lib Ws2_32.lib"
TargetMachine="17"
/> />
<Tool <Tool
Name="VCALinkTool" Name="VCALinkTool"

Loading…
Cancel
Save