|
@ -157,6 +157,7 @@ CodeGenerator::CodeGenerator(MacroAssembler* masm) |
|
|
state_(NULL), |
|
|
state_(NULL), |
|
|
loop_nesting_(0), |
|
|
loop_nesting_(0), |
|
|
type_info_(NULL), |
|
|
type_info_(NULL), |
|
|
|
|
|
function_return_(JumpTarget::BIDIRECTIONAL), |
|
|
function_return_is_shadowed_(false) { |
|
|
function_return_is_shadowed_(false) { |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
@ -218,7 +219,7 @@ void CodeGenerator::Generate(CompilationInfo* info) { |
|
|
// for stack overflow.
|
|
|
// for stack overflow.
|
|
|
frame_->AllocateStackSlots(); |
|
|
frame_->AllocateStackSlots(); |
|
|
|
|
|
|
|
|
VirtualFrame::SpilledScope spilled_scope(frame_); |
|
|
frame_->AssertIsSpilled(); |
|
|
int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; |
|
|
int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; |
|
|
if (heap_slots > 0) { |
|
|
if (heap_slots > 0) { |
|
|
// Allocate local context.
|
|
|
// Allocate local context.
|
|
@ -257,6 +258,7 @@ void CodeGenerator::Generate(CompilationInfo* info) { |
|
|
// order: such a parameter is copied repeatedly into the same
|
|
|
// order: such a parameter is copied repeatedly into the same
|
|
|
// context location and thus the last value is what is seen inside
|
|
|
// context location and thus the last value is what is seen inside
|
|
|
// the function.
|
|
|
// the function.
|
|
|
|
|
|
frame_->AssertIsSpilled(); |
|
|
for (int i = 0; i < scope()->num_parameters(); i++) { |
|
|
for (int i = 0; i < scope()->num_parameters(); i++) { |
|
|
Variable* par = scope()->parameter(i); |
|
|
Variable* par = scope()->parameter(i); |
|
|
Slot* slot = par->slot(); |
|
|
Slot* slot = par->slot(); |
|
@ -282,8 +284,7 @@ void CodeGenerator::Generate(CompilationInfo* info) { |
|
|
|
|
|
|
|
|
// Initialize ThisFunction reference if present.
|
|
|
// Initialize ThisFunction reference if present.
|
|
|
if (scope()->is_function_scope() && scope()->function() != NULL) { |
|
|
if (scope()->is_function_scope() && scope()->function() != NULL) { |
|
|
__ mov(ip, Operand(Factory::the_hole_value())); |
|
|
frame_->EmitPushRoot(Heap::kTheHoleValueRootIndex); |
|
|
frame_->EmitPush(ip); |
|
|
|
|
|
StoreToSlot(scope()->function()->slot(), NOT_CONST_INIT); |
|
|
StoreToSlot(scope()->function()->slot(), NOT_CONST_INIT); |
|
|
} |
|
|
} |
|
|
} else { |
|
|
} else { |
|
@ -510,7 +511,6 @@ void CodeGenerator::LoadCondition(Expression* x, |
|
|
has_valid_frame() && |
|
|
has_valid_frame() && |
|
|
!has_cc() && |
|
|
!has_cc() && |
|
|
frame_->height() == original_height) { |
|
|
frame_->height() == original_height) { |
|
|
frame_->SpillAll(); |
|
|
|
|
|
true_target->Jump(); |
|
|
true_target->Jump(); |
|
|
} |
|
|
} |
|
|
} |
|
|
} |
|
@ -535,22 +535,18 @@ void CodeGenerator::Load(Expression* expr) { |
|
|
|
|
|
|
|
|
if (has_cc()) { |
|
|
if (has_cc()) { |
|
|
// Convert cc_reg_ into a boolean value.
|
|
|
// Convert cc_reg_ into a boolean value.
|
|
|
VirtualFrame::SpilledScope scope(frame_); |
|
|
|
|
|
JumpTarget loaded; |
|
|
JumpTarget loaded; |
|
|
JumpTarget materialize_true; |
|
|
JumpTarget materialize_true; |
|
|
materialize_true.Branch(cc_reg_); |
|
|
materialize_true.Branch(cc_reg_); |
|
|
__ LoadRoot(r0, Heap::kFalseValueRootIndex); |
|
|
frame_->EmitPushRoot(Heap::kFalseValueRootIndex); |
|
|
frame_->EmitPush(r0); |
|
|
|
|
|
loaded.Jump(); |
|
|
loaded.Jump(); |
|
|
materialize_true.Bind(); |
|
|
materialize_true.Bind(); |
|
|
__ LoadRoot(r0, Heap::kTrueValueRootIndex); |
|
|
frame_->EmitPushRoot(Heap::kTrueValueRootIndex); |
|
|
frame_->EmitPush(r0); |
|
|
|
|
|
loaded.Bind(); |
|
|
loaded.Bind(); |
|
|
cc_reg_ = al; |
|
|
cc_reg_ = al; |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
if (true_target.is_linked() || false_target.is_linked()) { |
|
|
if (true_target.is_linked() || false_target.is_linked()) { |
|
|
VirtualFrame::SpilledScope scope(frame_); |
|
|
|
|
|
// We have at least one condition value that has been "translated"
|
|
|
// We have at least one condition value that has been "translated"
|
|
|
// into a branch, thus it needs to be loaded explicitly.
|
|
|
// into a branch, thus it needs to be loaded explicitly.
|
|
|
JumpTarget loaded; |
|
|
JumpTarget loaded; |
|
@ -561,8 +557,7 @@ void CodeGenerator::Load(Expression* expr) { |
|
|
// Load "true" if necessary.
|
|
|
// Load "true" if necessary.
|
|
|
if (true_target.is_linked()) { |
|
|
if (true_target.is_linked()) { |
|
|
true_target.Bind(); |
|
|
true_target.Bind(); |
|
|
__ LoadRoot(r0, Heap::kTrueValueRootIndex); |
|
|
frame_->EmitPushRoot(Heap::kTrueValueRootIndex); |
|
|
frame_->EmitPush(r0); |
|
|
|
|
|
} |
|
|
} |
|
|
// If both "true" and "false" need to be loaded jump across the code for
|
|
|
// If both "true" and "false" need to be loaded jump across the code for
|
|
|
// "false".
|
|
|
// "false".
|
|
@ -572,8 +567,7 @@ void CodeGenerator::Load(Expression* expr) { |
|
|
// Load "false" if necessary.
|
|
|
// Load "false" if necessary.
|
|
|
if (false_target.is_linked()) { |
|
|
if (false_target.is_linked()) { |
|
|
false_target.Bind(); |
|
|
false_target.Bind(); |
|
|
__ LoadRoot(r0, Heap::kFalseValueRootIndex); |
|
|
frame_->EmitPushRoot(Heap::kFalseValueRootIndex); |
|
|
frame_->EmitPush(r0); |
|
|
|
|
|
} |
|
|
} |
|
|
// A value is loaded on all paths reaching this point.
|
|
|
// A value is loaded on all paths reaching this point.
|
|
|
loaded.Bind(); |
|
|
loaded.Bind(); |
|
@ -592,11 +586,11 @@ void CodeGenerator::LoadGlobal() { |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void CodeGenerator::LoadGlobalReceiver(Register scratch) { |
|
|
void CodeGenerator::LoadGlobalReceiver(Register scratch) { |
|
|
VirtualFrame::SpilledScope spilled_scope(frame_); |
|
|
Register reg = frame_->GetTOSRegister(); |
|
|
__ ldr(scratch, ContextOperand(cp, Context::GLOBAL_INDEX)); |
|
|
__ ldr(reg, ContextOperand(cp, Context::GLOBAL_INDEX)); |
|
|
__ ldr(scratch, |
|
|
__ ldr(reg, |
|
|
FieldMemOperand(scratch, GlobalObject::kGlobalReceiverOffset)); |
|
|
FieldMemOperand(reg, GlobalObject::kGlobalReceiverOffset)); |
|
|
frame_->EmitPush(scratch); |
|
|
frame_->EmitPush(reg); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -613,8 +607,6 @@ ArgumentsAllocationMode CodeGenerator::ArgumentsMode() { |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void CodeGenerator::StoreArgumentsObject(bool initial) { |
|
|
void CodeGenerator::StoreArgumentsObject(bool initial) { |
|
|
VirtualFrame::SpilledScope spilled_scope(frame_); |
|
|
|
|
|
|
|
|
|
|
|
ArgumentsAllocationMode mode = ArgumentsMode(); |
|
|
ArgumentsAllocationMode mode = ArgumentsMode(); |
|
|
ASSERT(mode != NO_ARGUMENTS_ALLOCATION); |
|
|
ASSERT(mode != NO_ARGUMENTS_ALLOCATION); |
|
|
|
|
|
|
|
@ -623,9 +615,9 @@ void CodeGenerator::StoreArgumentsObject(bool initial) { |
|
|
// When using lazy arguments allocation, we store the hole value
|
|
|
// When using lazy arguments allocation, we store the hole value
|
|
|
// as a sentinel indicating that the arguments object hasn't been
|
|
|
// as a sentinel indicating that the arguments object hasn't been
|
|
|
// allocated yet.
|
|
|
// allocated yet.
|
|
|
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
|
|
frame_->EmitPushRoot(Heap::kTheHoleValueRootIndex); |
|
|
frame_->EmitPush(ip); |
|
|
|
|
|
} else { |
|
|
} else { |
|
|
|
|
|
frame_->SpillAll(); |
|
|
ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT); |
|
|
ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT); |
|
|
__ ldr(r2, frame_->Function()); |
|
|
__ ldr(r2, frame_->Function()); |
|
|
// The receiver is below the arguments, the return address, and the
|
|
|
// The receiver is below the arguments, the return address, and the
|
|
@ -649,9 +641,9 @@ void CodeGenerator::StoreArgumentsObject(bool initial) { |
|
|
// already been written to. This can happen if the a function
|
|
|
// already been written to. This can happen if the a function
|
|
|
// has a local variable named 'arguments'.
|
|
|
// has a local variable named 'arguments'.
|
|
|
LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF); |
|
|
LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF); |
|
|
frame_->EmitPop(r0); |
|
|
Register arguments = frame_->PopToRegister(); |
|
|
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
|
|
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
|
|
__ cmp(r0, ip); |
|
|
__ cmp(arguments, ip); |
|
|
done.Branch(ne); |
|
|
done.Branch(ne); |
|
|
} |
|
|
} |
|
|
StoreToSlot(arguments->slot(), NOT_CONST_INIT); |
|
|
StoreToSlot(arguments->slot(), NOT_CONST_INIT); |
|
@ -754,36 +746,35 @@ void CodeGenerator::UnloadReference(Reference* ref) { |
|
|
// may jump to 'false_target' in case the register converts to 'false'.
|
|
|
// may jump to 'false_target' in case the register converts to 'false'.
|
|
|
void CodeGenerator::ToBoolean(JumpTarget* true_target, |
|
|
void CodeGenerator::ToBoolean(JumpTarget* true_target, |
|
|
JumpTarget* false_target) { |
|
|
JumpTarget* false_target) { |
|
|
VirtualFrame::SpilledScope spilled_scope(frame_); |
|
|
|
|
|
// Note: The generated code snippet does not change stack variables.
|
|
|
// Note: The generated code snippet does not change stack variables.
|
|
|
// Only the condition code should be set.
|
|
|
// Only the condition code should be set.
|
|
|
frame_->EmitPop(r0); |
|
|
Register tos = frame_->PopToRegister(); |
|
|
|
|
|
|
|
|
// Fast case checks
|
|
|
// Fast case checks
|
|
|
|
|
|
|
|
|
// Check if the value is 'false'.
|
|
|
// Check if the value is 'false'.
|
|
|
__ LoadRoot(ip, Heap::kFalseValueRootIndex); |
|
|
__ LoadRoot(ip, Heap::kFalseValueRootIndex); |
|
|
__ cmp(r0, ip); |
|
|
__ cmp(tos, ip); |
|
|
false_target->Branch(eq); |
|
|
false_target->Branch(eq); |
|
|
|
|
|
|
|
|
// Check if the value is 'true'.
|
|
|
// Check if the value is 'true'.
|
|
|
__ LoadRoot(ip, Heap::kTrueValueRootIndex); |
|
|
__ LoadRoot(ip, Heap::kTrueValueRootIndex); |
|
|
__ cmp(r0, ip); |
|
|
__ cmp(tos, ip); |
|
|
true_target->Branch(eq); |
|
|
true_target->Branch(eq); |
|
|
|
|
|
|
|
|
// Check if the value is 'undefined'.
|
|
|
// Check if the value is 'undefined'.
|
|
|
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex); |
|
|
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex); |
|
|
__ cmp(r0, ip); |
|
|
__ cmp(tos, ip); |
|
|
false_target->Branch(eq); |
|
|
false_target->Branch(eq); |
|
|
|
|
|
|
|
|
// Check if the value is a smi.
|
|
|
// Check if the value is a smi.
|
|
|
__ cmp(r0, Operand(Smi::FromInt(0))); |
|
|
__ cmp(tos, Operand(Smi::FromInt(0))); |
|
|
false_target->Branch(eq); |
|
|
false_target->Branch(eq); |
|
|
__ tst(r0, Operand(kSmiTagMask)); |
|
|
__ tst(tos, Operand(kSmiTagMask)); |
|
|
true_target->Branch(eq); |
|
|
true_target->Branch(eq); |
|
|
|
|
|
|
|
|
// Slow case: call the runtime.
|
|
|
// Slow case: call the runtime.
|
|
|
frame_->EmitPush(r0); |
|
|
frame_->EmitPush(tos); |
|
|
frame_->CallRuntime(Runtime::kToBool, 1); |
|
|
frame_->CallRuntime(Runtime::kToBool, 1); |
|
|
// Convert the result (r0) to a condition code.
|
|
|
// Convert the result (r0) to a condition code.
|
|
|
__ LoadRoot(ip, Heap::kFalseValueRootIndex); |
|
|
__ LoadRoot(ip, Heap::kFalseValueRootIndex); |
|
@ -935,7 +926,15 @@ class DeferredInlineSmiOperation: public DeferredCode { |
|
|
}; |
|
|
}; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// On entry the non-constant side of the binary operation is in tos_register_
|
|
|
|
|
|
// and the constant smi side is nowhere. The tos_register_ is not used by the
|
|
|
|
|
|
// virtual frame. On exit the answer is in the tos_register_ and the virtual
|
|
|
|
|
|
// frame is unchanged.
|
|
|
void DeferredInlineSmiOperation::Generate() { |
|
|
void DeferredInlineSmiOperation::Generate() { |
|
|
|
|
|
VirtualFrame copied_frame(*frame_state()->frame()); |
|
|
|
|
|
copied_frame.SpillAll(); |
|
|
|
|
|
|
|
|
Register lhs = r1; |
|
|
Register lhs = r1; |
|
|
Register rhs = r0; |
|
|
Register rhs = r0; |
|
|
switch (op_) { |
|
|
switch (op_) { |
|
@ -969,45 +968,20 @@ void DeferredInlineSmiOperation::Generate() { |
|
|
case Token::MOD: |
|
|
case Token::MOD: |
|
|
case Token::BIT_OR: |
|
|
case Token::BIT_OR: |
|
|
case Token::BIT_XOR: |
|
|
case Token::BIT_XOR: |
|
|
case Token::BIT_AND: { |
|
|
case Token::BIT_AND: |
|
|
if (reversed_) { |
|
|
|
|
|
if (tos_register_.is(r0)) { |
|
|
|
|
|
__ mov(r1, Operand(Smi::FromInt(value_))); |
|
|
|
|
|
} else { |
|
|
|
|
|
ASSERT(tos_register_.is(r1)); |
|
|
|
|
|
__ mov(r0, Operand(Smi::FromInt(value_))); |
|
|
|
|
|
lhs = r0; |
|
|
|
|
|
rhs = r1; |
|
|
|
|
|
} |
|
|
|
|
|
} else { |
|
|
|
|
|
if (tos_register_.is(r1)) { |
|
|
|
|
|
__ mov(r0, Operand(Smi::FromInt(value_))); |
|
|
|
|
|
} else { |
|
|
|
|
|
ASSERT(tos_register_.is(r0)); |
|
|
|
|
|
__ mov(r1, Operand(Smi::FromInt(value_))); |
|
|
|
|
|
lhs = r0; |
|
|
|
|
|
rhs = r1; |
|
|
|
|
|
} |
|
|
|
|
|
} |
|
|
|
|
|
break; |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
case Token::SHL: |
|
|
case Token::SHL: |
|
|
case Token::SHR: |
|
|
case Token::SHR: |
|
|
case Token::SAR: { |
|
|
case Token::SAR: { |
|
|
if (!reversed_) { |
|
|
|
|
|
if (tos_register_.is(r1)) { |
|
|
if (tos_register_.is(r1)) { |
|
|
__ mov(r0, Operand(Smi::FromInt(value_))); |
|
|
__ mov(r0, Operand(Smi::FromInt(value_))); |
|
|
} else { |
|
|
} else { |
|
|
ASSERT(tos_register_.is(r0)); |
|
|
ASSERT(tos_register_.is(r0)); |
|
|
__ mov(r1, Operand(Smi::FromInt(value_))); |
|
|
__ mov(r1, Operand(Smi::FromInt(value_))); |
|
|
|
|
|
} |
|
|
|
|
|
if (reversed_ == tos_register_.is(r1)) { |
|
|
lhs = r0; |
|
|
lhs = r0; |
|
|
rhs = r1; |
|
|
rhs = r1; |
|
|
} |
|
|
} |
|
|
} else { |
|
|
|
|
|
ASSERT(op_ == Token::SHL); |
|
|
|
|
|
__ mov(r1, Operand(Smi::FromInt(value_))); |
|
|
|
|
|
} |
|
|
|
|
|
break; |
|
|
break; |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
@ -1019,11 +993,17 @@ void DeferredInlineSmiOperation::Generate() { |
|
|
|
|
|
|
|
|
GenericBinaryOpStub stub(op_, overwrite_mode_, lhs, rhs, value_); |
|
|
GenericBinaryOpStub stub(op_, overwrite_mode_, lhs, rhs, value_); |
|
|
__ CallStub(&stub); |
|
|
__ CallStub(&stub); |
|
|
|
|
|
|
|
|
// The generic stub returns its value in r0, but that's not
|
|
|
// The generic stub returns its value in r0, but that's not
|
|
|
// necessarily what we want. We want whatever the inlined code
|
|
|
// necessarily what we want. We want whatever the inlined code
|
|
|
// expected, which is that the answer is in the same register as
|
|
|
// expected, which is that the answer is in the same register as
|
|
|
// the operand was.
|
|
|
// the operand was.
|
|
|
__ Move(tos_register_, r0); |
|
|
__ Move(tos_register_, r0); |
|
|
|
|
|
|
|
|
|
|
|
// The tos register was not in use for the virtual frame that we
|
|
|
|
|
|
// came into this function with, so we can merge back to that frame
|
|
|
|
|
|
// without trashing it.
|
|
|
|
|
|
copied_frame.MergeTo(frame_state()->frame()); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -1124,12 +1104,6 @@ void CodeGenerator::SmiOperation(Token::Value op, |
|
|
|
|
|
|
|
|
// We move the top of stack to a register (normally no move is invoved).
|
|
|
// We move the top of stack to a register (normally no move is invoved).
|
|
|
Register tos = frame_->PopToRegister(); |
|
|
Register tos = frame_->PopToRegister(); |
|
|
// All other registers are spilled. The deferred code expects one argument
|
|
|
|
|
|
// in a register and all other values are flushed to the stack. The
|
|
|
|
|
|
// answer is returned in the same register that the top of stack argument was
|
|
|
|
|
|
// in.
|
|
|
|
|
|
frame_->SpillAll(); |
|
|
|
|
|
|
|
|
|
|
|
switch (op) { |
|
|
switch (op) { |
|
|
case Token::ADD: { |
|
|
case Token::ADD: { |
|
|
DeferredCode* deferred = |
|
|
DeferredCode* deferred = |
|
@ -1448,8 +1422,6 @@ void CodeGenerator::Comparison(Condition cc, |
|
|
void CodeGenerator::CallWithArguments(ZoneList<Expression*>* args, |
|
|
void CodeGenerator::CallWithArguments(ZoneList<Expression*>* args, |
|
|
CallFunctionFlags flags, |
|
|
CallFunctionFlags flags, |
|
|
int position) { |
|
|
int position) { |
|
|
frame_->AssertIsSpilled(); |
|
|
|
|
|
|
|
|
|
|
|
// Push the arguments ("left-to-right") on the stack.
|
|
|
// Push the arguments ("left-to-right") on the stack.
|
|
|
int arg_count = args->length(); |
|
|
int arg_count = args->length(); |
|
|
for (int i = 0; i < arg_count; i++) { |
|
|
for (int i = 0; i < arg_count; i++) { |
|
@ -1482,7 +1454,6 @@ void CodeGenerator::CallApplyLazy(Expression* applicand, |
|
|
// stack, as receiver and arguments, and calls x.
|
|
|
// stack, as receiver and arguments, and calls x.
|
|
|
// In the implementation comments, we call x the applicand
|
|
|
// In the implementation comments, we call x the applicand
|
|
|
// and y the receiver.
|
|
|
// and y the receiver.
|
|
|
VirtualFrame::SpilledScope spilled_scope(frame_); |
|
|
|
|
|
|
|
|
|
|
|
ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION); |
|
|
ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION); |
|
|
ASSERT(arguments->IsArguments()); |
|
|
ASSERT(arguments->IsArguments()); |
|
@ -1500,6 +1471,15 @@ void CodeGenerator::CallApplyLazy(Expression* applicand, |
|
|
Load(receiver); |
|
|
Load(receiver); |
|
|
LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF); |
|
|
LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF); |
|
|
|
|
|
|
|
|
|
|
|
// At this point the top two stack elements are probably in registers
|
|
|
|
|
|
// since they were just loaded. Ensure they are in regs and get the
|
|
|
|
|
|
// regs.
|
|
|
|
|
|
Register receiver_reg = frame_->Peek2(); |
|
|
|
|
|
Register arguments_reg = frame_->Peek(); |
|
|
|
|
|
|
|
|
|
|
|
// From now on the frame is spilled.
|
|
|
|
|
|
frame_->SpillAll(); |
|
|
|
|
|
|
|
|
// Emit the source position information after having loaded the
|
|
|
// Emit the source position information after having loaded the
|
|
|
// receiver and the arguments.
|
|
|
// receiver and the arguments.
|
|
|
CodeForSourcePosition(position); |
|
|
CodeForSourcePosition(position); |
|
@ -1513,32 +1493,30 @@ void CodeGenerator::CallApplyLazy(Expression* applicand, |
|
|
// already. If so, just use that instead of copying the arguments
|
|
|
// already. If so, just use that instead of copying the arguments
|
|
|
// from the stack. This also deals with cases where a local variable
|
|
|
// from the stack. This also deals with cases where a local variable
|
|
|
// named 'arguments' has been introduced.
|
|
|
// named 'arguments' has been introduced.
|
|
|
__ ldr(r0, MemOperand(sp, 0)); |
|
|
JumpTarget slow; |
|
|
|
|
|
Label done; |
|
|
Label slow, done; |
|
|
|
|
|
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
|
|
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
|
|
__ cmp(ip, r0); |
|
|
__ cmp(ip, arguments_reg); |
|
|
__ b(ne, &slow); |
|
|
slow.Branch(ne); |
|
|
|
|
|
|
|
|
Label build_args; |
|
|
Label build_args; |
|
|
// Get rid of the arguments object probe.
|
|
|
// Get rid of the arguments object probe.
|
|
|
frame_->Drop(); |
|
|
frame_->Drop(); |
|
|
// Stack now has 3 elements on it.
|
|
|
// Stack now has 3 elements on it.
|
|
|
// Contents of stack at this point:
|
|
|
// Contents of stack at this point:
|
|
|
// sp[0]: receiver
|
|
|
// sp[0]: receiver - in the receiver_reg register.
|
|
|
// sp[1]: applicand.apply
|
|
|
// sp[1]: applicand.apply
|
|
|
// sp[2]: applicand.
|
|
|
// sp[2]: applicand.
|
|
|
|
|
|
|
|
|
// Check that the receiver really is a JavaScript object.
|
|
|
// Check that the receiver really is a JavaScript object.
|
|
|
__ ldr(r0, MemOperand(sp, 0)); |
|
|
__ BranchOnSmi(receiver_reg, &build_args); |
|
|
__ BranchOnSmi(r0, &build_args); |
|
|
|
|
|
// We allow all JSObjects including JSFunctions. As long as
|
|
|
// We allow all JSObjects including JSFunctions. As long as
|
|
|
// JS_FUNCTION_TYPE is the last instance type and it is right
|
|
|
// JS_FUNCTION_TYPE is the last instance type and it is right
|
|
|
// after LAST_JS_OBJECT_TYPE, we do not have to check the upper
|
|
|
// after LAST_JS_OBJECT_TYPE, we do not have to check the upper
|
|
|
// bound.
|
|
|
// bound.
|
|
|
ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); |
|
|
ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); |
|
|
ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1); |
|
|
ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1); |
|
|
__ CompareObjectType(r0, r1, r2, FIRST_JS_OBJECT_TYPE); |
|
|
__ CompareObjectType(receiver_reg, r2, r3, FIRST_JS_OBJECT_TYPE); |
|
|
__ b(lt, &build_args); |
|
|
__ b(lt, &build_args); |
|
|
|
|
|
|
|
|
// Check that applicand.apply is Function.prototype.apply.
|
|
|
// Check that applicand.apply is Function.prototype.apply.
|
|
@ -1627,7 +1605,7 @@ void CodeGenerator::CallApplyLazy(Expression* applicand, |
|
|
StoreArgumentsObject(false); |
|
|
StoreArgumentsObject(false); |
|
|
|
|
|
|
|
|
// Stack and frame now have 4 elements.
|
|
|
// Stack and frame now have 4 elements.
|
|
|
__ bind(&slow); |
|
|
slow.Bind(); |
|
|
|
|
|
|
|
|
// Generic computation of x.apply(y, args) with no special optimization.
|
|
|
// Generic computation of x.apply(y, args) with no special optimization.
|
|
|
// Flip applicand.apply and applicand on the stack, so
|
|
|
// Flip applicand.apply and applicand on the stack, so
|
|
@ -1652,7 +1630,6 @@ void CodeGenerator::CallApplyLazy(Expression* applicand, |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void CodeGenerator::Branch(bool if_true, JumpTarget* target) { |
|
|
void CodeGenerator::Branch(bool if_true, JumpTarget* target) { |
|
|
VirtualFrame::SpilledScope spilled_scope(frame_); |
|
|
|
|
|
ASSERT(has_cc()); |
|
|
ASSERT(has_cc()); |
|
|
Condition cc = if_true ? cc_reg_ : NegateCondition(cc_reg_); |
|
|
Condition cc = if_true ? cc_reg_ : NegateCondition(cc_reg_); |
|
|
target->Branch(cc); |
|
|
target->Branch(cc); |
|
@ -1661,7 +1638,7 @@ void CodeGenerator::Branch(bool if_true, JumpTarget* target) { |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void CodeGenerator::CheckStack() { |
|
|
void CodeGenerator::CheckStack() { |
|
|
VirtualFrame::SpilledScope spilled_scope(frame_); |
|
|
frame_->SpillAll(); |
|
|
Comment cmnt(masm_, "[ check stack"); |
|
|
Comment cmnt(masm_, "[ check stack"); |
|
|
__ LoadRoot(ip, Heap::kStackLimitRootIndex); |
|
|
__ LoadRoot(ip, Heap::kStackLimitRootIndex); |
|
|
// Put the lr setup instruction in the delay slot. kInstrSize is added to
|
|
|
// Put the lr setup instruction in the delay slot. kInstrSize is added to
|
|
@ -1683,7 +1660,6 @@ void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) { |
|
|
#ifdef DEBUG |
|
|
#ifdef DEBUG |
|
|
int original_height = frame_->height(); |
|
|
int original_height = frame_->height(); |
|
|
#endif |
|
|
#endif |
|
|
VirtualFrame::SpilledScope spilled_scope(frame_); |
|
|
|
|
|
for (int i = 0; frame_ != NULL && i < statements->length(); i++) { |
|
|
for (int i = 0; frame_ != NULL && i < statements->length(); i++) { |
|
|
Visit(statements->at(i)); |
|
|
Visit(statements->at(i)); |
|
|
} |
|
|
} |
|
@ -1695,7 +1671,6 @@ void CodeGenerator::VisitBlock(Block* node) { |
|
|
#ifdef DEBUG |
|
|
#ifdef DEBUG |
|
|
int original_height = frame_->height(); |
|
|
int original_height = frame_->height(); |
|
|
#endif |
|
|
#endif |
|
|
VirtualFrame::SpilledScope spilled_scope(frame_); |
|
|
|
|
|
Comment cmnt(masm_, "[ Block"); |
|
|
Comment cmnt(masm_, "[ Block"); |
|
|
CodeForStatementPosition(node); |
|
|
CodeForStatementPosition(node); |
|
|
node->break_target()->SetExpectedHeight(); |
|
|
node->break_target()->SetExpectedHeight(); |
|
@ -1713,7 +1688,6 @@ void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { |
|
|
frame_->EmitPush(Operand(pairs)); |
|
|
frame_->EmitPush(Operand(pairs)); |
|
|
frame_->EmitPush(Operand(Smi::FromInt(is_eval() ? 1 : 0))); |
|
|
frame_->EmitPush(Operand(Smi::FromInt(is_eval() ? 1 : 0))); |
|
|
|
|
|
|
|
|
VirtualFrame::SpilledScope spilled_scope(frame_); |
|
|
|
|
|
frame_->CallRuntime(Runtime::kDeclareGlobals, 3); |
|
|
frame_->CallRuntime(Runtime::kDeclareGlobals, 3); |
|
|
// The result is discarded.
|
|
|
// The result is discarded.
|
|
|
} |
|
|
} |
|
@ -1754,7 +1728,6 @@ void CodeGenerator::VisitDeclaration(Declaration* node) { |
|
|
frame_->EmitPush(Operand(0)); |
|
|
frame_->EmitPush(Operand(0)); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
VirtualFrame::SpilledScope spilled_scope(frame_); |
|
|
|
|
|
frame_->CallRuntime(Runtime::kDeclareContextSlot, 4); |
|
|
frame_->CallRuntime(Runtime::kDeclareContextSlot, 4); |
|
|
// Ignore the return value (declarations are statements).
|
|
|
// Ignore the return value (declarations are statements).
|
|
|
|
|
|
|
|
@ -1899,7 +1872,6 @@ void CodeGenerator::VisitIfStatement(IfStatement* node) { |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void CodeGenerator::VisitContinueStatement(ContinueStatement* node) { |
|
|
void CodeGenerator::VisitContinueStatement(ContinueStatement* node) { |
|
|
VirtualFrame::SpilledScope spilled_scope(frame_); |
|
|
|
|
|
Comment cmnt(masm_, "[ ContinueStatement"); |
|
|
Comment cmnt(masm_, "[ ContinueStatement"); |
|
|
CodeForStatementPosition(node); |
|
|
CodeForStatementPosition(node); |
|
|
node->target()->continue_target()->Jump(); |
|
|
node->target()->continue_target()->Jump(); |
|
@ -1907,7 +1879,6 @@ void CodeGenerator::VisitContinueStatement(ContinueStatement* node) { |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void CodeGenerator::VisitBreakStatement(BreakStatement* node) { |
|
|
void CodeGenerator::VisitBreakStatement(BreakStatement* node) { |
|
|
VirtualFrame::SpilledScope spilled_scope(frame_); |
|
|
|
|
|
Comment cmnt(masm_, "[ BreakStatement"); |
|
|
Comment cmnt(masm_, "[ BreakStatement"); |
|
|
CodeForStatementPosition(node); |
|
|
CodeForStatementPosition(node); |
|
|
node->target()->break_target()->Jump(); |
|
|
node->target()->break_target()->Jump(); |
|
@ -1915,7 +1886,7 @@ void CodeGenerator::VisitBreakStatement(BreakStatement* node) { |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void CodeGenerator::VisitReturnStatement(ReturnStatement* node) { |
|
|
void CodeGenerator::VisitReturnStatement(ReturnStatement* node) { |
|
|
VirtualFrame::SpilledScope spilled_scope(frame_); |
|
|
frame_->SpillAll(); |
|
|
Comment cmnt(masm_, "[ ReturnStatement"); |
|
|
Comment cmnt(masm_, "[ ReturnStatement"); |
|
|
|
|
|
|
|
|
CodeForStatementPosition(node); |
|
|
CodeForStatementPosition(node); |
|
@ -1926,7 +1897,7 @@ void CodeGenerator::VisitReturnStatement(ReturnStatement* node) { |
|
|
} else { |
|
|
} else { |
|
|
// Pop the result from the frame and prepare the frame for
|
|
|
// Pop the result from the frame and prepare the frame for
|
|
|
// returning thus making it easier to merge.
|
|
|
// returning thus making it easier to merge.
|
|
|
frame_->EmitPop(r0); |
|
|
frame_->PopToR0(); |
|
|
frame_->PrepareForReturn(); |
|
|
frame_->PrepareForReturn(); |
|
|
if (function_return_.is_bound()) { |
|
|
if (function_return_.is_bound()) { |
|
|
// If the function return label is already bound we reuse the
|
|
|
// If the function return label is already bound we reuse the
|
|
@ -1986,7 +1957,6 @@ void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) { |
|
|
#ifdef DEBUG |
|
|
#ifdef DEBUG |
|
|
int original_height = frame_->height(); |
|
|
int original_height = frame_->height(); |
|
|
#endif |
|
|
#endif |
|
|
VirtualFrame::SpilledScope spilled_scope(frame_); |
|
|
|
|
|
Comment cmnt(masm_, "[ WithEnterStatement"); |
|
|
Comment cmnt(masm_, "[ WithEnterStatement"); |
|
|
CodeForStatementPosition(node); |
|
|
CodeForStatementPosition(node); |
|
|
Load(node->expression()); |
|
|
Load(node->expression()); |
|
@ -2012,7 +1982,6 @@ void CodeGenerator::VisitWithExitStatement(WithExitStatement* node) { |
|
|
#ifdef DEBUG |
|
|
#ifdef DEBUG |
|
|
int original_height = frame_->height(); |
|
|
int original_height = frame_->height(); |
|
|
#endif |
|
|
#endif |
|
|
VirtualFrame::SpilledScope spilled_scope(frame_); |
|
|
|
|
|
Comment cmnt(masm_, "[ WithExitStatement"); |
|
|
Comment cmnt(masm_, "[ WithExitStatement"); |
|
|
CodeForStatementPosition(node); |
|
|
CodeForStatementPosition(node); |
|
|
// Pop context.
|
|
|
// Pop context.
|
|
@ -2027,7 +1996,6 @@ void CodeGenerator::VisitSwitchStatement(SwitchStatement* node) { |
|
|
#ifdef DEBUG |
|
|
#ifdef DEBUG |
|
|
int original_height = frame_->height(); |
|
|
int original_height = frame_->height(); |
|
|
#endif |
|
|
#endif |
|
|
VirtualFrame::SpilledScope spilled_scope(frame_); |
|
|
|
|
|
Comment cmnt(masm_, "[ SwitchStatement"); |
|
|
Comment cmnt(masm_, "[ SwitchStatement"); |
|
|
CodeForStatementPosition(node); |
|
|
CodeForStatementPosition(node); |
|
|
node->break_target()->SetExpectedHeight(); |
|
|
node->break_target()->SetExpectedHeight(); |
|
@ -2055,8 +2023,7 @@ void CodeGenerator::VisitSwitchStatement(SwitchStatement* node) { |
|
|
next_test.Bind(); |
|
|
next_test.Bind(); |
|
|
next_test.Unuse(); |
|
|
next_test.Unuse(); |
|
|
// Duplicate TOS.
|
|
|
// Duplicate TOS.
|
|
|
__ ldr(r0, frame_->Top()); |
|
|
frame_->Dup(); |
|
|
frame_->EmitPush(r0); |
|
|
|
|
|
Comparison(eq, NULL, clause->label(), true); |
|
|
Comparison(eq, NULL, clause->label(), true); |
|
|
Branch(false, &next_test); |
|
|
Branch(false, &next_test); |
|
|
|
|
|
|
|
@ -2094,7 +2061,7 @@ void CodeGenerator::VisitSwitchStatement(SwitchStatement* node) { |
|
|
default_entry.Bind(); |
|
|
default_entry.Bind(); |
|
|
VisitStatements(default_clause->statements()); |
|
|
VisitStatements(default_clause->statements()); |
|
|
// If control flow can fall out of the default and there is a case after
|
|
|
// If control flow can fall out of the default and there is a case after
|
|
|
// it, jup to that case's body.
|
|
|
// it, jump to that case's body.
|
|
|
if (frame_ != NULL && default_exit.is_bound()) { |
|
|
if (frame_ != NULL && default_exit.is_bound()) { |
|
|
default_exit.Jump(); |
|
|
default_exit.Jump(); |
|
|
} |
|
|
} |
|
@ -2116,7 +2083,6 @@ void CodeGenerator::VisitDoWhileStatement(DoWhileStatement* node) { |
|
|
#ifdef DEBUG |
|
|
#ifdef DEBUG |
|
|
int original_height = frame_->height(); |
|
|
int original_height = frame_->height(); |
|
|
#endif |
|
|
#endif |
|
|
VirtualFrame::SpilledScope spilled_scope(frame_); |
|
|
|
|
|
Comment cmnt(masm_, "[ DoWhileStatement"); |
|
|
Comment cmnt(masm_, "[ DoWhileStatement"); |
|
|
CodeForStatementPosition(node); |
|
|
CodeForStatementPosition(node); |
|
|
node->break_target()->SetExpectedHeight(); |
|
|
node->break_target()->SetExpectedHeight(); |
|
@ -2191,7 +2157,6 @@ void CodeGenerator::VisitWhileStatement(WhileStatement* node) { |
|
|
#ifdef DEBUG |
|
|
#ifdef DEBUG |
|
|
int original_height = frame_->height(); |
|
|
int original_height = frame_->height(); |
|
|
#endif |
|
|
#endif |
|
|
VirtualFrame::SpilledScope spilled_scope(frame_); |
|
|
|
|
|
Comment cmnt(masm_, "[ WhileStatement"); |
|
|
Comment cmnt(masm_, "[ WhileStatement"); |
|
|
CodeForStatementPosition(node); |
|
|
CodeForStatementPosition(node); |
|
|
|
|
|
|
|
@ -2209,7 +2174,7 @@ void CodeGenerator::VisitWhileStatement(WhileStatement* node) { |
|
|
node->continue_target()->Bind(); |
|
|
node->continue_target()->Bind(); |
|
|
|
|
|
|
|
|
if (info == DONT_KNOW) { |
|
|
if (info == DONT_KNOW) { |
|
|
JumpTarget body; |
|
|
JumpTarget body(JumpTarget::BIDIRECTIONAL); |
|
|
LoadCondition(node->cond(), &body, node->break_target(), true); |
|
|
LoadCondition(node->cond(), &body, node->break_target(), true); |
|
|
if (has_valid_frame()) { |
|
|
if (has_valid_frame()) { |
|
|
// A NULL frame indicates that control did not fall out of the
|
|
|
// A NULL frame indicates that control did not fall out of the
|
|
@ -2242,7 +2207,6 @@ void CodeGenerator::VisitForStatement(ForStatement* node) { |
|
|
#ifdef DEBUG |
|
|
#ifdef DEBUG |
|
|
int original_height = frame_->height(); |
|
|
int original_height = frame_->height(); |
|
|
#endif |
|
|
#endif |
|
|
VirtualFrame::SpilledScope spilled_scope(frame_); |
|
|
|
|
|
Comment cmnt(masm_, "[ ForStatement"); |
|
|
Comment cmnt(masm_, "[ ForStatement"); |
|
|
CodeForStatementPosition(node); |
|
|
CodeForStatementPosition(node); |
|
|
if (node->init() != NULL) { |
|
|
if (node->init() != NULL) { |
|
@ -2931,7 +2895,6 @@ void CodeGenerator::VisitConditional(Conditional* node) { |
|
|
#ifdef DEBUG |
|
|
#ifdef DEBUG |
|
|
int original_height = frame_->height(); |
|
|
int original_height = frame_->height(); |
|
|
#endif |
|
|
#endif |
|
|
VirtualFrame::SpilledScope spilled_scope(frame_); |
|
|
|
|
|
Comment cmnt(masm_, "[ Conditional"); |
|
|
Comment cmnt(masm_, "[ Conditional"); |
|
|
JumpTarget then; |
|
|
JumpTarget then; |
|
|
JumpTarget else_; |
|
|
JumpTarget else_; |
|
@ -2972,10 +2935,8 @@ void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) { |
|
|
&done); |
|
|
&done); |
|
|
|
|
|
|
|
|
slow.Bind(); |
|
|
slow.Bind(); |
|
|
VirtualFrame::SpilledScope spilled_scope(frame_); |
|
|
|
|
|
frame_->EmitPush(cp); |
|
|
frame_->EmitPush(cp); |
|
|
__ mov(r0, Operand(slot->var()->name())); |
|
|
frame_->EmitPush(Operand(slot->var()->name())); |
|
|
frame_->EmitPush(r0); |
|
|
|
|
|
|
|
|
|
|
|
if (typeof_state == INSIDE_TYPEOF) { |
|
|
if (typeof_state == INSIDE_TYPEOF) { |
|
|
frame_->CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); |
|
|
frame_->CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); |
|
@ -2990,16 +2951,17 @@ void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) { |
|
|
Register scratch = VirtualFrame::scratch0(); |
|
|
Register scratch = VirtualFrame::scratch0(); |
|
|
TypeInfo info = type_info(slot); |
|
|
TypeInfo info = type_info(slot); |
|
|
frame_->EmitPush(SlotOperand(slot, scratch), info); |
|
|
frame_->EmitPush(SlotOperand(slot, scratch), info); |
|
|
|
|
|
|
|
|
if (slot->var()->mode() == Variable::CONST) { |
|
|
if (slot->var()->mode() == Variable::CONST) { |
|
|
// Const slots may contain 'the hole' value (the constant hasn't been
|
|
|
// Const slots may contain 'the hole' value (the constant hasn't been
|
|
|
// initialized yet) which needs to be converted into the 'undefined'
|
|
|
// initialized yet) which needs to be converted into the 'undefined'
|
|
|
// value.
|
|
|
// value.
|
|
|
Comment cmnt(masm_, "[ Unhole const"); |
|
|
Comment cmnt(masm_, "[ Unhole const"); |
|
|
frame_->EmitPop(scratch); |
|
|
Register tos = frame_->PopToRegister(); |
|
|
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
|
|
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
|
|
__ cmp(scratch, ip); |
|
|
__ cmp(tos, ip); |
|
|
__ LoadRoot(scratch, Heap::kUndefinedValueRootIndex, eq); |
|
|
__ LoadRoot(tos, Heap::kUndefinedValueRootIndex, eq); |
|
|
frame_->EmitPush(scratch); |
|
|
frame_->EmitPush(tos); |
|
|
} |
|
|
} |
|
|
} |
|
|
} |
|
|
} |
|
|
} |
|
@ -3007,6 +2969,7 @@ void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) { |
|
|
|
|
|
|
|
|
void CodeGenerator::LoadFromSlotCheckForArguments(Slot* slot, |
|
|
void CodeGenerator::LoadFromSlotCheckForArguments(Slot* slot, |
|
|
TypeofState state) { |
|
|
TypeofState state) { |
|
|
|
|
|
VirtualFrame::RegisterAllocationScope scope(this); |
|
|
LoadFromSlot(slot, state); |
|
|
LoadFromSlot(slot, state); |
|
|
|
|
|
|
|
|
// Bail out quickly if we're not using lazy arguments allocation.
|
|
|
// Bail out quickly if we're not using lazy arguments allocation.
|
|
@ -3015,17 +2978,15 @@ void CodeGenerator::LoadFromSlotCheckForArguments(Slot* slot, |
|
|
// ... or if the slot isn't a non-parameter arguments slot.
|
|
|
// ... or if the slot isn't a non-parameter arguments slot.
|
|
|
if (slot->type() == Slot::PARAMETER || !slot->is_arguments()) return; |
|
|
if (slot->type() == Slot::PARAMETER || !slot->is_arguments()) return; |
|
|
|
|
|
|
|
|
VirtualFrame::SpilledScope spilled_scope(frame_); |
|
|
// Load the loaded value from the stack into a register but leave it on the
|
|
|
|
|
|
|
|
|
// Load the loaded value from the stack into r0 but leave it on the
|
|
|
|
|
|
// stack.
|
|
|
// stack.
|
|
|
__ ldr(r0, MemOperand(sp, 0)); |
|
|
Register tos = frame_->Peek(); |
|
|
|
|
|
|
|
|
// If the loaded value is the sentinel that indicates that we
|
|
|
// If the loaded value is the sentinel that indicates that we
|
|
|
// haven't loaded the arguments object yet, we need to do it now.
|
|
|
// haven't loaded the arguments object yet, we need to do it now.
|
|
|
JumpTarget exit; |
|
|
JumpTarget exit; |
|
|
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
|
|
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
|
|
__ cmp(r0, ip); |
|
|
__ cmp(tos, ip); |
|
|
exit.Branch(ne); |
|
|
exit.Branch(ne); |
|
|
frame_->Drop(); |
|
|
frame_->Drop(); |
|
|
StoreArgumentsObject(false); |
|
|
StoreArgumentsObject(false); |
|
@ -3035,14 +2996,13 @@ void CodeGenerator::LoadFromSlotCheckForArguments(Slot* slot, |
|
|
|
|
|
|
|
|
void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) { |
|
|
void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) { |
|
|
ASSERT(slot != NULL); |
|
|
ASSERT(slot != NULL); |
|
|
|
|
|
VirtualFrame::RegisterAllocationScope scope(this); |
|
|
if (slot->type() == Slot::LOOKUP) { |
|
|
if (slot->type() == Slot::LOOKUP) { |
|
|
VirtualFrame::SpilledScope spilled_scope(frame_); |
|
|
|
|
|
ASSERT(slot->var()->is_dynamic()); |
|
|
ASSERT(slot->var()->is_dynamic()); |
|
|
|
|
|
|
|
|
// For now, just do a runtime call.
|
|
|
// For now, just do a runtime call.
|
|
|
frame_->EmitPush(cp); |
|
|
frame_->EmitPush(cp); |
|
|
__ mov(r0, Operand(slot->var()->name())); |
|
|
frame_->EmitPush(Operand(slot->var()->name())); |
|
|
frame_->EmitPush(r0); |
|
|
|
|
|
|
|
|
|
|
|
if (init_state == CONST_INIT) { |
|
|
if (init_state == CONST_INIT) { |
|
|
// Same as the case for a normal store, but ignores attribute
|
|
|
// Same as the case for a normal store, but ignores attribute
|
|
@ -3071,7 +3031,7 @@ void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) { |
|
|
} else { |
|
|
} else { |
|
|
ASSERT(!slot->var()->is_dynamic()); |
|
|
ASSERT(!slot->var()->is_dynamic()); |
|
|
Register scratch = VirtualFrame::scratch0(); |
|
|
Register scratch = VirtualFrame::scratch0(); |
|
|
VirtualFrame::RegisterAllocationScope scope(this); |
|
|
Register scratch2 = VirtualFrame::scratch1(); |
|
|
|
|
|
|
|
|
// The frame must be spilled when branching to this target.
|
|
|
// The frame must be spilled when branching to this target.
|
|
|
JumpTarget exit; |
|
|
JumpTarget exit; |
|
@ -3085,7 +3045,6 @@ void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) { |
|
|
__ ldr(scratch, SlotOperand(slot, scratch)); |
|
|
__ ldr(scratch, SlotOperand(slot, scratch)); |
|
|
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
|
|
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
|
|
__ cmp(scratch, ip); |
|
|
__ cmp(scratch, ip); |
|
|
frame_->SpillAll(); |
|
|
|
|
|
exit.Branch(ne); |
|
|
exit.Branch(ne); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
@ -3104,18 +3063,18 @@ void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) { |
|
|
// Skip write barrier if the written value is a smi.
|
|
|
// Skip write barrier if the written value is a smi.
|
|
|
__ tst(tos, Operand(kSmiTagMask)); |
|
|
__ tst(tos, Operand(kSmiTagMask)); |
|
|
// We don't use tos any more after here.
|
|
|
// We don't use tos any more after here.
|
|
|
VirtualFrame::SpilledScope spilled_scope(frame_); |
|
|
|
|
|
exit.Branch(eq); |
|
|
exit.Branch(eq); |
|
|
// scratch is loaded with context when calling SlotOperand above.
|
|
|
// scratch is loaded with context when calling SlotOperand above.
|
|
|
int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize; |
|
|
int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize; |
|
|
// r1 could be identical with tos, but that doesn't matter.
|
|
|
// We need an extra register. Until we have a way to do that in the
|
|
|
__ RecordWrite(scratch, Operand(offset), r3, r1); |
|
|
// virtual frame we will cheat and ask for a free TOS register.
|
|
|
|
|
|
Register scratch3 = frame_->GetTOSRegister(); |
|
|
|
|
|
__ RecordWrite(scratch, Operand(offset), scratch2, scratch3); |
|
|
} |
|
|
} |
|
|
// If we definitely did not jump over the assignment, we do not need
|
|
|
// If we definitely did not jump over the assignment, we do not need
|
|
|
// to bind the exit label. Doing so can defeat peephole
|
|
|
// to bind the exit label. Doing so can defeat peephole
|
|
|
// optimization.
|
|
|
// optimization.
|
|
|
if (init_state == CONST_INIT || slot->type() == Slot::CONTEXT) { |
|
|
if (init_state == CONST_INIT || slot->type() == Slot::CONTEXT) { |
|
|
frame_->SpillAll(); |
|
|
|
|
|
exit.Bind(); |
|
|
exit.Bind(); |
|
|
} |
|
|
} |
|
|
} |
|
|
} |
|
@ -3289,42 +3248,51 @@ void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) { |
|
|
#ifdef DEBUG |
|
|
#ifdef DEBUG |
|
|
int original_height = frame_->height(); |
|
|
int original_height = frame_->height(); |
|
|
#endif |
|
|
#endif |
|
|
VirtualFrame::SpilledScope spilled_scope(frame_); |
|
|
|
|
|
Comment cmnt(masm_, "[ RexExp Literal"); |
|
|
Comment cmnt(masm_, "[ RexExp Literal"); |
|
|
|
|
|
|
|
|
|
|
|
Register tmp = VirtualFrame::scratch0(); |
|
|
|
|
|
// Free up a TOS register that can be used to push the literal.
|
|
|
|
|
|
Register literal = frame_->GetTOSRegister(); |
|
|
|
|
|
|
|
|
// Retrieve the literal array and check the allocated entry.
|
|
|
// Retrieve the literal array and check the allocated entry.
|
|
|
|
|
|
|
|
|
// Load the function of this activation.
|
|
|
// Load the function of this activation.
|
|
|
__ ldr(r1, frame_->Function()); |
|
|
__ ldr(tmp, frame_->Function()); |
|
|
|
|
|
|
|
|
// Load the literals array of the function.
|
|
|
// Load the literals array of the function.
|
|
|
__ ldr(r1, FieldMemOperand(r1, JSFunction::kLiteralsOffset)); |
|
|
__ ldr(tmp, FieldMemOperand(tmp, JSFunction::kLiteralsOffset)); |
|
|
|
|
|
|
|
|
// Load the literal at the ast saved index.
|
|
|
// Load the literal at the ast saved index.
|
|
|
int literal_offset = |
|
|
int literal_offset = |
|
|
FixedArray::kHeaderSize + node->literal_index() * kPointerSize; |
|
|
FixedArray::kHeaderSize + node->literal_index() * kPointerSize; |
|
|
__ ldr(r2, FieldMemOperand(r1, literal_offset)); |
|
|
__ ldr(literal, FieldMemOperand(tmp, literal_offset)); |
|
|
|
|
|
|
|
|
JumpTarget done; |
|
|
JumpTarget done; |
|
|
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex); |
|
|
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex); |
|
|
__ cmp(r2, ip); |
|
|
__ cmp(literal, ip); |
|
|
|
|
|
// This branch locks the virtual frame at the done label to match the
|
|
|
|
|
|
// one we have here, where the literal register is not on the stack and
|
|
|
|
|
|
// nothing is spilled.
|
|
|
done.Branch(ne); |
|
|
done.Branch(ne); |
|
|
|
|
|
|
|
|
// If the entry is undefined we call the runtime system to computed
|
|
|
// If the entry is undefined we call the runtime system to compute
|
|
|
// the literal.
|
|
|
// the literal.
|
|
|
frame_->EmitPush(r1); // literal array (0)
|
|
|
// literal array (0)
|
|
|
__ mov(r0, Operand(Smi::FromInt(node->literal_index()))); |
|
|
frame_->EmitPush(tmp); |
|
|
frame_->EmitPush(r0); // literal index (1)
|
|
|
// literal index (1)
|
|
|
__ mov(r0, Operand(node->pattern())); // RegExp pattern (2)
|
|
|
frame_->EmitPush(Operand(Smi::FromInt(node->literal_index()))); |
|
|
frame_->EmitPush(r0); |
|
|
// RegExp pattern (2)
|
|
|
__ mov(r0, Operand(node->flags())); // RegExp flags (3)
|
|
|
frame_->EmitPush(Operand(node->pattern())); |
|
|
frame_->EmitPush(r0); |
|
|
// RegExp flags (3)
|
|
|
|
|
|
frame_->EmitPush(Operand(node->flags())); |
|
|
frame_->CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); |
|
|
frame_->CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); |
|
|
__ mov(r2, Operand(r0)); |
|
|
__ Move(literal, r0); |
|
|
|
|
|
|
|
|
|
|
|
// This call to bind will get us back to the virtual frame we had before
|
|
|
|
|
|
// where things are not spilled and the literal register is not on the stack.
|
|
|
done.Bind(); |
|
|
done.Bind(); |
|
|
// Push the literal.
|
|
|
// Push the literal.
|
|
|
frame_->EmitPush(r2); |
|
|
frame_->EmitPush(literal); |
|
|
ASSERT_EQ(original_height + 1, frame_->height()); |
|
|
ASSERT_EQ(original_height + 1, frame_->height()); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
@ -3333,20 +3301,20 @@ void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) { |
|
|
#ifdef DEBUG |
|
|
#ifdef DEBUG |
|
|
int original_height = frame_->height(); |
|
|
int original_height = frame_->height(); |
|
|
#endif |
|
|
#endif |
|
|
VirtualFrame::SpilledScope spilled_scope(frame_); |
|
|
|
|
|
Comment cmnt(masm_, "[ ObjectLiteral"); |
|
|
Comment cmnt(masm_, "[ ObjectLiteral"); |
|
|
|
|
|
|
|
|
|
|
|
Register literal = frame_->GetTOSRegister(); |
|
|
// Load the function of this activation.
|
|
|
// Load the function of this activation.
|
|
|
__ ldr(r3, frame_->Function()); |
|
|
__ ldr(literal, frame_->Function()); |
|
|
// Literal array.
|
|
|
// Literal array.
|
|
|
__ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset)); |
|
|
__ ldr(literal, FieldMemOperand(literal, JSFunction::kLiteralsOffset)); |
|
|
|
|
|
frame_->EmitPush(literal); |
|
|
// Literal index.
|
|
|
// Literal index.
|
|
|
__ mov(r2, Operand(Smi::FromInt(node->literal_index()))); |
|
|
frame_->EmitPush(Operand(Smi::FromInt(node->literal_index()))); |
|
|
// Constant properties.
|
|
|
// Constant properties.
|
|
|
__ mov(r1, Operand(node->constant_properties())); |
|
|
frame_->EmitPush(Operand(node->constant_properties())); |
|
|
// Should the object literal have fast elements?
|
|
|
// Should the object literal have fast elements?
|
|
|
__ mov(r0, Operand(Smi::FromInt(node->fast_elements() ? 1 : 0))); |
|
|
frame_->EmitPush(Operand(Smi::FromInt(node->fast_elements() ? 1 : 0))); |
|
|
frame_->EmitPushMultiple(4, r3.bit() | r2.bit() | r1.bit() | r0.bit()); |
|
|
|
|
|
if (node->depth() > 1) { |
|
|
if (node->depth() > 1) { |
|
|
frame_->CallRuntime(Runtime::kCreateObjectLiteral, 4); |
|
|
frame_->CallRuntime(Runtime::kCreateObjectLiteral, 4); |
|
|
} else { |
|
|
} else { |
|
@ -3369,37 +3337,33 @@ void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) { |
|
|
if (key->handle()->IsSymbol()) { |
|
|
if (key->handle()->IsSymbol()) { |
|
|
Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); |
|
|
Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); |
|
|
Load(value); |
|
|
Load(value); |
|
|
frame_->EmitPop(r0); |
|
|
frame_->PopToR0(); |
|
|
|
|
|
// Fetch the object literal.
|
|
|
|
|
|
frame_->SpillAllButCopyTOSToR1(); |
|
|
__ mov(r2, Operand(key->handle())); |
|
|
__ mov(r2, Operand(key->handle())); |
|
|
__ ldr(r1, frame_->Top()); // Load the receiver.
|
|
|
|
|
|
frame_->CallCodeObject(ic, RelocInfo::CODE_TARGET, 0); |
|
|
frame_->CallCodeObject(ic, RelocInfo::CODE_TARGET, 0); |
|
|
break; |
|
|
break; |
|
|
} |
|
|
} |
|
|
// else fall through
|
|
|
// else fall through
|
|
|
case ObjectLiteral::Property::PROTOTYPE: { |
|
|
case ObjectLiteral::Property::PROTOTYPE: { |
|
|
__ ldr(r0, frame_->Top()); |
|
|
frame_->Dup(); |
|
|
frame_->EmitPush(r0); // dup the result
|
|
|
|
|
|
Load(key); |
|
|
Load(key); |
|
|
Load(value); |
|
|
Load(value); |
|
|
frame_->CallRuntime(Runtime::kSetProperty, 3); |
|
|
frame_->CallRuntime(Runtime::kSetProperty, 3); |
|
|
break; |
|
|
break; |
|
|
} |
|
|
} |
|
|
case ObjectLiteral::Property::SETTER: { |
|
|
case ObjectLiteral::Property::SETTER: { |
|
|
__ ldr(r0, frame_->Top()); |
|
|
frame_->Dup(); |
|
|
frame_->EmitPush(r0); |
|
|
|
|
|
Load(key); |
|
|
Load(key); |
|
|
__ mov(r0, Operand(Smi::FromInt(1))); |
|
|
frame_->EmitPush(Operand(Smi::FromInt(1))); |
|
|
frame_->EmitPush(r0); |
|
|
|
|
|
Load(value); |
|
|
Load(value); |
|
|
frame_->CallRuntime(Runtime::kDefineAccessor, 4); |
|
|
frame_->CallRuntime(Runtime::kDefineAccessor, 4); |
|
|
break; |
|
|
break; |
|
|
} |
|
|
} |
|
|
case ObjectLiteral::Property::GETTER: { |
|
|
case ObjectLiteral::Property::GETTER: { |
|
|
__ ldr(r0, frame_->Top()); |
|
|
frame_->Dup(); |
|
|
frame_->EmitPush(r0); |
|
|
|
|
|
Load(key); |
|
|
Load(key); |
|
|
__ mov(r0, Operand(Smi::FromInt(0))); |
|
|
frame_->EmitPush(Operand(Smi::FromInt(0))); |
|
|
frame_->EmitPush(r0); |
|
|
|
|
|
Load(value); |
|
|
Load(value); |
|
|
frame_->CallRuntime(Runtime::kDefineAccessor, 4); |
|
|
frame_->CallRuntime(Runtime::kDefineAccessor, 4); |
|
|
break; |
|
|
break; |
|
@ -3414,16 +3378,16 @@ void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) { |
|
|
#ifdef DEBUG |
|
|
#ifdef DEBUG |
|
|
int original_height = frame_->height(); |
|
|
int original_height = frame_->height(); |
|
|
#endif |
|
|
#endif |
|
|
VirtualFrame::SpilledScope spilled_scope(frame_); |
|
|
|
|
|
Comment cmnt(masm_, "[ ArrayLiteral"); |
|
|
Comment cmnt(masm_, "[ ArrayLiteral"); |
|
|
|
|
|
|
|
|
|
|
|
Register tos = frame_->GetTOSRegister(); |
|
|
// Load the function of this activation.
|
|
|
// Load the function of this activation.
|
|
|
__ ldr(r2, frame_->Function()); |
|
|
__ ldr(tos, frame_->Function()); |
|
|
// Load the literals array of the function.
|
|
|
// Load the literals array of the function.
|
|
|
__ ldr(r2, FieldMemOperand(r2, JSFunction::kLiteralsOffset)); |
|
|
__ ldr(tos, FieldMemOperand(tos, JSFunction::kLiteralsOffset)); |
|
|
__ mov(r1, Operand(Smi::FromInt(node->literal_index()))); |
|
|
frame_->EmitPush(tos); |
|
|
__ mov(r0, Operand(node->constant_elements())); |
|
|
frame_->EmitPush(Operand(Smi::FromInt(node->literal_index()))); |
|
|
frame_->EmitPushMultiple(3, r2.bit() | r1.bit() | r0.bit()); |
|
|
frame_->EmitPush(Operand(node->constant_elements())); |
|
|
int length = node->values()->length(); |
|
|
int length = node->values()->length(); |
|
|
if (node->depth() > 1) { |
|
|
if (node->depth() > 1) { |
|
|
frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3); |
|
|
frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3); |
|
@ -3450,10 +3414,10 @@ void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) { |
|
|
|
|
|
|
|
|
// The property must be set by generated code.
|
|
|
// The property must be set by generated code.
|
|
|
Load(value); |
|
|
Load(value); |
|
|
frame_->EmitPop(r0); |
|
|
frame_->PopToR0(); |
|
|
|
|
|
|
|
|
// Fetch the object literal.
|
|
|
// Fetch the object literal.
|
|
|
__ ldr(r1, frame_->Top()); |
|
|
frame_->SpillAllButCopyTOSToR1(); |
|
|
|
|
|
|
|
|
// Get the elements array.
|
|
|
// Get the elements array.
|
|
|
__ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset)); |
|
|
__ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset)); |
|
|
|
|
|
|
|
@ -3863,7 +3827,6 @@ void CodeGenerator::VisitCall(Call* node) { |
|
|
// ------------------------------------------------------------------------
|
|
|
// ------------------------------------------------------------------------
|
|
|
|
|
|
|
|
|
if (var != NULL && var->is_possibly_eval()) { |
|
|
if (var != NULL && var->is_possibly_eval()) { |
|
|
VirtualFrame::SpilledScope spilled_scope(frame_); |
|
|
|
|
|
// ----------------------------------
|
|
|
// ----------------------------------
|
|
|
// JavaScript example: 'eval(arg)' // eval is not known to be shadowed
|
|
|
// JavaScript example: 'eval(arg)' // eval is not known to be shadowed
|
|
|
// ----------------------------------
|
|
|
// ----------------------------------
|
|
@ -3877,8 +3840,7 @@ void CodeGenerator::VisitCall(Call* node) { |
|
|
Load(function); |
|
|
Load(function); |
|
|
|
|
|
|
|
|
// Allocate a frame slot for the receiver.
|
|
|
// Allocate a frame slot for the receiver.
|
|
|
__ LoadRoot(r2, Heap::kUndefinedValueRootIndex); |
|
|
frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex); |
|
|
frame_->EmitPush(r2); |
|
|
|
|
|
|
|
|
|
|
|
// Load the arguments.
|
|
|
// Load the arguments.
|
|
|
int arg_count = args->length(); |
|
|
int arg_count = args->length(); |
|
@ -3886,6 +3848,8 @@ void CodeGenerator::VisitCall(Call* node) { |
|
|
Load(args->at(i)); |
|
|
Load(args->at(i)); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
VirtualFrame::SpilledScope spilled_scope(frame_); |
|
|
|
|
|
|
|
|
// If we know that eval can only be shadowed by eval-introduced
|
|
|
// If we know that eval can only be shadowed by eval-introduced
|
|
|
// variables we attempt to load the global eval function directly
|
|
|
// variables we attempt to load the global eval function directly
|
|
|
// in generated code. If we succeed, there is no need to perform a
|
|
|
// in generated code. If we succeed, there is no need to perform a
|
|
@ -5201,7 +5165,6 @@ void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) { |
|
|
#ifdef DEBUG |
|
|
#ifdef DEBUG |
|
|
int original_height = frame_->height(); |
|
|
int original_height = frame_->height(); |
|
|
#endif |
|
|
#endif |
|
|
VirtualFrame::SpilledScope spilled_scope(frame_); |
|
|
|
|
|
Comment cmnt(masm_, "[ UnaryOperation"); |
|
|
Comment cmnt(masm_, "[ UnaryOperation"); |
|
|
|
|
|
|
|
|
Token::Value op = node->op(); |
|
|
Token::Value op = node->op(); |
|
@ -5273,8 +5236,7 @@ void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) { |
|
|
break; |
|
|
break; |
|
|
|
|
|
|
|
|
case Token::SUB: { |
|
|
case Token::SUB: { |
|
|
VirtualFrame::SpilledScope spilled(frame_); |
|
|
frame_->PopToR0(); |
|
|
frame_->EmitPop(r0); |
|
|
|
|
|
GenericUnaryOpStub stub(Token::SUB, overwrite); |
|
|
GenericUnaryOpStub stub(Token::SUB, overwrite); |
|
|
frame_->CallStub(&stub, 0); |
|
|
frame_->CallStub(&stub, 0); |
|
|
frame_->EmitPush(r0); // r0 has result
|
|
|
frame_->EmitPush(r0); // r0 has result
|
|
@ -5282,23 +5244,28 @@ void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) { |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
case Token::BIT_NOT: { |
|
|
case Token::BIT_NOT: { |
|
|
// smi check
|
|
|
Register tos = frame_->PopToRegister(); |
|
|
VirtualFrame::SpilledScope spilled(frame_); |
|
|
JumpTarget not_smi_label; |
|
|
frame_->EmitPop(r0); |
|
|
|
|
|
JumpTarget smi_label; |
|
|
|
|
|
JumpTarget continue_label; |
|
|
JumpTarget continue_label; |
|
|
__ tst(r0, Operand(kSmiTagMask)); |
|
|
// Smi check.
|
|
|
smi_label.Branch(eq); |
|
|
__ tst(tos, Operand(kSmiTagMask)); |
|
|
|
|
|
not_smi_label.Branch(ne); |
|
|
|
|
|
|
|
|
|
|
|
__ mvn(tos, Operand(tos)); |
|
|
|
|
|
__ bic(tos, tos, Operand(kSmiTagMask)); // Bit-clear inverted smi-tag.
|
|
|
|
|
|
frame_->EmitPush(tos); |
|
|
|
|
|
// The fast case is the first to jump to the continue label, so it gets
|
|
|
|
|
|
// to decide the virtual frame layout.
|
|
|
|
|
|
continue_label.Jump(); |
|
|
|
|
|
|
|
|
|
|
|
not_smi_label.Bind(); |
|
|
|
|
|
frame_->SpillAll(); |
|
|
|
|
|
__ Move(r0, tos); |
|
|
GenericUnaryOpStub stub(Token::BIT_NOT, overwrite); |
|
|
GenericUnaryOpStub stub(Token::BIT_NOT, overwrite); |
|
|
frame_->CallStub(&stub, 0); |
|
|
frame_->CallStub(&stub, 0); |
|
|
continue_label.Jump(); |
|
|
frame_->EmitPush(r0); |
|
|
|
|
|
|
|
|
smi_label.Bind(); |
|
|
|
|
|
__ mvn(r0, Operand(r0)); |
|
|
|
|
|
__ bic(r0, r0, Operand(kSmiTagMask)); // bit-clear inverted smi-tag
|
|
|
|
|
|
continue_label.Bind(); |
|
|
continue_label.Bind(); |
|
|
frame_->EmitPush(r0); // r0 has result
|
|
|
|
|
|
break; |
|
|
break; |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
@ -5308,16 +5275,16 @@ void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) { |
|
|
break; |
|
|
break; |
|
|
|
|
|
|
|
|
case Token::ADD: { |
|
|
case Token::ADD: { |
|
|
VirtualFrame::SpilledScope spilled(frame_); |
|
|
Register tos = frame_->Peek(); |
|
|
frame_->EmitPop(r0); |
|
|
|
|
|
// Smi check.
|
|
|
// Smi check.
|
|
|
JumpTarget continue_label; |
|
|
JumpTarget continue_label; |
|
|
__ tst(r0, Operand(kSmiTagMask)); |
|
|
__ tst(tos, Operand(kSmiTagMask)); |
|
|
continue_label.Branch(eq); |
|
|
continue_label.Branch(eq); |
|
|
frame_->EmitPush(r0); |
|
|
|
|
|
frame_->InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS, 1); |
|
|
frame_->InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS, 1); |
|
|
|
|
|
frame_->EmitPush(r0); |
|
|
|
|
|
|
|
|
continue_label.Bind(); |
|
|
continue_label.Bind(); |
|
|
frame_->EmitPush(r0); // r0 has result
|
|
|
|
|
|
break; |
|
|
break; |
|
|
} |
|
|
} |
|
|
default: |
|
|
default: |
|
@ -5335,6 +5302,7 @@ void CodeGenerator::VisitCountOperation(CountOperation* node) { |
|
|
int original_height = frame_->height(); |
|
|
int original_height = frame_->height(); |
|
|
#endif |
|
|
#endif |
|
|
Comment cmnt(masm_, "[ CountOperation"); |
|
|
Comment cmnt(masm_, "[ CountOperation"); |
|
|
|
|
|
VirtualFrame::RegisterAllocationScope scope(this); |
|
|
|
|
|
|
|
|
bool is_postfix = node->is_postfix(); |
|
|
bool is_postfix = node->is_postfix(); |
|
|
bool is_increment = node->op() == Token::INC; |
|
|
bool is_increment = node->op() == Token::INC; |
|
@ -5478,7 +5446,6 @@ void CodeGenerator::GenerateLogicalBooleanOperation(BinaryOperation* node) { |
|
|
// after evaluating the left hand side (due to the shortcut
|
|
|
// after evaluating the left hand side (due to the shortcut
|
|
|
// semantics), but the compiler must (statically) know if the result
|
|
|
// semantics), but the compiler must (statically) know if the result
|
|
|
// of compiling the binary operation is materialized or not.
|
|
|
// of compiling the binary operation is materialized or not.
|
|
|
VirtualFrame::SpilledScope spilled_scope(frame_); |
|
|
|
|
|
if (node->op() == Token::AND) { |
|
|
if (node->op() == Token::AND) { |
|
|
JumpTarget is_true; |
|
|
JumpTarget is_true; |
|
|
LoadCondition(node->left(), &is_true, false_target(), false); |
|
|
LoadCondition(node->left(), &is_true, false_target(), false); |
|
@ -5663,8 +5630,6 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) { |
|
|
if (left_is_null || right_is_null) { |
|
|
if (left_is_null || right_is_null) { |
|
|
Load(left_is_null ? right : left); |
|
|
Load(left_is_null ? right : left); |
|
|
Register tos = frame_->PopToRegister(); |
|
|
Register tos = frame_->PopToRegister(); |
|
|
// JumpTargets can't cope with register allocation yet.
|
|
|
|
|
|
frame_->SpillAll(); |
|
|
|
|
|
__ LoadRoot(ip, Heap::kNullValueRootIndex); |
|
|
__ LoadRoot(ip, Heap::kNullValueRootIndex); |
|
|
__ cmp(tos, ip); |
|
|
__ cmp(tos, ip); |
|
|
|
|
|
|
|
@ -5707,9 +5672,6 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) { |
|
|
LoadTypeofExpression(operation->expression()); |
|
|
LoadTypeofExpression(operation->expression()); |
|
|
Register tos = frame_->PopToRegister(); |
|
|
Register tos = frame_->PopToRegister(); |
|
|
|
|
|
|
|
|
// JumpTargets can't cope with register allocation yet.
|
|
|
|
|
|
frame_->SpillAll(); |
|
|
|
|
|
|
|
|
|
|
|
Register scratch = VirtualFrame::scratch0(); |
|
|
Register scratch = VirtualFrame::scratch0(); |
|
|
|
|
|
|
|
|
if (check->Equals(Heap::number_symbol())) { |
|
|
if (check->Equals(Heap::number_symbol())) { |
|
@ -5830,7 +5792,6 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) { |
|
|
break; |
|
|
break; |
|
|
|
|
|
|
|
|
case Token::IN: { |
|
|
case Token::IN: { |
|
|
VirtualFrame::SpilledScope scope(frame_); |
|
|
|
|
|
Load(left); |
|
|
Load(left); |
|
|
Load(right); |
|
|
Load(right); |
|
|
frame_->InvokeBuiltin(Builtins::IN, CALL_JS, 2); |
|
|
frame_->InvokeBuiltin(Builtins::IN, CALL_JS, 2); |
|
@ -5839,7 +5800,6 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) { |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
case Token::INSTANCEOF: { |
|
|
case Token::INSTANCEOF: { |
|
|
VirtualFrame::SpilledScope scope(frame_); |
|
|
|
|
|
Load(left); |
|
|
Load(left); |
|
|
Load(right); |
|
|
Load(right); |
|
|
InstanceofStub stub; |
|
|
InstanceofStub stub; |
|
@ -5937,10 +5897,15 @@ class DeferredReferenceGetKeyedValue: public DeferredCode { |
|
|
}; |
|
|
}; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// Takes key and register in r0 and r1 or vice versa. Returns result
|
|
|
|
|
|
// in r0.
|
|
|
void DeferredReferenceGetKeyedValue::Generate() { |
|
|
void DeferredReferenceGetKeyedValue::Generate() { |
|
|
ASSERT((key_.is(r0) && receiver_.is(r1)) || |
|
|
ASSERT((key_.is(r0) && receiver_.is(r1)) || |
|
|
(key_.is(r1) && receiver_.is(r0))); |
|
|
(key_.is(r1) && receiver_.is(r0))); |
|
|
|
|
|
|
|
|
|
|
|
VirtualFrame copied_frame(*frame_state()->frame()); |
|
|
|
|
|
copied_frame.SpillAll(); |
|
|
|
|
|
|
|
|
Register scratch1 = VirtualFrame::scratch0(); |
|
|
Register scratch1 = VirtualFrame::scratch0(); |
|
|
Register scratch2 = VirtualFrame::scratch1(); |
|
|
Register scratch2 = VirtualFrame::scratch1(); |
|
|
__ DecrementCounter(&Counters::keyed_load_inline, 1, scratch1, scratch2); |
|
|
__ DecrementCounter(&Counters::keyed_load_inline, 1, scratch1, scratch2); |
|
@ -5961,6 +5926,13 @@ void DeferredReferenceGetKeyedValue::Generate() { |
|
|
// keyed load has been inlined.
|
|
|
// keyed load has been inlined.
|
|
|
__ nop(PROPERTY_ACCESS_INLINED); |
|
|
__ nop(PROPERTY_ACCESS_INLINED); |
|
|
|
|
|
|
|
|
|
|
|
// Now go back to the frame that we entered with. This will not overwrite
|
|
|
|
|
|
// the receiver or key registers since they were not in use when we came
|
|
|
|
|
|
// in. The instructions emitted by this merge are skipped over by the
|
|
|
|
|
|
// inline load patching mechanism when looking for the branch instruction
|
|
|
|
|
|
// that tells it where the code to patch is.
|
|
|
|
|
|
copied_frame.MergeTo(frame_state()->frame()); |
|
|
|
|
|
|
|
|
// Block the constant pool for one more instruction after leaving this
|
|
|
// Block the constant pool for one more instruction after leaving this
|
|
|
// constant pool block scope to include the branch instruction ending the
|
|
|
// constant pool block scope to include the branch instruction ending the
|
|
|
// deferred code.
|
|
|
// deferred code.
|
|
@ -6114,7 +6086,6 @@ void CodeGenerator::EmitKeyedLoad() { |
|
|
bool key_is_known_smi = frame_->KnownSmiAt(0); |
|
|
bool key_is_known_smi = frame_->KnownSmiAt(0); |
|
|
Register key = frame_->PopToRegister(); |
|
|
Register key = frame_->PopToRegister(); |
|
|
Register receiver = frame_->PopToRegister(key); |
|
|
Register receiver = frame_->PopToRegister(key); |
|
|
VirtualFrame::SpilledScope spilled(frame_); |
|
|
|
|
|
|
|
|
|
|
|
// The deferred code expects key and receiver in registers.
|
|
|
// The deferred code expects key and receiver in registers.
|
|
|
DeferredReferenceGetKeyedValue* deferred = |
|
|
DeferredReferenceGetKeyedValue* deferred = |
|
@ -6152,10 +6123,12 @@ void CodeGenerator::EmitKeyedLoad() { |
|
|
// Get the elements array from the receiver and check that it
|
|
|
// Get the elements array from the receiver and check that it
|
|
|
// is not a dictionary.
|
|
|
// is not a dictionary.
|
|
|
__ ldr(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
|
|
__ ldr(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
|
|
|
|
|
if (FLAG_debug_code) { |
|
|
__ ldr(scratch2, FieldMemOperand(scratch1, JSObject::kMapOffset)); |
|
|
__ ldr(scratch2, FieldMemOperand(scratch1, JSObject::kMapOffset)); |
|
|
__ LoadRoot(ip, Heap::kFixedArrayMapRootIndex); |
|
|
__ LoadRoot(ip, Heap::kFixedArrayMapRootIndex); |
|
|
__ cmp(scratch2, ip); |
|
|
__ cmp(scratch2, ip); |
|
|
deferred->Branch(ne); |
|
|
__ Assert(eq, "JSObject with fast elements map has slow elements"); |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
// Check that key is within bounds. Use unsigned comparison to handle
|
|
|
// Check that key is within bounds. Use unsigned comparison to handle
|
|
|
// negative keys.
|
|
|
// negative keys.
|
|
@ -6176,7 +6149,7 @@ void CodeGenerator::EmitKeyedLoad() { |
|
|
|
|
|
|
|
|
__ mov(r0, scratch1); |
|
|
__ mov(r0, scratch1); |
|
|
// Make sure that the expected number of instructions are generated.
|
|
|
// Make sure that the expected number of instructions are generated.
|
|
|
ASSERT_EQ(kInlinedKeyedLoadInstructionsAfterPatch, |
|
|
ASSERT_EQ(GetInlinedKeyedLoadInstructionsAfterPatch(), |
|
|
masm_->InstructionsGeneratedSince(&check_inlined_codesize)); |
|
|
masm_->InstructionsGeneratedSince(&check_inlined_codesize)); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
@ -6204,9 +6177,9 @@ void CodeGenerator::EmitKeyedStore(StaticType* key_type) { |
|
|
// Load the value, key and receiver from the stack.
|
|
|
// Load the value, key and receiver from the stack.
|
|
|
Register value = frame_->PopToRegister(); |
|
|
Register value = frame_->PopToRegister(); |
|
|
Register key = frame_->PopToRegister(value); |
|
|
Register key = frame_->PopToRegister(value); |
|
|
|
|
|
VirtualFrame::SpilledScope spilled(frame_); |
|
|
Register receiver = r2; |
|
|
Register receiver = r2; |
|
|
frame_->EmitPop(receiver); |
|
|
frame_->EmitPop(receiver); |
|
|
VirtualFrame::SpilledScope spilled(frame_); |
|
|
|
|
|
|
|
|
|
|
|
// The deferred code expects value, key and receiver in registers.
|
|
|
// The deferred code expects value, key and receiver in registers.
|
|
|
DeferredReferenceSetKeyedValue* deferred = |
|
|
DeferredReferenceSetKeyedValue* deferred = |
|
|