|
@ -418,20 +418,21 @@ void LCodeGen::AddToTranslation(Translation* translation, |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void LCodeGen::CallCode(Handle<Code> code, |
|
|
void LCodeGen::CallCodeGeneric(Handle<Code> code, |
|
|
RelocInfo::Mode mode, |
|
|
RelocInfo::Mode mode, |
|
|
LInstruction* instr, |
|
|
LInstruction* instr, |
|
|
bool adjusted) { |
|
|
ContextMode context_mode, |
|
|
|
|
|
SafepointMode safepoint_mode) { |
|
|
ASSERT(instr != NULL); |
|
|
ASSERT(instr != NULL); |
|
|
LPointerMap* pointers = instr->pointer_map(); |
|
|
LPointerMap* pointers = instr->pointer_map(); |
|
|
RecordPosition(pointers->position()); |
|
|
RecordPosition(pointers->position()); |
|
|
|
|
|
|
|
|
if (!adjusted) { |
|
|
if (context_mode == RESTORE_CONTEXT) { |
|
|
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
|
|
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
|
|
} |
|
|
} |
|
|
__ call(code, mode); |
|
|
__ call(code, mode); |
|
|
|
|
|
|
|
|
RegisterLazyDeoptimization(instr); |
|
|
RegisterLazyDeoptimization(instr, safepoint_mode); |
|
|
|
|
|
|
|
|
// Signal that we don't inline smi code before these stubs in the
|
|
|
// Signal that we don't inline smi code before these stubs in the
|
|
|
// optimizing code generator.
|
|
|
// optimizing code generator.
|
|
@ -442,25 +443,44 @@ void LCodeGen::CallCode(Handle<Code> code, |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void LCodeGen::CallCode(Handle<Code> code, |
|
|
|
|
|
RelocInfo::Mode mode, |
|
|
|
|
|
LInstruction* instr, |
|
|
|
|
|
ContextMode context_mode) { |
|
|
|
|
|
CallCodeGeneric(code, mode, instr, context_mode, RECORD_SIMPLE_SAFEPOINT); |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void LCodeGen::CallRuntime(Runtime::Function* fun, |
|
|
void LCodeGen::CallRuntime(Runtime::Function* fun, |
|
|
int argc, |
|
|
int argc, |
|
|
LInstruction* instr, |
|
|
LInstruction* instr, |
|
|
bool adjusted) { |
|
|
ContextMode context_mode) { |
|
|
ASSERT(instr != NULL); |
|
|
ASSERT(instr != NULL); |
|
|
ASSERT(instr->HasPointerMap()); |
|
|
ASSERT(instr->HasPointerMap()); |
|
|
LPointerMap* pointers = instr->pointer_map(); |
|
|
LPointerMap* pointers = instr->pointer_map(); |
|
|
RecordPosition(pointers->position()); |
|
|
RecordPosition(pointers->position()); |
|
|
|
|
|
|
|
|
if (!adjusted) { |
|
|
if (context_mode == RESTORE_CONTEXT) { |
|
|
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
|
|
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
|
|
} |
|
|
} |
|
|
__ CallRuntime(fun, argc); |
|
|
__ CallRuntime(fun, argc); |
|
|
|
|
|
|
|
|
RegisterLazyDeoptimization(instr); |
|
|
RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT); |
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, |
|
|
|
|
|
int argc, |
|
|
|
|
|
LInstruction* instr) { |
|
|
|
|
|
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
|
|
|
|
|
__ CallRuntimeSaveDoubles(id); |
|
|
|
|
|
RecordSafepointWithRegisters( |
|
|
|
|
|
instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) { |
|
|
void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr, |
|
|
|
|
|
SafepointMode safepoint_mode) { |
|
|
// Create the environment to bailout to. If the call has side effects
|
|
|
// Create the environment to bailout to. If the call has side effects
|
|
|
// execution has to continue after the call otherwise execution can continue
|
|
|
// execution has to continue after the call otherwise execution can continue
|
|
|
// from a previous bailout point repeating the call.
|
|
|
// from a previous bailout point repeating the call.
|
|
@ -472,8 +492,16 @@ void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) { |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
RegisterEnvironmentForDeoptimization(deoptimization_environment); |
|
|
RegisterEnvironmentForDeoptimization(deoptimization_environment); |
|
|
RecordSafepoint(instr->pointer_map(), |
|
|
if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) { |
|
|
deoptimization_environment->deoptimization_index()); |
|
|
RecordSafepoint(instr->pointer_map(), |
|
|
|
|
|
deoptimization_environment->deoptimization_index()); |
|
|
|
|
|
} else { |
|
|
|
|
|
ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); |
|
|
|
|
|
RecordSafepointWithRegisters( |
|
|
|
|
|
instr->pointer_map(), |
|
|
|
|
|
0, |
|
|
|
|
|
deoptimization_environment->deoptimization_index()); |
|
|
|
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -622,6 +650,7 @@ void LCodeGen::RecordSafepoint( |
|
|
Safepoint::Kind kind, |
|
|
Safepoint::Kind kind, |
|
|
int arguments, |
|
|
int arguments, |
|
|
int deoptimization_index) { |
|
|
int deoptimization_index) { |
|
|
|
|
|
ASSERT(kind == expected_safepoint_kind_); |
|
|
const ZoneList<LOperand*>* operands = pointers->operands(); |
|
|
const ZoneList<LOperand*>* operands = pointers->operands(); |
|
|
Safepoint safepoint = safepoints_.DefineSafepoint(masm(), |
|
|
Safepoint safepoint = safepoints_.DefineSafepoint(masm(), |
|
|
kind, arguments, deoptimization_index); |
|
|
kind, arguments, deoptimization_index); |
|
@ -707,48 +736,48 @@ void LCodeGen::DoCallStub(LCallStub* instr) { |
|
|
switch (instr->hydrogen()->major_key()) { |
|
|
switch (instr->hydrogen()->major_key()) { |
|
|
case CodeStub::RegExpConstructResult: { |
|
|
case CodeStub::RegExpConstructResult: { |
|
|
RegExpConstructResultStub stub; |
|
|
RegExpConstructResultStub stub; |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
|
|
break; |
|
|
break; |
|
|
} |
|
|
} |
|
|
case CodeStub::RegExpExec: { |
|
|
case CodeStub::RegExpExec: { |
|
|
RegExpExecStub stub; |
|
|
RegExpExecStub stub; |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
|
|
break; |
|
|
break; |
|
|
} |
|
|
} |
|
|
case CodeStub::SubString: { |
|
|
case CodeStub::SubString: { |
|
|
SubStringStub stub; |
|
|
SubStringStub stub; |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
|
|
break; |
|
|
break; |
|
|
} |
|
|
} |
|
|
case CodeStub::StringCharAt: { |
|
|
case CodeStub::StringCharAt: { |
|
|
StringCharAtStub stub; |
|
|
StringCharAtStub stub; |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
|
|
break; |
|
|
break; |
|
|
} |
|
|
} |
|
|
case CodeStub::MathPow: { |
|
|
case CodeStub::MathPow: { |
|
|
MathPowStub stub; |
|
|
MathPowStub stub; |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
|
|
break; |
|
|
break; |
|
|
} |
|
|
} |
|
|
case CodeStub::NumberToString: { |
|
|
case CodeStub::NumberToString: { |
|
|
NumberToStringStub stub; |
|
|
NumberToStringStub stub; |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
|
|
break; |
|
|
break; |
|
|
} |
|
|
} |
|
|
case CodeStub::StringAdd: { |
|
|
case CodeStub::StringAdd: { |
|
|
StringAddStub stub(NO_STRING_ADD_FLAGS); |
|
|
StringAddStub stub(NO_STRING_ADD_FLAGS); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
|
|
break; |
|
|
break; |
|
|
} |
|
|
} |
|
|
case CodeStub::StringCompare: { |
|
|
case CodeStub::StringCompare: { |
|
|
StringCompareStub stub; |
|
|
StringCompareStub stub; |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
|
|
break; |
|
|
break; |
|
|
} |
|
|
} |
|
|
case CodeStub::TranscendentalCache: { |
|
|
case CodeStub::TranscendentalCache: { |
|
|
TranscendentalCacheStub stub(instr->transcendental_type(), |
|
|
TranscendentalCacheStub stub(instr->transcendental_type(), |
|
|
TranscendentalCacheStub::TAGGED); |
|
|
TranscendentalCacheStub::TAGGED); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
|
|
break; |
|
|
break; |
|
|
} |
|
|
} |
|
|
default: |
|
|
default: |
|
@ -1098,7 +1127,7 @@ void LCodeGen::DoBitNotI(LBitNotI* instr) { |
|
|
|
|
|
|
|
|
void LCodeGen::DoThrow(LThrow* instr) { |
|
|
void LCodeGen::DoThrow(LThrow* instr) { |
|
|
__ push(ToOperand(instr->InputAt(0))); |
|
|
__ push(ToOperand(instr->InputAt(0))); |
|
|
CallRuntime(Runtime::kThrow, 1, instr, false); |
|
|
CallRuntime(Runtime::kThrow, 1, instr, RESTORE_CONTEXT); |
|
|
|
|
|
|
|
|
if (FLAG_debug_code) { |
|
|
if (FLAG_debug_code) { |
|
|
Comment("Unreachable code."); |
|
|
Comment("Unreachable code."); |
|
@ -1170,7 +1199,7 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) { |
|
|
ASSERT(ToRegister(instr->result()).is(eax)); |
|
|
ASSERT(ToRegister(instr->result()).is(eax)); |
|
|
|
|
|
|
|
|
TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE); |
|
|
TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -1282,12 +1311,8 @@ void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) { |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void LCodeGen::DoDeferredStackCheck(LGoto* instr) { |
|
|
void LCodeGen::DoDeferredStackCheck(LGoto* instr) { |
|
|
__ pushad(); |
|
|
PushSafepointRegistersScope scope(this); |
|
|
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
|
|
CallRuntimeFromDeferred(Runtime::kStackGuard, 0, instr); |
|
|
__ CallRuntimeSaveDoubles(Runtime::kStackGuard); |
|
|
|
|
|
RecordSafepointWithRegisters( |
|
|
|
|
|
instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); |
|
|
|
|
|
__ popad(); |
|
|
|
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
void LCodeGen::DoGoto(LGoto* instr) { |
|
|
void LCodeGen::DoGoto(LGoto* instr) { |
|
@ -1776,7 +1801,7 @@ void LCodeGen::DoInstanceOf(LInstanceOf* instr) { |
|
|
// Object and function are in fixed registers defined by the stub.
|
|
|
// Object and function are in fixed registers defined by the stub.
|
|
|
ASSERT(ToRegister(instr->context()).is(esi)); |
|
|
ASSERT(ToRegister(instr->context()).is(esi)); |
|
|
InstanceofStub stub(InstanceofStub::kArgsInRegisters); |
|
|
InstanceofStub stub(InstanceofStub::kArgsInRegisters); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
|
|
|
|
|
|
|
|
NearLabel true_value, done; |
|
|
NearLabel true_value, done; |
|
|
__ test(eax, Operand(eax)); |
|
|
__ test(eax, Operand(eax)); |
|
@ -1795,7 +1820,7 @@ void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) { |
|
|
int false_block = chunk_->LookupDestination(instr->false_block_id()); |
|
|
int false_block = chunk_->LookupDestination(instr->false_block_id()); |
|
|
|
|
|
|
|
|
InstanceofStub stub(InstanceofStub::kArgsInRegisters); |
|
|
InstanceofStub stub(InstanceofStub::kArgsInRegisters); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
|
|
__ test(eax, Operand(eax)); |
|
|
__ test(eax, Operand(eax)); |
|
|
EmitBranch(true_block, false_block, zero); |
|
|
EmitBranch(true_block, false_block, zero); |
|
|
} |
|
|
} |
|
@ -1867,7 +1892,7 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { |
|
|
|
|
|
|
|
|
void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, |
|
|
void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, |
|
|
Label* map_check) { |
|
|
Label* map_check) { |
|
|
__ PushSafepointRegisters(); |
|
|
PushSafepointRegistersScope scope(this); |
|
|
|
|
|
|
|
|
InstanceofStub::Flags flags = InstanceofStub::kNoFlags; |
|
|
InstanceofStub::Flags flags = InstanceofStub::kNoFlags; |
|
|
flags = static_cast<InstanceofStub::Flags>( |
|
|
flags = static_cast<InstanceofStub::Flags>( |
|
@ -1878,11 +1903,12 @@ void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, |
|
|
flags | InstanceofStub::kReturnTrueFalseObject); |
|
|
flags | InstanceofStub::kReturnTrueFalseObject); |
|
|
InstanceofStub stub(flags); |
|
|
InstanceofStub stub(flags); |
|
|
|
|
|
|
|
|
// Get the temp register reserved by the instruction. This needs to be edi as
|
|
|
// Get the temp register reserved by the instruction. This needs to be a
|
|
|
// its slot of the pushing of safepoint registers is used to communicate the
|
|
|
// register which is pushed last by PushSafepointRegisters as top of the
|
|
|
// offset to the location of the map check.
|
|
|
// stack is used to pass the offset to the location of the map check to
|
|
|
|
|
|
// the stub.
|
|
|
Register temp = ToRegister(instr->TempAt(0)); |
|
|
Register temp = ToRegister(instr->TempAt(0)); |
|
|
ASSERT(temp.is(edi)); |
|
|
ASSERT(MacroAssembler::SafepointRegisterStackIndex(temp) == 0); |
|
|
__ mov(InstanceofStub::right(), Immediate(instr->function())); |
|
|
__ mov(InstanceofStub::right(), Immediate(instr->function())); |
|
|
static const int kAdditionalDelta = 16; |
|
|
static const int kAdditionalDelta = 16; |
|
|
int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; |
|
|
int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; |
|
@ -1890,10 +1916,13 @@ void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, |
|
|
__ bind(&before_push_delta); |
|
|
__ bind(&before_push_delta); |
|
|
__ mov(temp, Immediate(delta)); |
|
|
__ mov(temp, Immediate(delta)); |
|
|
__ StoreToSafepointRegisterSlot(temp, temp); |
|
|
__ StoreToSafepointRegisterSlot(temp, temp); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
|
|
CallCodeGeneric(stub.GetCode(), |
|
|
|
|
|
RelocInfo::CODE_TARGET, |
|
|
|
|
|
instr, |
|
|
|
|
|
RESTORE_CONTEXT, |
|
|
|
|
|
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); |
|
|
// Put the result value into the eax slot and restore all registers.
|
|
|
// Put the result value into the eax slot and restore all registers.
|
|
|
__ StoreToSafepointRegisterSlot(eax, eax); |
|
|
__ StoreToSafepointRegisterSlot(eax, eax); |
|
|
__ PopSafepointRegisters(); |
|
|
|
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -1921,7 +1950,7 @@ void LCodeGen::DoCmpT(LCmpT* instr) { |
|
|
Token::Value op = instr->op(); |
|
|
Token::Value op = instr->op(); |
|
|
|
|
|
|
|
|
Handle<Code> ic = CompareIC::GetUninitialized(op); |
|
|
Handle<Code> ic = CompareIC::GetUninitialized(op); |
|
|
CallCode(ic, RelocInfo::CODE_TARGET, instr, false); |
|
|
CallCode(ic, RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
|
|
|
|
|
|
|
|
Condition condition = ComputeCompareCondition(op); |
|
|
Condition condition = ComputeCompareCondition(op); |
|
|
if (op == Token::GT || op == Token::LTE) { |
|
|
if (op == Token::GT || op == Token::LTE) { |
|
@ -1944,7 +1973,7 @@ void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) { |
|
|
int false_block = chunk_->LookupDestination(instr->false_block_id()); |
|
|
int false_block = chunk_->LookupDestination(instr->false_block_id()); |
|
|
|
|
|
|
|
|
Handle<Code> ic = CompareIC::GetUninitialized(op); |
|
|
Handle<Code> ic = CompareIC::GetUninitialized(op); |
|
|
CallCode(ic, RelocInfo::CODE_TARGET, instr, false); |
|
|
CallCode(ic, RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
|
|
|
|
|
|
|
|
// The compare stub expects compare condition and the input operands
|
|
|
// The compare stub expects compare condition and the input operands
|
|
|
// reversed for GT and LTE.
|
|
|
// reversed for GT and LTE.
|
|
@ -2039,7 +2068,7 @@ void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { |
|
|
|
|
|
|
|
|
__ mov(ecx, instr->name()); |
|
|
__ mov(ecx, instr->name()); |
|
|
Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); |
|
|
Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); |
|
|
CallCode(ic, RelocInfo::CODE_TARGET, instr); |
|
|
CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -2163,7 +2192,7 @@ void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { |
|
|
ASSERT(ToRegister(instr->key()).is(eax)); |
|
|
ASSERT(ToRegister(instr->key()).is(eax)); |
|
|
|
|
|
|
|
|
Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); |
|
|
Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); |
|
|
CallCode(ic, RelocInfo::CODE_TARGET, instr); |
|
|
CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -2351,7 +2380,7 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function, |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
// Setup deoptimization.
|
|
|
// Setup deoptimization.
|
|
|
RegisterLazyDeoptimization(instr); |
|
|
RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -2373,7 +2402,7 @@ void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) { |
|
|
Register tmp2 = tmp.is(ecx) ? edx : input_reg.is(ecx) ? edx : ecx; |
|
|
Register tmp2 = tmp.is(ecx) ? edx : input_reg.is(ecx) ? edx : ecx; |
|
|
|
|
|
|
|
|
// Preserve the value of all registers.
|
|
|
// Preserve the value of all registers.
|
|
|
__ PushSafepointRegisters(); |
|
|
PushSafepointRegistersScope scope(this); |
|
|
|
|
|
|
|
|
Label negative; |
|
|
Label negative; |
|
|
__ mov(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset)); |
|
|
__ mov(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset)); |
|
@ -2394,10 +2423,8 @@ void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) { |
|
|
// Slow case: Call the runtime system to do the number allocation.
|
|
|
// Slow case: Call the runtime system to do the number allocation.
|
|
|
__ bind(&slow); |
|
|
__ bind(&slow); |
|
|
|
|
|
|
|
|
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
|
|
CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr); |
|
|
__ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); |
|
|
|
|
|
RecordSafepointWithRegisters( |
|
|
|
|
|
instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); |
|
|
|
|
|
// Set the pointer to the new heap number in tmp.
|
|
|
// Set the pointer to the new heap number in tmp.
|
|
|
if (!tmp.is(eax)) __ mov(tmp, eax); |
|
|
if (!tmp.is(eax)) __ mov(tmp, eax); |
|
|
|
|
|
|
|
@ -2413,7 +2440,6 @@ void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) { |
|
|
__ StoreToSafepointRegisterSlot(input_reg, tmp); |
|
|
__ StoreToSafepointRegisterSlot(input_reg, tmp); |
|
|
|
|
|
|
|
|
__ bind(&done); |
|
|
__ bind(&done); |
|
|
__ PopSafepointRegisters(); |
|
|
|
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -2601,7 +2627,7 @@ void LCodeGen::DoMathLog(LUnaryMathOperation* instr) { |
|
|
ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); |
|
|
ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); |
|
|
TranscendentalCacheStub stub(TranscendentalCache::LOG, |
|
|
TranscendentalCacheStub stub(TranscendentalCache::LOG, |
|
|
TranscendentalCacheStub::UNTAGGED); |
|
|
TranscendentalCacheStub::UNTAGGED); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -2609,7 +2635,7 @@ void LCodeGen::DoMathCos(LUnaryMathOperation* instr) { |
|
|
ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); |
|
|
ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); |
|
|
TranscendentalCacheStub stub(TranscendentalCache::COS, |
|
|
TranscendentalCacheStub stub(TranscendentalCache::COS, |
|
|
TranscendentalCacheStub::UNTAGGED); |
|
|
TranscendentalCacheStub::UNTAGGED); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -2617,7 +2643,7 @@ void LCodeGen::DoMathSin(LUnaryMathOperation* instr) { |
|
|
ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); |
|
|
ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); |
|
|
TranscendentalCacheStub stub(TranscendentalCache::SIN, |
|
|
TranscendentalCacheStub stub(TranscendentalCache::SIN, |
|
|
TranscendentalCacheStub::UNTAGGED); |
|
|
TranscendentalCacheStub::UNTAGGED); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -2661,7 +2687,7 @@ void LCodeGen::DoCallKeyed(LCallKeyed* instr) { |
|
|
|
|
|
|
|
|
int arity = instr->arity(); |
|
|
int arity = instr->arity(); |
|
|
Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arity, NOT_IN_LOOP); |
|
|
Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arity, NOT_IN_LOOP); |
|
|
CallCode(ic, RelocInfo::CODE_TARGET, instr); |
|
|
CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -2672,7 +2698,7 @@ void LCodeGen::DoCallNamed(LCallNamed* instr) { |
|
|
int arity = instr->arity(); |
|
|
int arity = instr->arity(); |
|
|
Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP); |
|
|
Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP); |
|
|
__ mov(ecx, instr->name()); |
|
|
__ mov(ecx, instr->name()); |
|
|
CallCode(ic, RelocInfo::CODE_TARGET, instr); |
|
|
CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -2682,7 +2708,7 @@ void LCodeGen::DoCallFunction(LCallFunction* instr) { |
|
|
|
|
|
|
|
|
int arity = instr->arity(); |
|
|
int arity = instr->arity(); |
|
|
CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE); |
|
|
CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
|
|
__ Drop(1); |
|
|
__ Drop(1); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
@ -2694,7 +2720,7 @@ void LCodeGen::DoCallGlobal(LCallGlobal* instr) { |
|
|
int arity = instr->arity(); |
|
|
int arity = instr->arity(); |
|
|
Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP); |
|
|
Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP); |
|
|
__ mov(ecx, instr->name()); |
|
|
__ mov(ecx, instr->name()); |
|
|
CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr); |
|
|
CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr, CONTEXT_ADJUSTED); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -2712,12 +2738,12 @@ void LCodeGen::DoCallNew(LCallNew* instr) { |
|
|
|
|
|
|
|
|
Handle<Code> builtin(Builtins::builtin(Builtins::JSConstructCall)); |
|
|
Handle<Code> builtin(Builtins::builtin(Builtins::JSConstructCall)); |
|
|
__ Set(eax, Immediate(instr->arity())); |
|
|
__ Set(eax, Immediate(instr->arity())); |
|
|
CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr); |
|
|
CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr, CONTEXT_ADJUSTED); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void LCodeGen::DoCallRuntime(LCallRuntime* instr) { |
|
|
void LCodeGen::DoCallRuntime(LCallRuntime* instr) { |
|
|
CallRuntime(instr->function(), instr->arity(), instr, false); |
|
|
CallRuntime(instr->function(), instr->arity(), instr, RESTORE_CONTEXT); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -2760,7 +2786,7 @@ void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { |
|
|
Handle<Code> ic(Builtins::builtin( |
|
|
Handle<Code> ic(Builtins::builtin( |
|
|
info_->is_strict() ? Builtins::StoreIC_Initialize_Strict |
|
|
info_->is_strict() ? Builtins::StoreIC_Initialize_Strict |
|
|
: Builtins::StoreIC_Initialize)); |
|
|
: Builtins::StoreIC_Initialize)); |
|
|
CallCode(ic, RelocInfo::CODE_TARGET, instr); |
|
|
CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -2830,7 +2856,7 @@ void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { |
|
|
Handle<Code> ic(Builtins::builtin( |
|
|
Handle<Code> ic(Builtins::builtin( |
|
|
info_->is_strict() ? Builtins::KeyedStoreIC_Initialize_Strict |
|
|
info_->is_strict() ? Builtins::KeyedStoreIC_Initialize_Strict |
|
|
: Builtins::KeyedStoreIC_Initialize)); |
|
|
: Builtins::KeyedStoreIC_Initialize)); |
|
|
CallCode(ic, RelocInfo::CODE_TARGET, instr); |
|
|
CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -2948,7 +2974,7 @@ void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) { |
|
|
// contained in the register pointer map.
|
|
|
// contained in the register pointer map.
|
|
|
__ Set(result, Immediate(0)); |
|
|
__ Set(result, Immediate(0)); |
|
|
|
|
|
|
|
|
__ PushSafepointRegisters(); |
|
|
PushSafepointRegistersScope scope(this); |
|
|
__ push(string); |
|
|
__ push(string); |
|
|
// Push the index as a smi. This is safe because of the checks in
|
|
|
// Push the index as a smi. This is safe because of the checks in
|
|
|
// DoStringCharCodeAt above.
|
|
|
// DoStringCharCodeAt above.
|
|
@ -2961,16 +2987,12 @@ void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) { |
|
|
__ SmiTag(index); |
|
|
__ SmiTag(index); |
|
|
__ push(index); |
|
|
__ push(index); |
|
|
} |
|
|
} |
|
|
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
|
|
CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr); |
|
|
__ CallRuntimeSaveDoubles(Runtime::kStringCharCodeAt); |
|
|
|
|
|
RecordSafepointWithRegisters( |
|
|
|
|
|
instr->pointer_map(), 2, Safepoint::kNoDeoptimizationIndex); |
|
|
|
|
|
if (FLAG_debug_code) { |
|
|
if (FLAG_debug_code) { |
|
|
__ AbortIfNotSmi(eax); |
|
|
__ AbortIfNotSmi(eax); |
|
|
} |
|
|
} |
|
|
__ SmiUntag(eax); |
|
|
__ SmiUntag(eax); |
|
|
__ StoreToSafepointRegisterSlot(result, eax); |
|
|
__ StoreToSafepointRegisterSlot(result, eax); |
|
|
__ PopSafepointRegisters(); |
|
|
|
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -3017,7 +3039,7 @@ void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) { |
|
|
Register tmp = reg.is(eax) ? ecx : eax; |
|
|
Register tmp = reg.is(eax) ? ecx : eax; |
|
|
|
|
|
|
|
|
// Preserve the value of all registers.
|
|
|
// Preserve the value of all registers.
|
|
|
__ PushSafepointRegisters(); |
|
|
PushSafepointRegistersScope scope(this); |
|
|
|
|
|
|
|
|
// There was overflow, so bits 30 and 31 of the original integer
|
|
|
// There was overflow, so bits 30 and 31 of the original integer
|
|
|
// disagree. Try to allocate a heap number in new space and store
|
|
|
// disagree. Try to allocate a heap number in new space and store
|
|
@ -3039,10 +3061,7 @@ void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) { |
|
|
// integer value.
|
|
|
// integer value.
|
|
|
__ StoreToSafepointRegisterSlot(reg, Immediate(0)); |
|
|
__ StoreToSafepointRegisterSlot(reg, Immediate(0)); |
|
|
|
|
|
|
|
|
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
|
|
CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr); |
|
|
__ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); |
|
|
|
|
|
RecordSafepointWithRegisters( |
|
|
|
|
|
instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); |
|
|
|
|
|
if (!reg.is(eax)) __ mov(reg, eax); |
|
|
if (!reg.is(eax)) __ mov(reg, eax); |
|
|
|
|
|
|
|
|
// Done. Put the value in xmm0 into the value of the allocated heap
|
|
|
// Done. Put the value in xmm0 into the value of the allocated heap
|
|
@ -3050,7 +3069,6 @@ void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) { |
|
|
__ bind(&done); |
|
|
__ bind(&done); |
|
|
__ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), xmm0); |
|
|
__ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), xmm0); |
|
|
__ StoreToSafepointRegisterSlot(reg, reg); |
|
|
__ StoreToSafepointRegisterSlot(reg, reg); |
|
|
__ PopSafepointRegisters(); |
|
|
|
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -3086,13 +3104,9 @@ void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) { |
|
|
Register reg = ToRegister(instr->result()); |
|
|
Register reg = ToRegister(instr->result()); |
|
|
__ Set(reg, Immediate(0)); |
|
|
__ Set(reg, Immediate(0)); |
|
|
|
|
|
|
|
|
__ PushSafepointRegisters(); |
|
|
PushSafepointRegistersScope scope(this); |
|
|
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
|
|
CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr); |
|
|
__ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); |
|
|
|
|
|
RecordSafepointWithRegisters( |
|
|
|
|
|
instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); |
|
|
|
|
|
__ StoreToSafepointRegisterSlot(reg, eax); |
|
|
__ StoreToSafepointRegisterSlot(reg, eax); |
|
|
__ PopSafepointRegisters(); |
|
|
|
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -3503,16 +3517,16 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) { |
|
|
FastCloneShallowArrayStub::Mode mode = |
|
|
FastCloneShallowArrayStub::Mode mode = |
|
|
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS; |
|
|
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS; |
|
|
FastCloneShallowArrayStub stub(mode, length); |
|
|
FastCloneShallowArrayStub stub(mode, length); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
|
|
} else if (instr->hydrogen()->depth() > 1) { |
|
|
} else if (instr->hydrogen()->depth() > 1) { |
|
|
CallRuntime(Runtime::kCreateArrayLiteral, 3, instr, false); |
|
|
CallRuntime(Runtime::kCreateArrayLiteral, 3, instr, RESTORE_CONTEXT); |
|
|
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { |
|
|
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { |
|
|
CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr, false); |
|
|
CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr, RESTORE_CONTEXT); |
|
|
} else { |
|
|
} else { |
|
|
FastCloneShallowArrayStub::Mode mode = |
|
|
FastCloneShallowArrayStub::Mode mode = |
|
|
FastCloneShallowArrayStub::CLONE_ELEMENTS; |
|
|
FastCloneShallowArrayStub::CLONE_ELEMENTS; |
|
|
FastCloneShallowArrayStub stub(mode, length); |
|
|
FastCloneShallowArrayStub stub(mode, length); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
|
|
} |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
@ -3528,9 +3542,12 @@ void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { |
|
|
|
|
|
|
|
|
// Pick the right runtime function to call.
|
|
|
// Pick the right runtime function to call.
|
|
|
if (instr->hydrogen()->depth() > 1) { |
|
|
if (instr->hydrogen()->depth() > 1) { |
|
|
CallRuntime(Runtime::kCreateObjectLiteral, 4, instr); |
|
|
CallRuntime(Runtime::kCreateObjectLiteral, 4, instr, CONTEXT_ADJUSTED); |
|
|
} else { |
|
|
} else { |
|
|
CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr); |
|
|
CallRuntime(Runtime::kCreateObjectLiteralShallow, |
|
|
|
|
|
4, |
|
|
|
|
|
instr, |
|
|
|
|
|
CONTEXT_ADJUSTED); |
|
|
} |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
@ -3556,7 +3573,7 @@ void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { |
|
|
__ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); |
|
|
__ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); |
|
|
__ push(Immediate(instr->hydrogen()->pattern())); |
|
|
__ push(Immediate(instr->hydrogen()->pattern())); |
|
|
__ push(Immediate(instr->hydrogen()->flags())); |
|
|
__ push(Immediate(instr->hydrogen()->flags())); |
|
|
CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr, false); |
|
|
CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr, RESTORE_CONTEXT); |
|
|
__ mov(ebx, eax); |
|
|
__ mov(ebx, eax); |
|
|
|
|
|
|
|
|
__ bind(&materialized); |
|
|
__ bind(&materialized); |
|
@ -3568,7 +3585,7 @@ void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { |
|
|
__ bind(&runtime_allocate); |
|
|
__ bind(&runtime_allocate); |
|
|
__ push(ebx); |
|
|
__ push(ebx); |
|
|
__ push(Immediate(Smi::FromInt(size))); |
|
|
__ push(Immediate(Smi::FromInt(size))); |
|
|
CallRuntime(Runtime::kAllocateInNewSpace, 1, instr, false); |
|
|
CallRuntime(Runtime::kAllocateInNewSpace, 1, instr, RESTORE_CONTEXT); |
|
|
__ pop(ebx); |
|
|
__ pop(ebx); |
|
|
|
|
|
|
|
|
__ bind(&allocated); |
|
|
__ bind(&allocated); |
|
@ -3595,14 +3612,14 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { |
|
|
if (shared_info->num_literals() == 0 && !pretenure) { |
|
|
if (shared_info->num_literals() == 0 && !pretenure) { |
|
|
FastNewClosureStub stub; |
|
|
FastNewClosureStub stub; |
|
|
__ push(Immediate(shared_info)); |
|
|
__ push(Immediate(shared_info)); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
|
|
} else { |
|
|
} else { |
|
|
__ push(Operand(ebp, StandardFrameConstants::kContextOffset)); |
|
|
__ push(Operand(ebp, StandardFrameConstants::kContextOffset)); |
|
|
__ push(Immediate(shared_info)); |
|
|
__ push(Immediate(shared_info)); |
|
|
__ push(Immediate(pretenure |
|
|
__ push(Immediate(pretenure |
|
|
? Factory::true_value() |
|
|
? Factory::true_value() |
|
|
: Factory::false_value())); |
|
|
: Factory::false_value())); |
|
|
CallRuntime(Runtime::kNewClosure, 3, instr, false); |
|
|
CallRuntime(Runtime::kNewClosure, 3, instr, RESTORE_CONTEXT); |
|
|
} |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
@ -3614,7 +3631,7 @@ void LCodeGen::DoTypeof(LTypeof* instr) { |
|
|
} else { |
|
|
} else { |
|
|
__ push(ToOperand(input)); |
|
|
__ push(ToOperand(input)); |
|
|
} |
|
|
} |
|
|
CallRuntime(Runtime::kTypeof, 1, instr, false); |
|
|
CallRuntime(Runtime::kTypeof, 1, instr, RESTORE_CONTEXT); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -3825,7 +3842,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) { |
|
|
__ j(above_equal, &done); |
|
|
__ j(above_equal, &done); |
|
|
|
|
|
|
|
|
StackCheckStub stub; |
|
|
StackCheckStub stub; |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); |
|
|
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, RESTORE_CONTEXT); |
|
|
__ bind(&done); |
|
|
__ bind(&done); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|