|
|
@ -60,6 +60,16 @@ void FastCloneShallowObjectStub::InitializeInterfaceDescriptor( |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void CreateAllocationSiteStub::InitializeInterfaceDescriptor( |
|
|
|
Isolate* isolate, |
|
|
|
CodeStubInterfaceDescriptor* descriptor) { |
|
|
|
static Register registers[] = { r2 }; |
|
|
|
descriptor->register_param_count_ = 1; |
|
|
|
descriptor->register_params_ = registers; |
|
|
|
descriptor->deoptimization_handler_ = NULL; |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( |
|
|
|
Isolate* isolate, |
|
|
|
CodeStubInterfaceDescriptor* descriptor) { |
|
|
@ -226,8 +236,42 @@ void InternalArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor( |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void UnaryOpStub::InitializeInterfaceDescriptor( |
|
|
|
Isolate* isolate, |
|
|
|
CodeStubInterfaceDescriptor* descriptor) { |
|
|
|
static Register registers[] = { r0 }; |
|
|
|
descriptor->register_param_count_ = 1; |
|
|
|
descriptor->register_params_ = registers; |
|
|
|
descriptor->deoptimization_handler_ = |
|
|
|
FUNCTION_ADDR(UnaryOpIC_Miss); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void StoreGlobalStub::InitializeInterfaceDescriptor( |
|
|
|
Isolate* isolate, |
|
|
|
CodeStubInterfaceDescriptor* descriptor) { |
|
|
|
static Register registers[] = { r1, r2, r0 }; |
|
|
|
descriptor->register_param_count_ = 3; |
|
|
|
descriptor->register_params_ = registers; |
|
|
|
descriptor->deoptimization_handler_ = |
|
|
|
FUNCTION_ADDR(StoreIC_MissFromStubFailure); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void ElementsTransitionAndStoreStub::InitializeInterfaceDescriptor( |
|
|
|
Isolate* isolate, |
|
|
|
CodeStubInterfaceDescriptor* descriptor) { |
|
|
|
static Register registers[] = { r0, r3, r1, r2 }; |
|
|
|
descriptor->register_param_count_ = 4; |
|
|
|
descriptor->register_params_ = registers; |
|
|
|
descriptor->deoptimization_handler_ = |
|
|
|
FUNCTION_ADDR(ElementsTransitionAndStoreIC_Miss); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
#define __ ACCESS_MASM(masm) |
|
|
|
|
|
|
|
|
|
|
|
static void EmitIdenticalObjectComparison(MacroAssembler* masm, |
|
|
|
Label* slow, |
|
|
|
Condition cond); |
|
|
@ -892,17 +936,10 @@ static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm, |
|
|
|
|
|
|
|
// Now that we have the types we might as well check for
|
|
|
|
// internalized-internalized.
|
|
|
|
Label not_internalized; |
|
|
|
STATIC_ASSERT(kInternalizedTag != 0); |
|
|
|
__ and_(r2, r2, Operand(kIsNotStringMask | kIsInternalizedMask)); |
|
|
|
__ cmp(r2, Operand(kInternalizedTag | kStringTag)); |
|
|
|
__ b(ne, ¬_internalized); // r2 (rhs) is not an internalized string
|
|
|
|
|
|
|
|
__ and_(r3, r3, Operand(kIsNotStringMask | kIsInternalizedMask)); |
|
|
|
__ cmp(r3, Operand(kInternalizedTag | kStringTag)); |
|
|
|
__ b(eq, &return_not_equal); // both rhs and lhs are internalized strings
|
|
|
|
|
|
|
|
__ bind(¬_internalized); |
|
|
|
STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0); |
|
|
|
__ orr(r2, r2, Operand(r3)); |
|
|
|
__ tst(r2, Operand(kIsNotStringMask | kIsNotInternalizedMask)); |
|
|
|
__ b(eq, &return_not_equal); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
@ -943,15 +980,15 @@ static void EmitCheckForInternalizedStringsOrObjects(MacroAssembler* masm, |
|
|
|
|
|
|
|
// r2 is object type of rhs.
|
|
|
|
Label object_test; |
|
|
|
STATIC_ASSERT(kInternalizedTag != 0); |
|
|
|
STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0); |
|
|
|
__ tst(r2, Operand(kIsNotStringMask)); |
|
|
|
__ b(ne, &object_test); |
|
|
|
__ tst(r2, Operand(kIsInternalizedMask)); |
|
|
|
__ b(eq, possible_strings); |
|
|
|
__ tst(r2, Operand(kIsNotInternalizedMask)); |
|
|
|
__ b(ne, possible_strings); |
|
|
|
__ CompareObjectType(lhs, r3, r3, FIRST_NONSTRING_TYPE); |
|
|
|
__ b(ge, not_both_strings); |
|
|
|
__ tst(r3, Operand(kIsInternalizedMask)); |
|
|
|
__ b(eq, possible_strings); |
|
|
|
__ tst(r3, Operand(kIsNotInternalizedMask)); |
|
|
|
__ b(ne, possible_strings); |
|
|
|
|
|
|
|
// Both are internalized. We already checked they weren't the same pointer
|
|
|
|
// so they are not equal.
|
|
|
@ -982,7 +1019,6 @@ void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm, |
|
|
|
Register scratch1, |
|
|
|
Register scratch2, |
|
|
|
Register scratch3, |
|
|
|
bool object_is_smi, |
|
|
|
Label* not_found) { |
|
|
|
// Use of registers. Register result is used as a temporary.
|
|
|
|
Register number_string_cache = result; |
|
|
@ -1005,40 +1041,38 @@ void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm, |
|
|
|
Isolate* isolate = masm->isolate(); |
|
|
|
Label is_smi; |
|
|
|
Label load_result_from_cache; |
|
|
|
if (!object_is_smi) { |
|
|
|
__ JumpIfSmi(object, &is_smi); |
|
|
|
__ CheckMap(object, |
|
|
|
scratch1, |
|
|
|
Heap::kHeapNumberMapRootIndex, |
|
|
|
not_found, |
|
|
|
DONT_DO_SMI_CHECK); |
|
|
|
__ JumpIfSmi(object, &is_smi); |
|
|
|
__ CheckMap(object, |
|
|
|
scratch1, |
|
|
|
Heap::kHeapNumberMapRootIndex, |
|
|
|
not_found, |
|
|
|
DONT_DO_SMI_CHECK); |
|
|
|
|
|
|
|
STATIC_ASSERT(8 == kDoubleSize); |
|
|
|
__ add(scratch1, |
|
|
|
object, |
|
|
|
Operand(HeapNumber::kValueOffset - kHeapObjectTag)); |
|
|
|
__ ldm(ia, scratch1, scratch1.bit() | scratch2.bit()); |
|
|
|
__ eor(scratch1, scratch1, Operand(scratch2)); |
|
|
|
__ and_(scratch1, scratch1, Operand(mask)); |
|
|
|
|
|
|
|
// Calculate address of entry in string cache: each entry consists
|
|
|
|
// of two pointer sized fields.
|
|
|
|
__ add(scratch1, |
|
|
|
number_string_cache, |
|
|
|
Operand(scratch1, LSL, kPointerSizeLog2 + 1)); |
|
|
|
|
|
|
|
Register probe = mask; |
|
|
|
__ ldr(probe, |
|
|
|
FieldMemOperand(scratch1, FixedArray::kHeaderSize)); |
|
|
|
__ JumpIfSmi(probe, not_found); |
|
|
|
__ sub(scratch2, object, Operand(kHeapObjectTag)); |
|
|
|
__ vldr(d0, scratch2, HeapNumber::kValueOffset); |
|
|
|
__ sub(probe, probe, Operand(kHeapObjectTag)); |
|
|
|
__ vldr(d1, probe, HeapNumber::kValueOffset); |
|
|
|
__ VFPCompareAndSetFlags(d0, d1); |
|
|
|
__ b(ne, not_found); // The cache did not contain this value.
|
|
|
|
__ b(&load_result_from_cache); |
|
|
|
} |
|
|
|
STATIC_ASSERT(8 == kDoubleSize); |
|
|
|
__ add(scratch1, |
|
|
|
object, |
|
|
|
Operand(HeapNumber::kValueOffset - kHeapObjectTag)); |
|
|
|
__ ldm(ia, scratch1, scratch1.bit() | scratch2.bit()); |
|
|
|
__ eor(scratch1, scratch1, Operand(scratch2)); |
|
|
|
__ and_(scratch1, scratch1, Operand(mask)); |
|
|
|
|
|
|
|
// Calculate address of entry in string cache: each entry consists
|
|
|
|
// of two pointer sized fields.
|
|
|
|
__ add(scratch1, |
|
|
|
number_string_cache, |
|
|
|
Operand(scratch1, LSL, kPointerSizeLog2 + 1)); |
|
|
|
|
|
|
|
Register probe = mask; |
|
|
|
__ ldr(probe, |
|
|
|
FieldMemOperand(scratch1, FixedArray::kHeaderSize)); |
|
|
|
__ JumpIfSmi(probe, not_found); |
|
|
|
__ sub(scratch2, object, Operand(kHeapObjectTag)); |
|
|
|
__ vldr(d0, scratch2, HeapNumber::kValueOffset); |
|
|
|
__ sub(probe, probe, Operand(kHeapObjectTag)); |
|
|
|
__ vldr(d1, probe, HeapNumber::kValueOffset); |
|
|
|
__ VFPCompareAndSetFlags(d0, d1); |
|
|
|
__ b(ne, not_found); // The cache did not contain this value.
|
|
|
|
__ b(&load_result_from_cache); |
|
|
|
|
|
|
|
__ bind(&is_smi); |
|
|
|
Register scratch = scratch1; |
|
|
@ -1050,7 +1084,6 @@ void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm, |
|
|
|
Operand(scratch, LSL, kPointerSizeLog2 + 1)); |
|
|
|
|
|
|
|
// Check if the entry is the smi we are looking for.
|
|
|
|
Register probe = mask; |
|
|
|
__ ldr(probe, FieldMemOperand(scratch, FixedArray::kHeaderSize)); |
|
|
|
__ cmp(object, probe); |
|
|
|
__ b(ne, not_found); |
|
|
@ -1072,7 +1105,7 @@ void NumberToStringStub::Generate(MacroAssembler* masm) { |
|
|
|
__ ldr(r1, MemOperand(sp, 0)); |
|
|
|
|
|
|
|
// Generate code to lookup number in the number string cache.
|
|
|
|
GenerateLookupNumberStringCache(masm, r1, r0, r2, r3, r4, false, &runtime); |
|
|
|
GenerateLookupNumberStringCache(masm, r1, r0, r2, r3, r4, &runtime); |
|
|
|
__ add(sp, sp, Operand(1 * kPointerSize)); |
|
|
|
__ Ret(); |
|
|
|
|
|
|
@ -1289,277 +1322,6 @@ void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void UnaryOpStub::PrintName(StringStream* stream) { |
|
|
|
const char* op_name = Token::Name(op_); |
|
|
|
const char* overwrite_name = NULL; // Make g++ happy.
|
|
|
|
switch (mode_) { |
|
|
|
case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break; |
|
|
|
case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break; |
|
|
|
} |
|
|
|
stream->Add("UnaryOpStub_%s_%s_%s", |
|
|
|
op_name, |
|
|
|
overwrite_name, |
|
|
|
UnaryOpIC::GetName(operand_type_)); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
// TODO(svenpanne): Use virtual functions instead of switch.
|
|
|
|
void UnaryOpStub::Generate(MacroAssembler* masm) { |
|
|
|
switch (operand_type_) { |
|
|
|
case UnaryOpIC::UNINITIALIZED: |
|
|
|
GenerateTypeTransition(masm); |
|
|
|
break; |
|
|
|
case UnaryOpIC::SMI: |
|
|
|
GenerateSmiStub(masm); |
|
|
|
break; |
|
|
|
case UnaryOpIC::NUMBER: |
|
|
|
GenerateNumberStub(masm); |
|
|
|
break; |
|
|
|
case UnaryOpIC::GENERIC: |
|
|
|
GenerateGenericStub(masm); |
|
|
|
break; |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { |
|
|
|
__ mov(r3, Operand(r0)); // the operand
|
|
|
|
__ mov(r2, Operand(Smi::FromInt(op_))); |
|
|
|
__ mov(r1, Operand(Smi::FromInt(mode_))); |
|
|
|
__ mov(r0, Operand(Smi::FromInt(operand_type_))); |
|
|
|
__ Push(r3, r2, r1, r0); |
|
|
|
|
|
|
|
__ TailCallExternalReference( |
|
|
|
ExternalReference(IC_Utility(IC::kUnaryOp_Patch), masm->isolate()), 4, 1); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
// TODO(svenpanne): Use virtual functions instead of switch.
|
|
|
|
void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) { |
|
|
|
switch (op_) { |
|
|
|
case Token::SUB: |
|
|
|
GenerateSmiStubSub(masm); |
|
|
|
break; |
|
|
|
case Token::BIT_NOT: |
|
|
|
GenerateSmiStubBitNot(masm); |
|
|
|
break; |
|
|
|
default: |
|
|
|
UNREACHABLE(); |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) { |
|
|
|
Label non_smi, slow; |
|
|
|
GenerateSmiCodeSub(masm, &non_smi, &slow); |
|
|
|
__ bind(&non_smi); |
|
|
|
__ bind(&slow); |
|
|
|
GenerateTypeTransition(masm); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) { |
|
|
|
Label non_smi; |
|
|
|
GenerateSmiCodeBitNot(masm, &non_smi); |
|
|
|
__ bind(&non_smi); |
|
|
|
GenerateTypeTransition(masm); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm, |
|
|
|
Label* non_smi, |
|
|
|
Label* slow) { |
|
|
|
__ JumpIfNotSmi(r0, non_smi); |
|
|
|
|
|
|
|
// The result of negating zero or the smallest negative smi is not a smi.
|
|
|
|
__ bic(ip, r0, Operand(0x80000000), SetCC); |
|
|
|
__ b(eq, slow); |
|
|
|
|
|
|
|
// Return '0 - value'.
|
|
|
|
__ rsb(r0, r0, Operand::Zero()); |
|
|
|
__ Ret(); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void UnaryOpStub::GenerateSmiCodeBitNot(MacroAssembler* masm, |
|
|
|
Label* non_smi) { |
|
|
|
__ JumpIfNotSmi(r0, non_smi); |
|
|
|
|
|
|
|
// Flip bits and revert inverted smi-tag.
|
|
|
|
__ mvn(r0, Operand(r0)); |
|
|
|
__ bic(r0, r0, Operand(kSmiTagMask)); |
|
|
|
__ Ret(); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
// TODO(svenpanne): Use virtual functions instead of switch.
|
|
|
|
void UnaryOpStub::GenerateNumberStub(MacroAssembler* masm) { |
|
|
|
switch (op_) { |
|
|
|
case Token::SUB: |
|
|
|
GenerateNumberStubSub(masm); |
|
|
|
break; |
|
|
|
case Token::BIT_NOT: |
|
|
|
GenerateNumberStubBitNot(masm); |
|
|
|
break; |
|
|
|
default: |
|
|
|
UNREACHABLE(); |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void UnaryOpStub::GenerateNumberStubSub(MacroAssembler* masm) { |
|
|
|
Label non_smi, slow, call_builtin; |
|
|
|
GenerateSmiCodeSub(masm, &non_smi, &call_builtin); |
|
|
|
__ bind(&non_smi); |
|
|
|
GenerateHeapNumberCodeSub(masm, &slow); |
|
|
|
__ bind(&slow); |
|
|
|
GenerateTypeTransition(masm); |
|
|
|
__ bind(&call_builtin); |
|
|
|
GenerateGenericCodeFallback(masm); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void UnaryOpStub::GenerateNumberStubBitNot(MacroAssembler* masm) { |
|
|
|
Label non_smi, slow; |
|
|
|
GenerateSmiCodeBitNot(masm, &non_smi); |
|
|
|
__ bind(&non_smi); |
|
|
|
GenerateHeapNumberCodeBitNot(masm, &slow); |
|
|
|
__ bind(&slow); |
|
|
|
GenerateTypeTransition(masm); |
|
|
|
} |
|
|
|
|
|
|
|
void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, |
|
|
|
Label* slow) { |
|
|
|
EmitCheckForHeapNumber(masm, r0, r1, r6, slow); |
|
|
|
// r0 is a heap number. Get a new heap number in r1.
|
|
|
|
if (mode_ == UNARY_OVERWRITE) { |
|
|
|
__ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); |
|
|
|
__ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign.
|
|
|
|
__ str(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); |
|
|
|
} else { |
|
|
|
Label slow_allocate_heapnumber, heapnumber_allocated; |
|
|
|
__ AllocateHeapNumber(r1, r2, r3, r6, &slow_allocate_heapnumber); |
|
|
|
__ jmp(&heapnumber_allocated); |
|
|
|
|
|
|
|
__ bind(&slow_allocate_heapnumber); |
|
|
|
{ |
|
|
|
FrameScope scope(masm, StackFrame::INTERNAL); |
|
|
|
__ push(r0); |
|
|
|
__ CallRuntime(Runtime::kNumberAlloc, 0); |
|
|
|
__ mov(r1, Operand(r0)); |
|
|
|
__ pop(r0); |
|
|
|
} |
|
|
|
|
|
|
|
__ bind(&heapnumber_allocated); |
|
|
|
__ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset)); |
|
|
|
__ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); |
|
|
|
__ str(r3, FieldMemOperand(r1, HeapNumber::kMantissaOffset)); |
|
|
|
__ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign.
|
|
|
|
__ str(r2, FieldMemOperand(r1, HeapNumber::kExponentOffset)); |
|
|
|
__ mov(r0, Operand(r1)); |
|
|
|
} |
|
|
|
__ Ret(); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void UnaryOpStub::GenerateHeapNumberCodeBitNot(MacroAssembler* masm, |
|
|
|
Label* slow) { |
|
|
|
EmitCheckForHeapNumber(masm, r0, r1, r6, slow); |
|
|
|
|
|
|
|
// Convert the heap number in r0 to an untagged integer in r1.
|
|
|
|
__ vldr(d0, FieldMemOperand(r0, HeapNumber::kValueOffset)); |
|
|
|
__ ECMAToInt32(r1, d0, r2, r3, r4, d1); |
|
|
|
|
|
|
|
// Do the bitwise operation and check if the result fits in a smi.
|
|
|
|
Label try_float; |
|
|
|
__ mvn(r1, Operand(r1)); |
|
|
|
__ cmn(r1, Operand(0x40000000)); |
|
|
|
__ b(mi, &try_float); |
|
|
|
|
|
|
|
// Tag the result as a smi and we're done.
|
|
|
|
__ SmiTag(r0, r1); |
|
|
|
__ Ret(); |
|
|
|
|
|
|
|
// Try to store the result in a heap number.
|
|
|
|
__ bind(&try_float); |
|
|
|
if (mode_ == UNARY_NO_OVERWRITE) { |
|
|
|
Label slow_allocate_heapnumber, heapnumber_allocated; |
|
|
|
__ AllocateHeapNumber(r0, r3, r4, r6, &slow_allocate_heapnumber); |
|
|
|
__ jmp(&heapnumber_allocated); |
|
|
|
|
|
|
|
__ bind(&slow_allocate_heapnumber); |
|
|
|
{ |
|
|
|
FrameScope scope(masm, StackFrame::INTERNAL); |
|
|
|
// Push the lower bit of the result (left shifted to look like a smi).
|
|
|
|
__ mov(r2, Operand(r1, LSL, 31)); |
|
|
|
// Push the 31 high bits (bit 0 cleared to look like a smi).
|
|
|
|
__ bic(r1, r1, Operand(1)); |
|
|
|
__ Push(r2, r1); |
|
|
|
__ CallRuntime(Runtime::kNumberAlloc, 0); |
|
|
|
__ Pop(r2, r1); // Restore the result.
|
|
|
|
__ orr(r1, r1, Operand(r2, LSR, 31)); |
|
|
|
} |
|
|
|
__ bind(&heapnumber_allocated); |
|
|
|
} |
|
|
|
|
|
|
|
__ vmov(s0, r1); |
|
|
|
__ vcvt_f64_s32(d0, s0); |
|
|
|
__ vstr(d0, FieldMemOperand(r0, HeapNumber::kValueOffset)); |
|
|
|
__ Ret(); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
// TODO(svenpanne): Use virtual functions instead of switch.
|
|
|
|
void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) { |
|
|
|
switch (op_) { |
|
|
|
case Token::SUB: |
|
|
|
GenerateGenericStubSub(masm); |
|
|
|
break; |
|
|
|
case Token::BIT_NOT: |
|
|
|
GenerateGenericStubBitNot(masm); |
|
|
|
break; |
|
|
|
default: |
|
|
|
UNREACHABLE(); |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) { |
|
|
|
Label non_smi, slow; |
|
|
|
GenerateSmiCodeSub(masm, &non_smi, &slow); |
|
|
|
__ bind(&non_smi); |
|
|
|
GenerateHeapNumberCodeSub(masm, &slow); |
|
|
|
__ bind(&slow); |
|
|
|
GenerateGenericCodeFallback(masm); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) { |
|
|
|
Label non_smi, slow; |
|
|
|
GenerateSmiCodeBitNot(masm, &non_smi); |
|
|
|
__ bind(&non_smi); |
|
|
|
GenerateHeapNumberCodeBitNot(masm, &slow); |
|
|
|
__ bind(&slow); |
|
|
|
GenerateGenericCodeFallback(masm); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void UnaryOpStub::GenerateGenericCodeFallback(MacroAssembler* masm) { |
|
|
|
// Handle the slow case by jumping to the JavaScript builtin.
|
|
|
|
__ push(r0); |
|
|
|
switch (op_) { |
|
|
|
case Token::SUB: |
|
|
|
__ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION); |
|
|
|
break; |
|
|
|
case Token::BIT_NOT: |
|
|
|
__ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION); |
|
|
|
break; |
|
|
|
default: |
|
|
|
UNREACHABLE(); |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
// Generates code to call a C function to do a double operation.
|
|
|
|
// This code never falls through, but returns with a heap number containing
|
|
|
|
// the result in r0.
|
|
|
@ -2135,8 +1897,8 @@ void BinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) { |
|
|
|
__ CompareObjectType(right, r2, r2, FIRST_NONSTRING_TYPE); |
|
|
|
__ b(ge, &call_runtime); |
|
|
|
|
|
|
|
StringAddStub string_add_stub((StringAddFlags) |
|
|
|
(ERECT_FRAME | NO_STRING_CHECK_IN_STUB)); |
|
|
|
StringAddStub string_add_stub( |
|
|
|
(StringAddFlags)(STRING_ADD_CHECK_NONE | STRING_ADD_ERECT_FRAME)); |
|
|
|
GenerateRegisterArgsPush(masm); |
|
|
|
__ TailCallStub(&string_add_stub); |
|
|
|
|
|
|
@ -2494,8 +2256,8 @@ void BinaryOpStub::GenerateAddStrings(MacroAssembler* masm) { |
|
|
|
__ CompareObjectType(left, r2, r2, FIRST_NONSTRING_TYPE); |
|
|
|
__ b(ge, &left_not_string); |
|
|
|
|
|
|
|
StringAddStub string_add_left_stub((StringAddFlags) |
|
|
|
(ERECT_FRAME | NO_STRING_CHECK_LEFT_IN_STUB)); |
|
|
|
StringAddStub string_add_left_stub( |
|
|
|
(StringAddFlags)(STRING_ADD_CHECK_RIGHT | STRING_ADD_ERECT_FRAME)); |
|
|
|
GenerateRegisterArgsPush(masm); |
|
|
|
__ TailCallStub(&string_add_left_stub); |
|
|
|
|
|
|
@ -2505,8 +2267,8 @@ void BinaryOpStub::GenerateAddStrings(MacroAssembler* masm) { |
|
|
|
__ CompareObjectType(right, r2, r2, FIRST_NONSTRING_TYPE); |
|
|
|
__ b(ge, &call_runtime); |
|
|
|
|
|
|
|
StringAddStub string_add_right_stub((StringAddFlags) |
|
|
|
(ERECT_FRAME | NO_STRING_CHECK_RIGHT_IN_STUB)); |
|
|
|
StringAddStub string_add_right_stub( |
|
|
|
(StringAddFlags)(STRING_ADD_CHECK_LEFT | STRING_ADD_ERECT_FRAME)); |
|
|
|
GenerateRegisterArgsPush(masm); |
|
|
|
__ TailCallStub(&string_add_right_stub); |
|
|
|
|
|
|
@ -3001,6 +2763,7 @@ void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
|
|
|
StubFailureTrampolineStub::GenerateAheadOfTime(isolate); |
|
|
|
RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate); |
|
|
|
ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); |
|
|
|
CreateAllocationSiteStub::GenerateAheadOfTime(isolate); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
@ -3640,7 +3403,8 @@ void FunctionPrototypeStub::Generate(MacroAssembler* masm) { |
|
|
|
|
|
|
|
StubCompiler::GenerateLoadFunctionPrototype(masm, receiver, r3, r4, &miss); |
|
|
|
__ bind(&miss); |
|
|
|
StubCompiler::TailCallBuiltin(masm, StubCompiler::MissBuiltin(kind())); |
|
|
|
StubCompiler::TailCallBuiltin( |
|
|
|
masm, BaseLoadStoreStubCompiler::MissBuiltin(kind())); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
@ -3671,7 +3435,8 @@ void StringLengthStub::Generate(MacroAssembler* masm) { |
|
|
|
support_wrapper_); |
|
|
|
|
|
|
|
__ bind(&miss); |
|
|
|
StubCompiler::TailCallBuiltin(masm, StubCompiler::MissBuiltin(kind())); |
|
|
|
StubCompiler::TailCallBuiltin( |
|
|
|
masm, BaseLoadStoreStubCompiler::MissBuiltin(kind())); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
@ -3741,7 +3506,8 @@ void StoreArrayLengthStub::Generate(MacroAssembler* masm) { |
|
|
|
|
|
|
|
__ bind(&miss); |
|
|
|
|
|
|
|
StubCompiler::TailCallBuiltin(masm, StubCompiler::MissBuiltin(kind())); |
|
|
|
StubCompiler::TailCallBuiltin( |
|
|
|
masm, BaseLoadStoreStubCompiler::MissBuiltin(kind())); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
@ -4649,20 +4415,17 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { |
|
|
|
// function without changing the state.
|
|
|
|
__ cmp(r3, r1); |
|
|
|
__ b(eq, &done); |
|
|
|
__ CompareRoot(r3, Heap::kUndefinedValueRootIndex); |
|
|
|
__ b(eq, &done); |
|
|
|
|
|
|
|
// Special handling of the Array() function, which caches not only the
|
|
|
|
// monomorphic Array function but the initial ElementsKind with special
|
|
|
|
// sentinels
|
|
|
|
__ JumpIfNotSmi(r3, &miss); |
|
|
|
if (FLAG_debug_code) { |
|
|
|
Handle<Object> terminal_kind_sentinel = |
|
|
|
TypeFeedbackCells::MonomorphicArraySentinel(masm->isolate(), |
|
|
|
LAST_FAST_ELEMENTS_KIND); |
|
|
|
__ cmp(r3, Operand(terminal_kind_sentinel)); |
|
|
|
__ Assert(le, "Array function sentinel is not an ElementsKind"); |
|
|
|
} |
|
|
|
// If we came here, we need to see if we are the array function.
|
|
|
|
// If we didn't have a matching function, and we didn't find the megamorph
|
|
|
|
// sentinel, then we have in the cell either some other function or an
|
|
|
|
// AllocationSite. Do a map check on the object in ecx.
|
|
|
|
Handle<Map> allocation_site_map( |
|
|
|
masm->isolate()->heap()->allocation_site_map(), |
|
|
|
masm->isolate()); |
|
|
|
__ ldr(r5, FieldMemOperand(r3, 0)); |
|
|
|
__ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex); |
|
|
|
__ b(ne, &miss); |
|
|
|
|
|
|
|
// Make sure the function is the Array() function
|
|
|
|
__ LoadArrayFunction(r3); |
|
|
@ -4691,14 +4454,22 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { |
|
|
|
__ cmp(r1, r3); |
|
|
|
__ b(ne, ¬_array_function); |
|
|
|
|
|
|
|
// The target function is the Array constructor, install a sentinel value in
|
|
|
|
// the constructor's type info cell that will track the initial ElementsKind
|
|
|
|
// that should be used for the array when its constructed.
|
|
|
|
Handle<Object> initial_kind_sentinel = |
|
|
|
TypeFeedbackCells::MonomorphicArraySentinel(masm->isolate(), |
|
|
|
GetInitialFastElementsKind()); |
|
|
|
__ mov(r3, Operand(initial_kind_sentinel)); |
|
|
|
__ str(r3, FieldMemOperand(r2, Cell::kValueOffset)); |
|
|
|
// The target function is the Array constructor,
|
|
|
|
// Create an AllocationSite if we don't already have it, store it in the cell
|
|
|
|
{ |
|
|
|
FrameScope scope(masm, StackFrame::INTERNAL); |
|
|
|
|
|
|
|
__ push(r0); |
|
|
|
__ push(r1); |
|
|
|
__ push(r2); |
|
|
|
|
|
|
|
CreateAllocationSiteStub create_stub; |
|
|
|
__ CallStub(&create_stub); |
|
|
|
|
|
|
|
__ pop(r2); |
|
|
|
__ pop(r1); |
|
|
|
__ pop(r0); |
|
|
|
} |
|
|
|
__ b(&done); |
|
|
|
|
|
|
|
__ bind(¬_array_function); |
|
|
@ -5723,7 +5494,11 @@ void StringAddStub::Generate(MacroAssembler* masm) { |
|
|
|
__ ldr(r1, MemOperand(sp, 0 * kPointerSize)); // Second argument.
|
|
|
|
|
|
|
|
// Make sure that both arguments are strings if not known in advance.
|
|
|
|
if ((flags_ & NO_STRING_ADD_FLAGS) != 0) { |
|
|
|
// Otherwise, at least one of the arguments is definitely a string,
|
|
|
|
// and we convert the one that is not known to be a string.
|
|
|
|
if ((flags_ & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) { |
|
|
|
ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT); |
|
|
|
ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT); |
|
|
|
__ JumpIfEitherSmi(r0, r1, &call_runtime); |
|
|
|
// Load instance types.
|
|
|
|
__ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset)); |
|
|
@ -5735,20 +5510,16 @@ void StringAddStub::Generate(MacroAssembler* masm) { |
|
|
|
__ tst(r4, Operand(kIsNotStringMask)); |
|
|
|
__ tst(r5, Operand(kIsNotStringMask), eq); |
|
|
|
__ b(ne, &call_runtime); |
|
|
|
} else { |
|
|
|
// Here at least one of the arguments is definitely a string.
|
|
|
|
// We convert the one that is not known to be a string.
|
|
|
|
if ((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) == 0) { |
|
|
|
ASSERT((flags_ & NO_STRING_CHECK_RIGHT_IN_STUB) != 0); |
|
|
|
GenerateConvertArgument( |
|
|
|
masm, 1 * kPointerSize, r0, r2, r3, r4, r5, &call_builtin); |
|
|
|
builtin_id = Builtins::STRING_ADD_RIGHT; |
|
|
|
} else if ((flags_ & NO_STRING_CHECK_RIGHT_IN_STUB) == 0) { |
|
|
|
ASSERT((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) != 0); |
|
|
|
GenerateConvertArgument( |
|
|
|
masm, 0 * kPointerSize, r1, r2, r3, r4, r5, &call_builtin); |
|
|
|
builtin_id = Builtins::STRING_ADD_LEFT; |
|
|
|
} |
|
|
|
} else if ((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { |
|
|
|
ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == 0); |
|
|
|
GenerateConvertArgument( |
|
|
|
masm, 1 * kPointerSize, r0, r2, r3, r4, r5, &call_builtin); |
|
|
|
builtin_id = Builtins::STRING_ADD_RIGHT; |
|
|
|
} else if ((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { |
|
|
|
ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == 0); |
|
|
|
GenerateConvertArgument( |
|
|
|
masm, 0 * kPointerSize, r1, r2, r3, r4, r5, &call_builtin); |
|
|
|
builtin_id = Builtins::STRING_ADD_LEFT; |
|
|
|
} |
|
|
|
|
|
|
|
// Both arguments are strings.
|
|
|
@ -5796,7 +5567,7 @@ void StringAddStub::Generate(MacroAssembler* masm) { |
|
|
|
__ b(ne, &longer_than_two); |
|
|
|
|
|
|
|
// Check that both strings are non-external ASCII strings.
|
|
|
|
if (flags_ != NO_STRING_ADD_FLAGS) { |
|
|
|
if ((flags_ & STRING_ADD_CHECK_BOTH) != STRING_ADD_CHECK_BOTH) { |
|
|
|
__ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset)); |
|
|
|
__ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset)); |
|
|
|
__ ldrb(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset)); |
|
|
@ -5844,7 +5615,7 @@ void StringAddStub::Generate(MacroAssembler* masm) { |
|
|
|
|
|
|
|
// If result is not supposed to be flat, allocate a cons string object.
|
|
|
|
// If both strings are ASCII the result is an ASCII cons string.
|
|
|
|
if (flags_ != NO_STRING_ADD_FLAGS) { |
|
|
|
if ((flags_ & STRING_ADD_CHECK_BOTH) != STRING_ADD_CHECK_BOTH) { |
|
|
|
__ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset)); |
|
|
|
__ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset)); |
|
|
|
__ ldrb(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset)); |
|
|
@ -5927,7 +5698,7 @@ void StringAddStub::Generate(MacroAssembler* masm) { |
|
|
|
// r6: sum of lengths.
|
|
|
|
Label first_prepared, second_prepared; |
|
|
|
__ bind(&string_add_flat_result); |
|
|
|
if (flags_ != NO_STRING_ADD_FLAGS) { |
|
|
|
if ((flags_ & STRING_ADD_CHECK_BOTH) != STRING_ADD_CHECK_BOTH) { |
|
|
|
__ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset)); |
|
|
|
__ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset)); |
|
|
|
__ ldrb(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset)); |
|
|
@ -6015,7 +5786,7 @@ void StringAddStub::Generate(MacroAssembler* masm) { |
|
|
|
|
|
|
|
// Just jump to runtime to add the two strings.
|
|
|
|
__ bind(&call_runtime); |
|
|
|
if ((flags_ & ERECT_FRAME) != 0) { |
|
|
|
if ((flags_ & STRING_ADD_ERECT_FRAME) != 0) { |
|
|
|
GenerateRegisterArgsPop(masm); |
|
|
|
// Build a frame
|
|
|
|
{ |
|
|
@ -6030,7 +5801,7 @@ void StringAddStub::Generate(MacroAssembler* masm) { |
|
|
|
|
|
|
|
if (call_builtin.is_linked()) { |
|
|
|
__ bind(&call_builtin); |
|
|
|
if ((flags_ & ERECT_FRAME) != 0) { |
|
|
|
if ((flags_ & STRING_ADD_ERECT_FRAME) != 0) { |
|
|
|
GenerateRegisterArgsPop(masm); |
|
|
|
// Build a frame
|
|
|
|
{ |
|
|
@ -6082,7 +5853,6 @@ void StringAddStub::GenerateConvertArgument(MacroAssembler* masm, |
|
|
|
scratch2, |
|
|
|
scratch3, |
|
|
|
scratch4, |
|
|
|
false, |
|
|
|
¬_cached); |
|
|
|
__ mov(arg, scratch1); |
|
|
|
__ str(arg, MemOperand(sp, stack_offset)); |
|
|
@ -6223,14 +5993,9 @@ void ICCompareStub::GenerateInternalizedStrings(MacroAssembler* masm) { |
|
|
|
__ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); |
|
|
|
__ ldrb(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); |
|
|
|
__ ldrb(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset)); |
|
|
|
STATIC_ASSERT(kInternalizedTag != 0); |
|
|
|
|
|
|
|
__ and_(tmp1, tmp1, Operand(kIsNotStringMask | kIsInternalizedMask)); |
|
|
|
__ cmp(tmp1, Operand(kInternalizedTag | kStringTag)); |
|
|
|
__ b(ne, &miss); |
|
|
|
|
|
|
|
__ and_(tmp2, tmp2, Operand(kIsNotStringMask | kIsInternalizedMask)); |
|
|
|
__ cmp(tmp2, Operand(kInternalizedTag | kStringTag)); |
|
|
|
STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0); |
|
|
|
__ orr(tmp1, tmp1, Operand(tmp2)); |
|
|
|
__ tst(tmp1, Operand(kIsNotStringMask | kIsNotInternalizedMask)); |
|
|
|
__ b(ne, &miss); |
|
|
|
|
|
|
|
// Internalized strings are compared by identity.
|
|
|
@ -6264,7 +6029,6 @@ void ICCompareStub::GenerateUniqueNames(MacroAssembler* masm) { |
|
|
|
|
|
|
|
// Check that both operands are unique names. This leaves the instance
|
|
|
|
// types loaded in tmp1 and tmp2.
|
|
|
|
STATIC_ASSERT(kInternalizedTag != 0); |
|
|
|
__ ldr(tmp1, FieldMemOperand(left, HeapObject::kMapOffset)); |
|
|
|
__ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); |
|
|
|
__ ldrb(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); |
|
|
@ -6330,13 +6094,13 @@ void ICCompareStub::GenerateStrings(MacroAssembler* masm) { |
|
|
|
// strings.
|
|
|
|
if (equality) { |
|
|
|
ASSERT(GetCondition() == eq); |
|
|
|
STATIC_ASSERT(kInternalizedTag != 0); |
|
|
|
__ and_(tmp3, tmp1, Operand(tmp2)); |
|
|
|
__ tst(tmp3, Operand(kIsInternalizedMask)); |
|
|
|
STATIC_ASSERT(kInternalizedTag == 0); |
|
|
|
__ orr(tmp3, tmp1, Operand(tmp2)); |
|
|
|
__ tst(tmp3, Operand(kIsNotInternalizedMask)); |
|
|
|
// Make sure r0 is non-zero. At this point input operands are
|
|
|
|
// guaranteed to be non-zero.
|
|
|
|
ASSERT(right.is(r0)); |
|
|
|
__ Ret(ne); |
|
|
|
__ Ret(eq); |
|
|
|
} |
|
|
|
|
|
|
|
// Check that both strings are sequential ASCII.
|
|
|
@ -6711,6 +6475,7 @@ struct AheadOfTimeWriteBarrierStubList { |
|
|
|
RememberedSetAction action; |
|
|
|
}; |
|
|
|
|
|
|
|
|
|
|
|
#define REG(Name) { kRegister_ ## Name ## _Code } |
|
|
|
|
|
|
|
static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = { |
|
|
@ -7181,10 +6946,6 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm) { |
|
|
|
ASSERT(FAST_DOUBLE_ELEMENTS == 4); |
|
|
|
ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5); |
|
|
|
|
|
|
|
Handle<Object> undefined_sentinel( |
|
|
|
masm->isolate()->heap()->undefined_value(), |
|
|
|
masm->isolate()); |
|
|
|
|
|
|
|
// is the low bit set? If so, we are holey and that is good.
|
|
|
|
__ tst(r3, Operand(1)); |
|
|
|
Label normal_sequence; |
|
|
@ -7196,18 +6957,19 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm) { |
|
|
|
__ b(eq, &normal_sequence); |
|
|
|
|
|
|
|
// We are going to create a holey array, but our kind is non-holey.
|
|
|
|
// Fix kind and retry
|
|
|
|
// Fix kind and retry (only if we have an allocation site in the cell).
|
|
|
|
__ add(r3, r3, Operand(1)); |
|
|
|
__ cmp(r2, Operand(undefined_sentinel)); |
|
|
|
__ CompareRoot(r2, Heap::kUndefinedValueRootIndex); |
|
|
|
__ b(eq, &normal_sequence); |
|
|
|
|
|
|
|
// The type cell may have gone megamorphic, don't overwrite if so
|
|
|
|
__ ldr(r5, FieldMemOperand(r2, kPointerSize)); |
|
|
|
__ JumpIfNotSmi(r5, &normal_sequence); |
|
|
|
__ ldr(r5, FieldMemOperand(r2, Cell::kValueOffset)); |
|
|
|
__ ldr(r5, FieldMemOperand(r5, 0)); |
|
|
|
__ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex); |
|
|
|
__ b(ne, &normal_sequence); |
|
|
|
|
|
|
|
// Save the resulting elements kind in type info
|
|
|
|
__ SmiTag(r3); |
|
|
|
__ str(r3, FieldMemOperand(r2, kPointerSize)); |
|
|
|
__ ldr(r5, FieldMemOperand(r2, Cell::kValueOffset)); |
|
|
|
__ str(r3, FieldMemOperand(r5, AllocationSite::kTransitionInfoOffset)); |
|
|
|
__ SmiUntag(r3); |
|
|
|
|
|
|
|
__ bind(&normal_sequence); |
|
|
@ -7236,7 +6998,7 @@ static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { |
|
|
|
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); |
|
|
|
T stub(kind); |
|
|
|
stub.GetCode(isolate)->set_is_pregenerated(true); |
|
|
|
if (AllocationSiteInfo::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) { |
|
|
|
if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) { |
|
|
|
T stub1(kind, CONTEXT_CHECK_REQUIRED, DISABLE_ALLOCATION_SITES); |
|
|
|
stub1.GetCode(isolate)->set_is_pregenerated(true); |
|
|
|
} |
|
|
@ -7277,10 +7039,6 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) { |
|
|
|
// -- sp[0] : return address
|
|
|
|
// -- sp[4] : last argument
|
|
|
|
// -----------------------------------
|
|
|
|
Handle<Object> undefined_sentinel( |
|
|
|
masm->isolate()->heap()->undefined_value(), |
|
|
|
masm->isolate()); |
|
|
|
|
|
|
|
if (FLAG_debug_code) { |
|
|
|
// The array construct code is only set for the global and natives
|
|
|
|
// builtin Array functions which always have maps.
|
|
|
@ -7296,7 +7054,7 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) { |
|
|
|
// We should either have undefined in ebx or a valid cell
|
|
|
|
Label okay_here; |
|
|
|
Handle<Map> cell_map = masm->isolate()->factory()->cell_map(); |
|
|
|
__ cmp(r2, Operand(undefined_sentinel)); |
|
|
|
__ CompareRoot(r2, Heap::kUndefinedValueRootIndex); |
|
|
|
__ b(eq, &okay_here); |
|
|
|
__ ldr(r3, FieldMemOperand(r2, 0)); |
|
|
|
__ cmp(r3, Operand(cell_map)); |
|
|
@ -7306,10 +7064,20 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) { |
|
|
|
|
|
|
|
Label no_info, switch_ready; |
|
|
|
// Get the elements kind and case on that.
|
|
|
|
__ cmp(r2, Operand(undefined_sentinel)); |
|
|
|
__ CompareRoot(r2, Heap::kUndefinedValueRootIndex); |
|
|
|
__ b(eq, &no_info); |
|
|
|
__ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset)); |
|
|
|
__ JumpIfNotSmi(r3, &no_info); |
|
|
|
|
|
|
|
// The type cell may have undefined in its value.
|
|
|
|
__ CompareRoot(r3, Heap::kUndefinedValueRootIndex); |
|
|
|
__ b(eq, &no_info); |
|
|
|
|
|
|
|
// The type cell has either an AllocationSite or a JSFunction
|
|
|
|
__ ldr(r4, FieldMemOperand(r3, 0)); |
|
|
|
__ CompareRoot(r4, Heap::kAllocationSiteMapRootIndex); |
|
|
|
__ b(ne, &no_info); |
|
|
|
|
|
|
|
__ ldr(r3, FieldMemOperand(r3, AllocationSite::kTransitionInfoOffset)); |
|
|
|
__ SmiUntag(r3); |
|
|
|
__ jmp(&switch_ready); |
|
|
|
__ bind(&no_info); |
|
|
|