|
|
@ -29,7 +29,9 @@ |
|
|
|
|
|
|
|
#include "bootstrapper.h" |
|
|
|
#include "codegen-inl.h" |
|
|
|
#include "assembler-x64.h" |
|
|
|
#include "macro-assembler-x64.h" |
|
|
|
#include "debug.h" |
|
|
|
|
|
|
|
namespace v8 { |
|
|
|
namespace internal { |
|
|
@ -43,8 +45,152 @@ MacroAssembler::MacroAssembler(void* buffer, int size) |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::TailCallRuntime(ExternalReference const& a, int b) { |
|
|
|
UNIMPLEMENTED(); |
|
|
|
void MacroAssembler::Assert(Condition cc, const char* msg) { |
|
|
|
if (FLAG_debug_code) Check(cc, msg); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::Check(Condition cc, const char* msg) { |
|
|
|
Label L; |
|
|
|
j(cc, &L); |
|
|
|
Abort(msg); |
|
|
|
// will not return here
|
|
|
|
bind(&L); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::ConstructAndTestJSFunction() { |
|
|
|
const int initial_buffer_size = 4 * KB; |
|
|
|
char* buffer = new char[initial_buffer_size]; |
|
|
|
MacroAssembler masm(buffer, initial_buffer_size); |
|
|
|
|
|
|
|
const uint64_t secret = V8_INT64_C(0xdeadbeefcafebabe); |
|
|
|
Handle<String> constant = |
|
|
|
Factory::NewStringFromAscii(Vector<const char>("451", 3), TENURED); |
|
|
|
#define __ ACCESS_MASM((&masm)) |
|
|
|
// Construct a simple JSfunction here, using Assembler and MacroAssembler
|
|
|
|
// commands.
|
|
|
|
__ movq(rax, constant, RelocInfo::EMBEDDED_OBJECT); |
|
|
|
__ push(rax); |
|
|
|
__ CallRuntime(Runtime::kStringParseFloat, 1); |
|
|
|
__ movq(kScratchRegister, secret, RelocInfo::NONE); |
|
|
|
__ addq(rax, kScratchRegister); |
|
|
|
__ ret(0); |
|
|
|
#undef __ |
|
|
|
CodeDesc desc; |
|
|
|
masm.GetCode(&desc); |
|
|
|
Code::Flags flags = Code::ComputeFlags(Code::FUNCTION); |
|
|
|
Object* code = Heap::CreateCode(desc, NULL, flags, Handle<Object>::null()); |
|
|
|
if (!code->IsFailure()) { |
|
|
|
Handle<Code> code_handle(Code::cast(code)); |
|
|
|
Handle<String> name = |
|
|
|
Factory::NewStringFromAscii(Vector<const char>("foo", 3), NOT_TENURED); |
|
|
|
Handle<JSFunction> function = |
|
|
|
Factory::NewFunction(name, |
|
|
|
JS_FUNCTION_TYPE, |
|
|
|
JSObject::kHeaderSize, |
|
|
|
code_handle, |
|
|
|
true); |
|
|
|
bool pending_exceptions; |
|
|
|
Handle<Object> result = |
|
|
|
Execution::Call(function, |
|
|
|
Handle<Object>::cast(function), |
|
|
|
0, |
|
|
|
NULL, |
|
|
|
&pending_exceptions); |
|
|
|
CHECK(result->IsSmi()); |
|
|
|
CHECK(secret + (451 << kSmiTagSize) == reinterpret_cast<uint64_t>(*result)); |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::Abort(const char* msg) { |
|
|
|
// We want to pass the msg string like a smi to avoid GC
|
|
|
|
// problems, however msg is not guaranteed to be aligned
|
|
|
|
// properly. Instead, we pass an aligned pointer that is
|
|
|
|
// a proper v8 smi, but also pass the alignment difference
|
|
|
|
// from the real pointer as a smi.
|
|
|
|
intptr_t p1 = reinterpret_cast<intptr_t>(msg); |
|
|
|
intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag; |
|
|
|
// Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
|
|
|
|
ASSERT(reinterpret_cast<Object*>(p0)->IsSmi()); |
|
|
|
#ifdef DEBUG |
|
|
|
if (msg != NULL) { |
|
|
|
RecordComment("Abort message: "); |
|
|
|
RecordComment(msg); |
|
|
|
} |
|
|
|
#endif |
|
|
|
push(rax); |
|
|
|
movq(kScratchRegister, p0, RelocInfo::NONE); |
|
|
|
push(kScratchRegister); |
|
|
|
movq(kScratchRegister, |
|
|
|
reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0)), |
|
|
|
RelocInfo::NONE); |
|
|
|
push(kScratchRegister); |
|
|
|
CallRuntime(Runtime::kAbort, 2); |
|
|
|
// will not return here
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::CallStub(CodeStub* stub) { |
|
|
|
ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
|
|
|
|
movq(kScratchRegister, stub->GetCode(), RelocInfo::CODE_TARGET); |
|
|
|
call(kScratchRegister); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::StubReturn(int argc) { |
|
|
|
ASSERT(argc >= 1 && generating_stub()); |
|
|
|
ret((argc - 1) * kPointerSize); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::IllegalOperation(int num_arguments) { |
|
|
|
if (num_arguments > 0) { |
|
|
|
addq(rsp, Immediate(num_arguments * kPointerSize)); |
|
|
|
} |
|
|
|
movq(rax, Factory::undefined_value(), RelocInfo::EMBEDDED_OBJECT); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) { |
|
|
|
CallRuntime(Runtime::FunctionForId(id), num_arguments); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) { |
|
|
|
// If the expected number of arguments of the runtime function is
|
|
|
|
// constant, we check that the actual number of arguments match the
|
|
|
|
// expectation.
|
|
|
|
if (f->nargs >= 0 && f->nargs != num_arguments) { |
|
|
|
IllegalOperation(num_arguments); |
|
|
|
return; |
|
|
|
} |
|
|
|
|
|
|
|
Runtime::FunctionId function_id = |
|
|
|
static_cast<Runtime::FunctionId>(f->stub_id); |
|
|
|
RuntimeStub stub(function_id, num_arguments); |
|
|
|
CallStub(&stub); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::TailCallRuntime(ExternalReference const& ext, |
|
|
|
int num_arguments) { |
|
|
|
// TODO(1236192): Most runtime routines don't need the number of
|
|
|
|
// arguments passed in because it is constant. At some point we
|
|
|
|
// should remove this need and make the runtime routine entry code
|
|
|
|
// smarter.
|
|
|
|
movq(rax, Immediate(num_arguments)); |
|
|
|
JumpToBuiltin(ext); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::JumpToBuiltin(const ExternalReference& ext) { |
|
|
|
// Set the entry point and jump to the C entry runtime stub.
|
|
|
|
movq(rbx, ext); |
|
|
|
CEntryStub ces; |
|
|
|
movq(kScratchRegister, ces.GetCode(), RelocInfo::CODE_TARGET); |
|
|
|
jmp(kScratchRegister); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
@ -71,18 +217,43 @@ void MacroAssembler::Set(const Operand& dst, int64_t x) { |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::Jump(ExternalReference ext) { |
|
|
|
movq(kScratchRegister, ext); |
|
|
|
jmp(kScratchRegister); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) { |
|
|
|
movq(kScratchRegister, destination, rmode); |
|
|
|
jmp(kScratchRegister); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::Call(ExternalReference ext) { |
|
|
|
movq(kScratchRegister, ext); |
|
|
|
call(kScratchRegister); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) { |
|
|
|
movq(kScratchRegister, destination, rmode); |
|
|
|
call(kScratchRegister); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::PushTryHandler(CodeLocation try_location, |
|
|
|
HandlerType type) { |
|
|
|
// The pc (return address) is already on TOS.
|
|
|
|
// This code pushes state, code, frame pointer and parameter pointer.
|
|
|
|
// Check that they are expected next on the stack, int that order.
|
|
|
|
// Adjust this code if not the case.
|
|
|
|
ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize); |
|
|
|
|
|
|
|
// The pc (return address) is already on TOS. This code pushes state,
|
|
|
|
// frame pointer and current handler. Check that they are expected
|
|
|
|
// next on the stack, in that order.
|
|
|
|
ASSERT_EQ(StackHandlerConstants::kStateOffset, |
|
|
|
StackHandlerConstants::kPCOffset - kPointerSize); |
|
|
|
ASSERT_EQ(StackHandlerConstants::kCodeOffset, |
|
|
|
StackHandlerConstants::kStateOffset - kPointerSize); |
|
|
|
ASSERT_EQ(StackHandlerConstants::kFPOffset, |
|
|
|
StackHandlerConstants::kCodeOffset - kPointerSize); |
|
|
|
ASSERT_EQ(StackHandlerConstants::kPPOffset, |
|
|
|
StackHandlerConstants::kStateOffset - kPointerSize); |
|
|
|
ASSERT_EQ(StackHandlerConstants::kNextOffset, |
|
|
|
StackHandlerConstants::kFPOffset - kPointerSize); |
|
|
|
|
|
|
|
if (try_location == IN_JAVASCRIPT) { |
|
|
@ -91,26 +262,414 @@ void MacroAssembler::PushTryHandler(CodeLocation try_location, |
|
|
|
} else { |
|
|
|
push(Immediate(StackHandler::TRY_FINALLY)); |
|
|
|
} |
|
|
|
push(Immediate(Smi::FromInt(StackHandler::kCodeNotPresent))); |
|
|
|
push(rbp); |
|
|
|
push(rdi); |
|
|
|
} else { |
|
|
|
ASSERT(try_location == IN_JS_ENTRY); |
|
|
|
// The parameter pointer is meaningless here and ebp does not
|
|
|
|
// point to a JS frame. So we save NULL for both pp and ebp. We
|
|
|
|
// expect the code throwing an exception to check ebp before
|
|
|
|
// dereferencing it to restore the context.
|
|
|
|
// The frame pointer does not point to a JS frame so we save NULL
|
|
|
|
// for rbp. We expect the code throwing an exception to check rbp
|
|
|
|
// before dereferencing it to restore the context.
|
|
|
|
push(Immediate(StackHandler::ENTRY)); |
|
|
|
push(Immediate(Smi::FromInt(StackHandler::kCodeNotPresent))); |
|
|
|
push(Immediate(0)); // NULL frame pointer
|
|
|
|
push(Immediate(0)); // NULL parameter pointer
|
|
|
|
push(Immediate(0)); // NULL frame pointer.
|
|
|
|
} |
|
|
|
// Save the current handler.
|
|
|
|
movq(kScratchRegister, ExternalReference(Top::k_handler_address)); |
|
|
|
// Cached TOS.
|
|
|
|
movq(rax, Operand(kScratchRegister, 0)); |
|
|
|
push(Operand(kScratchRegister, 0)); |
|
|
|
// Link this handler.
|
|
|
|
movq(Operand(kScratchRegister, 0), rsp); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::Ret() { |
|
|
|
ret(0); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::CmpObjectType(Register heap_object, |
|
|
|
InstanceType type, |
|
|
|
Register map) { |
|
|
|
movq(map, FieldOperand(heap_object, HeapObject::kMapOffset)); |
|
|
|
CmpInstanceType(map, type); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::CmpInstanceType(Register map, InstanceType type) { |
|
|
|
cmpb(FieldOperand(map, Map::kInstanceTypeOffset), |
|
|
|
Immediate(static_cast<int8_t>(type))); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::SetCounter(StatsCounter* counter, int value) { |
|
|
|
if (FLAG_native_code_counters && counter->Enabled()) { |
|
|
|
movq(kScratchRegister, ExternalReference(counter)); |
|
|
|
movl(Operand(kScratchRegister, 0), Immediate(value)); |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) { |
|
|
|
ASSERT(value > 0); |
|
|
|
if (FLAG_native_code_counters && counter->Enabled()) { |
|
|
|
movq(kScratchRegister, ExternalReference(counter)); |
|
|
|
Operand operand(kScratchRegister, 0); |
|
|
|
if (value == 1) { |
|
|
|
incl(operand); |
|
|
|
} else { |
|
|
|
addl(operand, Immediate(value)); |
|
|
|
} |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) { |
|
|
|
ASSERT(value > 0); |
|
|
|
if (FLAG_native_code_counters && counter->Enabled()) { |
|
|
|
movq(kScratchRegister, ExternalReference(counter)); |
|
|
|
Operand operand(kScratchRegister, 0); |
|
|
|
if (value == 1) { |
|
|
|
decl(operand); |
|
|
|
} else { |
|
|
|
subl(operand, Immediate(value)); |
|
|
|
} |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
#ifdef ENABLE_DEBUGGER_SUPPORT |
|
|
|
|
|
|
|
void MacroAssembler::PushRegistersFromMemory(RegList regs) { |
|
|
|
ASSERT((regs & ~kJSCallerSaved) == 0); |
|
|
|
// Push the content of the memory location to the stack.
|
|
|
|
for (int i = 0; i < kNumJSCallerSaved; i++) { |
|
|
|
int r = JSCallerSavedCode(i); |
|
|
|
if ((regs & (1 << r)) != 0) { |
|
|
|
ExternalReference reg_addr = |
|
|
|
ExternalReference(Debug_Address::Register(i)); |
|
|
|
movq(kScratchRegister, reg_addr); |
|
|
|
push(Operand(kScratchRegister, 0)); |
|
|
|
} |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
void MacroAssembler::SaveRegistersToMemory(RegList regs) { |
|
|
|
ASSERT((regs & ~kJSCallerSaved) == 0); |
|
|
|
// Copy the content of registers to memory location.
|
|
|
|
for (int i = 0; i < kNumJSCallerSaved; i++) { |
|
|
|
int r = JSCallerSavedCode(i); |
|
|
|
if ((regs & (1 << r)) != 0) { |
|
|
|
Register reg = { r }; |
|
|
|
ExternalReference reg_addr = |
|
|
|
ExternalReference(Debug_Address::Register(i)); |
|
|
|
movq(kScratchRegister, reg_addr); |
|
|
|
movq(Operand(kScratchRegister, 0), reg); |
|
|
|
} |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::RestoreRegistersFromMemory(RegList regs) { |
|
|
|
ASSERT((regs & ~kJSCallerSaved) == 0); |
|
|
|
// Copy the content of memory location to registers.
|
|
|
|
for (int i = kNumJSCallerSaved - 1; i >= 0; i--) { |
|
|
|
int r = JSCallerSavedCode(i); |
|
|
|
if ((regs & (1 << r)) != 0) { |
|
|
|
Register reg = { r }; |
|
|
|
ExternalReference reg_addr = |
|
|
|
ExternalReference(Debug_Address::Register(i)); |
|
|
|
movq(kScratchRegister, reg_addr); |
|
|
|
movq(reg, Operand(kScratchRegister, 0)); |
|
|
|
} |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::PopRegistersToMemory(RegList regs) { |
|
|
|
ASSERT((regs & ~kJSCallerSaved) == 0); |
|
|
|
// Pop the content from the stack to the memory location.
|
|
|
|
for (int i = kNumJSCallerSaved - 1; i >= 0; i--) { |
|
|
|
int r = JSCallerSavedCode(i); |
|
|
|
if ((regs & (1 << r)) != 0) { |
|
|
|
ExternalReference reg_addr = |
|
|
|
ExternalReference(Debug_Address::Register(i)); |
|
|
|
movq(kScratchRegister, reg_addr); |
|
|
|
pop(Operand(kScratchRegister, 0)); |
|
|
|
} |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::CopyRegistersFromStackToMemory(Register base, |
|
|
|
Register scratch, |
|
|
|
RegList regs) { |
|
|
|
ASSERT(!scratch.is(kScratchRegister)); |
|
|
|
ASSERT(!base.is(kScratchRegister)); |
|
|
|
ASSERT(!base.is(scratch)); |
|
|
|
ASSERT((regs & ~kJSCallerSaved) == 0); |
|
|
|
// Copy the content of the stack to the memory location and adjust base.
|
|
|
|
for (int i = kNumJSCallerSaved - 1; i >= 0; i--) { |
|
|
|
int r = JSCallerSavedCode(i); |
|
|
|
if ((regs & (1 << r)) != 0) { |
|
|
|
movq(scratch, Operand(base, 0)); |
|
|
|
ExternalReference reg_addr = |
|
|
|
ExternalReference(Debug_Address::Register(i)); |
|
|
|
movq(kScratchRegister, reg_addr); |
|
|
|
movq(Operand(kScratchRegister, 0), scratch); |
|
|
|
lea(base, Operand(base, kPointerSize)); |
|
|
|
} |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
#endif // ENABLE_DEBUGGER_SUPPORT
|
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::InvokePrologue(const ParameterCount& expected, |
|
|
|
const ParameterCount& actual, |
|
|
|
Handle<Code> code_constant, |
|
|
|
Register code_register, |
|
|
|
Label* done, |
|
|
|
InvokeFlag flag) { |
|
|
|
bool definitely_matches = false; |
|
|
|
Label invoke; |
|
|
|
if (expected.is_immediate()) { |
|
|
|
ASSERT(actual.is_immediate()); |
|
|
|
if (expected.immediate() == actual.immediate()) { |
|
|
|
definitely_matches = true; |
|
|
|
} else { |
|
|
|
movq(rax, Immediate(actual.immediate())); |
|
|
|
if (expected.immediate() == |
|
|
|
SharedFunctionInfo::kDontAdaptArgumentsSentinel) { |
|
|
|
// Don't worry about adapting arguments for built-ins that
|
|
|
|
// don't want that done. Skip adaption code by making it look
|
|
|
|
// like we have a match between expected and actual number of
|
|
|
|
// arguments.
|
|
|
|
definitely_matches = true; |
|
|
|
} else { |
|
|
|
movq(rbx, Immediate(expected.immediate())); |
|
|
|
} |
|
|
|
} |
|
|
|
} else { |
|
|
|
if (actual.is_immediate()) { |
|
|
|
// Expected is in register, actual is immediate. This is the
|
|
|
|
// case when we invoke function values without going through the
|
|
|
|
// IC mechanism.
|
|
|
|
cmpq(expected.reg(), Immediate(actual.immediate())); |
|
|
|
j(equal, &invoke); |
|
|
|
ASSERT(expected.reg().is(rbx)); |
|
|
|
movq(rax, Immediate(actual.immediate())); |
|
|
|
} else if (!expected.reg().is(actual.reg())) { |
|
|
|
// Both expected and actual are in (different) registers. This
|
|
|
|
// is the case when we invoke functions using call and apply.
|
|
|
|
cmpq(expected.reg(), actual.reg()); |
|
|
|
j(equal, &invoke); |
|
|
|
ASSERT(actual.reg().is(rax)); |
|
|
|
ASSERT(expected.reg().is(rbx)); |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
if (!definitely_matches) { |
|
|
|
Handle<Code> adaptor = |
|
|
|
Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline)); |
|
|
|
if (!code_constant.is_null()) { |
|
|
|
movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT); |
|
|
|
addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag)); |
|
|
|
} else if (!code_register.is(rdx)) { |
|
|
|
movq(rdx, code_register); |
|
|
|
} |
|
|
|
|
|
|
|
movq(kScratchRegister, adaptor, RelocInfo::CODE_TARGET); |
|
|
|
if (flag == CALL_FUNCTION) { |
|
|
|
call(kScratchRegister); |
|
|
|
jmp(done); |
|
|
|
} else { |
|
|
|
jmp(kScratchRegister); |
|
|
|
} |
|
|
|
bind(&invoke); |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::InvokeCode(Register code, |
|
|
|
const ParameterCount& expected, |
|
|
|
const ParameterCount& actual, |
|
|
|
InvokeFlag flag) { |
|
|
|
Label done; |
|
|
|
InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag); |
|
|
|
if (flag == CALL_FUNCTION) { |
|
|
|
call(code); |
|
|
|
} else { |
|
|
|
ASSERT(flag == JUMP_FUNCTION); |
|
|
|
jmp(code); |
|
|
|
} |
|
|
|
bind(&done); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::InvokeCode(Handle<Code> code, |
|
|
|
const ParameterCount& expected, |
|
|
|
const ParameterCount& actual, |
|
|
|
RelocInfo::Mode rmode, |
|
|
|
InvokeFlag flag) { |
|
|
|
Label done; |
|
|
|
Register dummy = rax; |
|
|
|
InvokePrologue(expected, actual, code, dummy, &done, flag); |
|
|
|
movq(kScratchRegister, code, rmode); |
|
|
|
if (flag == CALL_FUNCTION) { |
|
|
|
call(kScratchRegister); |
|
|
|
} else { |
|
|
|
ASSERT(flag == JUMP_FUNCTION); |
|
|
|
jmp(kScratchRegister); |
|
|
|
} |
|
|
|
bind(&done); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::InvokeFunction(Register function, |
|
|
|
const ParameterCount& actual, |
|
|
|
InvokeFlag flag) { |
|
|
|
ASSERT(function.is(rdi)); |
|
|
|
movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset)); |
|
|
|
movq(rsi, FieldOperand(function, JSFunction::kContextOffset)); |
|
|
|
movl(rbx, FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset)); |
|
|
|
movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset)); |
|
|
|
// Advances rdx to the end of the Code object headers, to the start of
|
|
|
|
// the executable code.
|
|
|
|
lea(rdx, FieldOperand(rdx, Code::kHeaderSize)); |
|
|
|
|
|
|
|
ParameterCount expected(rbx); |
|
|
|
InvokeCode(rdx, expected, actual, flag); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::EnterFrame(StackFrame::Type type) { |
|
|
|
push(rbp); |
|
|
|
movq(rbp, rsp); |
|
|
|
push(rsi); // Context.
|
|
|
|
push(Immediate(Smi::FromInt(type))); |
|
|
|
movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT); |
|
|
|
push(kScratchRegister); |
|
|
|
if (FLAG_debug_code) { |
|
|
|
movq(kScratchRegister, |
|
|
|
Factory::undefined_value(), |
|
|
|
RelocInfo::EMBEDDED_OBJECT); |
|
|
|
cmpq(Operand(rsp, 0), kScratchRegister); |
|
|
|
Check(not_equal, "code object not properly patched"); |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::LeaveFrame(StackFrame::Type type) { |
|
|
|
if (FLAG_debug_code) { |
|
|
|
movq(kScratchRegister, Immediate(Smi::FromInt(type))); |
|
|
|
cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister); |
|
|
|
Check(equal, "stack frame types must match"); |
|
|
|
} |
|
|
|
movq(rsp, rbp); |
|
|
|
pop(rbp); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::EnterExitFrame(StackFrame::Type type) { |
|
|
|
ASSERT(type == StackFrame::EXIT || type == StackFrame::EXIT_DEBUG); |
|
|
|
|
|
|
|
// Setup the frame structure on the stack.
|
|
|
|
ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize); |
|
|
|
ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize); |
|
|
|
ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize); |
|
|
|
push(rbp); |
|
|
|
movq(rbp, rsp); |
|
|
|
|
|
|
|
// Reserve room for entry stack pointer and push the debug marker.
|
|
|
|
ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize); |
|
|
|
push(Immediate(0)); // saved entry sp, patched before call
|
|
|
|
push(Immediate(type == StackFrame::EXIT_DEBUG ? 1 : 0)); |
|
|
|
|
|
|
|
// Save the frame pointer and the context in top.
|
|
|
|
ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address); |
|
|
|
ExternalReference context_address(Top::k_context_address); |
|
|
|
movq(rdi, rax); // Backup rax before we use it.
|
|
|
|
|
|
|
|
movq(rax, rbp); |
|
|
|
store_rax(c_entry_fp_address); |
|
|
|
movq(rax, rsi); |
|
|
|
store_rax(context_address); |
|
|
|
|
|
|
|
// Setup argv in callee-saved register r15. It is reused in LeaveExitFrame,
|
|
|
|
// so it must be retained across the C-call.
|
|
|
|
int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize; |
|
|
|
lea(r15, Operand(rbp, rdi, kTimesPointerSize, offset)); |
|
|
|
|
|
|
|
#ifdef ENABLE_DEBUGGER_SUPPORT |
|
|
|
// Save the state of all registers to the stack from the memory
|
|
|
|
// location. This is needed to allow nested break points.
|
|
|
|
if (type == StackFrame::EXIT_DEBUG) { |
|
|
|
// TODO(1243899): This should be symmetric to
|
|
|
|
// CopyRegistersFromStackToMemory() but it isn't! esp is assumed
|
|
|
|
// correct here, but computed for the other call. Very error
|
|
|
|
// prone! FIX THIS. Actually there are deeper problems with
|
|
|
|
// register saving than this asymmetry (see the bug report
|
|
|
|
// associated with this issue).
|
|
|
|
PushRegistersFromMemory(kJSCallerSaved); |
|
|
|
} |
|
|
|
#endif |
|
|
|
|
|
|
|
// Reserve space for two arguments: argc and argv
|
|
|
|
subq(rsp, Immediate(2 * kPointerSize)); |
|
|
|
|
|
|
|
// Get the required frame alignment for the OS.
|
|
|
|
static const int kFrameAlignment = OS::ActivationFrameAlignment(); |
|
|
|
if (kFrameAlignment > 0) { |
|
|
|
ASSERT(IsPowerOf2(kFrameAlignment)); |
|
|
|
movq(kScratchRegister, Immediate(-kFrameAlignment)); |
|
|
|
and_(rsp, kScratchRegister); |
|
|
|
} |
|
|
|
|
|
|
|
// Patch the saved entry sp.
|
|
|
|
movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void MacroAssembler::LeaveExitFrame(StackFrame::Type type) { |
|
|
|
// Registers:
|
|
|
|
// r15 : argv
|
|
|
|
#ifdef ENABLE_DEBUGGER_SUPPORT |
|
|
|
// Restore the memory copy of the registers by digging them out from
|
|
|
|
// the stack. This is needed to allow nested break points.
|
|
|
|
if (type == StackFrame::EXIT_DEBUG) { |
|
|
|
// It's okay to clobber register ebx below because we don't need
|
|
|
|
// the function pointer after this.
|
|
|
|
const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize; |
|
|
|
int kOffset = ExitFrameConstants::kDebugMarkOffset - kCallerSavedSize; |
|
|
|
lea(rbx, Operand(rbp, kOffset)); |
|
|
|
CopyRegistersFromStackToMemory(rbx, rcx, kJSCallerSaved); |
|
|
|
} |
|
|
|
#endif |
|
|
|
|
|
|
|
// Get the return address from the stack and restore the frame pointer.
|
|
|
|
movq(rcx, Operand(rbp, 1 * kPointerSize)); |
|
|
|
movq(rbp, Operand(rbp, 0 * kPointerSize)); |
|
|
|
|
|
|
|
// Pop the arguments and the receiver from the caller stack.
|
|
|
|
lea(rsp, Operand(r15, 1 * kPointerSize)); |
|
|
|
|
|
|
|
// Restore current context from top and clear it in debug mode.
|
|
|
|
ExternalReference context_address(Top::k_context_address); |
|
|
|
movq(kScratchRegister, context_address); |
|
|
|
movq(rsi, Operand(kScratchRegister, 0)); |
|
|
|
#ifdef DEBUG |
|
|
|
movq(Operand(kScratchRegister, 0), Immediate(0)); |
|
|
|
#endif |
|
|
|
|
|
|
|
// Push the return address to get ready to return.
|
|
|
|
push(rcx); |
|
|
|
|
|
|
|
// Clear the top frame.
|
|
|
|
ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address); |
|
|
|
movq(kScratchRegister, c_entry_fp_address); |
|
|
|
movq(Operand(kScratchRegister, 0), Immediate(0)); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
} } // namespace v8::internal
|
|
|
|