[Liftoff] Prepare for arbitrarily big stack frames

Liftoff currently allocates a stack frame of fixed size for each
function, and bails out if more stack space is needed during code
generation for the function.
This CL prepares the interface and the assemblers on ia32 and x64 to
allow patching of the stack frame size after the whole function body
was generated.

R=mstarzinger@chromium.org, titzer@chromium.org
CC=sreten.kovacevic@mips.com

Bug: v8:6600
Change-Id: Iff54ff65f3e6e13d53ff90ec34b2c5cf7d276d5e
Reviewed-on: https://chromium-review.googlesource.com/925463
Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
Reviewed-by: Michael Starzinger <mstarzinger@chromium.org>
Reviewed-by: Ben Titzer <titzer@chromium.org>
Cr-Commit-Position: refs/heads/master@{#51406}
This commit is contained in:
Clemens Hammacher 2018-02-20 20:47:30 +01:00 committed by Commit Bot
parent f6b6f71ba2
commit ba4ab32162
14 changed files with 117 additions and 24 deletions

View File

@ -1311,6 +1311,13 @@ void Assembler::sub(const Operand& dst, Register src) {
emit_operand(src, dst);
}
void Assembler::sub_sp_32(uint32_t imm) {
EnsureSpace ensure_space(this);
EMIT(0x81); // using a literal 32-bit immediate.
static constexpr Register ireg = Register::from_code<5>();
emit_operand(ireg, Operand(esp));
emit(imm);
}
void Assembler::test(Register reg, const Immediate& imm) {
if (imm.is_uint8()) {

View File

@ -783,6 +783,7 @@ class Assembler : public AssemblerBase {
void sub(Register dst, Register src) { sub(dst, Operand(src)); }
void sub(Register dst, const Operand& src);
void sub(const Operand& dst, Register src);
void sub_sp_32(uint32_t imm);
void test(Register reg, const Immediate& imm);
void test(Register reg0, Register reg1) { test(reg0, Operand(reg1)); }

View File

@ -13,8 +13,14 @@ namespace v8 {
namespace internal {
namespace wasm {
void LiftoffAssembler::ReserveStackSpace(uint32_t stack_slots) {
BAILOUT("ReserveStackSpace");
uint32_t LiftoffAssembler::PrepareStackFrame() {
BAILOUT("PrepareStackFrame");
return 0;
}
void LiftoffAssembler::PatchPrepareStackFrame(uint32_t offset,
uint32_t stack_slots) {
BAILOUT("PatchPrepareStackFrame");
}
void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,

View File

@ -13,8 +13,14 @@ namespace v8 {
namespace internal {
namespace wasm {
void LiftoffAssembler::ReserveStackSpace(uint32_t stack_slots) {
BAILOUT("ReserveStackSpace");
uint32_t LiftoffAssembler::PrepareStackFrame() {
BAILOUT("PrepareStackFrame");
return 0;
}
void LiftoffAssembler::PatchPrepareStackFrame(uint32_t offset,
uint32_t stack_slots) {
BAILOUT("PatchPrepareStackFrame");
}
void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
@ -100,7 +106,7 @@ void LiftoffAssembler::FillI64Half(Register, uint32_t half_index) {
#define UNIMPLEMENTED_GP_UNOP(name) \
bool LiftoffAssembler::emit_##name(Register dst, Register src) { \
BAILOUT("gp unop"); \
return false; \
return true; \
}
#define UNIMPLEMENTED_FP_BINOP(name) \
void LiftoffAssembler::emit_##name(DoubleRegister dst, DoubleRegister lhs, \

View File

@ -49,10 +49,21 @@ static constexpr Register kCCallLastArgAddrReg = eax;
static constexpr DoubleRegister kScratchDoubleReg = xmm7;
void LiftoffAssembler::ReserveStackSpace(uint32_t stack_slots) {
uint32_t LiftoffAssembler::PrepareStackFrame() {
uint32_t offset = static_cast<uint32_t>(pc_offset());
sub_sp_32(0);
return offset;
}
void LiftoffAssembler::PatchPrepareStackFrame(uint32_t offset,
uint32_t stack_slots) {
uint32_t bytes = liftoff::kConstantStackSpace + kStackSlotSize * stack_slots;
DCHECK_LE(bytes, kMaxInt);
sub(esp, Immediate(bytes));
// We can't run out of space, just pass anything big enough to not cause the
// assembler to try to grow the buffer.
constexpr int kAvailableSpace = 64;
Assembler patching_assembler(isolate(), buffer_ + offset, kAvailableSpace);
patching_assembler.sub_sp_32(bytes);
}
void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,

View File

@ -320,7 +320,12 @@ class LiftoffAssembler : public TurboAssembler {
// Platform-specific part. //
////////////////////////////////////
inline void ReserveStackSpace(uint32_t bytes);
// This function emits machine code to prepare the stack frame, before the
// size of the stack frame is known. It returns an offset in the machine code
// which can later be patched (via {PatchPrepareStackFrame)} when the size of
// the frame is known.
inline uint32_t PrepareStackFrame();
inline void PatchPrepareStackFrame(uint32_t offset, uint32_t stack_slots);
inline void LoadConstant(LiftoffRegister, WasmValue,
RelocInfo::Mode rmode = RelocInfo::NONE);

View File

@ -272,11 +272,11 @@ class LiftoffCompiler {
void StartFunctionBody(Decoder* decoder, Control* block) {
__ EnterFrame(StackFrame::WASM_COMPILED);
__ set_has_frame(true);
__ ReserveStackSpace(__ GetTotalFrameSlotCount());
// {ReserveStackSpace} is the first platform-specific assembler method.
pc_offset_stack_frame_construction_ = __ PrepareStackFrame();
// {PrepareStackFrame} is the first platform-specific assembler method.
// If this failed, we can bail out immediately, avoiding runtime overhead
// and potential failures because of other unimplemented methods.
// A platform implementing {ReserveStackSpace} must ensure that we can
// A platform implementing {PrepareStackFrame} must ensure that we can
// finish compilation without errors even if we hit unimplemented
// LiftoffAssembler methods.
if (DidAssemblerBailout(decoder)) return;
@ -382,6 +382,8 @@ class LiftoffCompiler {
GenerateOutOfLineCode(ool);
}
safepoint_table_builder_.Emit(asm_, __ GetTotalFrameSlotCount());
__ PatchPrepareStackFrame(pc_offset_stack_frame_construction_,
__ GetTotalFrameSlotCount());
}
void OnFirstError(Decoder* decoder) {
@ -1304,6 +1306,9 @@ class LiftoffCompiler {
// code generation (in FinishCompilation).
std::unique_ptr<Zone>* codegen_zone_;
SafepointTableBuilder safepoint_table_builder_;
// The pc offset of the instructions to reserve the stack frame. Needed to
// patch the actually needed stack size in the end.
uint32_t pc_offset_stack_frame_construction_ = 0;
void TraceCacheState(Decoder* decoder) const {
#ifdef DEBUG

View File

@ -23,10 +23,21 @@ inline MemOperand GetContextOperand() { return MemOperand(sp, -16); }
} // namespace liftoff
void LiftoffAssembler::ReserveStackSpace(uint32_t stack_slots) {
uint32_t LiftoffAssembler::PrepareStackFrame() {
uint32_t offset = static_cast<uint32_t>(pc_offset());
addiu(sp, sp, 0);
return offset;
}
void LiftoffAssembler::PatchPrepareStackFrame(uint32_t offset,
uint32_t stack_slots) {
uint32_t bytes = liftoff::kConstantStackSpace + kStackSlotSize * stack_slots;
DCHECK_LE(bytes, kMaxInt);
addiu(sp, sp, -bytes);
// We can't run out of space, just pass anything big enough to not cause the
// assembler to try to grow the buffer.
constexpr int kAvailableSpace = 64;
Assembler patching_assembler(isolate(), buffer_ + offset, kAvailableSpace);
patching_assembler.addiu(sp, sp, -bytes);
}
void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,

View File

@ -23,10 +23,21 @@ inline MemOperand GetContextOperand() { return MemOperand(sp, -16); }
} // namespace liftoff
void LiftoffAssembler::ReserveStackSpace(uint32_t stack_slots) {
uint32_t LiftoffAssembler::PrepareStackFrame() {
uint32_t offset = static_cast<uint32_t>(pc_offset());
daddiu(sp, sp, 0);
return offset;
}
void LiftoffAssembler::PatchPrepareStackFrame(uint32_t offset,
uint32_t stack_slots) {
uint32_t bytes = liftoff::kConstantStackSpace + kStackSlotSize * stack_slots;
DCHECK_LE(bytes, kMaxInt);
daddiu(sp, sp, -bytes);
// We can't run out of space, just pass anything big enough to not cause the
// assembler to try to grow the buffer.
constexpr int kAvailableSpace = 64;
Assembler patching_assembler(isolate(), buffer_ + offset, kAvailableSpace);
patching_assembler.daddiu(sp, sp, -bytes);
}
void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,

View File

@ -13,8 +13,14 @@ namespace v8 {
namespace internal {
namespace wasm {
void LiftoffAssembler::ReserveStackSpace(uint32_t stack_slots) {
BAILOUT("ReserveStackSpace");
uint32_t LiftoffAssembler::PrepareStackFrame() {
BAILOUT("PrepareStackFrame");
return 0;
}
void LiftoffAssembler::PatchPrepareStackFrame(uint32_t offset,
uint32_t stack_slots) {
BAILOUT("PatchPrepareStackFrame");
}
void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
@ -100,7 +106,7 @@ void LiftoffAssembler::FillI64Half(Register, uint32_t half_index) {
#define UNIMPLEMENTED_GP_UNOP(name) \
bool LiftoffAssembler::emit_##name(Register dst, Register src) { \
BAILOUT("gp unop"); \
return false; \
return true; \
}
#define UNIMPLEMENTED_FP_BINOP(name) \
void LiftoffAssembler::emit_##name(DoubleRegister dst, DoubleRegister lhs, \
@ -138,7 +144,6 @@ UNIMPLEMENTED_FP_BINOP(f64_mul)
#undef UNIMPLEMENTED_FP_BINOP
#undef UNIMPLEMENTED_SHIFTOP
void LiftoffAssembler::emit_jump(Label* label) { BAILOUT("emit_jump"); }
void LiftoffAssembler::emit_cond_jump(Condition cond, Label* label,

View File

@ -13,8 +13,14 @@ namespace v8 {
namespace internal {
namespace wasm {
void LiftoffAssembler::ReserveStackSpace(uint32_t stack_slots) {
BAILOUT("ReserveStackSpace");
uint32_t LiftoffAssembler::PrepareStackFrame() {
BAILOUT("PrepareStackFrame");
return 0;
}
void LiftoffAssembler::PatchPrepareStackFrame(uint32_t offset,
uint32_t stack_slots) {
BAILOUT("PatchPrepareStackFrame");
}
void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
@ -100,7 +106,7 @@ void LiftoffAssembler::FillI64Half(Register, uint32_t half_index) {
#define UNIMPLEMENTED_GP_UNOP(name) \
bool LiftoffAssembler::emit_##name(Register dst, Register src) { \
BAILOUT("gp unop"); \
return false; \
return true; \
}
#define UNIMPLEMENTED_FP_BINOP(name) \
void LiftoffAssembler::emit_##name(DoubleRegister dst, DoubleRegister lhs, \

View File

@ -41,10 +41,21 @@ static constexpr Register kCCallLastArgAddrReg = rax;
} // namespace liftoff
void LiftoffAssembler::ReserveStackSpace(uint32_t stack_slots) {
uint32_t LiftoffAssembler::PrepareStackFrame() {
uint32_t offset = static_cast<uint32_t>(pc_offset());
sub_sp_32(0);
return offset;
}
void LiftoffAssembler::PatchPrepareStackFrame(uint32_t offset,
uint32_t stack_slots) {
uint32_t bytes = liftoff::kConstantStackSpace + kStackSlotSize * stack_slots;
DCHECK_LE(bytes, kMaxInt);
subp(rsp, Immediate(bytes));
// We can't run out of space, just pass anything big enough to not cause the
// assembler to try to grow the buffer.
constexpr int kAvailableSpace = 64;
Assembler patching_assembler(isolate(), buffer_ + offset, kAvailableSpace);
patching_assembler.sub_sp_32(bytes);
}
void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,

View File

@ -2198,6 +2198,12 @@ void Assembler::store_rax(ExternalReference ref) {
store_rax(ref.address(), RelocInfo::EXTERNAL_REFERENCE);
}
void Assembler::sub_sp_32(uint32_t imm) {
emit_rex_64();
emit(0x81); // using a literal 32-bit immediate.
emit_modrm(0x5, rsp);
emitl(imm);
}
void Assembler::testb(Register dst, Register src) {
EnsureSpace ensure_space(this);

View File

@ -814,6 +814,8 @@ class Assembler : public AssemblerBase {
immediate_arithmetic_op_8(0x5, dst, src);
}
void sub_sp_32(uint32_t imm);
void testb(Register dst, Register src);
void testb(Register reg, Immediate mask);
void testb(const Operand& op, Immediate mask);