[Liftoff] Fix moving stack values

On x64 the {kScratchRegister} cannot be held in a {LiftoffRegister},
since it is not a valid cache register. Also, the code unnecessarily
checked whether there is an unused cache register, but then didn't use
it. Simplify the logic to always use the scratch register, just
distinguish between 4-byte and 8-byte moves.
On ia32 we did not move 64-bit values correctly if we didn't have
unused registers and needed to move via the stack.

R=titzer@chromium.org

Bug: v8:6600, chromium:917588, chromium:917450
Change-Id: I0bbe946c6ac8fca62f85711ae47afdac9c02ae6b
Reviewed-on: https://chromium-review.googlesource.com/c/1391755
Reviewed-by: Andreas Haas <ahaas@chromium.org>
Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
Cr-Commit-Position: refs/heads/master@{#58521}
This commit is contained in:
Clemens Hammacher 2019-01-03 12:44:16 +01:00 committed by Commit Bot
parent 42a42a68ba
commit 14faced4c0
3 changed files with 56 additions and 12 deletions

View File

@ -134,6 +134,19 @@ inline Register GetTmpByteRegister(LiftoffAssembler* assm, Register candidate) {
return assm->GetUnusedRegister(liftoff::kByteRegs).gp();
}
inline void MoveStackValue(LiftoffAssembler* assm, const Operand& src,
const Operand& dst) {
if (assm->cache_state()->has_unused_register(kGpReg)) {
Register tmp = assm->cache_state()->unused_register(kGpReg).gp();
assm->mov(tmp, src);
assm->mov(dst, tmp);
} else {
// No free register, move via the stack.
assm->push(src);
assm->pop(dst);
}
}
constexpr DoubleRegister kScratchDoubleReg = xmm7;
constexpr int kSubSpSize = 6; // 6 bytes for "sub esp, <imm32>"
@ -401,14 +414,16 @@ void LiftoffAssembler::LoadCallerFrameSlot(LiftoffRegister dst,
void LiftoffAssembler::MoveStackValue(uint32_t dst_index, uint32_t src_index,
ValueType type) {
DCHECK_NE(dst_index, src_index);
if (cache_state_.has_unused_register(kGpReg)) {
LiftoffRegister reg = GetUnusedRegister(kGpReg);
Fill(reg, src_index, type);
Spill(dst_index, reg, type);
if (needs_reg_pair(type)) {
liftoff::MoveStackValue(this,
liftoff::GetHalfStackSlot(src_index, kLowWord),
liftoff::GetHalfStackSlot(dst_index, kLowWord));
liftoff::MoveStackValue(this,
liftoff::GetHalfStackSlot(src_index, kHighWord),
liftoff::GetHalfStackSlot(dst_index, kHighWord));
} else {
push(liftoff::GetStackSlot(src_index));
pop(liftoff::GetStackSlot(dst_index));
liftoff::MoveStackValue(this, liftoff::GetStackSlot(src_index),
liftoff::GetStackSlot(dst_index));
}
}

View File

@ -318,12 +318,15 @@ void LiftoffAssembler::LoadCallerFrameSlot(LiftoffRegister dst,
void LiftoffAssembler::MoveStackValue(uint32_t dst_index, uint32_t src_index,
ValueType type) {
DCHECK_NE(dst_index, src_index);
if (cache_state_.has_unused_register(kGpReg)) {
Fill(LiftoffRegister{kScratchRegister}, src_index, type);
Spill(dst_index, LiftoffRegister{kScratchRegister}, type);
Operand src = liftoff::GetStackSlot(src_index);
Operand dst = liftoff::GetStackSlot(dst_index);
if (ValueTypes::ElementSizeLog2Of(type) == 2) {
movl(kScratchRegister, src);
movl(dst, kScratchRegister);
} else {
pushq(liftoff::GetStackSlot(src_index));
popq(liftoff::GetStackSlot(dst_index));
DCHECK_EQ(3, ValueTypes::ElementSizeLog2Of(type));
movq(kScratchRegister, src);
movq(dst, kScratchRegister);
}
}

View File

@ -0,0 +1,26 @@
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
load('test/mjsunit/wasm/wasm-constants.js');
load('test/mjsunit/wasm/wasm-module-builder.js');
const builder = new WasmModuleBuilder();
const sig = builder.addType(makeSig([], [kWasmF64]));
builder.addFunction(undefined, sig)
.addLocals({f32_count: 5}).addLocals({f64_count: 3})
.addBody([
kExprBlock, kWasmF64,
kExprF64Const, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xf0, 0x3f,
kExprF64Const, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
kExprI32Const, 0,
kExprIf, kWasmI32,
kExprI32Const, 0,
kExprElse,
kExprI32Const, 1,
kExprEnd,
kExprBrIf, 0,
kExprUnreachable,
kExprEnd
]);
builder.instantiate();