[wasm-simd][liftoff][ia32][x64] Implement v128.load_zero

Implement v128.load32_zero and v128.load64_zero on Liftoff, only for
ia32 and x64. ARM will follow.

Bug: v8:11038
Change-Id: I0fad054f462e27eb60825258dad385244b5e5a95
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2486236
Commit-Queue: Zhi An Ng <zhin@chromium.org>
Reviewed-by: Clemens Backes <clemensb@chromium.org>
Cr-Commit-Position: refs/heads/master@{#70782}
This commit is contained in:
Zhi An Ng 2020-10-27 05:28:07 +00:00 committed by Commit Bot
parent f13641d3e3
commit f89869a213
5 changed files with 21 additions and 7 deletions

View File

@ -2257,6 +2257,8 @@ void LiftoffAssembler::LoadTransform(LiftoffRegister dst, Register src_addr,
NeonMemOperand(actual_src_addr));
vmovl(NeonU32, liftoff::GetSimd128Register(dst), dst.low_fp());
}
} else if (transform == LoadTransformationKind::kZeroExtend) {
bailout(kSimd, "v128.load_zero unimplemented");
} else {
DCHECK_EQ(LoadTransformationKind::kSplat, transform);
if (memtype == MachineType::Int8()) {

View File

@ -1505,6 +1505,8 @@ void LiftoffAssembler::LoadTransform(LiftoffRegister dst, Register src_addr,
Ldr(dst.fp().D(), src_op);
Uxtl(dst.fp().V2D(), dst.fp().V2S());
}
} else if (transform == LoadTransformationKind::kZeroExtend) {
bailout(kSimd, "v128.load_zero unimplemented");
} else {
// ld1r only allows no offset or post-index, so emit an add.
DCHECK_EQ(LoadTransformationKind::kSplat, transform);

View File

@ -2663,6 +2663,13 @@ void LiftoffAssembler::LoadTransform(LiftoffRegister dst, Register src_addr,
} else if (memtype == MachineType::Uint32()) {
Pmovzxdq(dst.fp(), src_op);
}
} else if (transform == LoadTransformationKind::kZeroExtend) {
if (memtype == MachineType::Int32()) {
movss(dst.fp(), src_op);
} else {
DCHECK_EQ(MachineType::Int64(), memtype);
movsd(dst.fp(), src_op);
}
} else {
DCHECK_EQ(LoadTransformationKind::kSplat, transform);
if (memtype == MachineType::Int8()) {

View File

@ -2291,15 +2291,11 @@ class LiftoffCompiler {
return;
}
if (transform == LoadTransformationKind::kZeroExtend) {
unsupported(decoder, kSimd, "prototyping s128 load zero extend");
return;
}
LiftoffRegList pinned;
Register index = pinned.set(__ PopToRegister()).gp();
// For load splats, LoadType is the size of the load, and for load
// extends, LoadType is the size of the lane, and it always loads 8 bytes.
// For load splats and load zero, LoadType is the size of the load, and for
// load extends, LoadType is the size of the lane, and it always loads 8
// bytes.
uint32_t access_size =
transform == LoadTransformationKind::kExtend ? 8 : type.size();
if (BoundsCheckMem(decoder, access_size, imm.offset, index, pinned,

View File

@ -2287,6 +2287,13 @@ void LiftoffAssembler::LoadTransform(LiftoffRegister dst, Register src_addr,
} else if (memtype == MachineType::Uint32()) {
Pmovzxdq(dst.fp(), src_op);
}
} else if (transform == LoadTransformationKind::kZeroExtend) {
if (memtype == MachineType::Int32()) {
Movss(dst.fp(), src_op);
} else {
DCHECK_EQ(MachineType::Int64(), memtype);
Movsd(dst.fp(), src_op);
}
} else {
DCHECK_EQ(LoadTransformationKind::kSplat, transform);
if (memtype == MachineType::Int8()) {