diff --git a/src/wasm/baseline/arm/liftoff-assembler-arm.h b/src/wasm/baseline/arm/liftoff-assembler-arm.h index 66d5f8009b..36be120bcf 100644 --- a/src/wasm/baseline/arm/liftoff-assembler-arm.h +++ b/src/wasm/baseline/arm/liftoff-assembler-arm.h @@ -71,11 +71,18 @@ inline MemOperand GetMemOp(LiftoffAssembler* assm, inline Register CalculateActualAddress(LiftoffAssembler* assm, UseScratchRegisterScope* temps, Register addr_reg, Register offset_reg, - int32_t offset_imm) { + int32_t offset_imm, + Register result_reg = no_reg) { if (offset_reg == no_reg && offset_imm == 0) { - return addr_reg; + if (result_reg == no_reg) { + return addr_reg; + } else { + assm->mov(result_reg, addr_reg); + return result_reg; + } } - Register actual_addr_reg = temps->Acquire(); + Register actual_addr_reg = + result_reg != no_reg ? result_reg : temps->Acquire(); if (offset_reg == no_reg) { assm->add(actual_addr_reg, addr_reg, Operand(offset_imm)); } else { @@ -901,8 +908,9 @@ void LiftoffAssembler::AtomicExchange(Register dst_addr, Register offset_reg, namespace liftoff { #define __ lasm-> -inline void AtomicI64CompareExchange(LiftoffAssembler* lasm, Register dst_addr, - Register offset_reg, uint32_t offset_imm, +inline void AtomicI64CompareExchange(LiftoffAssembler* lasm, + Register dst_addr_reg, Register offset_reg, + uint32_t offset_imm, LiftoffRegister expected, LiftoffRegister new_value, LiftoffRegister result) { @@ -912,9 +920,8 @@ inline void AtomicI64CompareExchange(LiftoffAssembler* lasm, Register dst_addr, // high-word has to be in the next higher register. To avoid complicated // register allocation code here, we just assign fixed registers to all // values here, and then move all values into the correct register. - // {ip} is the scratch register of {UseScratchRegisterScope}. Either it will - // be used by the actual address, or we force it to. - Register actual_addr = ip; + Register dst_addr = r0; + Register offset = r1; Register result_low = r4; Register result_high = r5; Register new_value_low = r2; @@ -924,36 +931,39 @@ inline void AtomicI64CompareExchange(LiftoffAssembler* lasm, Register dst_addr, Register expected_high = r9; // We spill all registers, so that we can re-assign them afterwards. - __ SpillRegisters(result_low, result_high, new_value_low, new_value_high, - store_result, expected_low, expected_high); - - // We calculate the address after spilling. Spilling may require the scratch - // register. - UseScratchRegisterScope temps(lasm); - Register initial_addr = liftoff::CalculateActualAddress( - lasm, &temps, dst_addr, offset_reg, offset_imm); - // Make sure the actual address is stored in the right register. - if (initial_addr != actual_addr) { - __ mov(actual_addr, initial_addr); - } + __ SpillRegisters(dst_addr, offset, result_low, result_high, new_value_low, + new_value_high, store_result, expected_low, expected_high); LiftoffAssembler::ParallelRegisterMoveTuple reg_moves[]{ {LiftoffRegister::ForPair(new_value_low, new_value_high), new_value, kWasmI64}, {LiftoffRegister::ForPair(expected_low, expected_high), expected, - kWasmI64}}; + kWasmI64}, + {LiftoffRegister(dst_addr), LiftoffRegister(dst_addr_reg), kWasmI32}, + {LiftoffRegister(offset), + LiftoffRegister(offset_reg != no_reg ? offset_reg : offset), kWasmI32}}; __ ParallelRegisterMove(ArrayVector(reg_moves)); + { + UseScratchRegisterScope temps(lasm); + Register temp = liftoff::CalculateActualAddress( + lasm, &temps, dst_addr, offset_reg == no_reg ? no_reg : offset, + offset_imm, dst_addr); + // Make sure the actual address is stored in the right register. + DCHECK_EQ(dst_addr, temp); + USE(temp); + } + Label retry; Label done; __ dmb(ISH); __ bind(&retry); - __ ldrexd(result_low, result_high, actual_addr); + __ ldrexd(result_low, result_high, dst_addr); __ cmp(result_low, expected_low); __ b(ne, &done); __ cmp(result_high, expected_high); __ b(ne, &done); - __ strexd(store_result, new_value_low, new_value_high, actual_addr); + __ strexd(store_result, new_value_low, new_value_high, dst_addr); __ cmp(store_result, Operand(0)); __ b(ne, &retry); __ dmb(ISH); diff --git a/test/mjsunit/regress/wasm/regress-1079449.js b/test/mjsunit/regress/wasm/regress-1079449.js new file mode 100644 index 0000000000..65e964552d --- /dev/null +++ b/test/mjsunit/regress/wasm/regress-1079449.js @@ -0,0 +1,37 @@ +// Copyright 2020 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --wasm-staging + +load('test/mjsunit/wasm/wasm-module-builder.js'); + +const builder = new WasmModuleBuilder(); +builder.addMemory(16, 32, false, true); +const sig = builder.addType(makeSig( + [ + kWasmI64, kWasmI32, kWasmI64, kWasmI32, kWasmI32, kWasmI32, kWasmI32, + kWasmI32, kWasmI32, kWasmI64, kWasmI64, kWasmI64 + ], + [kWasmI64])); +// Generate function 2 (out of 3). +builder.addFunction(undefined, sig) + .addLocals({f32_count: 10}) + .addLocals({i32_count: 4}) + .addLocals({f64_count: 1}) + .addLocals({i32_count: 15}) + .addBodyWithEnd([ + // signature: v_liliiiiiilll + // body: + kExprI32Const, 0x00, // i32.const + kExprI64Const, 0x00, // i64.const + kExprI64Const, 0x00, // i64.const + kAtomicPrefix, kExprI64AtomicCompareExchange, 0x00, + 0x8, // i64.atomic.cmpxchng64 + kExprEnd, // end @124 + ]); + +builder.addExport('main', 0); +const instance = builder.instantiate(); +assertEquals( + 0n, instance.exports.main(1n, 2, 3n, 4, 5, 6, 7, 8, 9, 10n, 11n, 12n, 13n));