ppc: [liftoff] Fix jump table atomic patching

Change-Id: I64f1d8868b3bc732d0014baaf4c87a7b6a0c68db
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3212814
Reviewed-by: Clemens Backes <clemensb@chromium.org>
Commit-Queue: Junliang Yan <junyan@redhat.com>
Cr-Commit-Position: refs/heads/main@{#77300}
This commit is contained in:
Junliang Yan 2021-10-07 14:44:16 -04:00 committed by V8 LUCI CQ
parent d87e5f42f3
commit 1628c91b51
3 changed files with 29 additions and 8 deletions

View File

@ -314,19 +314,36 @@ void JumpTableAssembler::EmitLazyCompileJumpSlot(uint32_t func_index,
}
bool JumpTableAssembler::EmitJumpSlot(Address target) {
mov(r0, Operand(target));
mtctr(r0);
bctr();
intptr_t relative_target = reinterpret_cast<byte*>(target) - pc_;
if (!is_int26(relative_target)) {
return false;
}
b(relative_target, LeaveLK);
return true;
}
void JumpTableAssembler::EmitFarJumpSlot(Address target) {
JumpToInstructionStream(target);
byte* start = pc_;
mov(ip, Operand(reinterpret_cast<Address>(start + kFarJumpTableSlotSize -
8))); // 5 instr
LoadU64(ip, MemOperand(ip));
mtctr(ip);
bctr();
byte* end = pc_;
int used = end - start;
CHECK(used < kFarJumpTableSlotSize - 8);
NopBytes(kFarJumpTableSlotSize - 8 - used);
CHECK_EQ(reinterpret_cast<Address>(pc_) & 0x7, 0); // Alignment
dp(target);
}
// static
void JumpTableAssembler::PatchFarJumpSlot(Address slot, Address target) {
UNREACHABLE();
Address target_addr = slot + kFarJumpTableSlotSize - 8;
reinterpret_cast<std::atomic<Address>*>(target_addr)
->store(target, std::memory_order_relaxed);
}
void JumpTableAssembler::NopBytes(int bytes) {

View File

@ -206,8 +206,8 @@ class V8_EXPORT_PRIVATE JumpTableAssembler : public MacroAssembler {
static constexpr int kLazyCompileTableSlotSize = 20;
#elif V8_TARGET_ARCH_PPC64
static constexpr int kJumpTableLineSize = 64;
static constexpr int kJumpTableSlotSize = 7 * kInstrSize;
static constexpr int kFarJumpTableSlotSize = 7 * kInstrSize;
static constexpr int kJumpTableSlotSize = 1 * kInstrSize;
static constexpr int kFarJumpTableSlotSize = 12 * kInstrSize;
static constexpr int kLazyCompileTableSlotSize = 12 * kInstrSize;
#elif V8_TARGET_ARCH_MIPS
static constexpr int kJumpTableLineSize = 8 * kInstrSize;

View File

@ -503,6 +503,9 @@ class WasmCodeAllocator {
#if V8_TARGET_ARCH_ARM64
// ARM64 only supports direct calls within a 128 MB range.
static constexpr size_t kMaxCodeSpaceSize = 128 * MB;
#elif V8_TARGET_ARCH_PPC64
// branches only takes 26 bits
static constexpr size_t kMaxCodeSpaceSize = 32 * MB;
#else
// Use 1024 MB limit for code spaces on other platforms. This is smaller than
// the total allowed code space (kMaxWasmCodeMemory) to avoid unnecessarily
@ -602,7 +605,8 @@ class WasmCodeAllocator {
class V8_EXPORT_PRIVATE NativeModule final {
public:
#if V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_S390X || V8_TARGET_ARCH_ARM64
#if V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_S390X || V8_TARGET_ARCH_ARM64 || \
V8_TARGET_ARCH_PPC64
static constexpr bool kNeedsFarJumpsBetweenCodeSpaces = true;
#else
static constexpr bool kNeedsFarJumpsBetweenCodeSpaces = false;