[mips][Turboprop] Move deoptimizations for dynamic map checks into builtin.

Port: b6643320b9

Bug: v8:10582

Change-Id: I3efdd840a4f3f2eeb6156f8b446478311a2ccd26
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2568569
Reviewed-by: Zhao Jiazhong <zhaojiazhong-hf@loongson.cn>
Commit-Queue: Zhao Jiazhong <zhaojiazhong-hf@loongson.cn>
Auto-Submit: Liu yu <liuyu@loongson.cn>
Cr-Commit-Position: refs/heads/master@{#71560}
This commit is contained in:
Liu Yu 2020-12-02 15:39:12 +08:00 committed by Commit Bot
parent 7730fd94a2
commit 059c5fa45a
12 changed files with 202 additions and 12 deletions

View File

@ -3847,6 +3847,50 @@ void Builtins::Generate_DeoptimizationEntry_Lazy(MacroAssembler* masm) {
Generate_DeoptimizationEntry(masm, DeoptimizeKind::kLazy);
}
void Builtins::Generate_DynamicMapChecksTrampoline(MacroAssembler* masm) {
FrameScope scope(masm, StackFrame::MANUAL);
__ EnterFrame(StackFrame::INTERNAL);
// Only save the registers that the DynamicMapChecks builtin can clobber.
DynamicMapChecksDescriptor descriptor;
RegList registers = descriptor.allocatable_registers();
// FLAG_debug_code is enabled CSA checks will call C function and so we need
// to save all CallerSaved registers too.
if (FLAG_debug_code) registers |= kJSCallerSaved;
__ SaveRegisters(registers);
__ Call(BUILTIN_CODE(masm->isolate(), DynamicMapChecks),
RelocInfo::CODE_TARGET);
Label deopt, bailout;
__ Branch(&deopt, ne, v0,
Operand(static_cast<int>(DynamicMapChecksStatus::kSuccess)));
__ RestoreRegisters(registers);
__ LeaveFrame(StackFrame::INTERNAL);
__ Ret();
__ bind(&deopt);
__ Branch(&bailout, eq, v0,
Operand(static_cast<int>(DynamicMapChecksStatus::kBailout)));
if (FLAG_debug_code) {
__ Assert(eq, AbortReason::kUnexpectedDynamicMapChecksStatus, v0,
Operand(static_cast<int>(DynamicMapChecksStatus::kDeopt)));
}
__ RestoreRegisters(registers);
__ LeaveFrame(StackFrame::INTERNAL);
Handle<Code> deopt_eager = masm->isolate()->builtins()->builtin_handle(
Deoptimizer::GetDeoptimizationEntry(DeoptimizeKind::kEager));
__ Jump(deopt_eager, RelocInfo::CODE_TARGET);
__ bind(&bailout);
__ RestoreRegisters(registers);
__ LeaveFrame(StackFrame::INTERNAL);
Handle<Code> deopt_bailout = masm->isolate()->builtins()->builtin_handle(
Deoptimizer::GetDeoptimizationEntry(DeoptimizeKind::kBailout));
__ Jump(deopt_bailout, RelocInfo::CODE_TARGET);
}
#undef __
} // namespace internal

View File

@ -3409,6 +3409,50 @@ void Builtins::Generate_DeoptimizationEntry_Lazy(MacroAssembler* masm) {
Generate_DeoptimizationEntry(masm, DeoptimizeKind::kLazy);
}
void Builtins::Generate_DynamicMapChecksTrampoline(MacroAssembler* masm) {
FrameScope scope(masm, StackFrame::MANUAL);
__ EnterFrame(StackFrame::INTERNAL);
// Only save the registers that the DynamicMapChecks builtin can clobber.
DynamicMapChecksDescriptor descriptor;
RegList registers = descriptor.allocatable_registers();
// FLAG_debug_code is enabled CSA checks will call C function and so we need
// to save all CallerSaved registers too.
if (FLAG_debug_code) registers |= kJSCallerSaved;
__ SaveRegisters(registers);
__ Call(BUILTIN_CODE(masm->isolate(), DynamicMapChecks),
RelocInfo::CODE_TARGET);
Label deopt, bailout;
__ Branch(&deopt, ne, v0,
Operand(static_cast<int>(DynamicMapChecksStatus::kSuccess)));
__ RestoreRegisters(registers);
__ LeaveFrame(StackFrame::INTERNAL);
__ Ret();
__ bind(&deopt);
__ Branch(&bailout, eq, v0,
Operand(static_cast<int>(DynamicMapChecksStatus::kBailout)));
if (FLAG_debug_code) {
__ Assert(eq, AbortReason::kUnexpectedDynamicMapChecksStatus, v0,
Operand(static_cast<int>(DynamicMapChecksStatus::kDeopt)));
}
__ RestoreRegisters(registers);
__ LeaveFrame(StackFrame::INTERNAL);
Handle<Code> deopt_eager = masm->isolate()->builtins()->builtin_handle(
Deoptimizer::GetDeoptimizationEntry(DeoptimizeKind::kEager));
__ Jump(deopt_eager, RelocInfo::CODE_TARGET);
__ bind(&bailout);
__ RestoreRegisters(registers);
__ LeaveFrame(StackFrame::INTERNAL);
Handle<Code> deopt_bailout = masm->isolate()->builtins()->builtin_handle(
Deoptimizer::GetDeoptimizationEntry(DeoptimizeKind::kBailout));
__ Jump(deopt_bailout, RelocInfo::CODE_TARGET);
}
#undef __
} // namespace internal

View File

@ -60,6 +60,18 @@ void RecordWriteDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(kParameterCount, default_stub_registers);
}
void DynamicMapChecksDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register default_stub_registers[] = {kReturnRegister0, a0, a1, a2, cp};
data->RestrictAllocatableRegisters(default_stub_registers,
arraysize(default_stub_registers));
CHECK_LE(static_cast<size_t>(kParameterCount),
arraysize(default_stub_registers));
data->InitializePlatformSpecific(kParameterCount, default_stub_registers);
}
void EphemeronKeyBarrierDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
const Register default_stub_registers[] = {a0, a1, a2, a3, kReturnRegister0};

View File

@ -5547,7 +5547,7 @@ void TurboAssembler::ResetSpeculationPoisonRegister() {
void TurboAssembler::CallForDeoptimization(Builtins::Name target, int,
Label* exit, DeoptimizeKind kind,
Label*) {
Label* ret, Label*) {
BlockTrampolinePoolScope block_trampoline_pool(this);
Lw(t9,
MemOperand(kRootRegister, IsolateData::builtin_entry_slot_offset(target)));
@ -5556,7 +5556,11 @@ void TurboAssembler::CallForDeoptimization(Builtins::Name target, int,
(kind == DeoptimizeKind::kLazy)
? Deoptimizer::kLazyDeoptExitSize
: Deoptimizer::kNonLazyDeoptExitSize);
USE(exit, kind);
if (kind == DeoptimizeKind::kEagerWithResume) {
Branch(ret);
DCHECK_EQ(SizeOfCodeGeneratedSince(exit),
Deoptimizer::kEagerWithResumeDeoptExitSize);
}
}
} // namespace internal

View File

@ -238,7 +238,7 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
void StoreReturnAddressAndCall(Register target);
void CallForDeoptimization(Builtins::Name target, int deopt_id, Label* exit,
DeoptimizeKind kind,
DeoptimizeKind kind, Label* ret,
Label* jump_deoptimization_entry_label);
void Ret(COND_ARGS);

View File

@ -60,6 +60,18 @@ void RecordWriteDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(kParameterCount, default_stub_registers);
}
void DynamicMapChecksDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register default_stub_registers[] = {kReturnRegister0, a0, a1, a2, cp};
data->RestrictAllocatableRegisters(default_stub_registers,
arraysize(default_stub_registers));
CHECK_LE(static_cast<size_t>(kParameterCount),
arraysize(default_stub_registers));
data->InitializePlatformSpecific(kParameterCount, default_stub_registers);
}
void EphemeronKeyBarrierDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
const Register default_stub_registers[] = {a0, a1, a2, a3, kReturnRegister0};

View File

@ -5908,7 +5908,7 @@ void TurboAssembler::ResetSpeculationPoisonRegister() {
void TurboAssembler::CallForDeoptimization(Builtins::Name target, int,
Label* exit, DeoptimizeKind kind,
Label*) {
Label* ret, Label*) {
BlockTrampolinePoolScope block_trampoline_pool(this);
Ld(t9,
MemOperand(kRootRegister, IsolateData::builtin_entry_slot_offset(target)));
@ -5917,7 +5917,12 @@ void TurboAssembler::CallForDeoptimization(Builtins::Name target, int,
(kind == DeoptimizeKind::kLazy)
? Deoptimizer::kLazyDeoptExitSize
: Deoptimizer::kNonLazyDeoptExitSize);
USE(exit, kind);
if (kind == DeoptimizeKind::kEagerWithResume) {
Branch(ret);
DCHECK_EQ(SizeOfCodeGeneratedSince(exit),
Deoptimizer::kEagerWithResumeDeoptExitSize);
}
}
} // namespace internal

View File

@ -261,7 +261,7 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
void StoreReturnAddressAndCall(Register target);
void CallForDeoptimization(Builtins::Name target, int deopt_id, Label* exit,
DeoptimizeKind kind,
DeoptimizeKind kind, Label* ret,
Label* jump_deoptimization_entry_label);
void Ret(COND_ARGS);

View File

@ -10,6 +10,7 @@ namespace internal {
const bool Deoptimizer::kSupportsFixedDeoptExitSizes = true;
const int Deoptimizer::kNonLazyDeoptExitSize = 3 * kInstrSize;
const int Deoptimizer::kLazyDeoptExitSize = 3 * kInstrSize;
const int Deoptimizer::kEagerWithResumeDeoptExitSize = 5 * kInstrSize;
// Maximum size of a table entry generated below.
#ifdef _MIPS_ARCH_MIPS32R6

View File

@ -10,6 +10,7 @@ namespace internal {
const bool Deoptimizer::kSupportsFixedDeoptExitSizes = true;
const int Deoptimizer::kNonLazyDeoptExitSize = 3 * kInstrSize;
const int Deoptimizer::kLazyDeoptExitSize = 3 * kInstrSize;
const int Deoptimizer::kEagerWithResumeDeoptExitSize = 5 * kInstrSize;
// Maximum size of a table entry generated below.
#ifdef _MIPS_ARCH_MIPS64R6

View File

@ -30,7 +30,9 @@
#include "src/api/api-inl.h"
#include "src/base/utils/random-number-generator.h"
#include "src/codegen/assembler-inl.h"
#include "src/codegen/macro-assembler.h"
#include "src/deoptimizer/deoptimizer.h"
#include "src/execution/simulator.h"
#include "src/init/v8.h"
#include "src/objects/heap-number.h"
@ -38,6 +40,7 @@
#include "src/objects/objects-inl.h"
#include "src/utils/ostreams.h"
#include "test/cctest/cctest.h"
#include "test/common/assembler-tester.h"
namespace v8 {
namespace internal {
@ -1332,6 +1335,38 @@ TEST(macro_float_minmax_f64) {
#undef CHECK_MINMAX
}
TEST(DeoptExitSizeIsFixed) {
CHECK(Deoptimizer::kSupportsFixedDeoptExitSizes);
Isolate* isolate = CcTest::i_isolate();
HandleScope handles(isolate);
auto buffer = AllocateAssemblerBuffer();
MacroAssembler masm(isolate, v8::internal::CodeObjectRequired::kYes,
buffer->CreateView());
STATIC_ASSERT(static_cast<int>(kFirstDeoptimizeKind) == 0);
for (int i = 0; i < kDeoptimizeKindCount; i++) {
DeoptimizeKind kind = static_cast<DeoptimizeKind>(i);
Label before_exit;
masm.bind(&before_exit);
if (kind == DeoptimizeKind::kEagerWithResume) {
Builtins::Name target = Deoptimizer::GetDeoptWithResumeBuiltin(
DeoptimizeReason::kDynamicMapCheck);
masm.CallForDeoptimization(target, 42, &before_exit, kind, &before_exit,
nullptr);
CHECK_EQ(masm.SizeOfCodeGeneratedSince(&before_exit),
Deoptimizer::kEagerWithResumeDeoptExitSize);
} else {
Builtins::Name target = Deoptimizer::GetDeoptimizationEntry(kind);
masm.CallForDeoptimization(target, 42, &before_exit, kind, &before_exit,
nullptr);
CHECK_EQ(masm.SizeOfCodeGeneratedSince(&before_exit),
kind == DeoptimizeKind::kLazy
? Deoptimizer::kLazyDeoptExitSize
: Deoptimizer::kNonLazyDeoptExitSize);
}
}
}
#undef __
} // namespace internal

View File

@ -28,15 +28,15 @@
#include <stdlib.h>
#include <iostream> // NOLINT(readability/streams)
#include "src/init/v8.h"
#include "test/cctest/cctest.h"
#include "src/base/utils/random-number-generator.h"
#include "src/codegen/assembler-inl.h"
#include "src/codegen/macro-assembler.h"
#include "src/deoptimizer/deoptimizer.h"
#include "src/execution/simulator.h"
#include "src/objects/heap-number.h"
#include "src/utils/ostreams.h"
#include "src/init/v8.h"
#include "src/objects/objects-inl.h"
#include "src/utils/ostreams.h"
#include "test/cctest/cctest.h"
#include "test/common/assembler-tester.h"
namespace v8 {
namespace internal {
@ -1687,6 +1687,38 @@ TEST(macro_float_minmax_f64) {
#undef CHECK_MINMAX
}
TEST(DeoptExitSizeIsFixed) {
CHECK(Deoptimizer::kSupportsFixedDeoptExitSizes);
Isolate* isolate = CcTest::i_isolate();
HandleScope handles(isolate);
auto buffer = AllocateAssemblerBuffer();
MacroAssembler masm(isolate, v8::internal::CodeObjectRequired::kYes,
buffer->CreateView());
STATIC_ASSERT(static_cast<int>(kFirstDeoptimizeKind) == 0);
for (int i = 0; i < kDeoptimizeKindCount; i++) {
DeoptimizeKind kind = static_cast<DeoptimizeKind>(i);
Label before_exit;
masm.bind(&before_exit);
if (kind == DeoptimizeKind::kEagerWithResume) {
Builtins::Name target = Deoptimizer::GetDeoptWithResumeBuiltin(
DeoptimizeReason::kDynamicMapCheck);
masm.CallForDeoptimization(target, 42, &before_exit, kind, &before_exit,
nullptr);
CHECK_EQ(masm.SizeOfCodeGeneratedSince(&before_exit),
Deoptimizer::kEagerWithResumeDeoptExitSize);
} else {
Builtins::Name target = Deoptimizer::GetDeoptimizationEntry(kind);
masm.CallForDeoptimization(target, 42, &before_exit, kind, &before_exit,
nullptr);
CHECK_EQ(masm.SizeOfCodeGeneratedSince(&before_exit),
kind == DeoptimizeKind::kLazy
? Deoptimizer::kLazyDeoptExitSize
: Deoptimizer::kNonLazyDeoptExitSize);
}
}
}
#undef __
} // namespace internal