[loong64][mips] Remove dynamic map checks and custom deoptimization kinds
Port commit b2978927d8
Fixed: v8:12552
Change-Id: Ic2fbded9a662ed840a0350e3ce049e147fbf03a0
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3541527
Auto-Submit: Yu Liu <liuyu@loongson.cn>
Reviewed-by: Zhao Jiazhong <zhaojiazhong-hf@loongson.cn>
Commit-Queue: Zhao Jiazhong <zhaojiazhong-hf@loongson.cn>
Cr-Commit-Position: refs/heads/main@{#79557}
This commit is contained in:
parent
faaf6818d2
commit
38940b7098
@ -3564,10 +3564,6 @@ void Builtins::Generate_DeoptimizationEntry_Soft(MacroAssembler* masm) {
|
||||
Generate_DeoptimizationEntry(masm, DeoptimizeKind::kSoft);
|
||||
}
|
||||
|
||||
void Builtins::Generate_DeoptimizationEntry_Bailout(MacroAssembler* masm) {
|
||||
Generate_DeoptimizationEntry(masm, DeoptimizeKind::kBailout);
|
||||
}
|
||||
|
||||
void Builtins::Generate_DeoptimizationEntry_Lazy(MacroAssembler* masm) {
|
||||
Generate_DeoptimizationEntry(masm, DeoptimizeKind::kLazy);
|
||||
}
|
||||
@ -3748,74 +3744,6 @@ void Builtins::Generate_InterpreterOnStackReplacement_ToBaseline(
|
||||
Generate_BaselineOrInterpreterEntry(masm, false, true);
|
||||
}
|
||||
|
||||
void Builtins::Generate_DynamicCheckMapsTrampoline(MacroAssembler* masm) {
|
||||
Generate_DynamicCheckMapsTrampoline<DynamicCheckMapsDescriptor>(
|
||||
masm, BUILTIN_CODE(masm->isolate(), DynamicCheckMaps));
|
||||
}
|
||||
|
||||
void Builtins::Generate_DynamicCheckMapsWithFeedbackVectorTrampoline(
|
||||
MacroAssembler* masm) {
|
||||
Generate_DynamicCheckMapsTrampoline<
|
||||
DynamicCheckMapsWithFeedbackVectorDescriptor>(
|
||||
masm, BUILTIN_CODE(masm->isolate(), DynamicCheckMapsWithFeedbackVector));
|
||||
}
|
||||
|
||||
template <class Descriptor>
|
||||
void Builtins::Generate_DynamicCheckMapsTrampoline(
|
||||
MacroAssembler* masm, Handle<Code> builtin_target) {
|
||||
FrameScope scope(masm, StackFrame::MANUAL);
|
||||
__ EnterFrame(StackFrame::INTERNAL);
|
||||
|
||||
// Only save the registers that the DynamicCheckMaps builtin can clobber.
|
||||
Descriptor descriptor;
|
||||
RegList registers = descriptor.allocatable_registers();
|
||||
// FLAG_debug_code is enabled CSA checks will call C function and so we need
|
||||
// to save all CallerSaved registers too.
|
||||
if (FLAG_debug_code) registers |= kJSCallerSaved;
|
||||
__ MaybeSaveRegisters(registers);
|
||||
|
||||
// Load the immediate arguments from the deopt exit to pass to the builtin.
|
||||
Register slot_arg = descriptor.GetRegisterParameter(Descriptor::kSlot);
|
||||
Register handler_arg = descriptor.GetRegisterParameter(Descriptor::kHandler);
|
||||
__ Ld_d(handler_arg, MemOperand(fp, CommonFrameConstants::kCallerPCOffset));
|
||||
__ Ld_d(
|
||||
slot_arg,
|
||||
MemOperand(handler_arg, Deoptimizer::kEagerWithResumeImmedArgs1PcOffset));
|
||||
__ Ld_d(
|
||||
handler_arg,
|
||||
MemOperand(handler_arg, Deoptimizer::kEagerWithResumeImmedArgs2PcOffset));
|
||||
__ Call(builtin_target, RelocInfo::CODE_TARGET);
|
||||
|
||||
Label deopt, bailout;
|
||||
__ Branch(&deopt, ne, a0,
|
||||
Operand(static_cast<int64_t>(DynamicCheckMapsStatus::kSuccess)));
|
||||
|
||||
__ MaybeRestoreRegisters(registers);
|
||||
__ LeaveFrame(StackFrame::INTERNAL);
|
||||
__ Ret();
|
||||
|
||||
__ bind(&deopt);
|
||||
__ Branch(&bailout, eq, a0,
|
||||
Operand(static_cast<int64_t>(DynamicCheckMapsStatus::kBailout)));
|
||||
|
||||
if (FLAG_debug_code) {
|
||||
__ Assert(eq, AbortReason::kUnexpectedDynamicCheckMapsStatus, a0,
|
||||
Operand(static_cast<int64_t>(DynamicCheckMapsStatus::kDeopt)));
|
||||
}
|
||||
__ MaybeRestoreRegisters(registers);
|
||||
__ LeaveFrame(StackFrame::INTERNAL);
|
||||
Handle<Code> deopt_eager = masm->isolate()->builtins()->code_handle(
|
||||
Deoptimizer::GetDeoptimizationEntry(DeoptimizeKind::kEager));
|
||||
__ Jump(deopt_eager, RelocInfo::CODE_TARGET);
|
||||
|
||||
__ bind(&bailout);
|
||||
__ MaybeRestoreRegisters(registers);
|
||||
__ LeaveFrame(StackFrame::INTERNAL);
|
||||
Handle<Code> deopt_bailout = masm->isolate()->builtins()->code_handle(
|
||||
Deoptimizer::GetDeoptimizationEntry(DeoptimizeKind::kBailout));
|
||||
__ Jump(deopt_bailout, RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
#undef __
|
||||
|
||||
} // namespace internal
|
||||
|
@ -4013,10 +4013,6 @@ void Builtins::Generate_DeoptimizationEntry_Soft(MacroAssembler* masm) {
|
||||
Generate_DeoptimizationEntry(masm, DeoptimizeKind::kSoft);
|
||||
}
|
||||
|
||||
void Builtins::Generate_DeoptimizationEntry_Bailout(MacroAssembler* masm) {
|
||||
Generate_DeoptimizationEntry(masm, DeoptimizeKind::kBailout);
|
||||
}
|
||||
|
||||
void Builtins::Generate_DeoptimizationEntry_Lazy(MacroAssembler* masm) {
|
||||
Generate_DeoptimizationEntry(masm, DeoptimizeKind::kLazy);
|
||||
}
|
||||
@ -4196,74 +4192,6 @@ void Builtins::Generate_InterpreterOnStackReplacement_ToBaseline(
|
||||
Generate_BaselineOrInterpreterEntry(masm, false, true);
|
||||
}
|
||||
|
||||
void Builtins::Generate_DynamicCheckMapsTrampoline(MacroAssembler* masm) {
|
||||
Generate_DynamicCheckMapsTrampoline<DynamicCheckMapsDescriptor>(
|
||||
masm, BUILTIN_CODE(masm->isolate(), DynamicCheckMaps));
|
||||
}
|
||||
|
||||
void Builtins::Generate_DynamicCheckMapsWithFeedbackVectorTrampoline(
|
||||
MacroAssembler* masm) {
|
||||
Generate_DynamicCheckMapsTrampoline<
|
||||
DynamicCheckMapsWithFeedbackVectorDescriptor>(
|
||||
masm, BUILTIN_CODE(masm->isolate(), DynamicCheckMapsWithFeedbackVector));
|
||||
}
|
||||
|
||||
template <class Descriptor>
|
||||
void Builtins::Generate_DynamicCheckMapsTrampoline(
|
||||
MacroAssembler* masm, Handle<Code> builtin_target) {
|
||||
FrameScope scope(masm, StackFrame::MANUAL);
|
||||
__ EnterFrame(StackFrame::INTERNAL);
|
||||
|
||||
// Only save the registers that the DynamicCheckMaps builtin can clobber.
|
||||
Descriptor descriptor;
|
||||
RegList registers = descriptor.allocatable_registers();
|
||||
// FLAG_debug_code is enabled CSA checks will call C function and so we need
|
||||
// to save all CallerSaved registers too.
|
||||
if (FLAG_debug_code) registers |= kJSCallerSaved;
|
||||
__ MaybeSaveRegisters(registers);
|
||||
|
||||
// Load the immediate arguments from the deopt exit to pass to the builtin.
|
||||
Register slot_arg = descriptor.GetRegisterParameter(Descriptor::kSlot);
|
||||
Register handler_arg = descriptor.GetRegisterParameter(Descriptor::kHandler);
|
||||
__ Lw(handler_arg, MemOperand(fp, CommonFrameConstants::kCallerPCOffset));
|
||||
__ Lw(slot_arg, MemOperand(handler_arg,
|
||||
Deoptimizer::kEagerWithResumeImmedArgs1PcOffset));
|
||||
__ Lw(
|
||||
handler_arg,
|
||||
MemOperand(handler_arg, Deoptimizer::kEagerWithResumeImmedArgs2PcOffset));
|
||||
|
||||
__ Call(builtin_target, RelocInfo::CODE_TARGET);
|
||||
|
||||
Label deopt, bailout;
|
||||
__ Branch(&deopt, ne, v0,
|
||||
Operand(static_cast<int>(DynamicCheckMapsStatus::kSuccess)));
|
||||
|
||||
__ MaybeRestoreRegisters(registers);
|
||||
__ LeaveFrame(StackFrame::INTERNAL);
|
||||
__ Ret();
|
||||
|
||||
__ bind(&deopt);
|
||||
__ Branch(&bailout, eq, v0,
|
||||
Operand(static_cast<int>(DynamicCheckMapsStatus::kBailout)));
|
||||
|
||||
if (FLAG_debug_code) {
|
||||
__ Assert(eq, AbortReason::kUnexpectedDynamicCheckMapsStatus, v0,
|
||||
Operand(static_cast<int>(DynamicCheckMapsStatus::kDeopt)));
|
||||
}
|
||||
__ MaybeRestoreRegisters(registers);
|
||||
__ LeaveFrame(StackFrame::INTERNAL);
|
||||
Handle<Code> deopt_eager = masm->isolate()->builtins()->code_handle(
|
||||
Deoptimizer::GetDeoptimizationEntry(DeoptimizeKind::kEager));
|
||||
__ Jump(deopt_eager, RelocInfo::CODE_TARGET);
|
||||
|
||||
__ bind(&bailout);
|
||||
__ MaybeRestoreRegisters(registers);
|
||||
__ LeaveFrame(StackFrame::INTERNAL);
|
||||
Handle<Code> deopt_bailout = masm->isolate()->builtins()->code_handle(
|
||||
Deoptimizer::GetDeoptimizationEntry(DeoptimizeKind::kBailout));
|
||||
__ Jump(deopt_bailout, RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
#undef __
|
||||
|
||||
} // namespace internal
|
||||
|
@ -3591,10 +3591,6 @@ void Builtins::Generate_DeoptimizationEntry_Soft(MacroAssembler* masm) {
|
||||
Generate_DeoptimizationEntry(masm, DeoptimizeKind::kSoft);
|
||||
}
|
||||
|
||||
void Builtins::Generate_DeoptimizationEntry_Bailout(MacroAssembler* masm) {
|
||||
Generate_DeoptimizationEntry(masm, DeoptimizeKind::kBailout);
|
||||
}
|
||||
|
||||
void Builtins::Generate_DeoptimizationEntry_Lazy(MacroAssembler* masm) {
|
||||
Generate_DeoptimizationEntry(masm, DeoptimizeKind::kLazy);
|
||||
}
|
||||
@ -3774,73 +3770,6 @@ void Builtins::Generate_InterpreterOnStackReplacement_ToBaseline(
|
||||
Generate_BaselineOrInterpreterEntry(masm, false, true);
|
||||
}
|
||||
|
||||
void Builtins::Generate_DynamicCheckMapsTrampoline(MacroAssembler* masm) {
|
||||
Generate_DynamicCheckMapsTrampoline<DynamicCheckMapsDescriptor>(
|
||||
masm, BUILTIN_CODE(masm->isolate(), DynamicCheckMaps));
|
||||
}
|
||||
|
||||
void Builtins::Generate_DynamicCheckMapsWithFeedbackVectorTrampoline(
|
||||
MacroAssembler* masm) {
|
||||
Generate_DynamicCheckMapsTrampoline<
|
||||
DynamicCheckMapsWithFeedbackVectorDescriptor>(
|
||||
masm, BUILTIN_CODE(masm->isolate(), DynamicCheckMapsWithFeedbackVector));
|
||||
}
|
||||
|
||||
template <class Descriptor>
|
||||
void Builtins::Generate_DynamicCheckMapsTrampoline(
|
||||
MacroAssembler* masm, Handle<Code> builtin_target) {
|
||||
FrameScope scope(masm, StackFrame::MANUAL);
|
||||
__ EnterFrame(StackFrame::INTERNAL);
|
||||
|
||||
// Only save the registers that the DynamicCheckMaps builtin can clobber.
|
||||
Descriptor descriptor;
|
||||
RegList registers = descriptor.allocatable_registers();
|
||||
// FLAG_debug_code is enabled CSA checks will call C function and so we need
|
||||
// to save all CallerSaved registers too.
|
||||
if (FLAG_debug_code) registers |= kJSCallerSaved;
|
||||
__ MaybeSaveRegisters(registers);
|
||||
|
||||
// Load the immediate arguments from the deopt exit to pass to the builtin.
|
||||
Register slot_arg = descriptor.GetRegisterParameter(Descriptor::kSlot);
|
||||
Register handler_arg = descriptor.GetRegisterParameter(Descriptor::kHandler);
|
||||
__ Ld(handler_arg, MemOperand(fp, CommonFrameConstants::kCallerPCOffset));
|
||||
__ Uld(slot_arg, MemOperand(handler_arg,
|
||||
Deoptimizer::kEagerWithResumeImmedArgs1PcOffset));
|
||||
__ Uld(
|
||||
handler_arg,
|
||||
MemOperand(handler_arg, Deoptimizer::kEagerWithResumeImmedArgs2PcOffset));
|
||||
__ Call(builtin_target, RelocInfo::CODE_TARGET);
|
||||
|
||||
Label deopt, bailout;
|
||||
__ Branch(&deopt, ne, v0,
|
||||
Operand(static_cast<int>(DynamicCheckMapsStatus::kSuccess)));
|
||||
|
||||
__ MaybeRestoreRegisters(registers);
|
||||
__ LeaveFrame(StackFrame::INTERNAL);
|
||||
__ Ret();
|
||||
|
||||
__ bind(&deopt);
|
||||
__ Branch(&bailout, eq, v0,
|
||||
Operand(static_cast<int>(DynamicCheckMapsStatus::kBailout)));
|
||||
|
||||
if (FLAG_debug_code) {
|
||||
__ Assert(eq, AbortReason::kUnexpectedDynamicCheckMapsStatus, v0,
|
||||
Operand(static_cast<int>(DynamicCheckMapsStatus::kDeopt)));
|
||||
}
|
||||
__ MaybeRestoreRegisters(registers);
|
||||
__ LeaveFrame(StackFrame::INTERNAL);
|
||||
Handle<Code> deopt_eager = masm->isolate()->builtins()->code_handle(
|
||||
Deoptimizer::GetDeoptimizationEntry(DeoptimizeKind::kEager));
|
||||
__ Jump(deopt_eager, RelocInfo::CODE_TARGET);
|
||||
|
||||
__ bind(&bailout);
|
||||
__ MaybeRestoreRegisters(registers);
|
||||
__ LeaveFrame(StackFrame::INTERNAL);
|
||||
Handle<Code> deopt_bailout = masm->isolate()->builtins()->code_handle(
|
||||
Deoptimizer::GetDeoptimizationEntry(DeoptimizeKind::kBailout));
|
||||
__ Jump(deopt_bailout, RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
#undef __
|
||||
|
||||
} // namespace internal
|
||||
|
@ -41,18 +41,6 @@ constexpr auto WriteBarrierDescriptor::registers() {
|
||||
return RegisterArray(a1, a5, a4, a2, a0, a3, kContextRegister);
|
||||
}
|
||||
|
||||
// static
|
||||
constexpr auto DynamicCheckMapsDescriptor::registers() {
|
||||
STATIC_ASSERT(kReturnRegister0 == a0);
|
||||
return RegisterArray(a0, a1, a2, a3, cp);
|
||||
}
|
||||
|
||||
// static
|
||||
constexpr auto DynamicCheckMapsWithFeedbackVectorDescriptor::registers() {
|
||||
STATIC_ASSERT(kReturnRegister0 == a0);
|
||||
return RegisterArray(a0, a1, a2, a3, cp);
|
||||
}
|
||||
|
||||
// static
|
||||
constexpr Register LoadDescriptor::ReceiverRegister() { return a1; }
|
||||
// static
|
||||
|
@ -4090,12 +4090,6 @@ void TurboAssembler::CallForDeoptimization(Builtin target, int, Label* exit,
|
||||
(kind == DeoptimizeKind::kLazy)
|
||||
? Deoptimizer::kLazyDeoptExitSize
|
||||
: Deoptimizer::kNonLazyDeoptExitSize);
|
||||
|
||||
if (kind == DeoptimizeKind::kEagerWithResume) {
|
||||
Branch(ret);
|
||||
DCHECK_EQ(SizeOfCodeGeneratedSince(exit),
|
||||
Deoptimizer::kEagerWithResumeBeforeArgsSize);
|
||||
}
|
||||
}
|
||||
|
||||
void TurboAssembler::LoadCodeObjectEntry(Register destination,
|
||||
|
@ -37,18 +37,6 @@ constexpr auto WriteBarrierDescriptor::registers() {
|
||||
return RegisterArray(a1, t1, t0, a0, a2, v0, a3, kContextRegister);
|
||||
}
|
||||
|
||||
// static
|
||||
constexpr auto DynamicCheckMapsDescriptor::registers() {
|
||||
STATIC_ASSERT(kReturnRegister0 == v0);
|
||||
return RegisterArray(kReturnRegister0, a0, a1, a2, cp);
|
||||
}
|
||||
|
||||
// static
|
||||
constexpr auto DynamicCheckMapsWithFeedbackVectorDescriptor::registers() {
|
||||
STATIC_ASSERT(kReturnRegister0 == v0);
|
||||
return RegisterArray(kReturnRegister0, a0, a1, a2, cp);
|
||||
}
|
||||
|
||||
// static
|
||||
constexpr Register LoadDescriptor::ReceiverRegister() { return a1; }
|
||||
// static
|
||||
|
@ -5574,11 +5574,6 @@ void TurboAssembler::CallForDeoptimization(Builtin target, int, Label* exit,
|
||||
(kind == DeoptimizeKind::kLazy)
|
||||
? Deoptimizer::kLazyDeoptExitSize
|
||||
: Deoptimizer::kNonLazyDeoptExitSize);
|
||||
if (kind == DeoptimizeKind::kEagerWithResume) {
|
||||
Branch(ret);
|
||||
DCHECK_EQ(SizeOfCodeGeneratedSince(exit),
|
||||
Deoptimizer::kEagerWithResumeBeforeArgsSize);
|
||||
}
|
||||
}
|
||||
|
||||
void TurboAssembler::LoadCodeObjectEntry(Register destination,
|
||||
|
@ -41,18 +41,6 @@ constexpr auto WriteBarrierDescriptor::registers() {
|
||||
return RegisterArray(a1, a5, a4, a0, a2, v0, a3, kContextRegister);
|
||||
}
|
||||
|
||||
// static
|
||||
constexpr auto DynamicCheckMapsDescriptor::registers() {
|
||||
STATIC_ASSERT(kReturnRegister0 == v0);
|
||||
return RegisterArray(kReturnRegister0, a0, a1, a2, cp);
|
||||
}
|
||||
|
||||
// static
|
||||
constexpr auto DynamicCheckMapsWithFeedbackVectorDescriptor::registers() {
|
||||
STATIC_ASSERT(kReturnRegister0 == v0);
|
||||
return RegisterArray(kReturnRegister0, a0, a1, a2, cp);
|
||||
}
|
||||
|
||||
// static
|
||||
constexpr Register LoadDescriptor::ReceiverRegister() { return a1; }
|
||||
// static
|
||||
|
@ -6117,12 +6117,6 @@ void TurboAssembler::CallForDeoptimization(Builtin target, int, Label* exit,
|
||||
(kind == DeoptimizeKind::kLazy)
|
||||
? Deoptimizer::kLazyDeoptExitSize
|
||||
: Deoptimizer::kNonLazyDeoptExitSize);
|
||||
|
||||
if (kind == DeoptimizeKind::kEagerWithResume) {
|
||||
Branch(ret);
|
||||
DCHECK_EQ(SizeOfCodeGeneratedSince(exit),
|
||||
Deoptimizer::kEagerWithResumeBeforeArgsSize);
|
||||
}
|
||||
}
|
||||
|
||||
void TurboAssembler::LoadCodeObjectEntry(Register destination,
|
||||
|
@ -10,13 +10,6 @@ namespace internal {
|
||||
const bool Deoptimizer::kSupportsFixedDeoptExitSizes = true;
|
||||
const int Deoptimizer::kNonLazyDeoptExitSize = 2 * kInstrSize;
|
||||
const int Deoptimizer::kLazyDeoptExitSize = 2 * kInstrSize;
|
||||
const int Deoptimizer::kEagerWithResumeBeforeArgsSize = 3 * kInstrSize;
|
||||
const int Deoptimizer::kEagerWithResumeDeoptExitSize =
|
||||
kEagerWithResumeBeforeArgsSize + 2 * kSystemPointerSize;
|
||||
// TODO(LOONG_dev): LOONG64 Is the PcOffset right?
|
||||
const int Deoptimizer::kEagerWithResumeImmedArgs1PcOffset = kInstrSize;
|
||||
const int Deoptimizer::kEagerWithResumeImmedArgs2PcOffset =
|
||||
kInstrSize + kSystemPointerSize;
|
||||
|
||||
Float32 RegisterValues::GetFloatRegister(unsigned n) const {
|
||||
return Float32::FromBits(
|
||||
|
@ -10,12 +10,6 @@ namespace internal {
|
||||
const bool Deoptimizer::kSupportsFixedDeoptExitSizes = true;
|
||||
const int Deoptimizer::kNonLazyDeoptExitSize = 3 * kInstrSize;
|
||||
const int Deoptimizer::kLazyDeoptExitSize = 3 * kInstrSize;
|
||||
const int Deoptimizer::kEagerWithResumeBeforeArgsSize = 5 * kInstrSize;
|
||||
const int Deoptimizer::kEagerWithResumeDeoptExitSize =
|
||||
kEagerWithResumeBeforeArgsSize + 2 * kSystemPointerSize;
|
||||
const int Deoptimizer::kEagerWithResumeImmedArgs1PcOffset = 2 * kInstrSize;
|
||||
const int Deoptimizer::kEagerWithResumeImmedArgs2PcOffset =
|
||||
2 * kInstrSize + kSystemPointerSize;
|
||||
|
||||
Float32 RegisterValues::GetFloatRegister(unsigned n) const {
|
||||
return Float32::FromBits(
|
||||
|
@ -10,12 +10,6 @@ namespace internal {
|
||||
const bool Deoptimizer::kSupportsFixedDeoptExitSizes = true;
|
||||
const int Deoptimizer::kNonLazyDeoptExitSize = 3 * kInstrSize;
|
||||
const int Deoptimizer::kLazyDeoptExitSize = 3 * kInstrSize;
|
||||
const int Deoptimizer::kEagerWithResumeBeforeArgsSize = 5 * kInstrSize;
|
||||
const int Deoptimizer::kEagerWithResumeDeoptExitSize =
|
||||
kEagerWithResumeBeforeArgsSize + 2 * kSystemPointerSize;
|
||||
const int Deoptimizer::kEagerWithResumeImmedArgs1PcOffset = 2 * kInstrSize;
|
||||
const int Deoptimizer::kEagerWithResumeImmedArgs2PcOffset =
|
||||
2 * kInstrSize + kSystemPointerSize;
|
||||
|
||||
Float32 RegisterValues::GetFloatRegister(unsigned n) const {
|
||||
return Float32::FromBits(
|
||||
|
@ -2891,14 +2891,6 @@ TEST(DeoptExitSizeIsFixed) {
|
||||
DeoptimizeKind kind = static_cast<DeoptimizeKind>(i);
|
||||
Label before_exit;
|
||||
masm.bind(&before_exit);
|
||||
if (kind == DeoptimizeKind::kEagerWithResume) {
|
||||
Builtin target = Deoptimizer::GetDeoptWithResumeBuiltin(
|
||||
DeoptimizeReason::kDynamicCheckMaps);
|
||||
masm.CallForDeoptimization(target, 42, &before_exit, kind, &before_exit,
|
||||
nullptr);
|
||||
CHECK_EQ(masm.SizeOfCodeGeneratedSince(&before_exit),
|
||||
Deoptimizer::kEagerWithResumeBeforeArgsSize);
|
||||
} else {
|
||||
Builtin target = Deoptimizer::GetDeoptimizationEntry(kind);
|
||||
masm.CallForDeoptimization(target, 42, &before_exit, kind, &before_exit,
|
||||
nullptr);
|
||||
@ -2908,7 +2900,6 @@ TEST(DeoptExitSizeIsFixed) {
|
||||
: Deoptimizer::kNonLazyDeoptExitSize);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#undef __
|
||||
|
||||
|
@ -1349,14 +1349,6 @@ TEST(DeoptExitSizeIsFixed) {
|
||||
DeoptimizeKind kind = static_cast<DeoptimizeKind>(i);
|
||||
Label before_exit;
|
||||
masm.bind(&before_exit);
|
||||
if (kind == DeoptimizeKind::kEagerWithResume) {
|
||||
Builtin target = Deoptimizer::GetDeoptWithResumeBuiltin(
|
||||
DeoptimizeReason::kDynamicCheckMaps);
|
||||
masm.CallForDeoptimization(target, 42, &before_exit, kind, &before_exit,
|
||||
nullptr);
|
||||
CHECK_EQ(masm.SizeOfCodeGeneratedSince(&before_exit),
|
||||
Deoptimizer::kEagerWithResumeBeforeArgsSize);
|
||||
} else {
|
||||
Builtin target = Deoptimizer::GetDeoptimizationEntry(kind);
|
||||
masm.CallForDeoptimization(target, 42, &before_exit, kind, &before_exit,
|
||||
nullptr);
|
||||
@ -1366,7 +1358,6 @@ TEST(DeoptExitSizeIsFixed) {
|
||||
: Deoptimizer::kNonLazyDeoptExitSize);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#undef __
|
||||
|
||||
|
@ -1702,14 +1702,6 @@ TEST(DeoptExitSizeIsFixed) {
|
||||
DeoptimizeKind kind = static_cast<DeoptimizeKind>(i);
|
||||
Label before_exit;
|
||||
masm.bind(&before_exit);
|
||||
if (kind == DeoptimizeKind::kEagerWithResume) {
|
||||
Builtin target = Deoptimizer::GetDeoptWithResumeBuiltin(
|
||||
DeoptimizeReason::kDynamicCheckMaps);
|
||||
masm.CallForDeoptimization(target, 42, &before_exit, kind, &before_exit,
|
||||
nullptr);
|
||||
CHECK_EQ(masm.SizeOfCodeGeneratedSince(&before_exit),
|
||||
Deoptimizer::kEagerWithResumeBeforeArgsSize);
|
||||
} else {
|
||||
Builtin target = Deoptimizer::GetDeoptimizationEntry(kind);
|
||||
masm.CallForDeoptimization(target, 42, &before_exit, kind, &before_exit,
|
||||
nullptr);
|
||||
@ -1719,7 +1711,6 @@ TEST(DeoptExitSizeIsFixed) {
|
||||
: Deoptimizer::kNonLazyDeoptExitSize);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#undef __
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user