[Turboprop] Move dynamic check maps immediate args to deopt exit.

Rather than loading the immediate arguments required by the
dynamic check maps builtin into registers in the fast-path,
instead insert them into the instruction stream in the deopt
exit and have the builtin load them into registers itself.

BUG=v8:10582

Change-Id: I66716570b408501374eed8f5e6432df64c6deb7c
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2589736
Commit-Queue: Ross McIlroy <rmcilroy@chromium.org>
Reviewed-by: Sathya Gunasekaran  <gsathya@chromium.org>
Reviewed-by: Tobias Tebbi <tebbi@chromium.org>
Cr-Commit-Position: refs/heads/master@{#71790}
This commit is contained in:
Ross McIlroy 2020-12-16 14:01:05 +00:00 committed by Commit Bot
parent 30eef5475a
commit b2a611d815
30 changed files with 191 additions and 37 deletions

View File

@ -3397,6 +3397,18 @@ void Builtins::Generate_DynamicCheckMapsTrampoline(MacroAssembler* masm) {
if (FLAG_debug_code) registers |= kCallerSaved;
__ SaveRegisters(registers);
// Load the immediate arguments from the deopt exit to pass to the builtin.
Register slot_arg =
descriptor.GetRegisterParameter(DynamicCheckMapsDescriptor::kSlot);
Register handler_arg =
descriptor.GetRegisterParameter(DynamicCheckMapsDescriptor::kHandler);
__ ldr(handler_arg, MemOperand(fp, CommonFrameConstants::kCallerPCOffset));
__ ldr(slot_arg, MemOperand(handler_arg,
Deoptimizer::kEagerWithResumeImmedArgs1PcOffset));
__ ldr(
handler_arg,
MemOperand(handler_arg, Deoptimizer::kEagerWithResumeImmedArgs2PcOffset));
__ Call(BUILTIN_CODE(masm->isolate(), DynamicCheckMaps),
RelocInfo::CODE_TARGET);

View File

@ -3936,6 +3936,18 @@ void Builtins::Generate_DynamicCheckMapsTrampoline(MacroAssembler* masm) {
if (FLAG_debug_code) registers |= kCallerSaved.list();
__ SaveRegisters(registers);
// Load the immediate arguments from the deopt exit to pass to the builtin.
Register slot_arg =
descriptor.GetRegisterParameter(DynamicCheckMapsDescriptor::kSlot);
Register handler_arg =
descriptor.GetRegisterParameter(DynamicCheckMapsDescriptor::kHandler);
__ Ldr(handler_arg, MemOperand(fp, CommonFrameConstants::kCallerPCOffset));
__ Ldr(slot_arg, MemOperand(handler_arg,
Deoptimizer::kEagerWithResumeImmedArgs1PcOffset));
__ Ldr(
handler_arg,
MemOperand(handler_arg, Deoptimizer::kEagerWithResumeImmedArgs2PcOffset));
__ Call(BUILTIN_CODE(masm->isolate(), DynamicCheckMaps),
RelocInfo::CODE_TARGET);

View File

@ -56,10 +56,10 @@ IC_BUILTIN_PARAM(LoadGlobalICInsideTypeofTrampoline, LoadGlobalICTrampoline,
INSIDE_TYPEOF)
TF_BUILTIN(DynamicCheckMaps, CodeStubAssembler) {
auto slot = UncheckedParameter<IntPtrT>(Descriptor::kSlot);
auto map = Parameter<Map>(Descriptor::kMap);
auto slot = UncheckedParameter<IntPtrT>(Descriptor::kSlot);
auto handler = Parameter<Object>(Descriptor::kHandler);
TNode<Int32T> status = DynamicCheckMaps(slot, map, handler);
TNode<Int32T> status = DynamicCheckMaps(map, slot, handler);
Return(status);
}

View File

@ -3999,6 +3999,17 @@ void Builtins::Generate_DynamicCheckMapsTrampoline(MacroAssembler* masm) {
if (FLAG_debug_code) registers |= kJSCallerSaved;
__ SaveRegisters(registers);
// Load the immediate arguments from the deopt exit to pass to the builtin.
Register slot_arg =
descriptor.GetRegisterParameter(DynamicCheckMapsDescriptor::kSlot);
Register handler_arg =
descriptor.GetRegisterParameter(DynamicCheckMapsDescriptor::kHandler);
__ mov(handler_arg, Operand(ebp, CommonFrameConstants::kCallerPCOffset));
__ mov(slot_arg,
Operand(handler_arg, Deoptimizer::kEagerWithResumeImmedArgs1PcOffset));
__ mov(handler_arg,
Operand(handler_arg, Deoptimizer::kEagerWithResumeImmedArgs2PcOffset));
__ Call(BUILTIN_CODE(masm->isolate(), DynamicCheckMaps),
RelocInfo::CODE_TARGET);

View File

@ -82,7 +82,7 @@ macro PerformMonomorphicCheck(
// the map, but the associated handler check fails then we return kDeopt status.
@export
macro DynamicCheckMaps(
slotIndex: intptr, actualMap: Map, actualHandler: Smi|DataHandler): int32 {
actualMap: Map, slotIndex: intptr, actualHandler: Smi|DataHandler): int32 {
const feedbackVector = LoadFeedbackVectorForStubWithTrampoline();
const feedback = feedbackVector[slotIndex];
try {

View File

@ -4275,6 +4275,18 @@ void Builtins::Generate_DynamicCheckMapsTrampoline(MacroAssembler* masm) {
if (FLAG_debug_code) registers |= kCallerSaved;
__ SaveRegisters(registers);
// Load the immediate arguments from the deopt exit to pass to the builtin.
Register slot_arg =
descriptor.GetRegisterParameter(DynamicCheckMapsDescriptor::kSlot);
Register handler_arg =
descriptor.GetRegisterParameter(DynamicCheckMapsDescriptor::kHandler);
__ movq(handler_arg, Operand(rbp, CommonFrameConstants::kCallerPCOffset));
__ movq(slot_arg, Operand(handler_arg,
Deoptimizer::kEagerWithResumeImmedArgs1PcOffset));
__ movq(
handler_arg,
Operand(handler_arg, Deoptimizer::kEagerWithResumeImmedArgs2PcOffset));
__ Call(BUILTIN_CODE(masm->isolate(), DynamicCheckMaps),
RelocInfo::CODE_TARGET);

View File

@ -2623,7 +2623,7 @@ void TurboAssembler::CallForDeoptimization(Builtins::Name target, int,
if (kind == DeoptimizeKind::kEagerWithResume) {
b(ret);
DCHECK_EQ(SizeOfCodeGeneratedSince(exit),
Deoptimizer::kEagerWithResumeDeoptExitSize);
Deoptimizer::kEagerWithResumeBeforeArgsSize);
}
}

View File

@ -2026,7 +2026,7 @@ void TurboAssembler::CallForDeoptimization(
if (kind == DeoptimizeKind::kEagerWithResume) {
b(ret);
DCHECK_EQ(SizeOfCodeGeneratedSince(exit),
Deoptimizer::kEagerWithResumeDeoptExitSize);
Deoptimizer::kEagerWithResumeBeforeArgsSize);
}
}

View File

@ -2316,7 +2316,7 @@ void TurboAssembler::CallForDeoptimization(Builtins::Name target, int,
jmp(ret);
DCHECK_EQ(SizeOfCodeGeneratedSince(exit),
Deoptimizer::kEagerWithResumeDeoptExitSize);
Deoptimizer::kEagerWithResumeBeforeArgsSize);
set_predictable_code_size(old_predictable_code_size);
}
}

View File

@ -878,10 +878,10 @@ class LoadGlobalWithVectorDescriptor : public LoadGlobalDescriptor {
class DynamicCheckMapsDescriptor final : public CallInterfaceDescriptor {
public:
DEFINE_PARAMETERS(kSlot, kMap, kHandler)
DEFINE_PARAMETERS(kMap, kSlot, kHandler)
DEFINE_RESULT_AND_PARAMETER_TYPES(MachineType::Int32(), // return val
MachineType::IntPtr(), // kSlot
MachineType::TaggedPointer(), // kMap
MachineType::IntPtr(), // kSlot
MachineType::TaggedSigned()) // kHandler
DECLARE_DESCRIPTOR(DynamicCheckMapsDescriptor, CallInterfaceDescriptor)

View File

@ -3013,8 +3013,9 @@ void TurboAssembler::CallForDeoptimization(Builtins::Name target, int,
bool old_predictable_code_size = predictable_code_size();
set_predictable_code_size(true);
jmp(ret);
DCHECK_EQ(SizeOfCodeGeneratedSince(exit),
Deoptimizer::kEagerWithResumeDeoptExitSize);
Deoptimizer::kEagerWithResumeBeforeArgsSize);
set_predictable_code_size(old_predictable_code_size);
}
}

View File

@ -962,7 +962,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
break;
case kArchDeoptimize: {
DeoptimizationExit* exit =
BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
BuildTranslation(instr, -1, 0, 0, OutputFrameStateCombine::Ignore());
__ b(exit->label());
break;
}

View File

@ -902,7 +902,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
break;
case kArchDeoptimize: {
DeoptimizationExit* exit =
BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
BuildTranslation(instr, -1, 0, 0, OutputFrameStateCombine::Ignore());
__ B(exit->label());
break;
}

View File

@ -197,6 +197,7 @@ class DeoptimizationExit : public ZoneObject {
pc_offset_(pc_offset),
kind_(kind),
reason_(reason),
immediate_args_(nullptr),
emitted_(false) {}
bool has_deoptimization_id() const {
@ -219,6 +220,12 @@ class DeoptimizationExit : public ZoneObject {
int pc_offset() const { return pc_offset_; }
DeoptimizeKind kind() const { return kind_; }
DeoptimizeReason reason() const { return reason_; }
const ZoneVector<ImmediateOperand*>* immediate_args() const {
return immediate_args_;
}
void set_immediate_args(ZoneVector<ImmediateOperand*>* immediate_args) {
immediate_args_ = immediate_args;
}
// Returns whether the deopt exit has already been emitted. Most deopt exits
// are emitted contiguously at the end of the code, but unconditional deopt
// exits (kArchDeoptimize) may be inlined where they are encountered.
@ -236,6 +243,7 @@ class DeoptimizationExit : public ZoneObject {
const int pc_offset_;
const DeoptimizeKind kind_;
const DeoptimizeReason reason_;
ZoneVector<ImmediateOperand*>* immediate_args_;
bool emitted_;
};

View File

@ -152,6 +152,52 @@ uint32_t CodeGenerator::GetStackCheckOffset() {
return std::max(frame_height_delta, max_pushed_argument_bytes);
}
void CodeGenerator::AssembleDeoptImmediateArgs(
const ZoneVector<ImmediateOperand*>* immediate_args, Label* deopt_exit) {
// EagerWithResume deopts should have immdiate args, and to ensure fixed
// deopt exit sizes, currently always have two immediate arguments in the
// deopt exit.
constexpr int kImmediateArgCount = 2;
DCHECK_NOT_NULL(immediate_args);
DCHECK_EQ(kImmediateArgCount, immediate_args->size());
const int expected_offsets[] = {
Deoptimizer::kEagerWithResumeImmedArgs1PcOffset,
Deoptimizer::kEagerWithResumeImmedArgs2PcOffset};
for (int i = 0; i < kImmediateArgCount; i++) {
ImmediateOperand* op = immediate_args->at(i);
Constant constant = instructions()->GetImmediate(op);
uintptr_t value;
switch (constant.type()) {
case Constant::kInt32:
value = constant.ToInt32();
break;
#ifdef V8_TARGET_ARCH_64_BIT
case Constant::kInt64:
value = constant.ToInt64();
break;
#endif
case Constant::kFloat64: {
int smi;
CHECK(DoubleToSmiInteger(constant.ToFloat64().value(), &smi));
value = Smi::FromInt(smi).ptr();
break;
}
default:
// Currently only Smis and Ints are supported, but other immediate
// constants can be added when required.
UNREACHABLE();
}
DCHECK_EQ(tasm()->SizeOfCodeGeneratedSince(deopt_exit),
expected_offsets[i] + Deoptimizer::kNonLazyDeoptExitSize);
USE(expected_offsets);
tasm()->dp(value);
}
DCHECK_EQ(tasm()->SizeOfCodeGeneratedSince(deopt_exit),
Deoptimizer::kEagerWithResumeDeoptExitSize);
}
CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
DeoptimizationExit* exit) {
int deoptimization_id = exit->deoptimization_id();
@ -184,6 +230,10 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
tasm()->CallForDeoptimization(target, deoptimization_id, exit->label(),
deopt_kind, exit->continue_label(),
jump_deoptimization_entry_label);
if (deopt_kind == DeoptimizeKind::kEagerWithResume) {
AssembleDeoptImmediateArgs(exit->immediate_args(), exit->label());
}
exit->set_emitted();
return kSuccess;
@ -794,9 +844,12 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleInstruction(
case kFlags_deoptimize_and_poison: {
// Assemble a conditional eager deoptimization after this instruction.
InstructionOperandConverter i(this, instr);
size_t frame_state_offset = MiscField::decode(instr->opcode());
DeoptimizationExit* const exit =
AddDeoptimizationExit(instr, frame_state_offset);
size_t frame_state_offset =
DeoptFrameStateOffsetField::decode(instr->opcode());
size_t immediate_args_count =
DeoptImmedArgsCountField::decode(instr->opcode());
DeoptimizationExit* const exit = AddDeoptimizationExit(
instr, frame_state_offset, immediate_args_count);
BranchInfo branch;
branch.condition = condition;
branch.true_label = exit->label();
@ -1010,7 +1063,7 @@ void CodeGenerator::RecordCallPosition(Instruction* instr) {
FrameStateDescriptor* descriptor =
GetDeoptimizationEntry(instr, frame_state_offset).descriptor();
int pc_offset = tasm()->pc_offset_for_safepoint();
BuildTranslation(instr, pc_offset, frame_state_offset,
BuildTranslation(instr, pc_offset, frame_state_offset, 0,
descriptor->state_combine());
}
}
@ -1152,7 +1205,7 @@ void CodeGenerator::BuildTranslationForFrameStateDescriptor(
DeoptimizationExit* CodeGenerator::BuildTranslation(
Instruction* instr, int pc_offset, size_t frame_state_offset,
OutputFrameStateCombine state_combine) {
size_t immediate_args_count, OutputFrameStateCombine state_combine) {
DeoptimizationEntry const& entry =
GetDeoptimizationEntry(instr, frame_state_offset);
FrameStateDescriptor* const descriptor = entry.descriptor();
@ -1180,6 +1233,15 @@ DeoptimizationExit* CodeGenerator::BuildTranslation(
if (!Deoptimizer::kSupportsFixedDeoptExitSizes) {
exit->set_deoptimization_id(next_deoptimization_id_++);
}
if (immediate_args_count != 0) {
auto immediate_args = zone()->New<ZoneVector<ImmediateOperand*>>(zone());
InstructionOperandIterator iter(
instr, frame_state_offset - immediate_args_count - 1);
for (size_t i = 0; i < immediate_args_count; i++) {
immediate_args->emplace_back(ImmediateOperand::cast(iter.Advance()));
}
exit->set_immediate_args(immediate_args);
}
deoptimization_exits_.push_back(exit);
return exit;
@ -1335,8 +1397,9 @@ void CodeGenerator::MarkLazyDeoptSite() {
}
DeoptimizationExit* CodeGenerator::AddDeoptimizationExit(
Instruction* instr, size_t frame_state_offset) {
return BuildTranslation(instr, -1, frame_state_offset,
Instruction* instr, size_t frame_state_offset,
size_t immediate_args_count) {
return BuildTranslation(instr, -1, frame_state_offset, immediate_args_count,
OutputFrameStateCombine::Ignore());
}

View File

@ -247,6 +247,9 @@ class V8_EXPORT_PRIVATE CodeGenerator final : public GapResolver::Assembler {
CodeGenResult AssembleDeoptimizerCall(DeoptimizationExit* exit);
void AssembleDeoptImmediateArgs(
const ZoneVector<ImmediateOperand*>* immediate_args, Label* deopt_exit);
// ===========================================================================
// ============= Architecture-specific code generation methods. ==============
// ===========================================================================
@ -391,6 +394,7 @@ class V8_EXPORT_PRIVATE CodeGenerator final : public GapResolver::Assembler {
size_t frame_state_offset);
DeoptimizationExit* BuildTranslation(Instruction* instr, int pc_offset,
size_t frame_state_offset,
size_t immediate_args_count,
OutputFrameStateCombine state_combine);
void BuildTranslationForFrameStateDescriptor(
FrameStateDescriptor* descriptor, InstructionOperandIterator* iter,
@ -408,7 +412,8 @@ class V8_EXPORT_PRIVATE CodeGenerator final : public GapResolver::Assembler {
void PrepareForDeoptimizationExits(ZoneDeque<DeoptimizationExit*>* exits);
DeoptimizationExit* AddDeoptimizationExit(Instruction* instr,
size_t frame_state_offset);
size_t frame_state_offset,
size_t immediate_args_count);
// ===========================================================================

View File

@ -1046,7 +1046,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
break;
case kArchDeoptimize: {
DeoptimizationExit* exit =
BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
BuildTranslation(instr, -1, 0, 0, OutputFrameStateCombine::Ignore());
__ jmp(exit->label());
break;
}

View File

@ -279,6 +279,8 @@ static_assert(ArchOpcodeField::is_valid(kLastArchOpcode),
using AddressingModeField = base::BitField<AddressingMode, 9, 5>;
using FlagsModeField = base::BitField<FlagsMode, 14, 3>;
using FlagsConditionField = base::BitField<FlagsCondition, 17, 5>;
using DeoptImmedArgsCountField = base::BitField<int, 22, 2>;
using DeoptFrameStateOffsetField = base::BitField<int, 24, 8>;
using MiscField = base::BitField<int, 22, 10>;
} // namespace compiler

View File

@ -785,13 +785,22 @@ Instruction* InstructionSelector::EmitWithContinuation(
continuation_inputs_.push_back(g.Label(cont->true_block()));
continuation_inputs_.push_back(g.Label(cont->false_block()));
} else if (cont->IsDeoptimize()) {
int immediate_args_count = 0;
if (cont->has_extra_args()) {
for (int i = 0; i < cont->extra_args_count(); i++) {
continuation_inputs_.push_back(cont->extra_args()[i]);
InstructionOperand op = cont->extra_args()[i];
continuation_inputs_.push_back(op);
input_count++;
if (op.IsImmediate()) {
immediate_args_count++;
} else {
// All immediate args should be added last.
DCHECK_EQ(immediate_args_count, 0);
}
}
}
opcode |= MiscField::encode(static_cast<int>(input_count));
opcode |= DeoptImmedArgsCountField::encode(immediate_args_count) |
DeoptFrameStateOffsetField::encode(static_cast<int>(input_count));
AppendDeoptimizeArguments(&continuation_inputs_, cont->kind(),
cont->reason(), cont->feedback(),
cont->frame_state());
@ -3205,13 +3214,9 @@ void InstructionSelector::VisitDynamicCheckMapsWithDeoptUnless(Node* node) {
CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor(
zone(), descriptor, descriptor.GetStackParameterCount(),
CallDescriptor::kNoFlags, Operator::kNoDeopt | Operator::kNoThrow);
// TODO(rmcilroy): Pass the constant values as immediates and move them into
// the correct location out of the fast-path (e.g., at deopt or in trampoline)
InstructionOperand dynamic_check_args[] = {
g.UseLocation(n.slot(), call_descriptor->GetInputLocation(1)),
g.UseLocation(n.map(), call_descriptor->GetInputLocation(2)),
g.UseLocation(n.handler(), call_descriptor->GetInputLocation(3)),
};
g.UseLocation(n.map(), call_descriptor->GetInputLocation(1)),
g.UseImmediate(n.slot()), g.UseImmediate(n.handler())};
if (NeedsPoisoning(IsSafetyCheck::kCriticalSafetyCheck)) {
FlagsContinuation cont = FlagsContinuation::ForDeoptimizeAndPoison(

View File

@ -1166,7 +1166,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
break;
case kArchDeoptimize: {
DeoptimizationExit* exit =
BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
BuildTranslation(instr, -1, 0, 0, OutputFrameStateCombine::Ignore());
__ jmp(exit->label());
break;
}

View File

@ -10,7 +10,12 @@ namespace internal {
const bool Deoptimizer::kSupportsFixedDeoptExitSizes = true;
const int Deoptimizer::kNonLazyDeoptExitSize = 2 * kInstrSize;
const int Deoptimizer::kLazyDeoptExitSize = 2 * kInstrSize;
const int Deoptimizer::kEagerWithResumeDeoptExitSize = 3 * kInstrSize;
const int Deoptimizer::kEagerWithResumeBeforeArgsSize = 3 * kInstrSize;
const int Deoptimizer::kEagerWithResumeDeoptExitSize =
kEagerWithResumeBeforeArgsSize + 2 * kSystemPointerSize;
const int Deoptimizer::kEagerWithResumeImmedArgs1PcOffset = kInstrSize;
const int Deoptimizer::kEagerWithResumeImmedArgs2PcOffset =
kInstrSize + kSystemPointerSize;
Float32 RegisterValues::GetFloatRegister(unsigned n) const {
const int kShift = n % 2 == 0 ? 0 : 32;

View File

@ -16,7 +16,12 @@ const int Deoptimizer::kLazyDeoptExitSize = 2 * kInstrSize;
#else
const int Deoptimizer::kLazyDeoptExitSize = 1 * kInstrSize;
#endif
const int Deoptimizer::kEagerWithResumeDeoptExitSize = 2 * kInstrSize;
const int Deoptimizer::kEagerWithResumeBeforeArgsSize = 2 * kInstrSize;
const int Deoptimizer::kEagerWithResumeDeoptExitSize =
kEagerWithResumeBeforeArgsSize + 2 * kSystemPointerSize;
const int Deoptimizer::kEagerWithResumeImmedArgs1PcOffset = kInstrSize;
const int Deoptimizer::kEagerWithResumeImmedArgs2PcOffset =
kInstrSize + kSystemPointerSize;
Float32 RegisterValues::GetFloatRegister(unsigned n) const {
return Float32::FromBits(

View File

@ -547,7 +547,10 @@ class Deoptimizer : public Malloced {
// kSupportsFixedDeoptExitSizes is true.
V8_EXPORT_PRIVATE static const int kNonLazyDeoptExitSize;
V8_EXPORT_PRIVATE static const int kLazyDeoptExitSize;
V8_EXPORT_PRIVATE static const int kEagerWithResumeBeforeArgsSize;
V8_EXPORT_PRIVATE static const int kEagerWithResumeDeoptExitSize;
V8_EXPORT_PRIVATE static const int kEagerWithResumeImmedArgs1PcOffset;
V8_EXPORT_PRIVATE static const int kEagerWithResumeImmedArgs2PcOffset;
// Tracing.
static void TraceMarkForDeoptimization(Code code, const char* reason);

View File

@ -12,7 +12,12 @@ namespace internal {
const bool Deoptimizer::kSupportsFixedDeoptExitSizes = true;
const int Deoptimizer::kNonLazyDeoptExitSize = 5;
const int Deoptimizer::kLazyDeoptExitSize = 5;
const int Deoptimizer::kEagerWithResumeDeoptExitSize = 10;
const int Deoptimizer::kEagerWithResumeBeforeArgsSize = 10;
const int Deoptimizer::kEagerWithResumeDeoptExitSize =
kEagerWithResumeBeforeArgsSize + 2 * kSystemPointerSize;
const int Deoptimizer::kEagerWithResumeImmedArgs1PcOffset = 5;
const int Deoptimizer::kEagerWithResumeImmedArgs2PcOffset =
5 + kSystemPointerSize;
Float32 RegisterValues::GetFloatRegister(unsigned n) const {
return Float32::FromBits(

View File

@ -12,7 +12,11 @@ namespace internal {
const bool Deoptimizer::kSupportsFixedDeoptExitSizes = true;
const int Deoptimizer::kNonLazyDeoptExitSize = 7;
const int Deoptimizer::kLazyDeoptExitSize = 7;
const int Deoptimizer::kEagerWithResumeDeoptExitSize = 12;
const int Deoptimizer::kEagerWithResumeBeforeArgsSize = 12;
const int Deoptimizer::kEagerWithResumeDeoptExitSize =
kEagerWithResumeBeforeArgsSize + 2 * kSystemPointerSize;
const int Deoptimizer::kEagerWithResumeImmedArgs1PcOffset = 5;
const int Deoptimizer::kEagerWithResumeImmedArgs2PcOffset = 13;
Float32 RegisterValues::GetFloatRegister(unsigned n) const {
return Float32::FromBits(

View File

@ -2794,6 +2794,7 @@ int DisassemblerIA32::InstructionDecode(v8::internal::Vector<char> out_buffer,
default:
UnimplementedInstruction();
data++;
}
}

View File

@ -1537,7 +1537,7 @@ TEST(DeoptExitSizeIsFixed) {
masm.CallForDeoptimization(target, 42, &before_exit, kind, &before_exit,
nullptr);
CHECK_EQ(masm.SizeOfCodeGeneratedSince(&before_exit),
Deoptimizer::kEagerWithResumeDeoptExitSize);
Deoptimizer::kEagerWithResumeBeforeArgsSize);
} else {
Builtins::Name target = Deoptimizer::GetDeoptimizationEntry(kind);
masm.CallForDeoptimization(target, 42, &before_exit, kind, &before_exit,

View File

@ -330,7 +330,7 @@ TEST(DeoptExitSizeIsFixed) {
masm.CallForDeoptimization(target, 42, &before_exit, kind, &before_exit,
nullptr);
CHECK_EQ(masm.SizeOfCodeGeneratedSince(&before_exit),
Deoptimizer::kEagerWithResumeDeoptExitSize);
Deoptimizer::kEagerWithResumeBeforeArgsSize);
} else {
Builtins::Name target = Deoptimizer::GetDeoptimizationEntry(kind);
masm.CallForDeoptimization(target, 42, &before_exit, kind, &before_exit,

View File

@ -111,7 +111,7 @@ TEST(DeoptExitSizeIsFixed) {
masm.CallForDeoptimization(target, 42, &before_exit, kind, &before_exit,
&before_exit);
CHECK_EQ(masm.SizeOfCodeGeneratedSince(&before_exit),
Deoptimizer::kEagerWithResumeDeoptExitSize);
Deoptimizer::kEagerWithResumeBeforeArgsSize);
} else {
Builtins::Name target = Deoptimizer::GetDeoptimizationEntry(kind);
// Mirroring logic in code-generator.cc.

View File

@ -1053,7 +1053,7 @@ TEST(DeoptExitSizeIsFixed) {
masm.CallForDeoptimization(target, 42, &before_exit, kind, &before_exit,
nullptr);
CHECK_EQ(masm.SizeOfCodeGeneratedSince(&before_exit),
Deoptimizer::kEagerWithResumeDeoptExitSize);
Deoptimizer::kEagerWithResumeBeforeArgsSize);
} else {
Builtins::Name target = Deoptimizer::GetDeoptimizationEntry(kind);
masm.CallForDeoptimization(target, 42, &before_exit, kind, &before_exit,