Refactor of AssembleDeoptimizerCall.

There's no need for this code to be completely architecture specific.

Bug: v8:6563
Change-Id: I90aa1aa76fa266a247d8f374459a6eb6469c8c75
Reviewed-on: https://chromium-review.googlesource.com/612340
Commit-Queue: Juliana Patricia Vicente Franco <jupvfranco@google.com>
Reviewed-by: Jaroslav Sevcik <jarin@chromium.org>
Cr-Commit-Position: refs/heads/master@{#47493}
This commit is contained in:
Juliana Franco 2017-08-21 20:19:52 +02:00 committed by Commit Bot
parent 47bd0ef77c
commit 2d858519f7
14 changed files with 52 additions and 111 deletions

View File

@ -352,6 +352,13 @@ class TurboAssembler : public Assembler {
bool check_constant_pool = true);
void Call(Label* target);
// This should only be used when assembling a deoptimizer call because of
// the CheckConstPool invocation, which is only needed for deoptimization.
void CallForDeoptimization(Address target, RelocInfo::Mode rmode) {
Call(target, rmode);
CheckConstPool(false, false);
}
// Emit code to discard a non-negative number of pointer-sized elements
// from the stack, clobbering only the sp register.
void Drop(int count, Condition cond = al);

View File

@ -871,6 +871,10 @@ class TurboAssembler : public Assembler {
void Call(Address target, RelocInfo::Mode rmode);
void Call(Handle<Code> code, RelocInfo::Mode rmode = RelocInfo::CODE_TARGET);
void CallForDeoptimization(Address target, RelocInfo::Mode rmode) {
Call(target, rmode);
}
// For every Call variant, there is a matching CallSize function that returns
// the size (in bytes) of the call sequence.
static int CallSize(Register target);

View File

@ -2696,26 +2696,6 @@ void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
}
}
CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
int deoptimization_id, SourcePosition pos) {
DeoptimizeReason deoptimization_reason =
GetDeoptimizationReason(deoptimization_id);
Deoptimizer::BailoutType bailout_type =
DeoptimizerCallBailout(deoptimization_id, pos);
Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
__ isolate(), deoptimization_id, bailout_type);
// TODO(turbofan): We should be able to generate better code by sharing the
// actual final call site and just bl'ing to it here, similar to what we do
// in the lithium backend.
if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
if (info()->is_source_positions_enabled()) {
__ RecordDeoptReason(deoptimization_reason, pos, deoptimization_id);
}
__ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
__ CheckConstPool(false, false);
return kSuccess;
}
void CodeGenerator::FinishFrame(Frame* frame) {
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();

View File

@ -2373,22 +2373,6 @@ void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
__ EndBlockPools();
}
CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
int deoptimization_id, SourcePosition pos) {
DeoptimizeReason deoptimization_reason =
GetDeoptimizationReason(deoptimization_id);
Deoptimizer::BailoutType bailout_type =
DeoptimizerCallBailout(deoptimization_id, pos);
Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
__ isolate(), deoptimization_id, bailout_type);
if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
if (info()->is_source_positions_enabled()) {
__ RecordDeoptReason(deoptimization_reason, pos, deoptimization_id);
}
__ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
return kSuccess;
}
void CodeGenerator::FinishFrame(Frame* frame) {
frame->AlignFrame(16);
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();

View File

@ -81,21 +81,35 @@ void CodeGenerator::CreateFrameAccessState(Frame* frame) {
frame_access_state_ = new (zone()) FrameAccessState(frame);
}
Deoptimizer::BailoutType CodeGenerator::DeoptimizerCallBailout(
CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
int deoptimization_id, SourcePosition pos) {
DeoptimizeKind deopt_kind = GetDeoptimizationKind(deoptimization_id);
Deoptimizer::BailoutType bailout_type;
switch (deopt_kind) {
case DeoptimizeKind::kSoft: {
return Deoptimizer::SOFT;
bailout_type = Deoptimizer::SOFT;
break;
}
case DeoptimizeKind::kEager: {
return Deoptimizer::EAGER;
bailout_type = Deoptimizer::EAGER;
break;
}
case DeoptimizeKind::kLazy: {
return Deoptimizer::LAZY;
bailout_type = Deoptimizer::LAZY;
break;
}
default: { UNREACHABLE(); }
}
UNREACHABLE();
DeoptimizeReason deoptimization_reason =
GetDeoptimizationReason(deoptimization_id);
Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
tasm()->isolate(), deoptimization_id, bailout_type);
if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
if (info()->is_source_positions_enabled()) {
tasm()->RecordDeoptReason(deoptimization_reason, pos, deoptimization_id);
}
tasm()->CallForDeoptimization(deopt_entry, RelocInfo::RUNTIME_ENTRY);
return kSuccess;
}
void CodeGenerator::AssembleCode() {

View File

@ -149,13 +149,15 @@ class CodeGenerator final : public GapResolver::Assembler {
// adjusted stack pointer is returned in |slot|.
bool GetSlotAboveSPBeforeTailCall(Instruction* instr, int* slot);
Deoptimizer::BailoutType DeoptimizerCallBailout(int deoptimization_id,
SourcePosition pos);
CodeGenResult AssembleDeoptimizerCall(int deoptimization_id,
SourcePosition pos);
// ===========================================================================
// ============= Architecture-specific code generation methods. ==============
// ===========================================================================
CodeGenResult FinalizeAssembleDeoptimizerCall(Address deoptimization_entry);
CodeGenResult AssembleArchInstruction(Instruction* instr);
void AssembleArchJump(RpoNumber target);
void AssembleArchBranch(Instruction* instr, BranchInfo* branch);
@ -164,9 +166,6 @@ class CodeGenerator final : public GapResolver::Assembler {
void AssembleArchLookupSwitch(Instruction* instr);
void AssembleArchTableSwitch(Instruction* instr);
CodeGenResult AssembleDeoptimizerCall(int deoptimization_id,
SourcePosition pos);
// Generates an architecture-specific, descriptor-specific prologue
// to set up a stack frame.
void AssembleConstructFrame();

View File

@ -2510,22 +2510,6 @@ void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
__ jmp(Operand::JumpTable(input, times_4, table));
}
CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
int deoptimization_id, SourcePosition pos) {
DeoptimizeReason deoptimization_reason =
GetDeoptimizationReason(deoptimization_id);
Deoptimizer::BailoutType bailout_type =
DeoptimizerCallBailout(deoptimization_id, pos);
Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
__ isolate(), deoptimization_id, bailout_type);
if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
if (info()->is_source_positions_enabled()) {
__ RecordDeoptReason(deoptimization_reason, pos, deoptimization_id);
}
__ call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
return kSuccess;
}
// The calling convention for JSFunctions on IA32 passes arguments on the
// stack and the JSFunction and context in EDI and ESI, respectively, thus

View File

@ -3159,22 +3159,6 @@ void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
});
}
CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
int deoptimization_id, SourcePosition pos) {
DeoptimizeReason deoptimization_reason =
GetDeoptimizationReason(deoptimization_id);
Deoptimizer::BailoutType bailout_type =
DeoptimizerCallBailout(deoptimization_id, pos);
Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
isolate(), deoptimization_id, bailout_type);
if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
if (info()->is_source_positions_enabled()) {
__ RecordDeoptReason(deoptimization_reason, pos, deoptimization_id);
}
__ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
return kSuccess;
}
void CodeGenerator::FinishFrame(Frame* frame) {
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();

View File

@ -3458,22 +3458,6 @@ void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
});
}
CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
int deoptimization_id, SourcePosition pos) {
DeoptimizeReason deoptimization_reason =
GetDeoptimizationReason(deoptimization_id);
Deoptimizer::BailoutType bailout_type =
DeoptimizerCallBailout(deoptimization_id, pos);
Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
tasm()->isolate(), deoptimization_id, bailout_type);
if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
if (info()->is_source_positions_enabled()) {
__ RecordDeoptReason(deoptimization_reason, pos, deoptimization_id);
}
__ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
return kSuccess;
}
void CodeGenerator::FinishFrame(Frame* frame) {
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();

View File

@ -2941,23 +2941,6 @@ void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
__ jmp(Operand(kScratchRegister, input, times_8, 0));
}
CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
int deoptimization_id, SourcePosition pos) {
DeoptimizeReason deoptimization_reason =
GetDeoptimizationReason(deoptimization_id);
Deoptimizer::BailoutType bailout_type =
DeoptimizerCallBailout(deoptimization_id, pos);
Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
__ isolate(), deoptimization_id, bailout_type);
if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
if (info()->is_source_positions_enabled()) {
__ RecordDeoptReason(deoptimization_reason, pos, deoptimization_id);
}
__ call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
return kSuccess;
}
namespace {
static const int kQuadWordSize = 16;

View File

@ -124,6 +124,10 @@ class TurboAssembler : public Assembler {
void Call(Handle<Code> target, RelocInfo::Mode rmode) { call(target, rmode); }
void Call(Label* target) { call(target); }
void CallForDeoptimization(Address target, RelocInfo::Mode rmode) {
call(target, rmode);
}
inline bool AllowThisStubCall(CodeStub* stub);
void CallStubDelayed(CodeStub* stub);

View File

@ -278,6 +278,11 @@ class TurboAssembler : public Assembler {
RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
COND_ARGS);
void Call(Label* target);
void CallForDeoptimization(Address target, RelocInfo::Mode rmode) {
Call(target, rmode);
}
void Ret(COND_ARGS);
inline void Ret(BranchDelaySlot bd, Condition cond = al,
Register rs = zero_reg, const Operand& rt = Operand(zero_reg)) {

View File

@ -318,6 +318,11 @@ class TurboAssembler : public Assembler {
RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
COND_ARGS);
void Call(Label* target);
void CallForDeoptimization(Address target, RelocInfo::Mode rmode) {
Call(target, rmode);
}
void Ret(COND_ARGS);
inline void Ret(BranchDelaySlot bd, Condition cond = al,
Register rs = zero_reg, const Operand& rt = Operand(zero_reg)) {

View File

@ -316,6 +316,10 @@ class TurboAssembler : public Assembler {
void Call(ExternalReference ext);
void Call(Label* target) { call(target); }
void CallForDeoptimization(Address target, RelocInfo::Mode rmode) {
call(target, rmode);
}
// The size of the code generated for different call instructions.
int CallSize(ExternalReference ext);
int CallSize(Address destination) { return kCallSequenceLength; }