Revert "Replacing pc with trampoline on stack"
This reverts commit a01ac7cbd9
.
Reason for revert: Causes flakes on gc stress:
https://build.chromium.org/p/client.v8/builders/V8%20Linux64%20GC%20Stress%20-%20custom%20snapshot/builds/14218
Original change's description:
> Replacing pc with trampoline on stack
>
> This CL is the follow up of https://chromium-review.googlesource.com/c/586707/
> which used to crash when running the gc-stress bots.
> It seems to be working now. We now keep the trampoline PC in the Safepoint
> table and use that information to find SafepointEntries.
>
> There's some refactoring that can be done, such as changing the code for
> exceptions in a similar way and removing the trampoline from the
> DeoptimizationInputData. Will take care of this in the next CL.
>
> Bug: v8:6563
> Change-Id: I02565297093620023a1155b55d76a4dafcb54794
> Reviewed-on: https://chromium-review.googlesource.com/593622
> Commit-Queue: Juliana Patricia Vicente Franco <jupvfranco@google.com>
> Reviewed-by: Benedikt Meurer <bmeurer@chromium.org>
> Reviewed-by: Jaroslav Sevcik <jarin@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#47030}
TBR=jarin@chromium.org,bmeurer@chromium.org,jupvfranco@google.com
Change-Id: Ie9929c9acae321a91014b76b9008f8835313e67d
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: v8:6563
Reviewed-on: https://chromium-review.googlesource.com/595927
Reviewed-by: Michael Achenbach <machenbach@chromium.org>
Commit-Queue: Michael Achenbach <machenbach@chromium.org>
Cr-Commit-Position: refs/heads/master@{#47038}
This commit is contained in:
parent
fcaa2c2e57
commit
3138850ed6
@ -2688,10 +2688,12 @@ void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
|
||||
|
||||
CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
|
||||
int deoptimization_id, SourcePosition pos) {
|
||||
DeoptimizeKind deoptimization_kind = GetDeoptimizationKind(deoptimization_id);
|
||||
DeoptimizeReason deoptimization_reason =
|
||||
GetDeoptimizationReason(deoptimization_id);
|
||||
Deoptimizer::BailoutType bailout_type =
|
||||
DeoptimizerCallBailout(deoptimization_id, pos);
|
||||
deoptimization_kind == DeoptimizeKind::kSoft ? Deoptimizer::SOFT
|
||||
: Deoptimizer::EAGER;
|
||||
Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
|
||||
__ isolate(), deoptimization_id, bailout_type);
|
||||
// TODO(turbofan): We should be able to generate better code by sharing the
|
||||
|
@ -2365,10 +2365,12 @@ void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
|
||||
|
||||
CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
|
||||
int deoptimization_id, SourcePosition pos) {
|
||||
DeoptimizeKind deoptimization_kind = GetDeoptimizationKind(deoptimization_id);
|
||||
DeoptimizeReason deoptimization_reason =
|
||||
GetDeoptimizationReason(deoptimization_id);
|
||||
Deoptimizer::BailoutType bailout_type =
|
||||
DeoptimizerCallBailout(deoptimization_id, pos);
|
||||
deoptimization_kind == DeoptimizeKind::kSoft ? Deoptimizer::SOFT
|
||||
: Deoptimizer::EAGER;
|
||||
Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
|
||||
__ isolate(), deoptimization_id, bailout_type);
|
||||
if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
|
||||
|
@ -81,23 +81,6 @@ void CodeGenerator::CreateFrameAccessState(Frame* frame) {
|
||||
frame_access_state_ = new (zone()) FrameAccessState(frame);
|
||||
}
|
||||
|
||||
Deoptimizer::BailoutType CodeGenerator::DeoptimizerCallBailout(
|
||||
int deoptimization_id, SourcePosition pos) {
|
||||
DeoptimizeKind deopt_kind = GetDeoptimizationKind(deoptimization_id);
|
||||
switch (deopt_kind) {
|
||||
case DeoptimizeKind::kSoft: {
|
||||
return Deoptimizer::SOFT;
|
||||
}
|
||||
case DeoptimizeKind::kEager: {
|
||||
return Deoptimizer::EAGER;
|
||||
}
|
||||
case DeoptimizeKind::kLazy: {
|
||||
return Deoptimizer::LAZY;
|
||||
}
|
||||
}
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
void CodeGenerator::AssembleCode() {
|
||||
CompilationInfo* info = this->info();
|
||||
|
||||
@ -208,27 +191,16 @@ void CodeGenerator::AssembleCode() {
|
||||
}
|
||||
}
|
||||
|
||||
// This nop operation is needed to ensure that the trampoline is not
|
||||
// confused with the pc of the call before deoptimization.
|
||||
// The test regress/regress-259 is an example of where we need it.
|
||||
tasm()->nop();
|
||||
|
||||
// Assemble deoptimization exits.
|
||||
// Assemble all eager deoptimization exits.
|
||||
for (DeoptimizationExit* exit : deoptimization_exits_) {
|
||||
tasm()->bind(exit->label());
|
||||
int trampoline_pc = tasm()->pc_offset();
|
||||
int deoptimization_id = exit->deoptimization_id();
|
||||
DeoptimizationState* ds = deoptimization_states_[deoptimization_id];
|
||||
|
||||
// TODO(juliana) maybe we can optimize this if we store the last index
|
||||
if (ds->kind() == DeoptimizeKind::kLazy) {
|
||||
safepoints()->UpdateDeoptimizationInfo(ds->pc_offset(), trampoline_pc);
|
||||
}
|
||||
ds->set_trampoline_pc(trampoline_pc);
|
||||
AssembleDeoptimizerCall(deoptimization_id, exit->pos());
|
||||
}
|
||||
|
||||
// TODO(juliana): check if we still need this.
|
||||
// Ensure there is space for lazy deoptimization in the code.
|
||||
if (info->ShouldEnsureSpaceForLazyDeopt()) {
|
||||
int target_offset = tasm()->pc_offset() + Deoptimizer::patch_size();
|
||||
|
@ -149,9 +149,6 @@ class CodeGenerator final : public GapResolver::Assembler {
|
||||
// adjusted stack pointer is returned in |slot|.
|
||||
bool GetSlotAboveSPBeforeTailCall(Instruction* instr, int* slot);
|
||||
|
||||
Deoptimizer::BailoutType DeoptimizerCallBailout(int deoptimization_id,
|
||||
SourcePosition pos);
|
||||
|
||||
// ===========================================================================
|
||||
// ============= Architecture-specific code generation methods. ==============
|
||||
// ===========================================================================
|
||||
|
@ -2401,10 +2401,12 @@ void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
|
||||
|
||||
CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
|
||||
int deoptimization_id, SourcePosition pos) {
|
||||
DeoptimizeKind deoptimization_kind = GetDeoptimizationKind(deoptimization_id);
|
||||
DeoptimizeReason deoptimization_reason =
|
||||
GetDeoptimizationReason(deoptimization_id);
|
||||
Deoptimizer::BailoutType bailout_type =
|
||||
DeoptimizerCallBailout(deoptimization_id, pos);
|
||||
deoptimization_kind == DeoptimizeKind::kSoft ? Deoptimizer::SOFT
|
||||
: Deoptimizer::EAGER;
|
||||
Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
|
||||
__ isolate(), deoptimization_id, bailout_type);
|
||||
if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
|
||||
|
@ -794,7 +794,7 @@ void InstructionSelector::InitializeCallBuffer(Node* call, CallBuffer* buffer,
|
||||
}
|
||||
|
||||
int const state_id = sequence()->AddDeoptimizationEntry(
|
||||
buffer->frame_state_descriptor, DeoptimizeKind::kLazy,
|
||||
buffer->frame_state_descriptor, DeoptimizeKind::kEager,
|
||||
DeoptimizeReason::kNoReason);
|
||||
buffer->instruction_args.push_back(g.TempImmediate(state_id));
|
||||
|
||||
@ -2723,7 +2723,6 @@ Instruction* InstructionSelector::EmitDeoptimize(
|
||||
args.push_back(inputs[i]);
|
||||
}
|
||||
opcode |= MiscField::encode(static_cast<int>(input_count));
|
||||
DCHECK_NE(DeoptimizeKind::kLazy, kind);
|
||||
int const state_id =
|
||||
sequence()->AddDeoptimizationEntry(descriptor, kind, reason);
|
||||
args.push_back(g.TempImmediate(state_id));
|
||||
|
@ -3119,10 +3119,12 @@ void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
|
||||
|
||||
CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
|
||||
int deoptimization_id, SourcePosition pos) {
|
||||
DeoptimizeKind deoptimization_kind = GetDeoptimizationKind(deoptimization_id);
|
||||
DeoptimizeReason deoptimization_reason =
|
||||
GetDeoptimizationReason(deoptimization_id);
|
||||
Deoptimizer::BailoutType bailout_type =
|
||||
DeoptimizerCallBailout(deoptimization_id, pos);
|
||||
deoptimization_kind == DeoptimizeKind::kSoft ? Deoptimizer::SOFT
|
||||
: Deoptimizer::EAGER;
|
||||
Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
|
||||
isolate(), deoptimization_id, bailout_type);
|
||||
if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
|
||||
|
@ -3417,10 +3417,12 @@ void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
|
||||
|
||||
CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
|
||||
int deoptimization_id, SourcePosition pos) {
|
||||
DeoptimizeKind deoptimization_kind = GetDeoptimizationKind(deoptimization_id);
|
||||
DeoptimizeReason deoptimization_reason =
|
||||
GetDeoptimizationReason(deoptimization_id);
|
||||
Deoptimizer::BailoutType bailout_type =
|
||||
DeoptimizerCallBailout(deoptimization_id, pos);
|
||||
deoptimization_kind == DeoptimizeKind::kSoft ? Deoptimizer::SOFT
|
||||
: Deoptimizer::EAGER;
|
||||
Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
|
||||
tasm()->isolate(), deoptimization_id, bailout_type);
|
||||
if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
|
||||
|
@ -2904,10 +2904,12 @@ void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
|
||||
|
||||
CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
|
||||
int deoptimization_id, SourcePosition pos) {
|
||||
DeoptimizeKind deoptimization_kind = GetDeoptimizationKind(deoptimization_id);
|
||||
DeoptimizeReason deoptimization_reason =
|
||||
GetDeoptimizationReason(deoptimization_id);
|
||||
Deoptimizer::BailoutType bailout_type =
|
||||
DeoptimizerCallBailout(deoptimization_id, pos);
|
||||
deoptimization_kind == DeoptimizeKind::kSoft ? Deoptimizer::SOFT
|
||||
: Deoptimizer::EAGER;
|
||||
Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
|
||||
__ isolate(), deoptimization_id, bailout_type);
|
||||
if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
|
||||
|
@ -222,9 +222,10 @@ void Deoptimizer::VisitAllOptimizedFunctions(
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Unlink functions referring to code marked for deoptimization, then move
|
||||
// marked code from the optimized code list to the deoptimized code list,
|
||||
// and replace pc on the stack for codes marked for deoptimization.
|
||||
// and patch code for lazy deopt.
|
||||
void Deoptimizer::DeoptimizeMarkedCodeForContext(Context* context) {
|
||||
DisallowHeapAllocation no_allocation;
|
||||
|
||||
@ -305,7 +306,10 @@ void Deoptimizer::DeoptimizeMarkedCodeForContext(Context* context) {
|
||||
#endif
|
||||
|
||||
// Move marked code from the optimized code list to the deoptimized
|
||||
// code list.
|
||||
// code list, collecting them into a ZoneList.
|
||||
Zone zone(isolate->allocator(), ZONE_NAME);
|
||||
ZoneList<Code*> codes(10, &zone);
|
||||
|
||||
// Walk over all optimized code objects in this native context.
|
||||
Code* prev = NULL;
|
||||
Object* element = context->OptimizedCodeListHead();
|
||||
@ -315,6 +319,9 @@ void Deoptimizer::DeoptimizeMarkedCodeForContext(Context* context) {
|
||||
Object* next = code->next_code_link();
|
||||
|
||||
if (code->marked_for_deoptimization()) {
|
||||
// Put the code into the list for later patching.
|
||||
codes.Add(code, &zone);
|
||||
|
||||
if (prev != NULL) {
|
||||
// Skip this code in the optimized code list.
|
||||
prev->set_next_code_link(next);
|
||||
@ -333,24 +340,26 @@ void Deoptimizer::DeoptimizeMarkedCodeForContext(Context* context) {
|
||||
element = next;
|
||||
}
|
||||
|
||||
// Finds the with activations of codes marked for deoptimization, search for
|
||||
// the trampoline to the deoptimizer call respective to each code, and use it
|
||||
// to replace the current pc on the stack.
|
||||
for (StackFrameIterator it(isolate, isolate->thread_local_top()); !it.done();
|
||||
it.Advance()) {
|
||||
if (it.frame()->type() == StackFrame::OPTIMIZED) {
|
||||
Code* code = it.frame()->LookupCode();
|
||||
if (code->kind() == Code::OPTIMIZED_FUNCTION &&
|
||||
code->marked_for_deoptimization()) {
|
||||
// Obtain the trampoline to the deoptimizer call.
|
||||
SafepointEntry safepoint = code->GetSafepointEntry(it.frame()->pc());
|
||||
int trampoline_pc = safepoint.trampoline_pc();
|
||||
DCHECK_IMPLIES(code == topmost_optimized_code,
|
||||
safe_to_deopt_topmost_optimized_code);
|
||||
// Replace the current pc on the stack with the trampoline.
|
||||
it.frame()->set_pc(code->instruction_start() + trampoline_pc);
|
||||
}
|
||||
// We need a handle scope only because of the macro assembler,
|
||||
// which is used in code patching in EnsureCodeForDeoptimizationEntry.
|
||||
HandleScope scope(isolate);
|
||||
|
||||
// Now patch all the codes for deoptimization.
|
||||
for (int i = 0; i < codes.length(); i++) {
|
||||
#ifdef DEBUG
|
||||
if (codes[i] == topmost_optimized_code) {
|
||||
DCHECK(safe_to_deopt_topmost_optimized_code);
|
||||
}
|
||||
#endif
|
||||
// It is finally time to die, code object.
|
||||
|
||||
// Do platform-specific patching to force any activations to lazy deopt.
|
||||
PatchCodeForDeoptimization(isolate, codes[i]);
|
||||
|
||||
// We might be in the middle of incremental marking with compaction.
|
||||
// Tell collector to treat this code object in a special way and
|
||||
// ignore all slots that might have been recorded on it.
|
||||
isolate->heap()->mark_compact_collector()->InvalidateCode(codes[i]);
|
||||
}
|
||||
}
|
||||
|
||||
@ -986,7 +995,6 @@ void Deoptimizer::DoComputeInterpretedFrame(TranslatedFrame* translated_frame,
|
||||
DebugPrintOutputSlot(reinterpret_cast<intptr_t>(smi_bytecode_offset),
|
||||
frame_index, output_offset, "bytecode offset @ ");
|
||||
PrintF(trace_scope_->file(), "%d\n", bytecode_offset);
|
||||
PrintF(trace_scope_->file(), " (input #0)\n");
|
||||
PrintF(trace_scope_->file(), " -------------------------\n");
|
||||
}
|
||||
|
||||
|
@ -1478,19 +1478,6 @@ int OptimizedFrame::LookupExceptionHandlerInTable(
|
||||
HandlerTable* table = HandlerTable::cast(code->handler_table());
|
||||
int pc_offset = static_cast<int>(pc() - code->entry());
|
||||
if (stack_slots) *stack_slots = code->stack_slots();
|
||||
|
||||
// When the return pc has been replaced by a trampoline there won't be
|
||||
// a handler for this trampoline. Thus we need to use the return pc that
|
||||
// _used to be_ on the stack to get the right ExceptionHandler.
|
||||
if (code->kind() == Code::OPTIMIZED_FUNCTION &&
|
||||
code->marked_for_deoptimization()) {
|
||||
DeoptimizationInputData* deopt_table =
|
||||
DeoptimizationInputData::cast(code->deoptimization_data());
|
||||
int ret_pc = deopt_table->TrampolinePcToReturnPc(pc_offset);
|
||||
if (ret_pc != -1) {
|
||||
return table->LookupReturn(ret_pc);
|
||||
}
|
||||
}
|
||||
return table->LookupReturn(pc_offset);
|
||||
}
|
||||
|
||||
|
@ -361,7 +361,7 @@ const int kNoSourcePosition = -1;
|
||||
const int kNoDeoptimizationId = -1;
|
||||
|
||||
// Deoptimize bailout kind.
|
||||
enum class DeoptimizeKind : uint8_t { kEager, kSoft, kLazy };
|
||||
enum class DeoptimizeKind : uint8_t { kEager, kSoft };
|
||||
inline size_t hash_value(DeoptimizeKind kind) {
|
||||
return static_cast<size_t>(kind);
|
||||
}
|
||||
@ -371,8 +371,6 @@ inline std::ostream& operator<<(std::ostream& os, DeoptimizeKind kind) {
|
||||
return os << "Eager";
|
||||
case DeoptimizeKind::kSoft:
|
||||
return os << "Soft";
|
||||
case DeoptimizeKind::kLazy:
|
||||
return os << "Lazy";
|
||||
}
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
@ -14423,16 +14423,6 @@ void Code::PrintExtraICState(std::ostream& os, // NOLINT
|
||||
|
||||
#endif // defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
|
||||
|
||||
int DeoptimizationInputData::TrampolinePcToReturnPc(int pc_offset) {
|
||||
int deopt_total = DeoptCount();
|
||||
for (int i = 0; i < deopt_total; i++) {
|
||||
if (TrampolinePc(i)->value() == pc_offset) {
|
||||
return Pc(i)->value();
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
#ifdef ENABLE_DISASSEMBLER
|
||||
|
||||
namespace {
|
||||
|
@ -3590,8 +3590,6 @@ class DeoptimizationInputData: public FixedArray {
|
||||
|
||||
inline int DeoptCount();
|
||||
|
||||
int TrampolinePcToReturnPc(int pc_offset);
|
||||
|
||||
static const int kNotInlinedIndex = -1;
|
||||
|
||||
// Returns the inlined function at the given position in LiteralArray, or the
|
||||
|
@ -42,7 +42,8 @@ SafepointTable::SafepointTable(Code* code) {
|
||||
length_ = Memory::uint32_at(header + kLengthOffset);
|
||||
entry_size_ = Memory::uint32_at(header + kEntrySizeOffset);
|
||||
pc_and_deoptimization_indexes_ = header + kHeaderSize;
|
||||
entries_ = pc_and_deoptimization_indexes_ + (length_ * kFixedEntrySize);
|
||||
entries_ = pc_and_deoptimization_indexes_ +
|
||||
(length_ * kPcAndDeoptimizationIndexSize);
|
||||
DCHECK(entry_size_ > 0);
|
||||
STATIC_ASSERT(SafepointEntry::DeoptimizationIndexField::kMax ==
|
||||
Safepoint::kNoDeoptimizationIndex);
|
||||
@ -58,11 +59,8 @@ SafepointEntry SafepointTable::FindEntry(Address pc) const {
|
||||
if (len == 1 && GetPcOffset(0) == kMaxUInt32) return GetEntry(0);
|
||||
for (unsigned i = 0; i < len; i++) {
|
||||
// TODO(kasperl): Replace the linear search with binary search.
|
||||
if (GetPcOffset(i) == pc_offset || GetTrampolinePcOffset(i) == pc_offset) {
|
||||
return GetEntry(i);
|
||||
}
|
||||
if (GetPcOffset(i) == pc_offset) return GetEntry(i);
|
||||
}
|
||||
UNREACHABLE();
|
||||
return SafepointEntry();
|
||||
}
|
||||
|
||||
@ -117,7 +115,6 @@ Safepoint SafepointTableBuilder::DefineSafepoint(
|
||||
info.pc = assembler->pc_offset();
|
||||
info.arguments = arguments;
|
||||
info.has_doubles = (kind & Safepoint::kWithDoubles);
|
||||
info.trampoline = -1;
|
||||
deoptimization_info_.Add(info, zone_);
|
||||
deopt_index_list_.Add(Safepoint::kNoDeoptimizationIndex, zone_);
|
||||
if (deopt_mode == Safepoint::kNoLazyDeopt) {
|
||||
@ -143,20 +140,6 @@ unsigned SafepointTableBuilder::GetCodeOffset() const {
|
||||
return offset_;
|
||||
}
|
||||
|
||||
void SafepointTableBuilder::UpdateDeoptimizationInfo(int pc, int trampoline) {
|
||||
int index = -1;
|
||||
for (int i = 0; i < deoptimization_info_.length(); i++) {
|
||||
if (static_cast<int>(deoptimization_info_[i].pc) == pc) {
|
||||
index = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
DCHECK(index >= 0);
|
||||
DCHECK(index < deoptimization_info_.length());
|
||||
if (index >= 0) {
|
||||
deoptimization_info_[index].trampoline = trampoline;
|
||||
}
|
||||
}
|
||||
|
||||
void SafepointTableBuilder::Emit(Assembler* assembler, int bits_per_entry) {
|
||||
RemoveDuplicates();
|
||||
@ -183,7 +166,6 @@ void SafepointTableBuilder::Emit(Assembler* assembler, int bits_per_entry) {
|
||||
assembler->dd(deoptimization_info_[i].pc);
|
||||
assembler->dd(EncodeExceptPC(deoptimization_info_[i],
|
||||
deopt_index_list_[i]));
|
||||
assembler->dd(deoptimization_info_[i].trampoline);
|
||||
}
|
||||
|
||||
// Emit table of bitmaps.
|
||||
|
@ -18,10 +18,9 @@ struct Register;
|
||||
|
||||
class SafepointEntry BASE_EMBEDDED {
|
||||
public:
|
||||
SafepointEntry() : info_(0), bits_(NULL), trampoline_pc_(-1) {}
|
||||
SafepointEntry() : info_(0), bits_(NULL) {}
|
||||
|
||||
SafepointEntry(unsigned info, uint8_t* bits, int trampoline_pc)
|
||||
: info_(info), bits_(bits), trampoline_pc_(trampoline_pc) {
|
||||
SafepointEntry(unsigned info, uint8_t* bits) : info_(info), bits_(bits) {
|
||||
DCHECK(is_valid());
|
||||
}
|
||||
|
||||
@ -41,15 +40,10 @@ class SafepointEntry BASE_EMBEDDED {
|
||||
return DeoptimizationIndexField::decode(info_);
|
||||
}
|
||||
|
||||
int trampoline_pc() { return trampoline_pc_; }
|
||||
|
||||
void set_trampoline_pc(int trampoline_pc) { trampoline_pc_ = trampoline_pc; }
|
||||
|
||||
static const int kArgumentsFieldBits = 3;
|
||||
static const int kSaveDoublesFieldBits = 1;
|
||||
static const int kDeoptIndexBits =
|
||||
32 - kArgumentsFieldBits - kSaveDoublesFieldBits;
|
||||
|
||||
class DeoptimizationIndexField:
|
||||
public BitField<int, 0, kDeoptIndexBits> {}; // NOLINT
|
||||
class ArgumentsField:
|
||||
@ -82,8 +76,6 @@ class SafepointEntry BASE_EMBEDDED {
|
||||
private:
|
||||
unsigned info_;
|
||||
uint8_t* bits_;
|
||||
// It needs to be an integer as it is -1 for eager deoptimizations.
|
||||
int trampoline_pc_;
|
||||
};
|
||||
|
||||
|
||||
@ -92,7 +84,8 @@ class SafepointTable BASE_EMBEDDED {
|
||||
explicit SafepointTable(Code* code);
|
||||
|
||||
int size() const {
|
||||
return kHeaderSize + (length_ * (kFixedEntrySize + entry_size_));
|
||||
return kHeaderSize +
|
||||
(length_ * (kPcAndDeoptimizationIndexSize + entry_size_));
|
||||
}
|
||||
unsigned length() const { return length_; }
|
||||
unsigned entry_size() const { return entry_size_; }
|
||||
@ -102,17 +95,11 @@ class SafepointTable BASE_EMBEDDED {
|
||||
return Memory::uint32_at(GetPcOffsetLocation(index));
|
||||
}
|
||||
|
||||
unsigned GetTrampolinePcOffset(unsigned index) const {
|
||||
DCHECK(index < length_);
|
||||
return Memory::int_at(GetTrampolineLocation(index));
|
||||
}
|
||||
|
||||
SafepointEntry GetEntry(unsigned index) const {
|
||||
DCHECK(index < length_);
|
||||
unsigned info = Memory::uint32_at(GetInfoLocation(index));
|
||||
uint8_t* bits = &Memory::uint8_at(entries_ + (index * entry_size_));
|
||||
int trampoline_pc = Memory::int_at(GetTrampolineLocation(index));
|
||||
return SafepointEntry(info, bits, trampoline_pc);
|
||||
return SafepointEntry(info, bits);
|
||||
}
|
||||
|
||||
// Returns the entry for the given pc.
|
||||
@ -123,26 +110,22 @@ class SafepointTable BASE_EMBEDDED {
|
||||
private:
|
||||
static const uint8_t kNoRegisters = 0xFF;
|
||||
|
||||
// Layout information
|
||||
static const int kLengthOffset = 0;
|
||||
static const int kEntrySizeOffset = kLengthOffset + kIntSize;
|
||||
static const int kHeaderSize = kEntrySizeOffset + kIntSize;
|
||||
static const int kPcOffset = 0;
|
||||
static const int kDeoptimizationIndexOffset = kPcOffset + kIntSize;
|
||||
static const int kTrampolinePcOffset = kDeoptimizationIndexOffset + kIntSize;
|
||||
static const int kFixedEntrySize = kTrampolinePcOffset + kIntSize;
|
||||
|
||||
static const int kPcSize = kIntSize;
|
||||
static const int kDeoptimizationIndexSize = kIntSize;
|
||||
static const int kPcAndDeoptimizationIndexSize =
|
||||
kPcSize + kDeoptimizationIndexSize;
|
||||
|
||||
Address GetPcOffsetLocation(unsigned index) const {
|
||||
return pc_and_deoptimization_indexes_ + (index * kFixedEntrySize);
|
||||
return pc_and_deoptimization_indexes_ +
|
||||
(index * kPcAndDeoptimizationIndexSize);
|
||||
}
|
||||
|
||||
// TODO(juliana): rename this to GetDeoptimizationIndexLocation
|
||||
Address GetInfoLocation(unsigned index) const {
|
||||
return GetPcOffsetLocation(index) + kDeoptimizationIndexOffset;
|
||||
}
|
||||
|
||||
Address GetTrampolineLocation(unsigned index) const {
|
||||
return GetPcOffsetLocation(index) + kTrampolinePcOffset;
|
||||
return GetPcOffsetLocation(index) + kPcSize;
|
||||
}
|
||||
|
||||
static void PrintBits(std::ostream& os, // NOLINT
|
||||
@ -224,18 +207,12 @@ class SafepointTableBuilder BASE_EMBEDDED {
|
||||
// entry must be enough to hold all the pointer indexes.
|
||||
void Emit(Assembler* assembler, int bits_per_entry);
|
||||
|
||||
// Find the Deoptimization Info with pc offset {pc} and update its
|
||||
// trampoline field. Calling this function ensures that the safepoint
|
||||
// table contains the trampoline PC (trampoline} that replaced the
|
||||
// return PC {pc} on the stack.
|
||||
void UpdateDeoptimizationInfo(int pc, int trampoline);
|
||||
|
||||
private:
|
||||
struct DeoptimizationInfo {
|
||||
unsigned pc;
|
||||
unsigned arguments;
|
||||
bool has_doubles;
|
||||
int trampoline;
|
||||
};
|
||||
|
||||
uint32_t EncodeExceptPC(const DeoptimizationInfo& info, unsigned index);
|
||||
|
Loading…
Reference in New Issue
Block a user