[cleanup] Remove safepoint table register support.

This removes dead and obsolete support for batch-saved register from the
safepoint table. We no longer spill the entire register window (either
double or general-purpose) from optimized code. All spills happen as
part of the normal spill-slots on the stack by now.

R=clemensh@chromium.org,jarin@chromium.org
BUG=v8:9183

Change-Id: I5a2be7a543fa3e44d71ab1a35c722da0d458765c
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1627531
Commit-Queue: Michael Starzinger <mstarzinger@chromium.org>
Reviewed-by: Clemens Hammacher <clemensh@chromium.org>
Reviewed-by: Jaroslav Sevcik <jarin@chromium.org>
Cr-Commit-Position: refs/heads/master@{#61815}
This commit is contained in:
Michael Starzinger 2019-05-23 17:34:51 +02:00 committed by Commit Bot
parent 70eeb22d1c
commit 8e71afad97
15 changed files with 48 additions and 209 deletions

View File

@ -14,24 +14,6 @@
namespace v8 {
namespace internal {
bool SafepointEntry::HasRegisters() const {
DCHECK(is_valid());
DCHECK(IsAligned(kNumSafepointRegisters, kBitsPerByte));
const int num_reg_bytes = kNumSafepointRegisters >> kBitsPerByteLog2;
for (int i = 0; i < num_reg_bytes; i++) {
if (bits_[i] != SafepointTable::kNoRegisters) return true;
}
return false;
}
bool SafepointEntry::HasRegisterAt(int reg_index) const {
DCHECK(is_valid());
DCHECK(reg_index >= 0 && reg_index < kNumSafepointRegisters);
int byte_index = reg_index >> kBitsPerByteLog2;
int bit_index = reg_index & (kBitsPerByte - 1);
return (bits_[byte_index] & (1 << bit_index)) != 0;
}
SafepointTable::SafepointTable(Address instruction_start,
size_t safepoint_table_offset,
uint32_t stack_slots, bool has_deopt)
@ -43,9 +25,6 @@ SafepointTable::SafepointTable(Address instruction_start,
entry_size_ = Memory<uint32_t>(header + kEntrySizeOffset);
pc_and_deoptimization_indexes_ = header + kHeaderSize;
entries_ = pc_and_deoptimization_indexes_ + (length_ * kFixedEntrySize);
DCHECK_GT(entry_size_, 0);
STATIC_ASSERT(SafepointEntry::DeoptimizationIndexField::kMax ==
Safepoint::kNoDeoptimizationIndex);
}
SafepointTable::SafepointTable(Code code)
@ -92,20 +71,11 @@ void SafepointTable::PrintEntry(unsigned index,
// Print the stack slot bits.
if (entry_size_ > 0) {
DCHECK(IsAligned(kNumSafepointRegisters, kBitsPerByte));
const int first = kNumSafepointRegisters >> kBitsPerByteLog2;
const int first = 0;
int last = entry_size_ - 1;
for (int i = first; i < last; i++) PrintBits(os, bits[i], kBitsPerByte);
int last_bits = stack_slots_ - ((last - first) * kBitsPerByte);
PrintBits(os, bits[last], last_bits);
// Print the registers (if any).
if (!entry.HasRegisters()) return;
for (int j = 0; j < kNumSafepointRegisters; j++) {
if (entry.HasRegisterAt(j)) {
os << " | " << converter.NameOfCPURegister(j);
}
}
}
}
@ -117,20 +87,15 @@ void SafepointTable::PrintBits(std::ostream& os, // NOLINT
}
}
void Safepoint::DefinePointerRegister(Register reg) {
registers_->push_back(reg.code());
}
Safepoint SafepointTableBuilder::DefineSafepoint(
Assembler* assembler, Safepoint::Kind kind,
Safepoint::DeoptMode deopt_mode) {
Assembler* assembler, Safepoint::DeoptMode deopt_mode) {
deoptimization_info_.push_back(
DeoptimizationInfo(zone_, assembler->pc_offset(), kind));
DeoptimizationInfo(zone_, assembler->pc_offset()));
if (deopt_mode == Safepoint::kNoLazyDeopt) {
last_lazy_safepoint_ = deoptimization_info_.size();
}
DeoptimizationInfo& new_info = deoptimization_info_.back();
return Safepoint(new_info.indexes, new_info.registers);
return Safepoint(new_info.indexes);
}
void SafepointTableBuilder::RecordLazyDeoptimizationIndex(int index) {
@ -166,9 +131,6 @@ void SafepointTableBuilder::Emit(Assembler* assembler, int bits_per_entry) {
assembler->RecordComment(";;; Safepoint table.");
offset_ = assembler->pc_offset();
// Take the register bits into account.
bits_per_entry += kNumSafepointRegisters;
// Compute the number of bytes per safepoint entry.
int bytes_per_entry =
RoundUp(bits_per_entry, kBitsPerByte) >> kBitsPerByteLog2;
@ -189,7 +151,7 @@ void SafepointTableBuilder::Emit(Assembler* assembler, int bits_per_entry) {
STATIC_ASSERT(SafepointTable::kFixedEntrySize == 3 * kIntSize);
for (const DeoptimizationInfo& info : deoptimization_info_) {
assembler->dd(info.pc);
assembler->dd(EncodeExceptPC(info));
assembler->dd(info.deopt_index);
assembler->dd(info.trampoline);
}
@ -197,25 +159,8 @@ void SafepointTableBuilder::Emit(Assembler* assembler, int bits_per_entry) {
ZoneVector<uint8_t> bits(bytes_per_entry, 0, zone_);
for (const DeoptimizationInfo& info : deoptimization_info_) {
ZoneChunkList<int>* indexes = info.indexes;
ZoneChunkList<int>* registers = info.registers;
std::fill(bits.begin(), bits.end(), 0);
// Run through the registers (if any).
DCHECK(IsAligned(kNumSafepointRegisters, kBitsPerByte));
if (registers == nullptr) {
const int num_reg_bytes = kNumSafepointRegisters >> kBitsPerByteLog2;
for (int j = 0; j < num_reg_bytes; j++) {
bits[j] = SafepointTable::kNoRegisters;
}
} else {
for (int index : *registers) {
DCHECK(index >= 0 && index < kNumSafepointRegisters);
int byte_index = index >> kBitsPerByteLog2;
int bit_index = index & (kBitsPerByte - 1);
bits[byte_index] |= (1 << bit_index);
}
}
// Run through the indexes and build a bitmap.
for (int idx : *indexes) {
int index = bits_per_entry - 1 - idx;
@ -232,11 +177,6 @@ void SafepointTableBuilder::Emit(Assembler* assembler, int bits_per_entry) {
emitted_ = true;
}
uint32_t SafepointTableBuilder::EncodeExceptPC(const DeoptimizationInfo& info) {
return SafepointEntry::DeoptimizationIndexField::encode(info.deopt_index) |
SafepointEntry::SaveDoublesField::encode(info.has_doubles);
}
void SafepointTableBuilder::RemoveDuplicates() {
// If the table contains more than one entry, and all entries are identical
// (except for the pc), replace the whole table by a single entry with pc =
@ -260,7 +200,6 @@ void SafepointTableBuilder::RemoveDuplicates() {
bool SafepointTableBuilder::IsIdenticalExceptForPc(
const DeoptimizationInfo& info1, const DeoptimizationInfo& info2) const {
if (info1.has_doubles != info2.has_doubles) return false;
if (info1.deopt_index != info2.deopt_index) return false;
ZoneChunkList<int>* indexes1 = info1.indexes;
@ -270,19 +209,6 @@ bool SafepointTableBuilder::IsIdenticalExceptForPc(
return false;
}
ZoneChunkList<int>* registers1 = info1.registers;
ZoneChunkList<int>* registers2 = info2.registers;
if (registers1) {
if (!registers2) return false;
if (registers1->size() != registers2->size()) return false;
if (!std::equal(registers1->begin(), registers1->end(),
registers2->begin())) {
return false;
}
} else if (registers2) {
return false;
}
return true;
}

View File

@ -15,52 +15,38 @@
namespace v8 {
namespace internal {
class Register;
class SafepointEntry {
public:
SafepointEntry() : info_(0), bits_(nullptr), trampoline_pc_(-1) {}
SafepointEntry() : deopt_index_(0), bits_(nullptr), trampoline_pc_(-1) {}
SafepointEntry(unsigned info, uint8_t* bits, int trampoline_pc)
: info_(info), bits_(bits), trampoline_pc_(trampoline_pc) {
SafepointEntry(unsigned deopt_index, uint8_t* bits, int trampoline_pc)
: deopt_index_(deopt_index), bits_(bits), trampoline_pc_(trampoline_pc) {
DCHECK(is_valid());
}
bool is_valid() const { return bits_ != nullptr; }
bool Equals(const SafepointEntry& other) const {
return info_ == other.info_ && bits_ == other.bits_;
return deopt_index_ == other.deopt_index_ && bits_ == other.bits_;
}
void Reset() {
info_ = 0;
deopt_index_ = 0;
bits_ = nullptr;
}
int trampoline_pc() { return trampoline_pc_; }
static const int kSaveDoublesFieldBits = 1;
static const int kDeoptIndexBits = 32 - kSaveDoublesFieldBits;
class DeoptimizationIndexField : public BitField<int, 0, kDeoptIndexBits> {};
class SaveDoublesField
: public BitField<bool, DeoptimizationIndexField::kNext,
kSaveDoublesFieldBits> {};
static const unsigned kNoDeoptIndex = kMaxUInt32;
int deoptimization_index() const {
DCHECK(is_valid() && has_deoptimization_index());
return DeoptimizationIndexField::decode(info_);
return deopt_index_;
}
bool has_deoptimization_index() const {
DCHECK(is_valid());
return DeoptimizationIndexField::decode(info_) !=
DeoptimizationIndexField::kMax;
}
bool has_doubles() const {
DCHECK(is_valid());
return SaveDoublesField::decode(info_);
return deopt_index_ != kNoDeoptIndex;
}
uint8_t* bits() {
@ -68,11 +54,8 @@ class SafepointEntry {
return bits_;
}
bool HasRegisters() const;
bool HasRegisterAt(int reg_index) const;
private:
unsigned info_;
unsigned deopt_index_;
uint8_t* bits_;
// It needs to be an integer as it is -1 for eager deoptimizations.
int trampoline_pc_;
@ -105,11 +88,11 @@ class SafepointTable {
SafepointEntry GetEntry(unsigned index) const {
DCHECK(index < length_);
unsigned info = Memory<uint32_t>(GetEncodedInfoLocation(index));
unsigned deopt_index = Memory<uint32_t>(GetEncodedInfoLocation(index));
uint8_t* bits = &Memory<uint8_t>(entries_ + (index * entry_size_));
int trampoline_pc =
has_deopt_ ? Memory<int>(GetTrampolineLocation(index)) : -1;
return SafepointEntry(info, bits, trampoline_pc);
return SafepointEntry(deopt_index, bits, trampoline_pc);
}
// Returns the entry for the given pc.
@ -162,26 +145,15 @@ class SafepointTable {
class Safepoint {
public:
typedef enum {
kSimple = 0,
kWithRegisters = 1 << 0,
kWithDoubles = 1 << 1,
kWithRegistersAndDoubles = kWithRegisters | kWithDoubles
} Kind;
enum DeoptMode { kNoLazyDeopt, kLazyDeopt };
static const int kNoDeoptimizationIndex =
SafepointEntry::DeoptimizationIndexField::kMax;
static const int kNoDeoptimizationIndex = SafepointEntry::kNoDeoptIndex;
void DefinePointerSlot(int index) { indexes_->push_back(index); }
void DefinePointerRegister(Register reg);
private:
Safepoint(ZoneChunkList<int>* indexes, ZoneChunkList<int>* registers)
: indexes_(indexes), registers_(registers) {}
explicit Safepoint(ZoneChunkList<int>* indexes) : indexes_(indexes) {}
ZoneChunkList<int>* const indexes_;
ZoneChunkList<int>* const registers_;
friend class SafepointTableBuilder;
};
@ -198,8 +170,7 @@ class SafepointTableBuilder {
unsigned GetCodeOffset() const;
// Define a new safepoint for the current position in the body.
Safepoint DefineSafepoint(Assembler* assembler, Safepoint::Kind kind,
Safepoint::DeoptMode mode);
Safepoint DefineSafepoint(Assembler* assembler, Safepoint::DeoptMode mode);
// Record deoptimization index for lazy deoptimization for the last
// outstanding safepoints.
@ -222,26 +193,16 @@ class SafepointTableBuilder {
struct DeoptimizationInfo {
unsigned pc;
unsigned deopt_index;
bool has_doubles;
int trampoline;
ZoneChunkList<int>* indexes;
ZoneChunkList<int>* registers;
DeoptimizationInfo(Zone* zone, unsigned pc, Safepoint::Kind kind)
DeoptimizationInfo(Zone* zone, unsigned pc)
: pc(pc),
deopt_index(Safepoint::kNoDeoptimizationIndex),
has_doubles(kind & Safepoint::kWithDoubles),
trampoline(-1),
indexes(new (zone) ZoneChunkList<int>(
zone, ZoneChunkList<int>::StartMode::kSmall)),
registers(kind & Safepoint::kWithRegisters
? new (zone) ZoneChunkList<int>(
zone, ZoneChunkList<int>::StartMode::kSmall)
: nullptr) {}
zone, ZoneChunkList<int>::StartMode::kSmall)) {}
};
// Encodes all fields of a {DeoptimizationInfo} except {pc} and {trampoline}.
uint32_t EncodeExceptPC(const DeoptimizationInfo&);
// Compares all fields of a {DeoptimizationInfo} except {pc} and {trampoline}.
bool IsIdenticalExceptForPc(const DeoptimizationInfo&,
const DeoptimizationInfo&) const;

View File

@ -2894,8 +2894,7 @@ void CodeGenerator::AssembleArchTrap(Instruction* instr,
__ Call(static_cast<Address>(trap_id), RelocInfo::WASM_STUB_CALL);
ReferenceMap* reference_map =
new (gen_->zone()) ReferenceMap(gen_->zone());
gen_->RecordSafepoint(reference_map, Safepoint::kSimple,
Safepoint::kNoLazyDeopt);
gen_->RecordSafepoint(reference_map, Safepoint::kNoLazyDeopt);
if (FLAG_debug_code) {
__ stop(GetAbortReason(AbortReason::kUnexpectedReturnFromWasmTrap));
}
@ -3063,8 +3062,7 @@ void CodeGenerator::AssembleConstructFrame() {
__ Call(wasm::WasmCode::kWasmStackOverflow, RelocInfo::WASM_STUB_CALL);
// We come from WebAssembly, there are no references for the GC.
ReferenceMap* reference_map = new (zone()) ReferenceMap(zone());
RecordSafepoint(reference_map, Safepoint::kSimple,
Safepoint::kNoLazyDeopt);
RecordSafepoint(reference_map, Safepoint::kNoLazyDeopt);
if (FLAG_debug_code) {
__ stop(GetAbortReason(AbortReason::kUnexpectedReturnFromThrow));
}

View File

@ -2335,8 +2335,7 @@ void CodeGenerator::AssembleArchTrap(Instruction* instr,
__ Call(static_cast<Address>(trap_id), RelocInfo::WASM_STUB_CALL);
ReferenceMap* reference_map =
new (gen_->zone()) ReferenceMap(gen_->zone());
gen_->RecordSafepoint(reference_map, Safepoint::kSimple,
Safepoint::kNoLazyDeopt);
gen_->RecordSafepoint(reference_map, Safepoint::kNoLazyDeopt);
if (FLAG_debug_code) {
// The trap code should never return.
__ Brk(0);
@ -2508,8 +2507,7 @@ void CodeGenerator::AssembleConstructFrame() {
__ Call(wasm::WasmCode::kWasmStackOverflow, RelocInfo::WASM_STUB_CALL);
// We come from WebAssembly, there are no references for the GC.
ReferenceMap* reference_map = new (zone()) ReferenceMap(zone());
RecordSafepoint(reference_map, Safepoint::kSimple,
Safepoint::kNoLazyDeopt);
RecordSafepoint(reference_map, Safepoint::kNoLazyDeopt);
if (FLAG_debug_code) {
__ Brk(0);
}

View File

@ -440,9 +440,8 @@ bool CodeGenerator::IsNextInAssemblyOrder(RpoNumber block) const {
}
void CodeGenerator::RecordSafepoint(ReferenceMap* references,
Safepoint::Kind kind,
Safepoint::DeoptMode deopt_mode) {
Safepoint safepoint = safepoints()->DefineSafepoint(tasm(), kind, deopt_mode);
Safepoint safepoint = safepoints()->DefineSafepoint(tasm(), deopt_mode);
int stackSlotToSpillSlotDelta =
frame()->GetTotalFrameSlotCount() - frame()->GetSpillSlotCount();
for (const InstructionOperand& operand : references->reference_operands()) {
@ -456,9 +455,6 @@ void CodeGenerator::RecordSafepoint(ReferenceMap* references,
// knowledge about those fields anyway.
if (index < stackSlotToSpillSlotDelta) continue;
safepoint.DefinePointerSlot(index);
} else if (operand.IsRegister() && (kind & Safepoint::kWithRegisters)) {
Register reg = LocationOperand::cast(operand).GetRegister();
safepoint.DefinePointerRegister(reg);
}
}
}
@ -871,9 +867,9 @@ void CodeGenerator::RecordCallPosition(Instruction* instr) {
bool needs_frame_state = (flags & CallDescriptor::kNeedsFrameState);
RecordSafepoint(
instr->reference_map(), Safepoint::kSimple,
needs_frame_state ? Safepoint::kLazyDeopt : Safepoint::kNoLazyDeopt);
RecordSafepoint(instr->reference_map(), needs_frame_state
? Safepoint::kLazyDeopt
: Safepoint::kNoLazyDeopt);
if (flags & CallDescriptor::kHasExceptionHandler) {
InstructionOperandConverter i(this, instr);

View File

@ -129,7 +129,7 @@ class V8_EXPORT_PRIVATE CodeGenerator final : public GapResolver::Assembler {
void AssembleSourcePosition(SourcePosition source_position);
// Record a safepoint with the given pointer map.
void RecordSafepoint(ReferenceMap* references, Safepoint::Kind kind,
void RecordSafepoint(ReferenceMap* references,
Safepoint::DeoptMode deopt_mode);
Zone* zone() const { return zone_; }

View File

@ -3968,8 +3968,7 @@ void CodeGenerator::AssembleArchTrap(Instruction* instr,
__ wasm_call(static_cast<Address>(trap_id), RelocInfo::WASM_STUB_CALL);
ReferenceMap* reference_map =
new (gen_->zone()) ReferenceMap(gen_->zone());
gen_->RecordSafepoint(reference_map, Safepoint::kSimple,
Safepoint::kNoLazyDeopt);
gen_->RecordSafepoint(reference_map, Safepoint::kNoLazyDeopt);
__ AssertUnreachable(AbortReason::kUnexpectedReturnFromWasmTrap);
}
}
@ -4279,8 +4278,7 @@ void CodeGenerator::AssembleConstructFrame() {
__ wasm_call(wasm::WasmCode::kWasmStackOverflow,
RelocInfo::WASM_STUB_CALL);
ReferenceMap* reference_map = new (zone()) ReferenceMap(zone());
RecordSafepoint(reference_map, Safepoint::kSimple,
Safepoint::kNoLazyDeopt);
RecordSafepoint(reference_map, Safepoint::kNoLazyDeopt);
__ AssertUnreachable(AbortReason::kUnexpectedReturnFromWasmTrap);
__ bind(&done);
}

View File

@ -3155,8 +3155,7 @@ void CodeGenerator::AssembleArchTrap(Instruction* instr,
__ Call(static_cast<Address>(trap_id), RelocInfo::WASM_STUB_CALL);
ReferenceMap* reference_map =
new (gen_->zone()) ReferenceMap(gen_->zone());
gen_->RecordSafepoint(reference_map, Safepoint::kSimple,
Safepoint::kNoLazyDeopt);
gen_->RecordSafepoint(reference_map, Safepoint::kNoLazyDeopt);
if (FLAG_debug_code) {
__ stop(GetAbortReason(AbortReason::kUnexpectedReturnFromWasmTrap));
}

View File

@ -3302,8 +3302,7 @@ void CodeGenerator::AssembleArchTrap(Instruction* instr,
__ Call(static_cast<Address>(trap_id), RelocInfo::WASM_STUB_CALL);
ReferenceMap* reference_map =
new (gen_->zone()) ReferenceMap(gen_->zone());
gen_->RecordSafepoint(reference_map, Safepoint::kSimple,
Safepoint::kNoLazyDeopt);
gen_->RecordSafepoint(reference_map, Safepoint::kNoLazyDeopt);
if (FLAG_debug_code) {
__ stop(GetAbortReason(AbortReason::kUnexpectedReturnFromWasmTrap));
}

View File

@ -2148,8 +2148,7 @@ void CodeGenerator::AssembleArchTrap(Instruction* instr,
__ Call(static_cast<Address>(trap_id), RelocInfo::WASM_STUB_CALL);
ReferenceMap* reference_map =
new (gen_->zone()) ReferenceMap(gen_->zone());
gen_->RecordSafepoint(reference_map, Safepoint::kSimple,
Safepoint::kNoLazyDeopt);
gen_->RecordSafepoint(reference_map, Safepoint::kNoLazyDeopt);
if (FLAG_debug_code) {
__ stop(GetAbortReason(AbortReason::kUnexpectedReturnFromWasmTrap));
}
@ -2388,8 +2387,7 @@ void CodeGenerator::AssembleConstructFrame() {
__ Call(wasm::WasmCode::kWasmStackOverflow, RelocInfo::WASM_STUB_CALL);
// We come from WebAssembly, there are no references for the GC.
ReferenceMap* reference_map = new (zone()) ReferenceMap(zone());
RecordSafepoint(reference_map, Safepoint::kSimple,
Safepoint::kNoLazyDeopt);
RecordSafepoint(reference_map, Safepoint::kNoLazyDeopt);
if (FLAG_debug_code) {
__ stop(GetAbortReason(AbortReason::kUnexpectedReturnFromThrow));
}

View File

@ -2889,8 +2889,7 @@ void CodeGenerator::AssembleArchTrap(Instruction* instr,
__ Call(static_cast<Address>(trap_id), RelocInfo::WASM_STUB_CALL);
ReferenceMap* reference_map =
new (gen_->zone()) ReferenceMap(gen_->zone());
gen_->RecordSafepoint(reference_map, Safepoint::kSimple,
Safepoint::kNoLazyDeopt);
gen_->RecordSafepoint(reference_map, Safepoint::kNoLazyDeopt);
if (FLAG_debug_code) {
__ stop(GetAbortReason(AbortReason::kUnexpectedReturnFromWasmTrap));
}
@ -3088,8 +3087,7 @@ void CodeGenerator::AssembleConstructFrame() {
__ Call(wasm::WasmCode::kWasmStackOverflow, RelocInfo::WASM_STUB_CALL);
// We come from WebAssembly, there are no references for the GC.
ReferenceMap* reference_map = new (zone()) ReferenceMap(zone());
RecordSafepoint(reference_map, Safepoint::kSimple,
Safepoint::kNoLazyDeopt);
RecordSafepoint(reference_map, Safepoint::kNoLazyDeopt);
if (FLAG_debug_code) {
__ stop(GetAbortReason(AbortReason::kUnexpectedReturnFromThrow));
}

View File

@ -325,8 +325,7 @@ class WasmOutOfLineTrap : public OutOfLineCode {
__ near_call(static_cast<Address>(trap_id), RelocInfo::WASM_STUB_CALL);
ReferenceMap* reference_map =
new (gen_->zone()) ReferenceMap(gen_->zone());
gen_->RecordSafepoint(reference_map, Safepoint::kSimple,
Safepoint::kNoLazyDeopt);
gen_->RecordSafepoint(reference_map, Safepoint::kNoLazyDeopt);
__ AssertUnreachable(AbortReason::kUnexpectedReturnFromWasmTrap);
}
}
@ -3807,8 +3806,7 @@ void CodeGenerator::AssembleConstructFrame() {
__ near_call(wasm::WasmCode::kWasmStackOverflow,
RelocInfo::WASM_STUB_CALL);
ReferenceMap* reference_map = new (zone()) ReferenceMap(zone());
RecordSafepoint(reference_map, Safepoint::kSimple,
Safepoint::kNoLazyDeopt);
RecordSafepoint(reference_map, Safepoint::kNoLazyDeopt);
__ AssertUnreachable(AbortReason::kUnexpectedReturnFromWasmTrap);
__ bind(&done);
}

View File

@ -945,32 +945,6 @@ void StandardFrame::IterateCompiledFrame(RootVisitor* v) const {
FullObjectSlot parameters_base(&Memory<Address>(sp()));
FullObjectSlot parameters_limit(frame_header_base.address() - slot_space);
// Skip saved double registers.
if (safepoint_entry.has_doubles()) {
// Number of doubles not known at snapshot time.
DCHECK(!isolate()->serializer_enabled());
parameters_base +=
RegisterConfiguration::Default()->num_allocatable_double_registers() *
kDoubleSize / kSystemPointerSize;
}
// Visit the registers that contain pointers if any.
if (safepoint_entry.HasRegisters()) {
for (int i = kNumSafepointRegisters - 1; i >= 0; i--) {
if (safepoint_entry.HasRegisterAt(i)) {
int reg_stack_index = MacroAssembler::SafepointRegisterStackIndex(i);
v->VisitRootPointer(Root::kTop, nullptr,
parameters_base + reg_stack_index);
}
}
// Skip the words containing the register values.
parameters_base += kNumSafepointRegisters;
}
// We're done dealing with the register bits.
uint8_t* safepoint_bits = safepoint_entry.bits();
safepoint_bits += kNumSafepointRegisters >> kBitsPerByteLog2;
// Visit the rest of the parameters if they are tagged.
if (has_tagged_params) {
v->VisitRootPointers(Root::kTop, nullptr, parameters_base,
@ -979,6 +953,7 @@ void StandardFrame::IterateCompiledFrame(RootVisitor* v) const {
DEFINE_ROOT_VALUE(isolate());
// Visit pointer spill slots and locals.
uint8_t* safepoint_bits = safepoint_entry.bits();
for (unsigned index = 0; index < stack_slots; index++) {
int byte_index = index >> kBitsPerByteLog2;
int bit_index = index & (kBitsPerByte - 1);

View File

@ -23,6 +23,7 @@ class ExternalCallbackScope;
class InnerPointerToCodeCache;
class Isolate;
class ObjectVisitor;
class Register;
class RootVisitor;
class StackFrameIteratorBase;
class StringStream;

View File

@ -424,8 +424,7 @@ class LiftoffCompiler {
source_position_table_builder_.AddPosition(
__ pc_offset(), SourcePosition(ool.position), false);
__ CallRuntimeStub(ool.stub);
safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kSimple,
Safepoint::kNoLazyDeopt);
safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kNoLazyDeopt);
DCHECK_EQ(ool.continuation.get()->is_bound(), is_stack_check);
if (!ool.regs_to_save.is_empty()) __ PopRegisters(ool.regs_to_save);
if (is_stack_check) {
@ -1590,8 +1589,7 @@ class LiftoffCompiler {
Register centry = kJavaScriptCallCodeStartRegister;
LOAD_TAGGED_PTR_INSTANCE_FIELD(centry, CEntryStub);
__ CallRuntimeWithCEntry(runtime_function, centry);
safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kSimple,
Safepoint::kNoLazyDeopt);
safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kNoLazyDeopt);
}
Register AddMemoryMasking(Register index, uint32_t* offset,
@ -1710,8 +1708,7 @@ class LiftoffCompiler {
if (input.gp() != param_reg) __ Move(param_reg, input.gp(), kWasmI32);
__ CallRuntimeStub(WasmCode::kWasmMemoryGrow);
safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kSimple,
Safepoint::kNoLazyDeopt);
safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kNoLazyDeopt);
if (kReturnRegister0 != result.gp()) {
__ Move(result.gp(), kReturnRegister0, kWasmI32);
@ -1762,8 +1759,7 @@ class LiftoffCompiler {
__ CallIndirect(imm.sig, call_descriptor, target);
safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kSimple,
Safepoint::kNoLazyDeopt);
safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kNoLazyDeopt);
__ FinishCall(imm.sig, call_descriptor);
} else {
@ -1777,8 +1773,7 @@ class LiftoffCompiler {
Address addr = static_cast<Address>(imm.index);
__ CallNativeWasmCode(addr);
safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kSimple,
Safepoint::kNoLazyDeopt);
safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kNoLazyDeopt);
__ FinishCall(imm.sig, call_descriptor);
}
@ -1915,8 +1910,7 @@ class LiftoffCompiler {
__ PrepareCall(imm.sig, call_descriptor, &target, explicit_instance);
__ CallIndirect(imm.sig, call_descriptor, target);
safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kSimple,
Safepoint::kNoLazyDeopt);
safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kNoLazyDeopt);
__ FinishCall(imm.sig, call_descriptor);
}