[baseline] Fallback to handle references on heap compilation
If a GC happens between Code object allocation and Code finalization, we might have invalid embedded object references. We fallback and patch the refernces back to handles, then unbox the handles and relocate. Bug: v8:11872 Change-Id: I3a7b050c20179c1708eef343ec8266441ab5dca1 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3059689 Commit-Queue: Victor Gomes <victorgomes@chromium.org> Reviewed-by: Leszek Swirski <leszeks@chromium.org> Cr-Commit-Position: refs/heads/master@{#75996}
This commit is contained in:
parent
9e0e2c150c
commit
642a467338
@ -5172,6 +5172,13 @@ void Assembler::RecordConstPool(int size) {
|
||||
RecordRelocInfo(RelocInfo::CONST_POOL, static_cast<intptr_t>(size));
|
||||
}
|
||||
|
||||
void Assembler::FixOnHeapReferences() {
|
||||
Address base = reinterpret_cast<Address>(buffer_->start());
|
||||
for (auto p : saved_handles_for_raw_object_ptr_) {
|
||||
WriteUnalignedValue(base + p.first, p.second);
|
||||
}
|
||||
}
|
||||
|
||||
void Assembler::GrowBuffer() {
|
||||
DCHECK_EQ(buffer_start_, buffer_->start());
|
||||
|
||||
@ -5211,10 +5218,7 @@ void Assembler::GrowBuffer() {
|
||||
|
||||
// Patch on-heap references to handles.
|
||||
if (previously_on_heap && !buffer_->IsOnHeap()) {
|
||||
Address base = reinterpret_cast<Address>(buffer_->start());
|
||||
for (auto p : saved_handles_for_raw_object_ptr_) {
|
||||
WriteUnalignedValue(base + p.first, p.second);
|
||||
}
|
||||
FixOnHeapReferences();
|
||||
}
|
||||
|
||||
// None of our relocation types are pc relative pointing outside the code
|
||||
|
@ -328,6 +328,11 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
|
||||
GetCode(isolate, desc, kNoSafepointTable, kNoHandlerTable);
|
||||
}
|
||||
|
||||
// This function is called when on-heap-compilation invariants are
|
||||
// invalidated. For instance, when the assembler buffer grows or a GC happens
|
||||
// between Code object allocation and Code object finalization.
|
||||
void FixOnHeapReferences();
|
||||
|
||||
// Label operations & relative jumps (PPUM Appendix D)
|
||||
//
|
||||
// Takes a branch opcode (cc) and a label (L) and generates
|
||||
|
@ -4275,6 +4275,19 @@ bool Assembler::IsImmFP64(double imm) {
|
||||
return true;
|
||||
}
|
||||
|
||||
void Assembler::FixOnHeapReferences() {
|
||||
Address base = reinterpret_cast<Address>(buffer_->start());
|
||||
for (auto p : saved_handles_for_raw_object_ptr_) {
|
||||
WriteUnalignedValue(base + p.first, p.second);
|
||||
}
|
||||
for (auto p : saved_offsets_for_runtime_entries_) {
|
||||
Instruction* instr = reinterpret_cast<Instruction*>(base + p.first);
|
||||
DCHECK(is_int26(p.second));
|
||||
DCHECK(instr->IsBranchAndLink() || instr->IsUnconditionalBranch());
|
||||
instr->SetInstructionBits(instr->Mask(UnconditionalBranchMask) | p.second);
|
||||
}
|
||||
}
|
||||
|
||||
void Assembler::GrowBuffer() {
|
||||
bool previously_on_heap = buffer_->IsOnHeap();
|
||||
|
||||
@ -4322,17 +4335,7 @@ void Assembler::GrowBuffer() {
|
||||
|
||||
// Patch on-heap references to handles.
|
||||
if (previously_on_heap && !buffer_->IsOnHeap()) {
|
||||
Address base = reinterpret_cast<Address>(buffer_->start());
|
||||
for (auto p : saved_handles_for_raw_object_ptr_) {
|
||||
WriteUnalignedValue(base + p.first, p.second);
|
||||
}
|
||||
for (auto p : saved_offsets_for_runtime_entries_) {
|
||||
Instruction* instr = reinterpret_cast<Instruction*>(base + p.first);
|
||||
DCHECK(is_int26(p.second));
|
||||
DCHECK(instr->IsBranchAndLink() || instr->IsUnconditionalBranch());
|
||||
instr->SetInstructionBits(instr->Mask(UnconditionalBranchMask) |
|
||||
p.second);
|
||||
}
|
||||
FixOnHeapReferences();
|
||||
}
|
||||
|
||||
// Pending relocation entries are also relative, no need to relocate.
|
||||
|
@ -204,6 +204,11 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
|
||||
GetCode(isolate, desc, kNoSafepointTable, kNoHandlerTable);
|
||||
}
|
||||
|
||||
// This function is called when on-heap-compilation invariants are
|
||||
// invalidated. For instance, when the assembler buffer grows or a GC happens
|
||||
// between Code object allocation and Code object finalization.
|
||||
void FixOnHeapReferences();
|
||||
|
||||
// Insert the smallest number of nop instructions
|
||||
// possible to align the pc offset to a multiple
|
||||
// of m. m must be a power of 2 (>= 4).
|
||||
|
@ -142,8 +142,8 @@ class ExternalAssemblerBufferImpl : public AssemblerBuffer {
|
||||
|
||||
class OnHeapAssemblerBuffer : public AssemblerBuffer {
|
||||
public:
|
||||
OnHeapAssemblerBuffer(Handle<Code> code, int size)
|
||||
: code_(code), size_(size) {}
|
||||
OnHeapAssemblerBuffer(Handle<Code> code, int size, int gc_count)
|
||||
: code_(code), size_(size), gc_count_(gc_count) {}
|
||||
|
||||
byte* start() const override {
|
||||
return reinterpret_cast<byte*>(code_->raw_instruction_start());
|
||||
@ -162,11 +162,14 @@ class OnHeapAssemblerBuffer : public AssemblerBuffer {
|
||||
|
||||
bool IsOnHeap() const override { return true; }
|
||||
|
||||
int OnHeapGCCount() const override { return gc_count_; }
|
||||
|
||||
MaybeHandle<Code> code() const override { return code_; }
|
||||
|
||||
private:
|
||||
Handle<Code> code_;
|
||||
const int size_;
|
||||
const int gc_count_;
|
||||
};
|
||||
|
||||
static thread_local std::aligned_storage_t<sizeof(ExternalAssemblerBufferImpl),
|
||||
@ -211,7 +214,8 @@ std::unique_ptr<AssemblerBuffer> NewOnHeapAssemblerBuffer(Isolate* isolate,
|
||||
MaybeHandle<Code> code =
|
||||
isolate->factory()->NewEmptyCode(CodeKind::BASELINE, size);
|
||||
if (code.is_null()) return {};
|
||||
return std::make_unique<OnHeapAssemblerBuffer>(code.ToHandleChecked(), size);
|
||||
return std::make_unique<OnHeapAssemblerBuffer>(code.ToHandleChecked(), size,
|
||||
isolate->heap()->gc_count());
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
|
@ -204,6 +204,9 @@ class AssemblerBuffer {
|
||||
V8_WARN_UNUSED_RESULT = 0;
|
||||
virtual bool IsOnHeap() const { return false; }
|
||||
virtual MaybeHandle<Code> code() const { return MaybeHandle<Code>(); }
|
||||
// Return the GC count when the buffer was allocated (only if the buffer is on
|
||||
// the GC heap).
|
||||
virtual int OnHeapGCCount() const { return 0; }
|
||||
};
|
||||
|
||||
// Allocate an AssemblerBuffer which uses an existing buffer. This buffer cannot
|
||||
@ -283,6 +286,8 @@ class V8_EXPORT_PRIVATE AssemblerBase : public Malloced {
|
||||
|
||||
bool IsOnHeap() const { return buffer_->IsOnHeap(); }
|
||||
|
||||
int OnHeapGCCount() const { return buffer_->OnHeapGCCount(); }
|
||||
|
||||
MaybeHandle<Code> code() const {
|
||||
DCHECK(IsOnHeap());
|
||||
return buffer_->code();
|
||||
|
@ -3341,6 +3341,13 @@ void Assembler::emit_vex_prefix(Register vreg, VectorLength l, SIMDPrefix pp,
|
||||
emit_vex_prefix(ivreg, l, pp, mm, w);
|
||||
}
|
||||
|
||||
void Assembler::FixOnHeapReferences() {
|
||||
Address base = reinterpret_cast<Address>(buffer_->start());
|
||||
for (auto p : saved_handles_for_raw_object_ptr_) {
|
||||
WriteUnalignedValue<uint32_t>(base + p.first, p.second);
|
||||
}
|
||||
}
|
||||
|
||||
void Assembler::GrowBuffer() {
|
||||
DCHECK(buffer_overflow());
|
||||
DCHECK_EQ(buffer_start_, buffer_->start());
|
||||
@ -3396,10 +3403,7 @@ void Assembler::GrowBuffer() {
|
||||
|
||||
// Patch on-heap references to handles.
|
||||
if (previously_on_heap && !buffer_->IsOnHeap()) {
|
||||
Address base = reinterpret_cast<Address>(buffer_->start());
|
||||
for (auto p : saved_handles_for_raw_object_ptr_) {
|
||||
WriteUnalignedValue<uint32_t>(base + p.first, p.second);
|
||||
}
|
||||
FixOnHeapReferences();
|
||||
}
|
||||
|
||||
DCHECK(!buffer_overflow());
|
||||
|
@ -392,6 +392,11 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
|
||||
GetCode(isolate, desc, kNoSafepointTable, kNoHandlerTable);
|
||||
}
|
||||
|
||||
// This function is called when on-heap-compilation invariants are
|
||||
// invalidated. For instance, when the assembler buffer grows or a GC happens
|
||||
// between Code object allocation and Code object finalization.
|
||||
void FixOnHeapReferences();
|
||||
|
||||
void FinalizeJumpOptimizationInfo();
|
||||
|
||||
// Unused on this architecture.
|
||||
|
@ -537,6 +537,16 @@ bool Assembler::is_optimizable_farjmp(int idx) {
|
||||
return !!(bitmap[idx / 32] & (1 << (idx & 31)));
|
||||
}
|
||||
|
||||
void Assembler::FixOnHeapReferences() {
|
||||
Address base = reinterpret_cast<Address>(buffer_->start());
|
||||
for (auto p : saved_handles_for_raw_object_ptr_) {
|
||||
WriteUnalignedValue(base + p.first, p.second);
|
||||
}
|
||||
for (auto p : saved_offsets_for_runtime_entries_) {
|
||||
WriteUnalignedValue<uint32_t>(base + p.first, p.second);
|
||||
}
|
||||
}
|
||||
|
||||
void Assembler::GrowBuffer() {
|
||||
DCHECK(buffer_overflow());
|
||||
|
||||
@ -581,13 +591,7 @@ void Assembler::GrowBuffer() {
|
||||
|
||||
// Patch on-heap references to handles.
|
||||
if (previously_on_heap && !buffer_->IsOnHeap()) {
|
||||
Address base = reinterpret_cast<Address>(buffer_->start());
|
||||
for (auto p : saved_handles_for_raw_object_ptr_) {
|
||||
WriteUnalignedValue(base + p.first, p.second);
|
||||
}
|
||||
for (auto p : saved_offsets_for_runtime_entries_) {
|
||||
WriteUnalignedValue<uint32_t>(base + p.first, p.second);
|
||||
}
|
||||
FixOnHeapReferences();
|
||||
}
|
||||
|
||||
DCHECK(!buffer_overflow());
|
||||
|
@ -421,6 +421,11 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
|
||||
GetCode(isolate, desc, kNoSafepointTable, kNoHandlerTable);
|
||||
}
|
||||
|
||||
// This function is called when on-heap-compilation invariants are
|
||||
// invalidated. For instance, when the assembler buffer grows or a GC happens
|
||||
// between Code object allocation and Code object finalization.
|
||||
void FixOnHeapReferences();
|
||||
|
||||
void FinalizeJumpOptimizationInfo();
|
||||
|
||||
// Unused on this architecture.
|
||||
|
@ -297,6 +297,17 @@ void Factory::CodeBuilder::FinalizeOnHeapCode(Handle<Code> code,
|
||||
|
||||
code->CopyRelocInfoToByteArray(reloc_info, code_desc_);
|
||||
|
||||
if (code_desc_.origin->OnHeapGCCount() != heap->gc_count()) {
|
||||
// If a GC happens between Code object allocation and now, we might have
|
||||
// invalid embedded object references. We need to fix them back to handles,
|
||||
// then unbox and relocate them.
|
||||
// TODO(victorgomes): FixOnHeapReferences could already do the full
|
||||
// relocation, to avoid having to call RelocateFromDesc. Additionally, we
|
||||
// could used it to make grown buffers on-heap.
|
||||
code_desc_.origin->FixOnHeapReferences();
|
||||
code->RelocateFromDesc(reloc_info, heap, code_desc_);
|
||||
}
|
||||
|
||||
#ifdef VERIFY_HEAP
|
||||
code->VerifyRelocInfo(isolate_, reloc_info);
|
||||
#endif
|
||||
|
Loading…
Reference in New Issue
Block a user