Clean up deferred handle scopes
- There was no use of DisallowDeferredHandleDereference, so remove the corresponding assertion scope and related code. - Make DeferredHandleScope::Detach return a unique_ptr rather than a raw pointer for clarity. - Store DeferredHandles in compilation info as unique_ptr rather than shared_ptr, as it's never shared. - Remove some unused methods. Change-Id: I8327399fd291eba782820dd7a62c3bbdffedac4d Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1805645 Commit-Queue: Georg Neis <neis@chromium.org> Reviewed-by: Michael Starzinger <mstarzinger@chromium.org> Reviewed-by: Ulan Degenbaev <ulan@chromium.org> Cr-Commit-Position: refs/heads/master@{#63828}
This commit is contained in:
parent
93ce2b42b6
commit
c98aa0e275
@ -10587,9 +10587,10 @@ char* HandleScopeImplementer::Iterate(RootVisitor* v, char* storage) {
|
||||
return storage + ArchiveSpacePerThread();
|
||||
}
|
||||
|
||||
DeferredHandles* HandleScopeImplementer::Detach(Address* prev_limit) {
|
||||
DeferredHandles* deferred =
|
||||
new DeferredHandles(isolate()->handle_scope_data()->next, isolate());
|
||||
std::unique_ptr<DeferredHandles> HandleScopeImplementer::Detach(
|
||||
Address* prev_limit) {
|
||||
std::unique_ptr<DeferredHandles> deferred(
|
||||
new DeferredHandles(isolate()->handle_scope_data()->next, isolate()));
|
||||
|
||||
while (!blocks_.empty()) {
|
||||
Address* block_start = blocks_.back();
|
||||
|
@ -433,7 +433,7 @@ class HandleScopeImplementer {
|
||||
}
|
||||
|
||||
void BeginDeferredScope();
|
||||
DeferredHandles* Detach(Address* prev_limit);
|
||||
std::unique_ptr<DeferredHandles> Detach(Address* prev_limit);
|
||||
|
||||
Isolate* isolate_;
|
||||
DetachableVector<Address*> blocks_;
|
||||
|
@ -1456,15 +1456,6 @@ void TurboAssembler::LoadRoot(Register destination, RootIndex index) {
|
||||
MemOperand(kRootRegister, RootRegisterOffsetForRootIndex(index)));
|
||||
}
|
||||
|
||||
void MacroAssembler::LoadObject(Register result, Handle<Object> object) {
|
||||
AllowDeferredHandleDereference heap_object_check;
|
||||
if (object->IsHeapObject()) {
|
||||
Mov(result, Handle<HeapObject>::cast(object));
|
||||
} else {
|
||||
Mov(result, Operand(Smi::cast(*object)));
|
||||
}
|
||||
}
|
||||
|
||||
void TurboAssembler::Move(Register dst, Smi src) { Mov(dst, src); }
|
||||
|
||||
void TurboAssembler::MovePair(Register dst0, Register src0, Register dst1,
|
||||
|
@ -1597,8 +1597,6 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler {
|
||||
tbx(vd, vn, vn2, vn3, vn4, vm);
|
||||
}
|
||||
|
||||
void LoadObject(Register result, Handle<Object> object);
|
||||
|
||||
inline void PushSizeRegList(
|
||||
RegList registers, unsigned reg_size,
|
||||
CPURegister::RegisterType type = CPURegister::kRegister) {
|
||||
|
@ -3990,7 +3990,6 @@ void TurboAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode,
|
||||
}
|
||||
}
|
||||
DCHECK(RelocInfo::IsCodeTarget(rmode));
|
||||
AllowDeferredHandleDereference embedding_raw_address;
|
||||
Call(code.address(), rmode, cond, rs, rt, bd);
|
||||
}
|
||||
|
||||
|
@ -111,15 +111,9 @@ OptimizedCompilationInfo::~OptimizedCompilationInfo() {
|
||||
}
|
||||
|
||||
void OptimizedCompilationInfo::set_deferred_handles(
|
||||
std::shared_ptr<DeferredHandles> deferred_handles) {
|
||||
std::unique_ptr<DeferredHandles> deferred_handles) {
|
||||
DCHECK_NULL(deferred_handles_);
|
||||
deferred_handles_.swap(deferred_handles);
|
||||
}
|
||||
|
||||
void OptimizedCompilationInfo::set_deferred_handles(
|
||||
DeferredHandles* deferred_handles) {
|
||||
DCHECK_NULL(deferred_handles_);
|
||||
deferred_handles_.reset(deferred_handles);
|
||||
deferred_handles_ = std::move(deferred_handles);
|
||||
}
|
||||
|
||||
void OptimizedCompilationInfo::ReopenHandlesInNewHandleScope(Isolate* isolate) {
|
||||
@ -132,6 +126,7 @@ void OptimizedCompilationInfo::ReopenHandlesInNewHandleScope(Isolate* isolate) {
|
||||
if (!closure_.is_null()) {
|
||||
closure_ = Handle<JSFunction>(*closure_, isolate);
|
||||
}
|
||||
DCHECK(code_.is_null());
|
||||
}
|
||||
|
||||
void OptimizedCompilationInfo::AbortOptimization(BailoutReason reason) {
|
||||
|
@ -231,11 +231,7 @@ class V8_EXPORT_PRIVATE OptimizedCompilationInfo final {
|
||||
osr_frame_ = osr_frame;
|
||||
}
|
||||
|
||||
void set_deferred_handles(std::shared_ptr<DeferredHandles> deferred_handles);
|
||||
void set_deferred_handles(DeferredHandles* deferred_handles);
|
||||
std::shared_ptr<DeferredHandles> deferred_handles() {
|
||||
return deferred_handles_;
|
||||
}
|
||||
void set_deferred_handles(std::unique_ptr<DeferredHandles> deferred_handles);
|
||||
|
||||
void ReopenHandlesInNewHandleScope(Isolate* isolate);
|
||||
|
||||
@ -330,7 +326,7 @@ class V8_EXPORT_PRIVATE OptimizedCompilationInfo final {
|
||||
// OptimizedCompilationInfo allocates.
|
||||
Zone* zone_;
|
||||
|
||||
std::shared_ptr<DeferredHandles> deferred_handles_;
|
||||
std::unique_ptr<DeferredHandles> deferred_handles_;
|
||||
|
||||
BailoutReason bailout_reason_ = BailoutReason::kNoReason;
|
||||
|
||||
|
@ -1423,7 +1423,6 @@ void MacroAssembler::Negpd(XMMRegister dst) {
|
||||
}
|
||||
|
||||
void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
|
||||
AllowDeferredHandleDereference smi_check;
|
||||
if (source->IsSmi()) {
|
||||
Cmp(dst, Smi::cast(*source));
|
||||
} else {
|
||||
@ -1433,7 +1432,6 @@ void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
|
||||
}
|
||||
|
||||
void MacroAssembler::Cmp(Operand dst, Handle<Object> source) {
|
||||
AllowDeferredHandleDereference smi_check;
|
||||
if (source->IsSmi()) {
|
||||
Cmp(dst, Smi::cast(*source));
|
||||
} else {
|
||||
|
@ -126,8 +126,6 @@ template class PerThreadAssertScope<HANDLE_ALLOCATION_ASSERT, false>;
|
||||
template class PerThreadAssertScope<HANDLE_ALLOCATION_ASSERT, true>;
|
||||
template class PerThreadAssertScope<HANDLE_DEREFERENCE_ASSERT, false>;
|
||||
template class PerThreadAssertScope<HANDLE_DEREFERENCE_ASSERT, true>;
|
||||
template class PerThreadAssertScope<DEFERRED_HANDLE_DEREFERENCE_ASSERT, false>;
|
||||
template class PerThreadAssertScope<DEFERRED_HANDLE_DEREFERENCE_ASSERT, true>;
|
||||
template class PerThreadAssertScope<CODE_DEPENDENCY_CHANGE_ASSERT, false>;
|
||||
template class PerThreadAssertScope<CODE_DEPENDENCY_CHANGE_ASSERT, true>;
|
||||
|
||||
|
@ -28,7 +28,6 @@ enum PerThreadAssertType {
|
||||
HEAP_ALLOCATION_ASSERT,
|
||||
HANDLE_ALLOCATION_ASSERT,
|
||||
HANDLE_DEREFERENCE_ASSERT,
|
||||
DEFERRED_HANDLE_DEREFERENCE_ASSERT,
|
||||
CODE_DEPENDENCY_CHANGE_ASSERT,
|
||||
LAST_PER_THREAD_ASSERT_TYPE
|
||||
};
|
||||
@ -145,19 +144,11 @@ using DisallowHandleDereference =
|
||||
using AllowHandleDereference =
|
||||
PerThreadAssertScopeDebugOnly<HANDLE_DEREFERENCE_ASSERT, true>;
|
||||
|
||||
// Scope to document where we do not expect deferred handles to be dereferenced.
|
||||
using DisallowDeferredHandleDereference =
|
||||
PerThreadAssertScopeDebugOnly<DEFERRED_HANDLE_DEREFERENCE_ASSERT, false>;
|
||||
|
||||
// Scope to introduce an exception to DisallowDeferredHandleDereference.
|
||||
using AllowDeferredHandleDereference =
|
||||
PerThreadAssertScopeDebugOnly<DEFERRED_HANDLE_DEREFERENCE_ASSERT, true>;
|
||||
|
||||
// Scope to document where we do not expect deferred handles to be dereferenced.
|
||||
// Scope to document where we do not expect code dependencies to change.
|
||||
using DisallowCodeDependencyChange =
|
||||
PerThreadAssertScopeDebugOnly<CODE_DEPENDENCY_CHANGE_ASSERT, false>;
|
||||
|
||||
// Scope to introduce an exception to DisallowDeferredHandleDereference.
|
||||
// Scope to introduce an exception to DisallowCodeDependencyChange.
|
||||
using AllowCodeDependencyChange =
|
||||
PerThreadAssertScopeDebugOnly<CODE_DEPENDENCY_CHANGE_ASSERT, true>;
|
||||
|
||||
@ -243,10 +234,6 @@ extern template class PerThreadAssertScope<HANDLE_ALLOCATION_ASSERT, false>;
|
||||
extern template class PerThreadAssertScope<HANDLE_ALLOCATION_ASSERT, true>;
|
||||
extern template class PerThreadAssertScope<HANDLE_DEREFERENCE_ASSERT, false>;
|
||||
extern template class PerThreadAssertScope<HANDLE_DEREFERENCE_ASSERT, true>;
|
||||
extern template class PerThreadAssertScope<DEFERRED_HANDLE_DEREFERENCE_ASSERT,
|
||||
false>;
|
||||
extern template class PerThreadAssertScope<DEFERRED_HANDLE_DEREFERENCE_ASSERT,
|
||||
true>;
|
||||
extern template class PerThreadAssertScope<CODE_DEPENDENCY_CHANGE_ASSERT,
|
||||
false>;
|
||||
extern template class PerThreadAssertScope<CODE_DEPENDENCY_CHANGE_ASSERT, true>;
|
||||
|
@ -163,7 +163,6 @@ void JsonPrintInlinedFunctionInfo(
|
||||
void JsonPrintAllSourceWithPositions(std::ostream& os,
|
||||
OptimizedCompilationInfo* info,
|
||||
Isolate* isolate) {
|
||||
AllowDeferredHandleDereference allow_deference_for_print_code;
|
||||
os << "\"sources\" : {";
|
||||
Handle<Script> script =
|
||||
(info->shared_info().is_null() ||
|
||||
|
@ -696,8 +696,6 @@ void PrintInlinedFunctionInfo(
|
||||
// compilation. For inlined functions print source position of their inlining.
|
||||
void PrintParticipatingSource(OptimizedCompilationInfo* info,
|
||||
Isolate* isolate) {
|
||||
AllowDeferredHandleDereference allow_deference_for_print_code;
|
||||
|
||||
SourceIdAssigner id_assigner(info->inlined_functions().size());
|
||||
PrintFunctionSource(info, isolate, -1, info->shared_info());
|
||||
const auto& inlined = info->inlined_functions();
|
||||
@ -716,7 +714,6 @@ void PrintCode(Isolate* isolate, Handle<Code> code,
|
||||
}
|
||||
|
||||
#ifdef ENABLE_DISASSEMBLER
|
||||
AllowDeferredHandleDereference allow_deference_for_print_code;
|
||||
bool print_code =
|
||||
FLAG_print_code ||
|
||||
(info->IsOptimizing() && FLAG_print_opt_code &&
|
||||
|
@ -28,7 +28,7 @@ ASSERT_TRIVIALLY_COPYABLE(Handle<Object>);
|
||||
ASSERT_TRIVIALLY_COPYABLE(MaybeHandle<Object>);
|
||||
|
||||
#ifdef DEBUG
|
||||
bool HandleBase::IsDereferenceAllowed(DereferenceCheckMode mode) const {
|
||||
bool HandleBase::IsDereferenceAllowed() const {
|
||||
DCHECK_NOT_NULL(location_);
|
||||
Object object(*location_);
|
||||
if (object.IsSmi()) return true;
|
||||
@ -40,16 +40,7 @@ bool HandleBase::IsDereferenceAllowed(DereferenceCheckMode mode) const {
|
||||
RootsTable::IsImmortalImmovable(root_index)) {
|
||||
return true;
|
||||
}
|
||||
if (!AllowHandleDereference::IsAllowed()) return false;
|
||||
if (mode == INCLUDE_DEFERRED_CHECK &&
|
||||
!AllowDeferredHandleDereference::IsAllowed()) {
|
||||
// Accessing cells, maps and internalized strings is safe.
|
||||
if (heap_object.IsCell()) return true;
|
||||
if (heap_object.IsMap()) return true;
|
||||
if (heap_object.IsInternalizedString()) return true;
|
||||
return !isolate->IsDeferredHandle(location_);
|
||||
}
|
||||
return true;
|
||||
return AllowHandleDereference::IsAllowed();
|
||||
}
|
||||
#endif
|
||||
|
||||
@ -188,13 +179,13 @@ DeferredHandleScope::DeferredHandleScope(Isolate* isolate)
|
||||
}
|
||||
|
||||
DeferredHandleScope::~DeferredHandleScope() {
|
||||
impl_->isolate()->handle_scope_data()->level--;
|
||||
DCHECK(handles_detached_);
|
||||
DCHECK(impl_->isolate()->handle_scope_data()->level == prev_level_);
|
||||
impl_->isolate()->handle_scope_data()->level--;
|
||||
DCHECK_EQ(impl_->isolate()->handle_scope_data()->level, prev_level_);
|
||||
}
|
||||
|
||||
DeferredHandles* DeferredHandleScope::Detach() {
|
||||
DeferredHandles* deferred = impl_->Detach(prev_limit_);
|
||||
std::unique_ptr<DeferredHandles> DeferredHandleScope::Detach() {
|
||||
std::unique_ptr<DeferredHandles> deferred = impl_->Detach(prev_limit_);
|
||||
HandleScopeData* data = impl_->isolate()->handle_scope_data();
|
||||
data->next = prev_next_;
|
||||
data->limit = prev_limit_;
|
||||
|
@ -41,11 +41,8 @@ class HandleBase {
|
||||
|
||||
// Check if this handle refers to the exact same object as the other handle.
|
||||
V8_INLINE bool is_identical_to(const HandleBase that) const {
|
||||
// Dereferencing deferred handles to check object equality is safe.
|
||||
SLOW_DCHECK((this->location_ == nullptr ||
|
||||
this->IsDereferenceAllowed(NO_DEFERRED_CHECK)) &&
|
||||
(that.location_ == nullptr ||
|
||||
that.IsDereferenceAllowed(NO_DEFERRED_CHECK)));
|
||||
SLOW_DCHECK((this->location_ == nullptr || this->IsDereferenceAllowed()) &&
|
||||
(that.location_ == nullptr || that.IsDereferenceAllowed()));
|
||||
if (this->location_ == that.location_) return true;
|
||||
if (this->location_ == nullptr || that.location_ == nullptr) return false;
|
||||
return *this->location_ == *that.location_;
|
||||
@ -59,20 +56,16 @@ class HandleBase {
|
||||
|
||||
// Returns the address to where the raw pointer is stored.
|
||||
V8_INLINE Address* location() const {
|
||||
SLOW_DCHECK(location_ == nullptr ||
|
||||
IsDereferenceAllowed(INCLUDE_DEFERRED_CHECK));
|
||||
SLOW_DCHECK(location_ == nullptr || IsDereferenceAllowed());
|
||||
return location_;
|
||||
}
|
||||
|
||||
protected:
|
||||
enum DereferenceCheckMode { INCLUDE_DEFERRED_CHECK, NO_DEFERRED_CHECK };
|
||||
#ifdef DEBUG
|
||||
bool V8_EXPORT_PRIVATE IsDereferenceAllowed(DereferenceCheckMode mode) const;
|
||||
bool V8_EXPORT_PRIVATE IsDereferenceAllowed() const;
|
||||
#else
|
||||
V8_INLINE
|
||||
bool V8_EXPORT_PRIVATE IsDereferenceAllowed(DereferenceCheckMode mode) const {
|
||||
return true;
|
||||
}
|
||||
bool V8_EXPORT_PRIVATE IsDereferenceAllowed() const { return true; }
|
||||
#endif // DEBUG
|
||||
|
||||
// This uses type Address* as opposed to a pointer type to a typed
|
||||
@ -140,7 +133,7 @@ class Handle final : public HandleBase {
|
||||
V8_INLINE T operator*() const {
|
||||
// unchecked_cast because we rather trust Handle<T> to contain a T than
|
||||
// include all the respective -inl.h headers for SLOW_DCHECKs.
|
||||
SLOW_DCHECK(IsDereferenceAllowed(INCLUDE_DEFERRED_CHECK));
|
||||
SLOW_DCHECK(IsDereferenceAllowed());
|
||||
return T::unchecked_cast(Object(*location()));
|
||||
}
|
||||
|
||||
@ -318,7 +311,7 @@ class V8_EXPORT_PRIVATE DeferredHandleScope final {
|
||||
// The DeferredHandles object returned stores the Handles created
|
||||
// since the creation of this DeferredHandleScope. The Handles are
|
||||
// alive as long as the DeferredHandles object is alive.
|
||||
DeferredHandles* Detach();
|
||||
std::unique_ptr<DeferredHandles> Detach();
|
||||
~DeferredHandleScope();
|
||||
|
||||
private:
|
||||
|
@ -1785,7 +1785,6 @@ Handle<BytecodeArray> Factory::NewBytecodeArray(
|
||||
}
|
||||
|
||||
Handle<Cell> Factory::NewCell(Handle<Object> value) {
|
||||
AllowDeferredHandleDereference convert_to_cell;
|
||||
STATIC_ASSERT(Cell::kSize <= kMaxRegularHeapObjectSize);
|
||||
HeapObject result = AllocateRawWithImmortalMap(
|
||||
Cell::kSize, AllocationType::kOld, *cell_map());
|
||||
@ -1795,7 +1794,6 @@ Handle<Cell> Factory::NewCell(Handle<Object> value) {
|
||||
}
|
||||
|
||||
Handle<FeedbackCell> Factory::NewNoClosuresCell(Handle<HeapObject> value) {
|
||||
AllowDeferredHandleDereference convert_to_cell;
|
||||
HeapObject result = AllocateRawWithImmortalMap(FeedbackCell::kAlignedSize,
|
||||
AllocationType::kOld, *no_closures_cell_map());
|
||||
Handle<FeedbackCell> cell(FeedbackCell::cast(result), isolate());
|
||||
@ -1806,7 +1804,6 @@ Handle<FeedbackCell> Factory::NewNoClosuresCell(Handle<HeapObject> value) {
|
||||
}
|
||||
|
||||
Handle<FeedbackCell> Factory::NewOneClosureCell(Handle<HeapObject> value) {
|
||||
AllowDeferredHandleDereference convert_to_cell;
|
||||
HeapObject result = AllocateRawWithImmortalMap(FeedbackCell::kAlignedSize,
|
||||
AllocationType::kOld, *one_closure_cell_map());
|
||||
Handle<FeedbackCell> cell(FeedbackCell::cast(result), isolate());
|
||||
@ -1817,7 +1814,6 @@ Handle<FeedbackCell> Factory::NewOneClosureCell(Handle<HeapObject> value) {
|
||||
}
|
||||
|
||||
Handle<FeedbackCell> Factory::NewManyClosuresCell(Handle<HeapObject> value) {
|
||||
AllowDeferredHandleDereference convert_to_cell;
|
||||
HeapObject result = AllocateRawWithImmortalMap(FeedbackCell::kAlignedSize,
|
||||
AllocationType::kOld, *many_closures_cell_map());
|
||||
Handle<FeedbackCell> cell(FeedbackCell::cast(result), isolate());
|
||||
|
@ -101,7 +101,6 @@ void Code::CopyFromNoFlush(Heap* heap, const CodeDesc& desc) {
|
||||
|
||||
// Unbox handles and relocate.
|
||||
Assembler* origin = desc.origin;
|
||||
AllowDeferredHandleDereference embedding_raw_address;
|
||||
const int mode_mask = RelocInfo::PostCodegenRelocationMask();
|
||||
for (RelocIterator it(*this, mode_mask); !it.done(); it.next()) {
|
||||
RelocInfo::Mode mode = it.rinfo()->rmode();
|
||||
|
@ -3663,7 +3663,7 @@ TEST(DeferredHandles) {
|
||||
DeferredHandleScope deferred(isolate);
|
||||
DummyVisitor visitor;
|
||||
isolate->handle_scope_implementer()->Iterate(&visitor);
|
||||
delete deferred.Detach();
|
||||
deferred.Detach();
|
||||
}
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user