From e7d8fb96e226b4dfa1343c5ad383934a102898fd Mon Sep 17 00:00:00 2001 From: "svenpanne@chromium.org" Date: Tue, 13 May 2014 06:22:49 +0000 Subject: [PATCH] Simplified slot buffer logic during weak list visiting. Tiny reformatting cleanup on the way. R=mstarzinger@chromium.org Review URL: https://codereview.chromium.org/282493004 git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@21278 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- src/heap.cc | 35 ++++---------- src/heap.h | 6 +-- src/mark-compact.cc | 2 +- src/objects-visiting.cc | 103 ++++++++++++++++++---------------------- src/objects-visiting.h | 5 +- 5 files changed, 62 insertions(+), 89 deletions(-) diff --git a/src/heap.cc b/src/heap.cc index ddec9a9f8d..f0c9154b5e 100644 --- a/src/heap.cc +++ b/src/heap.cc @@ -1681,36 +1681,24 @@ void Heap::UpdateReferencesInExternalStringTable( void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) { - // We don't record weak slots during marking or scavenges. - // Instead we do it once when we complete mark-compact cycle. - // Note that write barrier has no effect if we are already in the middle of - // compacting mark-sweep cycle and we have to record slots manually. - bool record_slots = - gc_state() == MARK_COMPACT && - mark_compact_collector()->is_compacting(); - ProcessArrayBuffers(retainer, record_slots); - ProcessNativeContexts(retainer, record_slots); + ProcessArrayBuffers(retainer); + ProcessNativeContexts(retainer); // TODO(mvstanton): AllocationSites only need to be processed during // MARK_COMPACT, as they live in old space. Verify and address. - ProcessAllocationSites(retainer, record_slots); + ProcessAllocationSites(retainer); } -void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer, - bool record_slots) { - Object* head = - VisitWeakList( - this, native_contexts_list(), retainer, record_slots); + +void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) { + Object* head = VisitWeakList(this, native_contexts_list(), retainer); // Update the head of the list of contexts. set_native_contexts_list(head); } -void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer, - bool record_slots) { +void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer) { Object* array_buffer_obj = - VisitWeakList(this, - array_buffers_list(), - retainer, record_slots); + VisitWeakList(this, array_buffers_list(), retainer); set_array_buffers_list(array_buffer_obj); } @@ -1726,12 +1714,9 @@ void Heap::TearDownArrayBuffers() { } -void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer, - bool record_slots) { +void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) { Object* allocation_site_obj = - VisitWeakList(this, - allocation_sites_list(), - retainer, record_slots); + VisitWeakList(this, allocation_sites_list(), retainer); set_allocation_sites_list(allocation_site_obj); } diff --git a/src/heap.h b/src/heap.h index 0ea45296df..8e3cd3f2b0 100644 --- a/src/heap.h +++ b/src/heap.h @@ -1974,9 +1974,9 @@ class Heap { // Code to be run before and after mark-compact. void MarkCompactPrologue(); - void ProcessNativeContexts(WeakObjectRetainer* retainer, bool record_slots); - void ProcessArrayBuffers(WeakObjectRetainer* retainer, bool record_slots); - void ProcessAllocationSites(WeakObjectRetainer* retainer, bool record_slots); + void ProcessNativeContexts(WeakObjectRetainer* retainer); + void ProcessArrayBuffers(WeakObjectRetainer* retainer); + void ProcessAllocationSites(WeakObjectRetainer* retainer); // Deopts all code that contains allocation instruction which are tenured or // not tenured. Moreover it clears the pretenuring allocation site statistics. diff --git a/src/mark-compact.cc b/src/mark-compact.cc index 38013962dc..ff6d2e3049 100644 --- a/src/mark-compact.cc +++ b/src/mark-compact.cc @@ -2757,7 +2757,7 @@ int MarkCompactCollector::ClearNonLiveDependentCodeInGroup( ASSERT(start + 1 == end); Object* old_head = entries->object_at(start); MarkCompactWeakObjectRetainer retainer; - Object* head = VisitWeakList(heap(), old_head, &retainer, true); + Object* head = VisitWeakList(heap(), old_head, &retainer); entries->set_object_at(new_start, head); Object** slot = entries->slot_at(new_start); RecordSlot(slot, slot, head); diff --git a/src/objects-visiting.cc b/src/objects-visiting.cc index 24cff3487f..aea8a092ab 100644 --- a/src/objects-visiting.cc +++ b/src/objects-visiting.cc @@ -191,6 +191,16 @@ StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId( } +// We don't record weak slots during marking or scavenges. Instead we do it +// once when we complete mark-compact cycle. Note that write barrier has no +// effect if we are already in the middle of compacting mark-sweep cycle and we +// have to record slots manually. +static bool MustRecordSlots(Heap* heap) { + return heap->gc_state() == Heap::MARK_COMPACT && + heap->mark_compact_collector()->is_compacting(); +} + + template struct WeakListVisitor; @@ -198,12 +208,12 @@ struct WeakListVisitor; template Object* VisitWeakList(Heap* heap, Object* list, - WeakObjectRetainer* retainer, - bool record_slots) { + WeakObjectRetainer* retainer) { Object* undefined = heap->undefined_value(); Object* head = undefined; T* tail = NULL; MarkCompactCollector* collector = heap->mark_compact_collector(); + bool record_slots = MustRecordSlots(heap); while (list != undefined) { // Check whether to keep the candidate in the list. T* candidate = reinterpret_cast(list); @@ -229,8 +239,7 @@ Object* VisitWeakList(Heap* heap, // tail is a live object, visit it. - WeakListVisitor::VisitLiveObject( - heap, tail, retainer, record_slots); + WeakListVisitor::VisitLiveObject(heap, tail, retainer); } else { WeakListVisitor::VisitPhantomObject(heap, candidate); } @@ -273,12 +282,9 @@ struct WeakListVisitor { return JSFunction::kNextFunctionLinkOffset; } - static void VisitLiveObject(Heap*, JSFunction*, - WeakObjectRetainer*, bool) { - } + static void VisitLiveObject(Heap*, JSFunction*, WeakObjectRetainer*) {} - static void VisitPhantomObject(Heap*, JSFunction*) { - } + static void VisitPhantomObject(Heap*, JSFunction*) {} }; @@ -296,12 +302,9 @@ struct WeakListVisitor { return Code::kNextCodeLinkOffset; } - static void VisitLiveObject(Heap*, Code*, - WeakObjectRetainer*, bool) { - } + static void VisitLiveObject(Heap*, Code*, WeakObjectRetainer*) {} - static void VisitPhantomObject(Heap*, Code*) { - } + static void VisitPhantomObject(Heap*, Code*) {} }; @@ -317,33 +320,32 @@ struct WeakListVisitor { return context->get(Context::NEXT_CONTEXT_LINK); } + static int WeakNextOffset() { + return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK); + } + static void VisitLiveObject(Heap* heap, Context* context, - WeakObjectRetainer* retainer, - bool record_slots) { + WeakObjectRetainer* retainer) { // Process the three weak lists linked off the context. - DoWeakList(heap, context, retainer, record_slots, + DoWeakList(heap, context, retainer, Context::OPTIMIZED_FUNCTIONS_LIST); - DoWeakList(heap, context, retainer, record_slots, - Context::OPTIMIZED_CODE_LIST); - DoWeakList(heap, context, retainer, record_slots, - Context::DEOPTIMIZED_CODE_LIST); + DoWeakList(heap, context, retainer, Context::OPTIMIZED_CODE_LIST); + DoWeakList(heap, context, retainer, Context::DEOPTIMIZED_CODE_LIST); } template static void DoWeakList(Heap* heap, Context* context, WeakObjectRetainer* retainer, - bool record_slots, int index) { // Visit the weak list, removing dead intermediate elements. - Object* list_head = VisitWeakList(heap, context->get(index), retainer, - record_slots); + Object* list_head = VisitWeakList(heap, context->get(index), retainer); // Update the list head. context->set(index, list_head, UPDATE_WRITE_BARRIER); - if (record_slots) { + if (MustRecordSlots(heap)) { // Record the updated slot if necessary. Object** head_slot = HeapObject::RawField( context, FixedArray::SizeFor(index)); @@ -358,10 +360,6 @@ struct WeakListVisitor { ClearWeakList(heap, context->get(Context::OPTIMIZED_CODE_LIST)); ClearWeakList(heap, context->get(Context::DEOPTIMIZED_CODE_LIST)); } - - static int WeakNextOffset() { - return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK); - } }; @@ -375,16 +373,13 @@ struct WeakListVisitor { return obj->weak_next(); } - static void VisitLiveObject(Heap*, - JSArrayBufferView* obj, - WeakObjectRetainer* retainer, - bool record_slots) {} - - static void VisitPhantomObject(Heap*, JSArrayBufferView*) {} - static int WeakNextOffset() { return JSArrayBufferView::kWeakNextOffset; } + + static void VisitLiveObject(Heap*, JSArrayBufferView*, WeakObjectRetainer*) {} + + static void VisitPhantomObject(Heap*, JSArrayBufferView*) {} }; @@ -398,17 +393,20 @@ struct WeakListVisitor { return obj->weak_next(); } + static int WeakNextOffset() { + return JSArrayBuffer::kWeakNextOffset; + } + static void VisitLiveObject(Heap* heap, JSArrayBuffer* array_buffer, - WeakObjectRetainer* retainer, - bool record_slots) { + WeakObjectRetainer* retainer) { Object* typed_array_obj = VisitWeakList( heap, array_buffer->weak_first_view(), - retainer, record_slots); + retainer); array_buffer->set_weak_first_view(typed_array_obj); - if (typed_array_obj != heap->undefined_value() && record_slots) { + if (typed_array_obj != heap->undefined_value() && MustRecordSlots(heap)) { Object** slot = HeapObject::RawField( array_buffer, JSArrayBuffer::kWeakFirstViewOffset); heap->mark_compact_collector()->RecordSlot(slot, slot, typed_array_obj); @@ -418,10 +416,6 @@ struct WeakListVisitor { static void VisitPhantomObject(Heap* heap, JSArrayBuffer* phantom) { Runtime::FreeArrayBuffer(heap->isolate(), phantom); } - - static int WeakNextOffset() { - return JSArrayBuffer::kWeakNextOffset; - } }; @@ -435,36 +429,33 @@ struct WeakListVisitor { return obj->weak_next(); } - static void VisitLiveObject(Heap* heap, - AllocationSite* site, - WeakObjectRetainer* retainer, - bool record_slots) {} - - static void VisitPhantomObject(Heap* heap, AllocationSite* phantom) {} - static int WeakNextOffset() { return AllocationSite::kWeakNextOffset; } + + static void VisitLiveObject(Heap*, AllocationSite*, WeakObjectRetainer*) {} + + static void VisitPhantomObject(Heap*, AllocationSite*) {} }; template Object* VisitWeakList( - Heap* heap, Object* list, WeakObjectRetainer* retainer, bool record_slots); + Heap* heap, Object* list, WeakObjectRetainer* retainer); template Object* VisitWeakList( - Heap* heap, Object* list, WeakObjectRetainer* retainer, bool record_slots); + Heap* heap, Object* list, WeakObjectRetainer* retainer); template Object* VisitWeakList( - Heap* heap, Object* list, WeakObjectRetainer* retainer, bool record_slots); + Heap* heap, Object* list, WeakObjectRetainer* retainer); template Object* VisitWeakList( - Heap* heap, Object* list, WeakObjectRetainer* retainer, bool record_slots); + Heap* heap, Object* list, WeakObjectRetainer* retainer); template Object* VisitWeakList( - Heap* heap, Object* list, WeakObjectRetainer* retainer, bool record_slots); + Heap* heap, Object* list, WeakObjectRetainer* retainer); } } // namespace v8::internal diff --git a/src/objects-visiting.h b/src/objects-visiting.h index 05f82574cc..d9ab02a20f 100644 --- a/src/objects-visiting.h +++ b/src/objects-visiting.h @@ -469,10 +469,7 @@ class WeakObjectRetainer; // pointers. The template parameter T is a WeakListVisitor that defines how to // access the next-element pointers. template -Object* VisitWeakList(Heap* heap, - Object* list, - WeakObjectRetainer* retainer, - bool record_slots); +Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer); } } // namespace v8::internal