From 177e0aa380194df059494c9b336aa201e345b9cd Mon Sep 17 00:00:00 2001 From: Hannes Payer Date: Fri, 24 Aug 2018 12:52:49 +0200 Subject: [PATCH] [heap] Reland: Reuse object evacuation information for slot recording in Scavenger. This reverts commit 5876d8f58d27f145497d7ea17f191a0f4d99b55b. Bug: chromium:852420 Change-Id: I318587f20409f98d05278fc0b4c14da09d259cd3 Reviewed-on: https://chromium-review.googlesource.com/1188128 Reviewed-by: Ulan Degenbaev Reviewed-by: Michael Lippautz Commit-Queue: Hannes Payer Cr-Commit-Position: refs/heads/master@{#55393} --- src/heap/scavenger-inl.h | 95 +++++++++++++++++++--------------------- src/heap/scavenger.cc | 6 +-- src/heap/scavenger.h | 27 +++++++----- 3 files changed, 63 insertions(+), 65 deletions(-) diff --git a/src/heap/scavenger-inl.h b/src/heap/scavenger-inl.h index 649292085a..9533b84be7 100644 --- a/src/heap/scavenger-inl.h +++ b/src/heap/scavenger-inl.h @@ -125,47 +125,53 @@ bool Scavenger::PromoteObject(Map* map, HeapObjectReference** slot, return false; } -void Scavenger::EvacuateObjectDefault(Map* map, HeapObjectReference** slot, - HeapObject* object, int object_size) { +SlotCallbackResult Scavenger::EvacuateObjectDefault(Map* map, + HeapObjectReference** slot, + HeapObject* object, + int object_size) { SLOW_DCHECK(object_size <= Page::kAllocatableMemory); SLOW_DCHECK(object->SizeFromMap(map) == object_size); if (!heap()->ShouldBePromoted(object->address())) { // A semi-space copy may fail due to fragmentation. In that case, we // try to promote the object. - if (SemiSpaceCopyObject(map, slot, object, object_size)) return; + if (SemiSpaceCopyObject(map, slot, object, object_size)) return KEEP_SLOT; } - if (PromoteObject(map, slot, object, object_size)) return; + if (PromoteObject(map, slot, object, object_size)) return REMOVE_SLOT; // If promotion failed, we try to copy the object to the other semi-space - if (SemiSpaceCopyObject(map, slot, object, object_size)) return; + if (SemiSpaceCopyObject(map, slot, object, object_size)) return KEEP_SLOT; heap()->FatalProcessOutOfMemory("Scavenger: semi-space copy"); + UNREACHABLE(); } -void Scavenger::EvacuateThinString(Map* map, HeapObject** slot, - ThinString* object, int object_size) { +SlotCallbackResult Scavenger::EvacuateThinString(Map* map, HeapObject** slot, + ThinString* object, + int object_size) { if (!is_incremental_marking_) { - // Loading actual is fine in a parallel setting is there is no write. + // Loading actual is fine in a parallel setting since there is no write. String* actual = object->actual(); object->set_length(0); *slot = actual; // ThinStrings always refer to internalized strings, which are // always in old space. DCHECK(!Heap::InNewSpace(actual)); - base::AsAtomicPointer::Relaxed_Store( + base::AsAtomicPointer::Release_Store( reinterpret_cast(object->address()), MapWord::FromForwardingAddress(actual).ToMap()); - return; + return REMOVE_SLOT; } - EvacuateObjectDefault(map, reinterpret_cast(slot), - object, object_size); + return EvacuateObjectDefault( + map, reinterpret_cast(slot), object, object_size); } -void Scavenger::EvacuateShortcutCandidate(Map* map, HeapObject** slot, - ConsString* object, int object_size) { +SlotCallbackResult Scavenger::EvacuateShortcutCandidate(Map* map, + HeapObject** slot, + ConsString* object, + int object_size) { DCHECK(IsShortcutCandidate(map->instance_type())); if (!is_incremental_marking_ && object->unchecked_second() == ReadOnlyRoots(heap()).empty_string()) { @@ -174,37 +180,38 @@ void Scavenger::EvacuateShortcutCandidate(Map* map, HeapObject** slot, *slot = first; if (!Heap::InNewSpace(first)) { - base::AsAtomicPointer::Relaxed_Store( + base::AsAtomicPointer::Release_Store( reinterpret_cast(object->address()), MapWord::FromForwardingAddress(first).ToMap()); - return; + return REMOVE_SLOT; } - MapWord first_word = first->map_word(); + MapWord first_word = first->synchronized_map_word(); if (first_word.IsForwardingAddress()) { HeapObject* target = first_word.ToForwardingAddress(); *slot = target; - base::AsAtomicPointer::Relaxed_Store( + base::AsAtomicPointer::Release_Store( reinterpret_cast(object->address()), MapWord::FromForwardingAddress(target).ToMap()); - return; + return Heap::InToSpace(target) ? KEEP_SLOT : REMOVE_SLOT; } Map* map = first_word.ToMap(); - EvacuateObjectDefault(map, reinterpret_cast(slot), - first, first->SizeFromMap(map)); - base::AsAtomicPointer::Relaxed_Store( + SlotCallbackResult result = EvacuateObjectDefault( + map, reinterpret_cast(slot), first, + first->SizeFromMap(map)); + base::AsAtomicPointer::Release_Store( reinterpret_cast(object->address()), MapWord::FromForwardingAddress(*slot).ToMap()); - return; + return result; } - EvacuateObjectDefault(map, reinterpret_cast(slot), - object, object_size); + return EvacuateObjectDefault( + map, reinterpret_cast(slot), object, object_size); } -void Scavenger::EvacuateObject(HeapObjectReference** slot, Map* map, - HeapObject* source) { +SlotCallbackResult Scavenger::EvacuateObject(HeapObjectReference** slot, + Map* map, HeapObject* source) { SLOW_DCHECK(Heap::InFromSpace(source)); SLOW_DCHECK(!MapWord::FromMap(map).IsForwardingAddress()); int size = source->SizeFromMap(map); @@ -214,22 +221,21 @@ void Scavenger::EvacuateObject(HeapObjectReference** slot, Map* map, case kVisitThinString: // At the moment we don't allow weak pointers to thin strings. DCHECK(!(*slot)->IsWeakHeapObject()); - EvacuateThinString(map, reinterpret_cast(slot), - reinterpret_cast(source), size); - break; + return EvacuateThinString(map, reinterpret_cast(slot), + reinterpret_cast(source), size); case kVisitShortcutCandidate: DCHECK(!(*slot)->IsWeakHeapObject()); // At the moment we don't allow weak pointers to cons strings. - EvacuateShortcutCandidate(map, reinterpret_cast(slot), - reinterpret_cast(source), size); - break; + return EvacuateShortcutCandidate( + map, reinterpret_cast(slot), + reinterpret_cast(source), size); default: - EvacuateObjectDefault(map, slot, source, size); - break; + return EvacuateObjectDefault(map, slot, source, size); } } -void Scavenger::ScavengeObject(HeapObjectReference** p, HeapObject* object) { +SlotCallbackResult Scavenger::ScavengeObject(HeapObjectReference** p, + HeapObject* object) { DCHECK(Heap::InFromSpace(object)); // Synchronized load that consumes the publishing CAS of MigrateObject. @@ -246,14 +252,14 @@ void Scavenger::ScavengeObject(HeapObjectReference** p, HeapObject* object) { DCHECK((*p)->IsStrongHeapObject()); *p = HeapObjectReference::Strong(dest); } - return; + return Heap::InToSpace(dest) ? KEEP_SLOT : REMOVE_SLOT; } Map* map = first_word.ToMap(); // AllocationMementos are unrooted and shouldn't survive a scavenge DCHECK_NE(ReadOnlyRoots(heap()).allocation_memento_map(), map); // Call the slow part of scavenge object. - EvacuateObject(p, map, object); + return EvacuateObject(p, map, object); } SlotCallbackResult Scavenger::CheckAndScavengeObject(Heap* heap, @@ -267,17 +273,8 @@ SlotCallbackResult Scavenger::CheckAndScavengeObject(Heap* heap, DCHECK(success); DCHECK(heap_object->IsHeapObject()); - ScavengeObject(reinterpret_cast(slot), heap_object); - - object = *slot; - // If the object was in from space before and is after executing the - // callback in to space, the object is still live. - // Unfortunately, we do not know about the slot. It could be in a - // just freed free space object. - PageMemoryFence(object); - if (Heap::InToSpace(object)) { - return KEEP_SLOT; - } + return ScavengeObject(reinterpret_cast(slot), + heap_object); } else if (Heap::InToSpace(object)) { // Already updated slot. This can happen when processing of the work list // is interleaved with processing roots. diff --git a/src/heap/scavenger.cc b/src/heap/scavenger.cc index f8c6d496ce..8406c1d346 100644 --- a/src/heap/scavenger.cc +++ b/src/heap/scavenger.cc @@ -53,15 +53,13 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor { scavenger_->PageMemoryFence(reinterpret_cast(target)); if (Heap::InFromSpace(target)) { - scavenger_->ScavengeObject(slot, target); + SlotCallbackResult result = scavenger_->ScavengeObject(slot, target); bool success = (*slot)->ToStrongOrWeakHeapObject(&target); USE(success); DCHECK(success); - scavenger_->PageMemoryFence(reinterpret_cast(target)); - if (Heap::InNewSpace(target)) { + if (result == KEEP_SLOT) { SLOW_DCHECK(target->IsHeapObject()); - SLOW_DCHECK(Heap::InToSpace(target)); RememberedSet::Insert(Page::FromAddress(slot_address), slot_address); } diff --git a/src/heap/scavenger.h b/src/heap/scavenger.h index 4e6753f6ce..ce1a2225cd 100644 --- a/src/heap/scavenger.h +++ b/src/heap/scavenger.h @@ -61,7 +61,8 @@ class Scavenger { // Scavenges an object |object| referenced from slot |p|. |object| is required // to be in from space. - inline void ScavengeObject(HeapObjectReference** p, HeapObject* object); + inline SlotCallbackResult ScavengeObject(HeapObjectReference** p, + HeapObject* object); // Copies |source| to |target| and sets the forwarding pointer in |source|. V8_INLINE bool MigrateObject(Map* map, HeapObject* source, HeapObject* target, @@ -73,22 +74,24 @@ class Scavenger { V8_INLINE bool PromoteObject(Map* map, HeapObjectReference** slot, HeapObject* object, int object_size); - V8_INLINE void EvacuateObject(HeapObjectReference** slot, Map* map, - HeapObject* source); + V8_INLINE SlotCallbackResult EvacuateObject(HeapObjectReference** slot, + Map* map, HeapObject* source); // Different cases for object evacuation. - V8_INLINE void EvacuateObjectDefault(Map* map, HeapObjectReference** slot, - HeapObject* object, int object_size); + V8_INLINE SlotCallbackResult EvacuateObjectDefault(Map* map, + HeapObjectReference** slot, + HeapObject* object, + int object_size); - V8_INLINE void EvacuateJSFunction(Map* map, HeapObject** slot, - JSFunction* object, int object_size); + inline SlotCallbackResult EvacuateThinString(Map* map, HeapObject** slot, + ThinString* object, + int object_size); - inline void EvacuateThinString(Map* map, HeapObject** slot, - ThinString* object, int object_size); - - inline void EvacuateShortcutCandidate(Map* map, HeapObject** slot, - ConsString* object, int object_size); + inline SlotCallbackResult EvacuateShortcutCandidate(Map* map, + HeapObject** slot, + ConsString* object, + int object_size); void IterateAndScavengePromotedObject(HeapObject* target, int size);