Revert "[heap] Reland: Reuse object evacuation information for slot recording in Scavenger."
This reverts commit0d66b4d85e
. Reason for revert: https://ci.chromium.org/p/v8/builders/luci.v8.ci/V8%20Linux64%20TSAN/22200 Original change's description: > [heap] Reland: Reuse object evacuation information for slot recording in Scavenger. > > This reverts commit136ecbb9bf
. > > Bug: chromium:852420 > Change-Id: I4fab9d6ed7b18085352fa7488c2849b90588deaf > Reviewed-on: https://chromium-review.googlesource.com/1189802 > Commit-Queue: Hannes Payer <hpayer@chromium.org> > Reviewed-by: Ulan Degenbaev <ulan@chromium.org> > Reviewed-by: Michael Lippautz <mlippautz@chromium.org> > Cr-Commit-Position: refs/heads/master@{#55467} TBR=ulan@chromium.org,hpayer@chromium.org,mlippautz@chromium.org Change-Id: I03991a02eb5e06e7db91f88462232dde4bd97f49 No-Presubmit: true No-Tree-Checks: true No-Try: true Bug: chromium:852420 Reviewed-on: https://chromium-review.googlesource.com/1194005 Reviewed-by: Michael Achenbach <machenbach@chromium.org> Commit-Queue: Michael Achenbach <machenbach@chromium.org> Cr-Commit-Position: refs/heads/master@{#55468}
This commit is contained in:
parent
0d66b4d85e
commit
d6de4af573
@ -71,10 +71,8 @@ bool Scavenger::MigrateObject(Map* map, HeapObject* source, HeapObject* target,
|
||||
return true;
|
||||
}
|
||||
|
||||
CopyAndForwardResult Scavenger::SemiSpaceCopyObject(Map* map,
|
||||
HeapObjectReference** slot,
|
||||
HeapObject* object,
|
||||
int object_size) {
|
||||
bool Scavenger::SemiSpaceCopyObject(Map* map, HeapObjectReference** slot,
|
||||
HeapObject* object, int object_size) {
|
||||
DCHECK(heap()->AllowedToBeMigrated(object, NEW_SPACE));
|
||||
AllocationAlignment alignment = HeapObject::RequiredAlignment(map);
|
||||
AllocationResult allocation =
|
||||
@ -89,24 +87,19 @@ CopyAndForwardResult Scavenger::SemiSpaceCopyObject(Map* map,
|
||||
allocator_.FreeLast(NEW_SPACE, target, object_size);
|
||||
MapWord map_word = object->map_word();
|
||||
HeapObjectReference::Update(slot, map_word.ToForwardingAddress());
|
||||
DCHECK(!Heap::InFromSpace(*slot));
|
||||
return Heap::InToSpace(*slot)
|
||||
? CopyAndForwardResult::SUCCESS_YOUNG_GENERATION
|
||||
: CopyAndForwardResult::SUCCESS_OLD_GENERATION;
|
||||
return true;
|
||||
}
|
||||
HeapObjectReference::Update(slot, target);
|
||||
|
||||
copied_list_.Push(ObjectAndSize(target, object_size));
|
||||
copied_size_ += object_size;
|
||||
return CopyAndForwardResult::SUCCESS_YOUNG_GENERATION;
|
||||
return true;
|
||||
}
|
||||
return CopyAndForwardResult::FAILURE;
|
||||
return false;
|
||||
}
|
||||
|
||||
CopyAndForwardResult Scavenger::PromoteObject(Map* map,
|
||||
HeapObjectReference** slot,
|
||||
HeapObject* object,
|
||||
int object_size) {
|
||||
bool Scavenger::PromoteObject(Map* map, HeapObjectReference** slot,
|
||||
HeapObject* object, int object_size) {
|
||||
AllocationAlignment alignment = HeapObject::RequiredAlignment(map);
|
||||
AllocationResult allocation =
|
||||
allocator_.Allocate(OLD_SPACE, object_size, alignment);
|
||||
@ -120,88 +113,59 @@ CopyAndForwardResult Scavenger::PromoteObject(Map* map,
|
||||
allocator_.FreeLast(OLD_SPACE, target, object_size);
|
||||
MapWord map_word = object->map_word();
|
||||
HeapObjectReference::Update(slot, map_word.ToForwardingAddress());
|
||||
DCHECK(!Heap::InFromSpace(*slot));
|
||||
return Heap::InToSpace(*slot)
|
||||
? CopyAndForwardResult::SUCCESS_YOUNG_GENERATION
|
||||
: CopyAndForwardResult::SUCCESS_OLD_GENERATION;
|
||||
return true;
|
||||
}
|
||||
HeapObjectReference::Update(slot, target);
|
||||
if (!ContainsOnlyData(map->visitor_id())) {
|
||||
promotion_list_.Push(ObjectAndSize(target, object_size));
|
||||
}
|
||||
promoted_size_ += object_size;
|
||||
return CopyAndForwardResult::SUCCESS_OLD_GENERATION;
|
||||
return true;
|
||||
}
|
||||
return CopyAndForwardResult::FAILURE;
|
||||
return false;
|
||||
}
|
||||
|
||||
SlotCallbackResult Scavenger::RememberedSetEntryNeeded(
|
||||
CopyAndForwardResult result) {
|
||||
DCHECK_NE(CopyAndForwardResult::FAILURE, result);
|
||||
return result == CopyAndForwardResult::SUCCESS_YOUNG_GENERATION ? KEEP_SLOT
|
||||
: REMOVE_SLOT;
|
||||
}
|
||||
|
||||
SlotCallbackResult Scavenger::EvacuateObjectDefault(Map* map,
|
||||
HeapObjectReference** slot,
|
||||
HeapObject* object,
|
||||
int object_size) {
|
||||
void Scavenger::EvacuateObjectDefault(Map* map, HeapObjectReference** slot,
|
||||
HeapObject* object, int object_size) {
|
||||
SLOW_DCHECK(object_size <= Page::kAllocatableMemory);
|
||||
SLOW_DCHECK(object->SizeFromMap(map) == object_size);
|
||||
CopyAndForwardResult result;
|
||||
|
||||
if (!heap()->ShouldBePromoted(object->address())) {
|
||||
// A semi-space copy may fail due to fragmentation. In that case, we
|
||||
// try to promote the object.
|
||||
result = SemiSpaceCopyObject(map, slot, object, object_size);
|
||||
if (result != CopyAndForwardResult::FAILURE) {
|
||||
return RememberedSetEntryNeeded(result);
|
||||
}
|
||||
if (SemiSpaceCopyObject(map, slot, object, object_size)) return;
|
||||
}
|
||||
|
||||
// We may want to promote this object if the object was already semi-space
|
||||
// copied in a previes young generation GC or if the semi-space copy above
|
||||
// failed.
|
||||
result = PromoteObject(map, slot, object, object_size);
|
||||
if (result != CopyAndForwardResult::FAILURE) {
|
||||
return RememberedSetEntryNeeded(result);
|
||||
}
|
||||
if (PromoteObject(map, slot, object, object_size)) return;
|
||||
|
||||
// If promotion failed, we try to copy the object to the other semi-space.
|
||||
result = SemiSpaceCopyObject(map, slot, object, object_size);
|
||||
if (result != CopyAndForwardResult::FAILURE) {
|
||||
return RememberedSetEntryNeeded(result);
|
||||
}
|
||||
// If promotion failed, we try to copy the object to the other semi-space
|
||||
if (SemiSpaceCopyObject(map, slot, object, object_size)) return;
|
||||
|
||||
heap()->FatalProcessOutOfMemory("Scavenger: semi-space copy");
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
SlotCallbackResult Scavenger::EvacuateThinString(Map* map, HeapObject** slot,
|
||||
ThinString* object,
|
||||
int object_size) {
|
||||
void Scavenger::EvacuateThinString(Map* map, HeapObject** slot,
|
||||
ThinString* object, int object_size) {
|
||||
if (!is_incremental_marking_) {
|
||||
// Loading actual is fine in a parallel setting since there is no write.
|
||||
// Loading actual is fine in a parallel setting is there is no write.
|
||||
String* actual = object->actual();
|
||||
object->set_length(0);
|
||||
*slot = actual;
|
||||
// ThinStrings always refer to internalized strings, which are
|
||||
// always in old space.
|
||||
DCHECK(!Heap::InNewSpace(actual));
|
||||
base::AsAtomicPointer::Release_Store(
|
||||
base::AsAtomicPointer::Relaxed_Store(
|
||||
reinterpret_cast<Map**>(object->address()),
|
||||
MapWord::FromForwardingAddress(actual).ToMap());
|
||||
return REMOVE_SLOT;
|
||||
return;
|
||||
}
|
||||
|
||||
return EvacuateObjectDefault(
|
||||
map, reinterpret_cast<HeapObjectReference**>(slot), object, object_size);
|
||||
EvacuateObjectDefault(map, reinterpret_cast<HeapObjectReference**>(slot),
|
||||
object, object_size);
|
||||
}
|
||||
|
||||
SlotCallbackResult Scavenger::EvacuateShortcutCandidate(Map* map,
|
||||
HeapObject** slot,
|
||||
ConsString* object,
|
||||
int object_size) {
|
||||
void Scavenger::EvacuateShortcutCandidate(Map* map, HeapObject** slot,
|
||||
ConsString* object, int object_size) {
|
||||
DCHECK(IsShortcutCandidate(map->instance_type()));
|
||||
if (!is_incremental_marking_ &&
|
||||
object->unchecked_second() == ReadOnlyRoots(heap()).empty_string()) {
|
||||
@ -210,38 +174,37 @@ SlotCallbackResult Scavenger::EvacuateShortcutCandidate(Map* map,
|
||||
*slot = first;
|
||||
|
||||
if (!Heap::InNewSpace(first)) {
|
||||
base::AsAtomicPointer::Release_Store(
|
||||
base::AsAtomicPointer::Relaxed_Store(
|
||||
reinterpret_cast<Map**>(object->address()),
|
||||
MapWord::FromForwardingAddress(first).ToMap());
|
||||
return REMOVE_SLOT;
|
||||
return;
|
||||
}
|
||||
|
||||
MapWord first_word = first->synchronized_map_word();
|
||||
MapWord first_word = first->map_word();
|
||||
if (first_word.IsForwardingAddress()) {
|
||||
HeapObject* target = first_word.ToForwardingAddress();
|
||||
|
||||
*slot = target;
|
||||
base::AsAtomicPointer::Release_Store(
|
||||
base::AsAtomicPointer::Relaxed_Store(
|
||||
reinterpret_cast<Map**>(object->address()),
|
||||
MapWord::FromForwardingAddress(target).ToMap());
|
||||
return Heap::InToSpace(target) ? KEEP_SLOT : REMOVE_SLOT;
|
||||
return;
|
||||
}
|
||||
Map* map = first_word.ToMap();
|
||||
SlotCallbackResult result = EvacuateObjectDefault(
|
||||
map, reinterpret_cast<HeapObjectReference**>(slot), first,
|
||||
first->SizeFromMap(map));
|
||||
base::AsAtomicPointer::Release_Store(
|
||||
EvacuateObjectDefault(map, reinterpret_cast<HeapObjectReference**>(slot),
|
||||
first, first->SizeFromMap(map));
|
||||
base::AsAtomicPointer::Relaxed_Store(
|
||||
reinterpret_cast<Map**>(object->address()),
|
||||
MapWord::FromForwardingAddress(*slot).ToMap());
|
||||
return result;
|
||||
return;
|
||||
}
|
||||
|
||||
return EvacuateObjectDefault(
|
||||
map, reinterpret_cast<HeapObjectReference**>(slot), object, object_size);
|
||||
EvacuateObjectDefault(map, reinterpret_cast<HeapObjectReference**>(slot),
|
||||
object, object_size);
|
||||
}
|
||||
|
||||
SlotCallbackResult Scavenger::EvacuateObject(HeapObjectReference** slot,
|
||||
Map* map, HeapObject* source) {
|
||||
void Scavenger::EvacuateObject(HeapObjectReference** slot, Map* map,
|
||||
HeapObject* source) {
|
||||
SLOW_DCHECK(Heap::InFromSpace(source));
|
||||
SLOW_DCHECK(!MapWord::FromMap(map).IsForwardingAddress());
|
||||
int size = source->SizeFromMap(map);
|
||||
@ -251,21 +214,22 @@ SlotCallbackResult Scavenger::EvacuateObject(HeapObjectReference** slot,
|
||||
case kVisitThinString:
|
||||
// At the moment we don't allow weak pointers to thin strings.
|
||||
DCHECK(!(*slot)->IsWeakHeapObject());
|
||||
return EvacuateThinString(map, reinterpret_cast<HeapObject**>(slot),
|
||||
reinterpret_cast<ThinString*>(source), size);
|
||||
EvacuateThinString(map, reinterpret_cast<HeapObject**>(slot),
|
||||
reinterpret_cast<ThinString*>(source), size);
|
||||
break;
|
||||
case kVisitShortcutCandidate:
|
||||
DCHECK(!(*slot)->IsWeakHeapObject());
|
||||
// At the moment we don't allow weak pointers to cons strings.
|
||||
return EvacuateShortcutCandidate(
|
||||
map, reinterpret_cast<HeapObject**>(slot),
|
||||
reinterpret_cast<ConsString*>(source), size);
|
||||
EvacuateShortcutCandidate(map, reinterpret_cast<HeapObject**>(slot),
|
||||
reinterpret_cast<ConsString*>(source), size);
|
||||
break;
|
||||
default:
|
||||
return EvacuateObjectDefault(map, slot, source, size);
|
||||
EvacuateObjectDefault(map, slot, source, size);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
SlotCallbackResult Scavenger::ScavengeObject(HeapObjectReference** p,
|
||||
HeapObject* object) {
|
||||
void Scavenger::ScavengeObject(HeapObjectReference** p, HeapObject* object) {
|
||||
DCHECK(Heap::InFromSpace(object));
|
||||
|
||||
// Synchronized load that consumes the publishing CAS of MigrateObject.
|
||||
@ -282,15 +246,14 @@ SlotCallbackResult Scavenger::ScavengeObject(HeapObjectReference** p,
|
||||
DCHECK((*p)->IsStrongHeapObject());
|
||||
*p = HeapObjectReference::Strong(dest);
|
||||
}
|
||||
DCHECK(Heap::InToSpace(dest) || !Heap::InNewSpace((dest)));
|
||||
return Heap::InToSpace(dest) ? KEEP_SLOT : REMOVE_SLOT;
|
||||
return;
|
||||
}
|
||||
|
||||
Map* map = first_word.ToMap();
|
||||
// AllocationMementos are unrooted and shouldn't survive a scavenge
|
||||
DCHECK_NE(ReadOnlyRoots(heap()).allocation_memento_map(), map);
|
||||
// Call the slow part of scavenge object.
|
||||
return EvacuateObject(p, map, object);
|
||||
EvacuateObject(p, map, object);
|
||||
}
|
||||
|
||||
SlotCallbackResult Scavenger::CheckAndScavengeObject(Heap* heap,
|
||||
@ -304,10 +267,17 @@ SlotCallbackResult Scavenger::CheckAndScavengeObject(Heap* heap,
|
||||
DCHECK(success);
|
||||
DCHECK(heap_object->IsHeapObject());
|
||||
|
||||
SlotCallbackResult result = ScavengeObject(
|
||||
reinterpret_cast<HeapObjectReference**>(slot), heap_object);
|
||||
DCHECK_IMPLIES(result == REMOVE_SLOT, !Heap::InNewSpace(*slot));
|
||||
return result;
|
||||
ScavengeObject(reinterpret_cast<HeapObjectReference**>(slot), heap_object);
|
||||
|
||||
object = *slot;
|
||||
// If the object was in from space before and is after executing the
|
||||
// callback in to space, the object is still live.
|
||||
// Unfortunately, we do not know about the slot. It could be in a
|
||||
// just freed free space object.
|
||||
PageMemoryFence(object);
|
||||
if (Heap::InToSpace(object)) {
|
||||
return KEEP_SLOT;
|
||||
}
|
||||
} else if (Heap::InToSpace(object)) {
|
||||
// Already updated slot. This can happen when processing of the work list
|
||||
// is interleaved with processing roots.
|
||||
|
@ -53,13 +53,15 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
|
||||
scavenger_->PageMemoryFence(reinterpret_cast<MaybeObject*>(target));
|
||||
|
||||
if (Heap::InFromSpace(target)) {
|
||||
SlotCallbackResult result = scavenger_->ScavengeObject(slot, target);
|
||||
scavenger_->ScavengeObject(slot, target);
|
||||
bool success = (*slot)->ToStrongOrWeakHeapObject(&target);
|
||||
USE(success);
|
||||
DCHECK(success);
|
||||
scavenger_->PageMemoryFence(reinterpret_cast<MaybeObject*>(target));
|
||||
|
||||
if (result == KEEP_SLOT) {
|
||||
if (Heap::InNewSpace(target)) {
|
||||
SLOW_DCHECK(target->IsHeapObject());
|
||||
SLOW_DCHECK(Heap::InToSpace(target));
|
||||
RememberedSet<OLD_TO_NEW>::Insert(Page::FromAddress(slot_address),
|
||||
slot_address);
|
||||
}
|
||||
|
@ -16,12 +16,6 @@ namespace internal {
|
||||
|
||||
class OneshotBarrier;
|
||||
|
||||
enum class CopyAndForwardResult {
|
||||
SUCCESS_YOUNG_GENERATION,
|
||||
SUCCESS_OLD_GENERATION,
|
||||
FAILURE
|
||||
};
|
||||
|
||||
class Scavenger {
|
||||
public:
|
||||
static const int kCopiedListSegmentSize = 256;
|
||||
@ -67,43 +61,34 @@ class Scavenger {
|
||||
|
||||
// Scavenges an object |object| referenced from slot |p|. |object| is required
|
||||
// to be in from space.
|
||||
inline SlotCallbackResult ScavengeObject(HeapObjectReference** p,
|
||||
HeapObject* object);
|
||||
inline void ScavengeObject(HeapObjectReference** p, HeapObject* object);
|
||||
|
||||
// Copies |source| to |target| and sets the forwarding pointer in |source|.
|
||||
V8_INLINE bool MigrateObject(Map* map, HeapObject* source, HeapObject* target,
|
||||
int size);
|
||||
|
||||
V8_INLINE SlotCallbackResult
|
||||
RememberedSetEntryNeeded(CopyAndForwardResult result);
|
||||
V8_INLINE bool SemiSpaceCopyObject(Map* map, HeapObjectReference** slot,
|
||||
HeapObject* object, int object_size);
|
||||
|
||||
V8_INLINE CopyAndForwardResult SemiSpaceCopyObject(Map* map,
|
||||
HeapObjectReference** slot,
|
||||
HeapObject* object,
|
||||
int object_size);
|
||||
V8_INLINE bool PromoteObject(Map* map, HeapObjectReference** slot,
|
||||
HeapObject* object, int object_size);
|
||||
|
||||
V8_INLINE CopyAndForwardResult PromoteObject(Map* map,
|
||||
HeapObjectReference** slot,
|
||||
HeapObject* object,
|
||||
int object_size);
|
||||
|
||||
V8_INLINE SlotCallbackResult EvacuateObject(HeapObjectReference** slot,
|
||||
Map* map, HeapObject* source);
|
||||
V8_INLINE void EvacuateObject(HeapObjectReference** slot, Map* map,
|
||||
HeapObject* source);
|
||||
|
||||
// Different cases for object evacuation.
|
||||
V8_INLINE SlotCallbackResult EvacuateObjectDefault(Map* map,
|
||||
HeapObjectReference** slot,
|
||||
HeapObject* object,
|
||||
int object_size);
|
||||
|
||||
inline SlotCallbackResult EvacuateThinString(Map* map, HeapObject** slot,
|
||||
ThinString* object,
|
||||
int object_size);
|
||||
V8_INLINE void EvacuateObjectDefault(Map* map, HeapObjectReference** slot,
|
||||
HeapObject* object, int object_size);
|
||||
|
||||
inline SlotCallbackResult EvacuateShortcutCandidate(Map* map,
|
||||
HeapObject** slot,
|
||||
ConsString* object,
|
||||
int object_size);
|
||||
V8_INLINE void EvacuateJSFunction(Map* map, HeapObject** slot,
|
||||
JSFunction* object, int object_size);
|
||||
|
||||
inline void EvacuateThinString(Map* map, HeapObject** slot,
|
||||
ThinString* object, int object_size);
|
||||
|
||||
inline void EvacuateShortcutCandidate(Map* map, HeapObject** slot,
|
||||
ConsString* object, int object_size);
|
||||
|
||||
void IterateAndScavengePromotedObject(HeapObject* target, int size);
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user