[heap] Filter out stale left-trimmed handles for scavenges
The missing part from https://codereview.chromium.org/2078403002/ R=jochen@chromium.org BUG=chromium:621869 LOG=N Review-Url: https://codereview.chromium.org/2077353004 Cr-Commit-Position: refs/heads/master@{#37184}
This commit is contained in:
parent
d4d4703266
commit
7a88ff3cc0
@ -476,6 +476,31 @@ void Heap::CopyBlock(Address dst, Address src, int byte_size) {
|
||||
static_cast<size_t>(byte_size / kPointerSize));
|
||||
}
|
||||
|
||||
bool Heap::PurgeLeftTrimmedObject(Object** object) {
|
||||
HeapObject* current = reinterpret_cast<HeapObject*>(*object);
|
||||
const MapWord map_word = current->map_word();
|
||||
if (current->IsFiller() && !map_word.IsForwardingAddress()) {
|
||||
#ifdef DEBUG
|
||||
// We need to find a FixedArrayBase map after walking the fillers.
|
||||
while (current->IsFiller()) {
|
||||
Address next = reinterpret_cast<Address>(current);
|
||||
if (current->map() == one_pointer_filler_map()) {
|
||||
next += kPointerSize;
|
||||
} else if (current->map() == two_pointer_filler_map()) {
|
||||
next += 2 * kPointerSize;
|
||||
} else {
|
||||
next += current->Size();
|
||||
}
|
||||
current = reinterpret_cast<HeapObject*>(next);
|
||||
}
|
||||
DCHECK(current->IsFixedArrayBase());
|
||||
#endif // DEBUG
|
||||
*object = nullptr;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
template <Heap::FindMementoMode mode>
|
||||
AllocationMemento* Heap::FindAllocationMemento(HeapObject* object) {
|
||||
// Check if there is potentially a memento behind the object. If
|
||||
|
@ -626,6 +626,12 @@ class Heap {
|
||||
// stored on the map to facilitate fast dispatch for {StaticVisitorBase}.
|
||||
static int GetStaticVisitorIdForMap(Map* map);
|
||||
|
||||
// We cannot avoid stale handles to left-trimmed objects, but can only make
|
||||
// sure all handles still needed are updated. Filter out a stale pointer
|
||||
// and clear the slot to allow post processing of handles (needed because
|
||||
// the sweeper might actually free the underlying page).
|
||||
inline bool PurgeLeftTrimmedObject(Object** object);
|
||||
|
||||
// Notifies the heap that is ok to start marking or other activities that
|
||||
// should not happen during deserialization.
|
||||
void NotifyDeserializationComplete();
|
||||
|
@ -1435,31 +1435,7 @@ class RootMarkingVisitor : public ObjectVisitor {
|
||||
|
||||
HeapObject* object = HeapObject::cast(*p);
|
||||
|
||||
// We cannot avoid stale handles to left-trimmed objects, but can only make
|
||||
// sure all handles still needed are updated. Filter out any stale pointers
|
||||
// and clear the slot to allow post processing of handles (needed because
|
||||
// the sweeper might actually free the underlying page).
|
||||
if (object->IsFiller()) {
|
||||
#ifdef DEBUG
|
||||
// We need to find a FixedArrayBase map after walking the fillers.
|
||||
Heap* heap = collector_->heap();
|
||||
HeapObject* current = object;
|
||||
while (current->IsFiller()) {
|
||||
Address next = reinterpret_cast<Address>(current);
|
||||
if (current->map() == heap->one_pointer_filler_map()) {
|
||||
next += kPointerSize;
|
||||
} else if (current->map() == heap->two_pointer_filler_map()) {
|
||||
next += 2 * kPointerSize;
|
||||
} else {
|
||||
next += current->Size();
|
||||
}
|
||||
current = reinterpret_cast<HeapObject*>(next);
|
||||
}
|
||||
DCHECK(current->IsFixedArrayBase());
|
||||
#endif // DEBUG
|
||||
*p = nullptr;
|
||||
return;
|
||||
}
|
||||
if (collector_->heap()->PurgeLeftTrimmedObject(p)) return;
|
||||
|
||||
MarkBit mark_bit = Marking::MarkBitFrom(object);
|
||||
if (Marking::IsBlackOrGrey(mark_bit)) return;
|
||||
|
@ -444,6 +444,9 @@ void ScavengeVisitor::VisitPointers(Object** start, Object** end) {
|
||||
void ScavengeVisitor::ScavengePointer(Object** p) {
|
||||
Object* object = *p;
|
||||
if (!heap_->InNewSpace(object)) return;
|
||||
|
||||
if (heap_->PurgeLeftTrimmedObject(p)) return;
|
||||
|
||||
Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(p),
|
||||
reinterpret_cast<HeapObject*>(object));
|
||||
}
|
||||
|
@ -1274,8 +1274,7 @@ Map* MapWord::ToMap() {
|
||||
return reinterpret_cast<Map*>(value_);
|
||||
}
|
||||
|
||||
|
||||
bool MapWord::IsForwardingAddress() {
|
||||
bool MapWord::IsForwardingAddress() const {
|
||||
return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
|
||||
}
|
||||
|
||||
|
@ -1490,7 +1490,7 @@ class MapWord BASE_EMBEDDED {
|
||||
// True if this map word is a forwarding address for a scavenge
|
||||
// collection. Only valid during a scavenge collection (specifically,
|
||||
// when all map words are heap object pointers, i.e. not during a full GC).
|
||||
inline bool IsForwardingAddress();
|
||||
inline bool IsForwardingAddress() const;
|
||||
|
||||
// Create a map word from a forwarding address.
|
||||
static inline MapWord FromForwardingAddress(HeapObject* object);
|
||||
|
18
test/mjsunit/regress/regress-621869.js
Normal file
18
test/mjsunit/regress/regress-621869.js
Normal file
@ -0,0 +1,18 @@
|
||||
// Copyright 2016 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
// Flags: --expose-gc
|
||||
|
||||
var o0 = [];
|
||||
var o1 = [];
|
||||
var cnt = 0;
|
||||
var only_scavenge = true;
|
||||
o1.__defineGetter__(0, function() {
|
||||
if (cnt++ > 2) return;
|
||||
o0.shift();
|
||||
gc(only_scavenge);
|
||||
o0.push((64));
|
||||
o0.concat(o1);
|
||||
});
|
||||
o1[0];
|
Loading…
Reference in New Issue
Block a user