[heap] Separate scavenger functionality into own file.
This moves scavenging functionality into a separate component so that neither the scavenger nor objects-visiting need to be exposed outside the heap. R=hpayer@chromium.org,mlippautz@chromium.org Review URL: https://codereview.chromium.org/1323993004 Cr-Commit-Position: refs/heads/master@{#30712}
This commit is contained in:
parent
5ee2ea3cae
commit
ea25bf05f8
3
BUILD.gn
3
BUILD.gn
@ -984,6 +984,9 @@ source_set("v8_base") {
|
||||
"src/heap/objects-visiting-inl.h",
|
||||
"src/heap/objects-visiting.cc",
|
||||
"src/heap/objects-visiting.h",
|
||||
"src/heap/scavenger-inl.h",
|
||||
"src/heap/scavenger.cc",
|
||||
"src/heap/scavenger.h",
|
||||
"src/heap/spaces-inl.h",
|
||||
"src/heap/spaces.cc",
|
||||
"src/heap/spaces.h",
|
||||
|
@ -11,6 +11,7 @@
|
||||
#include "src/counters.h"
|
||||
#include "src/heap/heap.h"
|
||||
#include "src/heap/incremental-marking-inl.h"
|
||||
#include "src/heap/objects-visiting.h"
|
||||
#include "src/heap/spaces-inl.h"
|
||||
#include "src/heap/store-buffer.h"
|
||||
#include "src/heap/store-buffer-inl.h"
|
||||
@ -460,9 +461,6 @@ void Heap::MoveBlock(Address dst, Address src, int byte_size) {
|
||||
}
|
||||
|
||||
|
||||
void Heap::ScavengePointer(HeapObject** p) { ScavengeObject(p, *p); }
|
||||
|
||||
|
||||
AllocationMemento* Heap::FindAllocationMemento(HeapObject* object) {
|
||||
// Check if there is potentially a memento behind the object. If
|
||||
// the last word of the memento is on another page we return
|
||||
@ -520,33 +518,6 @@ void Heap::UpdateAllocationSiteFeedback(HeapObject* object,
|
||||
}
|
||||
|
||||
|
||||
void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
|
||||
DCHECK(object->GetIsolate()->heap()->InFromSpace(object));
|
||||
|
||||
// We use the first word (where the map pointer usually is) of a heap
|
||||
// object to record the forwarding pointer. A forwarding pointer can
|
||||
// point to an old space, the code space, or the to space of the new
|
||||
// generation.
|
||||
MapWord first_word = object->map_word();
|
||||
|
||||
// If the first word is a forwarding address, the object has already been
|
||||
// copied.
|
||||
if (first_word.IsForwardingAddress()) {
|
||||
HeapObject* dest = first_word.ToForwardingAddress();
|
||||
DCHECK(object->GetIsolate()->heap()->InFromSpace(*p));
|
||||
*p = dest;
|
||||
return;
|
||||
}
|
||||
|
||||
UpdateAllocationSiteFeedback(object, IGNORE_SCRATCHPAD_SLOT);
|
||||
|
||||
// AllocationMementos are unrooted and shouldn't survive a scavenge
|
||||
DCHECK(object->map() != object->GetHeap()->allocation_memento_map());
|
||||
// Call the slow part of scavenge object.
|
||||
return ScavengeObjectSlow(p, object);
|
||||
}
|
||||
|
||||
|
||||
bool Heap::CollectGarbage(AllocationSpace space, const char* gc_reason,
|
||||
const v8::GCCallbackFlags callbackFlags) {
|
||||
const char* collector_reason = NULL;
|
||||
|
507
src/heap/heap.cc
507
src/heap/heap.cc
@ -27,6 +27,7 @@
|
||||
#include "src/heap/object-stats.h"
|
||||
#include "src/heap/objects-visiting-inl.h"
|
||||
#include "src/heap/objects-visiting.h"
|
||||
#include "src/heap/scavenger-inl.h"
|
||||
#include "src/heap/store-buffer.h"
|
||||
#include "src/heap-profiler.h"
|
||||
#include "src/interpreter/interpreter.h"
|
||||
@ -120,6 +121,7 @@ Heap::Heap()
|
||||
sweeping_time_(0.0),
|
||||
last_idle_notification_time_(0.0),
|
||||
last_gc_time_(0.0),
|
||||
scavenge_collector_(nullptr),
|
||||
mark_compact_collector_(this),
|
||||
store_buffer_(this),
|
||||
incremental_marking_(this),
|
||||
@ -1393,30 +1395,6 @@ void Heap::MarkCompactPrologue() {
|
||||
}
|
||||
|
||||
|
||||
// Helper class for copying HeapObjects
|
||||
class ScavengeVisitor : public ObjectVisitor {
|
||||
public:
|
||||
explicit ScavengeVisitor(Heap* heap) : heap_(heap) {}
|
||||
|
||||
void VisitPointer(Object** p) { ScavengePointer(p); }
|
||||
|
||||
void VisitPointers(Object** start, Object** end) {
|
||||
// Copy all HeapObject pointers in [start, end)
|
||||
for (Object** p = start; p < end; p++) ScavengePointer(p);
|
||||
}
|
||||
|
||||
private:
|
||||
void ScavengePointer(Object** p) {
|
||||
Object* object = *p;
|
||||
if (!heap_->InNewSpace(object)) return;
|
||||
Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
|
||||
reinterpret_cast<HeapObject*>(object));
|
||||
}
|
||||
|
||||
Heap* heap_;
|
||||
};
|
||||
|
||||
|
||||
#ifdef VERIFY_HEAP
|
||||
// Visitor class to verify pointers in code or data space do not point into
|
||||
// new space.
|
||||
@ -1561,7 +1539,7 @@ void Heap::Scavenge() {
|
||||
// Used for updating survived_since_last_expansion_ at function end.
|
||||
intptr_t survived_watermark = PromotedSpaceSizeOfObjects();
|
||||
|
||||
SelectScavengingVisitorsTable();
|
||||
scavenge_collector_->SelectScavengingVisitorsTable();
|
||||
|
||||
array_buffer_tracker()->PrepareDiscoveryInNewSpace();
|
||||
|
||||
@ -1603,7 +1581,7 @@ void Heap::Scavenge() {
|
||||
GCTracer::Scope::SCAVENGER_OLD_TO_NEW_POINTERS);
|
||||
StoreBufferRebuildScope scope(this, store_buffer(),
|
||||
&ScavengeStoreBufferCallback);
|
||||
store_buffer()->IteratePointersToNewSpace(&ScavengeObject);
|
||||
store_buffer()->IteratePointersToNewSpace(&Scavenger::ScavengeObject);
|
||||
}
|
||||
|
||||
{
|
||||
@ -1832,17 +1810,6 @@ void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) {
|
||||
}
|
||||
|
||||
|
||||
class NewSpaceScavenger : public StaticNewSpaceVisitor<NewSpaceScavenger> {
|
||||
public:
|
||||
static inline void VisitPointer(Heap* heap, Object** p) {
|
||||
Object* object = *p;
|
||||
if (!heap->InNewSpace(object)) return;
|
||||
Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
|
||||
reinterpret_cast<HeapObject*>(object));
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
|
||||
Address new_space_front) {
|
||||
do {
|
||||
@ -1854,7 +1821,7 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
|
||||
if (!NewSpacePage::IsAtEnd(new_space_front)) {
|
||||
HeapObject* object = HeapObject::FromAddress(new_space_front);
|
||||
new_space_front +=
|
||||
NewSpaceScavenger::IterateBody(object->map(), object);
|
||||
StaticScavengeVisitor::IterateBody(object->map(), object);
|
||||
} else {
|
||||
new_space_front =
|
||||
NewSpacePage::FromLimit(new_space_front)->next_page()->area_start();
|
||||
@ -1899,7 +1866,7 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
|
||||
IterateAndMarkPointersToFromSpace(
|
||||
target, obj_address + offset,
|
||||
obj_address + end_of_region_offset, record_slots,
|
||||
&ScavengeObject);
|
||||
&Scavenger::ScavengeObject);
|
||||
}
|
||||
offset = end_of_region_offset;
|
||||
}
|
||||
@ -1907,7 +1874,7 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
|
||||
#endif
|
||||
IterateAndMarkPointersToFromSpace(target, obj_address,
|
||||
obj_address + size, record_slots,
|
||||
&ScavengeObject);
|
||||
&Scavenger::ScavengeObject);
|
||||
#if V8_DOUBLE_FIELDS_UNBOXING
|
||||
}
|
||||
#endif
|
||||
@ -1999,457 +1966,6 @@ void Heap::UnregisterArrayBuffer(JSArrayBuffer* buffer) {
|
||||
}
|
||||
|
||||
|
||||
enum LoggingAndProfiling {
|
||||
LOGGING_AND_PROFILING_ENABLED,
|
||||
LOGGING_AND_PROFILING_DISABLED
|
||||
};
|
||||
|
||||
|
||||
enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS };
|
||||
|
||||
|
||||
template <MarksHandling marks_handling,
|
||||
LoggingAndProfiling logging_and_profiling_mode>
|
||||
class ScavengingVisitor : public StaticVisitorBase {
|
||||
public:
|
||||
static void Initialize() {
|
||||
table_.Register(kVisitSeqOneByteString, &EvacuateSeqOneByteString);
|
||||
table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString);
|
||||
table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate);
|
||||
table_.Register(kVisitByteArray, &EvacuateByteArray);
|
||||
table_.Register(kVisitFixedArray, &EvacuateFixedArray);
|
||||
table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray);
|
||||
table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray);
|
||||
table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array);
|
||||
table_.Register(kVisitJSArrayBuffer, &EvacuateJSArrayBuffer);
|
||||
|
||||
table_.Register(
|
||||
kVisitNativeContext,
|
||||
&ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
|
||||
Context::kSize>);
|
||||
|
||||
table_.Register(
|
||||
kVisitConsString,
|
||||
&ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
|
||||
ConsString::kSize>);
|
||||
|
||||
table_.Register(
|
||||
kVisitSlicedString,
|
||||
&ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
|
||||
SlicedString::kSize>);
|
||||
|
||||
table_.Register(
|
||||
kVisitSymbol,
|
||||
&ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
|
||||
Symbol::kSize>);
|
||||
|
||||
table_.Register(
|
||||
kVisitSharedFunctionInfo,
|
||||
&ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
|
||||
SharedFunctionInfo::kSize>);
|
||||
|
||||
table_.Register(kVisitJSWeakCollection,
|
||||
&ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
|
||||
|
||||
table_.Register(kVisitJSTypedArray,
|
||||
&ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
|
||||
|
||||
table_.Register(kVisitJSDataView,
|
||||
&ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
|
||||
|
||||
table_.Register(kVisitJSRegExp,
|
||||
&ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
|
||||
|
||||
if (marks_handling == IGNORE_MARKS) {
|
||||
table_.Register(
|
||||
kVisitJSFunction,
|
||||
&ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
|
||||
JSFunction::kSize>);
|
||||
} else {
|
||||
table_.Register(kVisitJSFunction, &EvacuateJSFunction);
|
||||
}
|
||||
|
||||
table_.RegisterSpecializations<ObjectEvacuationStrategy<DATA_OBJECT>,
|
||||
kVisitDataObject, kVisitDataObjectGeneric>();
|
||||
|
||||
table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>,
|
||||
kVisitJSObject, kVisitJSObjectGeneric>();
|
||||
|
||||
table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>,
|
||||
kVisitStruct, kVisitStructGeneric>();
|
||||
}
|
||||
|
||||
static VisitorDispatchTable<ScavengingCallback>* GetTable() {
|
||||
return &table_;
|
||||
}
|
||||
|
||||
private:
|
||||
enum ObjectContents { DATA_OBJECT, POINTER_OBJECT };
|
||||
|
||||
static void RecordCopiedObject(Heap* heap, HeapObject* obj) {
|
||||
bool should_record = false;
|
||||
#ifdef DEBUG
|
||||
should_record = FLAG_heap_stats;
|
||||
#endif
|
||||
should_record = should_record || FLAG_log_gc;
|
||||
if (should_record) {
|
||||
if (heap->new_space()->Contains(obj)) {
|
||||
heap->new_space()->RecordAllocation(obj);
|
||||
} else {
|
||||
heap->new_space()->RecordPromotion(obj);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function used by CopyObject to copy a source object to an
|
||||
// allocated target object and update the forwarding pointer in the source
|
||||
// object. Returns the target object.
|
||||
INLINE(static void MigrateObject(Heap* heap, HeapObject* source,
|
||||
HeapObject* target, int size)) {
|
||||
// If we migrate into to-space, then the to-space top pointer should be
|
||||
// right after the target object. Incorporate double alignment
|
||||
// over-allocation.
|
||||
DCHECK(!heap->InToSpace(target) ||
|
||||
target->address() + size == heap->new_space()->top() ||
|
||||
target->address() + size + kPointerSize == heap->new_space()->top());
|
||||
|
||||
// Make sure that we do not overwrite the promotion queue which is at
|
||||
// the end of to-space.
|
||||
DCHECK(!heap->InToSpace(target) ||
|
||||
heap->promotion_queue()->IsBelowPromotionQueue(
|
||||
heap->new_space()->top()));
|
||||
|
||||
// Copy the content of source to target.
|
||||
heap->CopyBlock(target->address(), source->address(), size);
|
||||
|
||||
// Set the forwarding address.
|
||||
source->set_map_word(MapWord::FromForwardingAddress(target));
|
||||
|
||||
if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) {
|
||||
// Update NewSpace stats if necessary.
|
||||
RecordCopiedObject(heap, target);
|
||||
heap->OnMoveEvent(target, source, size);
|
||||
}
|
||||
|
||||
if (marks_handling == TRANSFER_MARKS) {
|
||||
if (Marking::TransferColor(source, target)) {
|
||||
MemoryChunk::IncrementLiveBytesFromGC(target, size);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
template <AllocationAlignment alignment>
|
||||
static inline bool SemiSpaceCopyObject(Map* map, HeapObject** slot,
|
||||
HeapObject* object, int object_size) {
|
||||
Heap* heap = map->GetHeap();
|
||||
|
||||
DCHECK(heap->AllowedToBeMigrated(object, NEW_SPACE));
|
||||
AllocationResult allocation =
|
||||
heap->new_space()->AllocateRaw(object_size, alignment);
|
||||
|
||||
HeapObject* target = NULL; // Initialization to please compiler.
|
||||
if (allocation.To(&target)) {
|
||||
// Order is important here: Set the promotion limit before storing a
|
||||
// filler for double alignment or migrating the object. Otherwise we
|
||||
// may end up overwriting promotion queue entries when we migrate the
|
||||
// object.
|
||||
heap->promotion_queue()->SetNewLimit(heap->new_space()->top());
|
||||
|
||||
MigrateObject(heap, object, target, object_size);
|
||||
|
||||
// Update slot to new target.
|
||||
*slot = target;
|
||||
|
||||
heap->IncrementSemiSpaceCopiedObjectSize(object_size);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
template <ObjectContents object_contents, AllocationAlignment alignment>
|
||||
static inline bool PromoteObject(Map* map, HeapObject** slot,
|
||||
HeapObject* object, int object_size) {
|
||||
Heap* heap = map->GetHeap();
|
||||
|
||||
AllocationResult allocation =
|
||||
heap->old_space()->AllocateRaw(object_size, alignment);
|
||||
|
||||
HeapObject* target = NULL; // Initialization to please compiler.
|
||||
if (allocation.To(&target)) {
|
||||
MigrateObject(heap, object, target, object_size);
|
||||
|
||||
// Update slot to new target.
|
||||
*slot = target;
|
||||
|
||||
if (object_contents == POINTER_OBJECT) {
|
||||
if (map->instance_type() == JS_FUNCTION_TYPE) {
|
||||
heap->promotion_queue()->insert(target,
|
||||
JSFunction::kNonWeakFieldsEndOffset);
|
||||
} else {
|
||||
heap->promotion_queue()->insert(target, object_size);
|
||||
}
|
||||
}
|
||||
heap->IncrementPromotedObjectsSize(object_size);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
template <ObjectContents object_contents, AllocationAlignment alignment>
|
||||
static inline void EvacuateObject(Map* map, HeapObject** slot,
|
||||
HeapObject* object, int object_size) {
|
||||
SLOW_DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
|
||||
SLOW_DCHECK(object->Size() == object_size);
|
||||
Heap* heap = map->GetHeap();
|
||||
|
||||
if (!heap->ShouldBePromoted(object->address(), object_size)) {
|
||||
// A semi-space copy may fail due to fragmentation. In that case, we
|
||||
// try to promote the object.
|
||||
if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (PromoteObject<object_contents, alignment>(map, slot, object,
|
||||
object_size)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// If promotion failed, we try to copy the object to the other semi-space
|
||||
if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) return;
|
||||
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
|
||||
static inline void EvacuateJSFunction(Map* map, HeapObject** slot,
|
||||
HeapObject* object) {
|
||||
ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
|
||||
JSFunction::kSize>(map, slot, object);
|
||||
|
||||
MapWord map_word = object->map_word();
|
||||
DCHECK(map_word.IsForwardingAddress());
|
||||
HeapObject* target = map_word.ToForwardingAddress();
|
||||
|
||||
MarkBit mark_bit = Marking::MarkBitFrom(target);
|
||||
if (Marking::IsBlack(mark_bit)) {
|
||||
// This object is black and it might not be rescanned by marker.
|
||||
// We should explicitly record code entry slot for compaction because
|
||||
// promotion queue processing (IterateAndMarkPointersToFromSpace) will
|
||||
// miss it as it is not HeapObject-tagged.
|
||||
Address code_entry_slot =
|
||||
target->address() + JSFunction::kCodeEntryOffset;
|
||||
Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot));
|
||||
map->GetHeap()->mark_compact_collector()->RecordCodeEntrySlot(
|
||||
target, code_entry_slot, code);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static inline void EvacuateFixedArray(Map* map, HeapObject** slot,
|
||||
HeapObject* object) {
|
||||
int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
|
||||
EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object,
|
||||
object_size);
|
||||
}
|
||||
|
||||
|
||||
static inline void EvacuateFixedDoubleArray(Map* map, HeapObject** slot,
|
||||
HeapObject* object) {
|
||||
int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
|
||||
int object_size = FixedDoubleArray::SizeFor(length);
|
||||
EvacuateObject<DATA_OBJECT, kDoubleAligned>(map, slot, object, object_size);
|
||||
}
|
||||
|
||||
|
||||
static inline void EvacuateFixedTypedArray(Map* map, HeapObject** slot,
|
||||
HeapObject* object) {
|
||||
int object_size = reinterpret_cast<FixedTypedArrayBase*>(object)->size();
|
||||
EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size);
|
||||
|
||||
MapWord map_word = object->map_word();
|
||||
DCHECK(map_word.IsForwardingAddress());
|
||||
FixedTypedArrayBase* target =
|
||||
reinterpret_cast<FixedTypedArrayBase*>(map_word.ToForwardingAddress());
|
||||
if (target->base_pointer() != Smi::FromInt(0))
|
||||
target->set_base_pointer(target, SKIP_WRITE_BARRIER);
|
||||
}
|
||||
|
||||
|
||||
static inline void EvacuateFixedFloat64Array(Map* map, HeapObject** slot,
|
||||
HeapObject* object) {
|
||||
int object_size = reinterpret_cast<FixedFloat64Array*>(object)->size();
|
||||
EvacuateObject<DATA_OBJECT, kDoubleAligned>(map, slot, object, object_size);
|
||||
|
||||
MapWord map_word = object->map_word();
|
||||
DCHECK(map_word.IsForwardingAddress());
|
||||
FixedTypedArrayBase* target =
|
||||
reinterpret_cast<FixedTypedArrayBase*>(map_word.ToForwardingAddress());
|
||||
if (target->base_pointer() != Smi::FromInt(0))
|
||||
target->set_base_pointer(target, SKIP_WRITE_BARRIER);
|
||||
}
|
||||
|
||||
|
||||
static inline void EvacuateJSArrayBuffer(Map* map, HeapObject** slot,
|
||||
HeapObject* object) {
|
||||
ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object);
|
||||
|
||||
Heap* heap = map->GetHeap();
|
||||
MapWord map_word = object->map_word();
|
||||
DCHECK(map_word.IsForwardingAddress());
|
||||
HeapObject* target = map_word.ToForwardingAddress();
|
||||
if (!heap->InNewSpace(target)) {
|
||||
heap->array_buffer_tracker()->Promote(JSArrayBuffer::cast(target));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static inline void EvacuateByteArray(Map* map, HeapObject** slot,
|
||||
HeapObject* object) {
|
||||
int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
|
||||
EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size);
|
||||
}
|
||||
|
||||
|
||||
static inline void EvacuateSeqOneByteString(Map* map, HeapObject** slot,
|
||||
HeapObject* object) {
|
||||
int object_size = SeqOneByteString::cast(object)
|
||||
->SeqOneByteStringSize(map->instance_type());
|
||||
EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size);
|
||||
}
|
||||
|
||||
|
||||
static inline void EvacuateSeqTwoByteString(Map* map, HeapObject** slot,
|
||||
HeapObject* object) {
|
||||
int object_size = SeqTwoByteString::cast(object)
|
||||
->SeqTwoByteStringSize(map->instance_type());
|
||||
EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size);
|
||||
}
|
||||
|
||||
|
||||
static inline void EvacuateShortcutCandidate(Map* map, HeapObject** slot,
|
||||
HeapObject* object) {
|
||||
DCHECK(IsShortcutCandidate(map->instance_type()));
|
||||
|
||||
Heap* heap = map->GetHeap();
|
||||
|
||||
if (marks_handling == IGNORE_MARKS &&
|
||||
ConsString::cast(object)->unchecked_second() == heap->empty_string()) {
|
||||
HeapObject* first =
|
||||
HeapObject::cast(ConsString::cast(object)->unchecked_first());
|
||||
|
||||
*slot = first;
|
||||
|
||||
if (!heap->InNewSpace(first)) {
|
||||
object->set_map_word(MapWord::FromForwardingAddress(first));
|
||||
return;
|
||||
}
|
||||
|
||||
MapWord first_word = first->map_word();
|
||||
if (first_word.IsForwardingAddress()) {
|
||||
HeapObject* target = first_word.ToForwardingAddress();
|
||||
|
||||
*slot = target;
|
||||
object->set_map_word(MapWord::FromForwardingAddress(target));
|
||||
return;
|
||||
}
|
||||
|
||||
Heap::ScavengeObjectSlow(slot, first);
|
||||
object->set_map_word(MapWord::FromForwardingAddress(*slot));
|
||||
return;
|
||||
}
|
||||
|
||||
int object_size = ConsString::kSize;
|
||||
EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object,
|
||||
object_size);
|
||||
}
|
||||
|
||||
template <ObjectContents object_contents>
|
||||
class ObjectEvacuationStrategy {
|
||||
public:
|
||||
template <int object_size>
|
||||
static inline void VisitSpecialized(Map* map, HeapObject** slot,
|
||||
HeapObject* object) {
|
||||
EvacuateObject<object_contents, kWordAligned>(map, slot, object,
|
||||
object_size);
|
||||
}
|
||||
|
||||
static inline void Visit(Map* map, HeapObject** slot, HeapObject* object) {
|
||||
int object_size = map->instance_size();
|
||||
EvacuateObject<object_contents, kWordAligned>(map, slot, object,
|
||||
object_size);
|
||||
}
|
||||
};
|
||||
|
||||
static VisitorDispatchTable<ScavengingCallback> table_;
|
||||
};
|
||||
|
||||
|
||||
template <MarksHandling marks_handling,
|
||||
LoggingAndProfiling logging_and_profiling_mode>
|
||||
VisitorDispatchTable<ScavengingCallback>
|
||||
ScavengingVisitor<marks_handling, logging_and_profiling_mode>::table_;
|
||||
|
||||
|
||||
static void InitializeScavengingVisitorsTables() {
|
||||
ScavengingVisitor<TRANSFER_MARKS,
|
||||
LOGGING_AND_PROFILING_DISABLED>::Initialize();
|
||||
ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_DISABLED>::Initialize();
|
||||
ScavengingVisitor<TRANSFER_MARKS,
|
||||
LOGGING_AND_PROFILING_ENABLED>::Initialize();
|
||||
ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_ENABLED>::Initialize();
|
||||
}
|
||||
|
||||
|
||||
void Heap::SelectScavengingVisitorsTable() {
|
||||
bool logging_and_profiling =
|
||||
FLAG_verify_predictable || isolate()->logger()->is_logging() ||
|
||||
isolate()->cpu_profiler()->is_profiling() ||
|
||||
(isolate()->heap_profiler() != NULL &&
|
||||
isolate()->heap_profiler()->is_tracking_object_moves());
|
||||
|
||||
if (!incremental_marking()->IsMarking()) {
|
||||
if (!logging_and_profiling) {
|
||||
scavenging_visitors_table_.CopyFrom(ScavengingVisitor<
|
||||
IGNORE_MARKS, LOGGING_AND_PROFILING_DISABLED>::GetTable());
|
||||
} else {
|
||||
scavenging_visitors_table_.CopyFrom(ScavengingVisitor<
|
||||
IGNORE_MARKS, LOGGING_AND_PROFILING_ENABLED>::GetTable());
|
||||
}
|
||||
} else {
|
||||
if (!logging_and_profiling) {
|
||||
scavenging_visitors_table_.CopyFrom(ScavengingVisitor<
|
||||
TRANSFER_MARKS, LOGGING_AND_PROFILING_DISABLED>::GetTable());
|
||||
} else {
|
||||
scavenging_visitors_table_.CopyFrom(ScavengingVisitor<
|
||||
TRANSFER_MARKS, LOGGING_AND_PROFILING_ENABLED>::GetTable());
|
||||
}
|
||||
|
||||
if (incremental_marking()->IsCompacting()) {
|
||||
// When compacting forbid short-circuiting of cons-strings.
|
||||
// Scavenging code relies on the fact that new space object
|
||||
// can't be evacuated into evacuation candidate but
|
||||
// short-circuiting violates this assumption.
|
||||
scavenging_visitors_table_.Register(
|
||||
StaticVisitorBase::kVisitShortcutCandidate,
|
||||
scavenging_visitors_table_.GetVisitorById(
|
||||
StaticVisitorBase::kVisitConsString));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
|
||||
SLOW_DCHECK(object->GetIsolate()->heap()->InFromSpace(object));
|
||||
MapWord first_word = object->map_word();
|
||||
SLOW_DCHECK(!first_word.IsForwardingAddress());
|
||||
Map* map = first_word.ToMap();
|
||||
map->GetHeap()->scavenging_visitors_table_.GetVisitor(map)(map, p, object);
|
||||
}
|
||||
|
||||
|
||||
void Heap::ConfigureInitialOldGenerationSize() {
|
||||
if (!old_generation_size_configured_ && tracer()->SurvivalEventsRecorded()) {
|
||||
old_generation_allocation_limit_ =
|
||||
@ -5478,8 +4994,8 @@ void Heap::DisableInlineAllocation() {
|
||||
V8_DECLARE_ONCE(initialize_gc_once);
|
||||
|
||||
static void InitializeGCOnce() {
|
||||
InitializeScavengingVisitorsTables();
|
||||
NewSpaceScavenger::Initialize();
|
||||
Scavenger::Initialize();
|
||||
StaticScavengeVisitor::Initialize();
|
||||
MarkCompactCollector::Initialize();
|
||||
}
|
||||
|
||||
@ -5558,6 +5074,8 @@ bool Heap::SetUp() {
|
||||
|
||||
tracer_ = new GCTracer(this);
|
||||
|
||||
scavenge_collector_ = new Scavenger(this);
|
||||
|
||||
memory_reducer_ = new MemoryReducer(this);
|
||||
|
||||
object_stats_ = new ObjectStats(this);
|
||||
@ -5669,6 +5187,9 @@ void Heap::TearDown() {
|
||||
PrintAlloctionsHash();
|
||||
}
|
||||
|
||||
delete scavenge_collector_;
|
||||
scavenge_collector_ = nullptr;
|
||||
|
||||
if (memory_reducer_ != nullptr) {
|
||||
memory_reducer_->TearDown();
|
||||
delete memory_reducer_;
|
||||
|
@ -15,7 +15,6 @@
|
||||
#include "src/heap/gc-idle-time-handler.h"
|
||||
#include "src/heap/incremental-marking.h"
|
||||
#include "src/heap/mark-compact.h"
|
||||
#include "src/heap/objects-visiting.h"
|
||||
#include "src/heap/spaces.h"
|
||||
#include "src/heap/store-buffer.h"
|
||||
#include "src/list.h"
|
||||
@ -427,6 +426,7 @@ class HeapStats;
|
||||
class Isolate;
|
||||
class MemoryReducer;
|
||||
class ObjectStats;
|
||||
class Scavenger;
|
||||
class WeakObjectRetainer;
|
||||
|
||||
|
||||
@ -535,10 +535,6 @@ class PromotionQueue {
|
||||
};
|
||||
|
||||
|
||||
typedef void (*ScavengingCallback)(Map* map, HeapObject** slot,
|
||||
HeapObject* object);
|
||||
|
||||
|
||||
enum ArrayStorageAllocationMode {
|
||||
DONT_INITIALIZE_ARRAY_ELEMENTS,
|
||||
INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE
|
||||
@ -719,16 +715,6 @@ class Heap {
|
||||
template <typename T>
|
||||
static inline bool IsOneByte(T t, int chars);
|
||||
|
||||
// Callback function passed to Heap::Iterate etc. Copies an object if
|
||||
// necessary, the object might be promoted to an old space. The caller must
|
||||
// ensure the precondition that the object is (a) a heap object and (b) in
|
||||
// the heap's from space.
|
||||
static inline void ScavengePointer(HeapObject** p);
|
||||
static inline void ScavengeObject(HeapObject** p, HeapObject* object);
|
||||
|
||||
// Slow part of scavenge object.
|
||||
static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
|
||||
|
||||
static void FatalProcessOutOfMemory(const char* location,
|
||||
bool take_snapshot = false);
|
||||
|
||||
@ -1776,8 +1762,6 @@ class Heap {
|
||||
|
||||
void ConfigureInitialOldGenerationSize();
|
||||
|
||||
void SelectScavengingVisitorsTable();
|
||||
|
||||
bool HasLowYoungGenerationAllocationRate();
|
||||
bool HasLowOldGenerationAllocationRate();
|
||||
double YoungGenerationMutatorUtilization();
|
||||
@ -2259,6 +2243,8 @@ class Heap {
|
||||
// Last time a garbage collection happened.
|
||||
double last_gc_time_;
|
||||
|
||||
Scavenger* scavenge_collector_;
|
||||
|
||||
MarkCompactCollector mark_compact_collector_;
|
||||
|
||||
StoreBuffer store_buffer_;
|
||||
@ -2318,8 +2304,6 @@ class Heap {
|
||||
|
||||
ExternalStringTable external_string_table_;
|
||||
|
||||
VisitorDispatchTable<ScavengingCallback> scavenging_visitors_table_;
|
||||
|
||||
MemoryChunk* chunks_queued_for_free_;
|
||||
|
||||
size_t concurrent_unmapping_tasks_active_;
|
||||
@ -2348,6 +2332,7 @@ class Heap {
|
||||
friend class MarkCompactMarkingVisitor;
|
||||
friend class ObjectStatsVisitor;
|
||||
friend class Page;
|
||||
friend class Scavenger;
|
||||
friend class StoreBuffer;
|
||||
|
||||
// The allocator interface.
|
||||
|
@ -6,6 +6,7 @@
|
||||
#define V8_HEAP_OBJECT_STATS_H_
|
||||
|
||||
#include "src/heap/heap.h"
|
||||
#include "src/heap/objects-visiting.h"
|
||||
#include "src/objects.h"
|
||||
|
||||
namespace v8 {
|
||||
|
51
src/heap/scavenger-inl.h
Normal file
51
src/heap/scavenger-inl.h
Normal file
@ -0,0 +1,51 @@
|
||||
// Copyright 2015 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#ifndef V8_HEAP_SCAVENGER_INL_H_
|
||||
#define V8_HEAP_SCAVENGER_INL_H_
|
||||
|
||||
#include "src/heap/scavenger.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
void Scavenger::ScavengeObject(HeapObject** p, HeapObject* object) {
|
||||
DCHECK(object->GetIsolate()->heap()->InFromSpace(object));
|
||||
|
||||
// We use the first word (where the map pointer usually is) of a heap
|
||||
// object to record the forwarding pointer. A forwarding pointer can
|
||||
// point to an old space, the code space, or the to space of the new
|
||||
// generation.
|
||||
MapWord first_word = object->map_word();
|
||||
|
||||
// If the first word is a forwarding address, the object has already been
|
||||
// copied.
|
||||
if (first_word.IsForwardingAddress()) {
|
||||
HeapObject* dest = first_word.ToForwardingAddress();
|
||||
DCHECK(object->GetIsolate()->heap()->InFromSpace(*p));
|
||||
*p = dest;
|
||||
return;
|
||||
}
|
||||
|
||||
Heap::UpdateAllocationSiteFeedback(object, Heap::IGNORE_SCRATCHPAD_SLOT);
|
||||
|
||||
// AllocationMementos are unrooted and shouldn't survive a scavenge
|
||||
DCHECK(object->map() != object->GetHeap()->allocation_memento_map());
|
||||
// Call the slow part of scavenge object.
|
||||
return ScavengeObjectSlow(p, object);
|
||||
}
|
||||
|
||||
|
||||
// static
|
||||
void StaticScavengeVisitor::VisitPointer(Heap* heap, Object** p) {
|
||||
Object* object = *p;
|
||||
if (!heap->InNewSpace(object)) return;
|
||||
Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(p),
|
||||
reinterpret_cast<HeapObject*>(object));
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
#endif // V8_HEAP_SCAVENGER_INL_H_
|
496
src/heap/scavenger.cc
Normal file
496
src/heap/scavenger.cc
Normal file
@ -0,0 +1,496 @@
|
||||
// Copyright 2015 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#include "src/heap/scavenger.h"
|
||||
|
||||
#include "src/contexts.h"
|
||||
#include "src/cpu-profiler.h"
|
||||
#include "src/heap/heap.h"
|
||||
#include "src/heap/objects-visiting-inl.h"
|
||||
#include "src/heap/scavenger-inl.h"
|
||||
#include "src/isolate.h"
|
||||
#include "src/log.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
enum LoggingAndProfiling {
|
||||
LOGGING_AND_PROFILING_ENABLED,
|
||||
LOGGING_AND_PROFILING_DISABLED
|
||||
};
|
||||
|
||||
|
||||
enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS };
|
||||
|
||||
|
||||
template <MarksHandling marks_handling,
|
||||
LoggingAndProfiling logging_and_profiling_mode>
|
||||
class ScavengingVisitor : public StaticVisitorBase {
|
||||
public:
|
||||
static void Initialize() {
|
||||
table_.Register(kVisitSeqOneByteString, &EvacuateSeqOneByteString);
|
||||
table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString);
|
||||
table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate);
|
||||
table_.Register(kVisitByteArray, &EvacuateByteArray);
|
||||
table_.Register(kVisitFixedArray, &EvacuateFixedArray);
|
||||
table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray);
|
||||
table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray);
|
||||
table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array);
|
||||
table_.Register(kVisitJSArrayBuffer, &EvacuateJSArrayBuffer);
|
||||
|
||||
table_.Register(
|
||||
kVisitNativeContext,
|
||||
&ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
|
||||
Context::kSize>);
|
||||
|
||||
table_.Register(
|
||||
kVisitConsString,
|
||||
&ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
|
||||
ConsString::kSize>);
|
||||
|
||||
table_.Register(
|
||||
kVisitSlicedString,
|
||||
&ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
|
||||
SlicedString::kSize>);
|
||||
|
||||
table_.Register(
|
||||
kVisitSymbol,
|
||||
&ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
|
||||
Symbol::kSize>);
|
||||
|
||||
table_.Register(
|
||||
kVisitSharedFunctionInfo,
|
||||
&ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
|
||||
SharedFunctionInfo::kSize>);
|
||||
|
||||
table_.Register(kVisitJSWeakCollection,
|
||||
&ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
|
||||
|
||||
table_.Register(kVisitJSTypedArray,
|
||||
&ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
|
||||
|
||||
table_.Register(kVisitJSDataView,
|
||||
&ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
|
||||
|
||||
table_.Register(kVisitJSRegExp,
|
||||
&ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
|
||||
|
||||
if (marks_handling == IGNORE_MARKS) {
|
||||
table_.Register(
|
||||
kVisitJSFunction,
|
||||
&ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
|
||||
JSFunction::kSize>);
|
||||
} else {
|
||||
table_.Register(kVisitJSFunction, &EvacuateJSFunction);
|
||||
}
|
||||
|
||||
table_.RegisterSpecializations<ObjectEvacuationStrategy<DATA_OBJECT>,
|
||||
kVisitDataObject, kVisitDataObjectGeneric>();
|
||||
|
||||
table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>,
|
||||
kVisitJSObject, kVisitJSObjectGeneric>();
|
||||
|
||||
table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>,
|
||||
kVisitStruct, kVisitStructGeneric>();
|
||||
}
|
||||
|
||||
static VisitorDispatchTable<ScavengingCallback>* GetTable() {
|
||||
return &table_;
|
||||
}
|
||||
|
||||
private:
|
||||
enum ObjectContents { DATA_OBJECT, POINTER_OBJECT };
|
||||
|
||||
static void RecordCopiedObject(Heap* heap, HeapObject* obj) {
|
||||
bool should_record = false;
|
||||
#ifdef DEBUG
|
||||
should_record = FLAG_heap_stats;
|
||||
#endif
|
||||
should_record = should_record || FLAG_log_gc;
|
||||
if (should_record) {
|
||||
if (heap->new_space()->Contains(obj)) {
|
||||
heap->new_space()->RecordAllocation(obj);
|
||||
} else {
|
||||
heap->new_space()->RecordPromotion(obj);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function used by CopyObject to copy a source object to an
|
||||
// allocated target object and update the forwarding pointer in the source
|
||||
// object. Returns the target object.
|
||||
INLINE(static void MigrateObject(Heap* heap, HeapObject* source,
|
||||
HeapObject* target, int size)) {
|
||||
// If we migrate into to-space, then the to-space top pointer should be
|
||||
// right after the target object. Incorporate double alignment
|
||||
// over-allocation.
|
||||
DCHECK(!heap->InToSpace(target) ||
|
||||
target->address() + size == heap->new_space()->top() ||
|
||||
target->address() + size + kPointerSize == heap->new_space()->top());
|
||||
|
||||
// Make sure that we do not overwrite the promotion queue which is at
|
||||
// the end of to-space.
|
||||
DCHECK(!heap->InToSpace(target) ||
|
||||
heap->promotion_queue()->IsBelowPromotionQueue(
|
||||
heap->new_space()->top()));
|
||||
|
||||
// Copy the content of source to target.
|
||||
heap->CopyBlock(target->address(), source->address(), size);
|
||||
|
||||
// Set the forwarding address.
|
||||
source->set_map_word(MapWord::FromForwardingAddress(target));
|
||||
|
||||
if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) {
|
||||
// Update NewSpace stats if necessary.
|
||||
RecordCopiedObject(heap, target);
|
||||
heap->OnMoveEvent(target, source, size);
|
||||
}
|
||||
|
||||
if (marks_handling == TRANSFER_MARKS) {
|
||||
if (Marking::TransferColor(source, target)) {
|
||||
MemoryChunk::IncrementLiveBytesFromGC(target, size);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
template <AllocationAlignment alignment>
|
||||
static inline bool SemiSpaceCopyObject(Map* map, HeapObject** slot,
|
||||
HeapObject* object, int object_size) {
|
||||
Heap* heap = map->GetHeap();
|
||||
|
||||
DCHECK(heap->AllowedToBeMigrated(object, NEW_SPACE));
|
||||
AllocationResult allocation =
|
||||
heap->new_space()->AllocateRaw(object_size, alignment);
|
||||
|
||||
HeapObject* target = NULL; // Initialization to please compiler.
|
||||
if (allocation.To(&target)) {
|
||||
// Order is important here: Set the promotion limit before storing a
|
||||
// filler for double alignment or migrating the object. Otherwise we
|
||||
// may end up overwriting promotion queue entries when we migrate the
|
||||
// object.
|
||||
heap->promotion_queue()->SetNewLimit(heap->new_space()->top());
|
||||
|
||||
MigrateObject(heap, object, target, object_size);
|
||||
|
||||
// Update slot to new target.
|
||||
*slot = target;
|
||||
|
||||
heap->IncrementSemiSpaceCopiedObjectSize(object_size);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
template <ObjectContents object_contents, AllocationAlignment alignment>
|
||||
static inline bool PromoteObject(Map* map, HeapObject** slot,
|
||||
HeapObject* object, int object_size) {
|
||||
Heap* heap = map->GetHeap();
|
||||
|
||||
AllocationResult allocation =
|
||||
heap->old_space()->AllocateRaw(object_size, alignment);
|
||||
|
||||
HeapObject* target = NULL; // Initialization to please compiler.
|
||||
if (allocation.To(&target)) {
|
||||
MigrateObject(heap, object, target, object_size);
|
||||
|
||||
// Update slot to new target.
|
||||
*slot = target;
|
||||
|
||||
if (object_contents == POINTER_OBJECT) {
|
||||
if (map->instance_type() == JS_FUNCTION_TYPE) {
|
||||
heap->promotion_queue()->insert(target,
|
||||
JSFunction::kNonWeakFieldsEndOffset);
|
||||
} else {
|
||||
heap->promotion_queue()->insert(target, object_size);
|
||||
}
|
||||
}
|
||||
heap->IncrementPromotedObjectsSize(object_size);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
template <ObjectContents object_contents, AllocationAlignment alignment>
|
||||
static inline void EvacuateObject(Map* map, HeapObject** slot,
|
||||
HeapObject* object, int object_size) {
|
||||
SLOW_DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
|
||||
SLOW_DCHECK(object->Size() == object_size);
|
||||
Heap* heap = map->GetHeap();
|
||||
|
||||
if (!heap->ShouldBePromoted(object->address(), object_size)) {
|
||||
// A semi-space copy may fail due to fragmentation. In that case, we
|
||||
// try to promote the object.
|
||||
if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (PromoteObject<object_contents, alignment>(map, slot, object,
|
||||
object_size)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// If promotion failed, we try to copy the object to the other semi-space
|
||||
if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) return;
|
||||
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
|
||||
static inline void EvacuateJSFunction(Map* map, HeapObject** slot,
|
||||
HeapObject* object) {
|
||||
ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
|
||||
JSFunction::kSize>(map, slot, object);
|
||||
|
||||
MapWord map_word = object->map_word();
|
||||
DCHECK(map_word.IsForwardingAddress());
|
||||
HeapObject* target = map_word.ToForwardingAddress();
|
||||
|
||||
MarkBit mark_bit = Marking::MarkBitFrom(target);
|
||||
if (Marking::IsBlack(mark_bit)) {
|
||||
// This object is black and it might not be rescanned by marker.
|
||||
// We should explicitly record code entry slot for compaction because
|
||||
// promotion queue processing (IterateAndMarkPointersToFromSpace) will
|
||||
// miss it as it is not HeapObject-tagged.
|
||||
Address code_entry_slot =
|
||||
target->address() + JSFunction::kCodeEntryOffset;
|
||||
Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot));
|
||||
map->GetHeap()->mark_compact_collector()->RecordCodeEntrySlot(
|
||||
target, code_entry_slot, code);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static inline void EvacuateFixedArray(Map* map, HeapObject** slot,
|
||||
HeapObject* object) {
|
||||
int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
|
||||
EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object,
|
||||
object_size);
|
||||
}
|
||||
|
||||
|
||||
static inline void EvacuateFixedDoubleArray(Map* map, HeapObject** slot,
|
||||
HeapObject* object) {
|
||||
int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
|
||||
int object_size = FixedDoubleArray::SizeFor(length);
|
||||
EvacuateObject<DATA_OBJECT, kDoubleAligned>(map, slot, object, object_size);
|
||||
}
|
||||
|
||||
|
||||
static inline void EvacuateFixedTypedArray(Map* map, HeapObject** slot,
|
||||
HeapObject* object) {
|
||||
int object_size = reinterpret_cast<FixedTypedArrayBase*>(object)->size();
|
||||
EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size);
|
||||
|
||||
MapWord map_word = object->map_word();
|
||||
DCHECK(map_word.IsForwardingAddress());
|
||||
FixedTypedArrayBase* target =
|
||||
reinterpret_cast<FixedTypedArrayBase*>(map_word.ToForwardingAddress());
|
||||
if (target->base_pointer() != Smi::FromInt(0))
|
||||
target->set_base_pointer(target, SKIP_WRITE_BARRIER);
|
||||
}
|
||||
|
||||
|
||||
static inline void EvacuateFixedFloat64Array(Map* map, HeapObject** slot,
|
||||
HeapObject* object) {
|
||||
int object_size = reinterpret_cast<FixedFloat64Array*>(object)->size();
|
||||
EvacuateObject<DATA_OBJECT, kDoubleAligned>(map, slot, object, object_size);
|
||||
|
||||
MapWord map_word = object->map_word();
|
||||
DCHECK(map_word.IsForwardingAddress());
|
||||
FixedTypedArrayBase* target =
|
||||
reinterpret_cast<FixedTypedArrayBase*>(map_word.ToForwardingAddress());
|
||||
if (target->base_pointer() != Smi::FromInt(0))
|
||||
target->set_base_pointer(target, SKIP_WRITE_BARRIER);
|
||||
}
|
||||
|
||||
|
||||
static inline void EvacuateJSArrayBuffer(Map* map, HeapObject** slot,
|
||||
HeapObject* object) {
|
||||
ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object);
|
||||
|
||||
Heap* heap = map->GetHeap();
|
||||
MapWord map_word = object->map_word();
|
||||
DCHECK(map_word.IsForwardingAddress());
|
||||
HeapObject* target = map_word.ToForwardingAddress();
|
||||
if (!heap->InNewSpace(target)) {
|
||||
heap->array_buffer_tracker()->Promote(JSArrayBuffer::cast(target));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static inline void EvacuateByteArray(Map* map, HeapObject** slot,
|
||||
HeapObject* object) {
|
||||
int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
|
||||
EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size);
|
||||
}
|
||||
|
||||
|
||||
static inline void EvacuateSeqOneByteString(Map* map, HeapObject** slot,
|
||||
HeapObject* object) {
|
||||
int object_size = SeqOneByteString::cast(object)
|
||||
->SeqOneByteStringSize(map->instance_type());
|
||||
EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size);
|
||||
}
|
||||
|
||||
|
||||
static inline void EvacuateSeqTwoByteString(Map* map, HeapObject** slot,
|
||||
HeapObject* object) {
|
||||
int object_size = SeqTwoByteString::cast(object)
|
||||
->SeqTwoByteStringSize(map->instance_type());
|
||||
EvacuateObject<DATA_OBJECT, kWordAligned>(map, slot, object, object_size);
|
||||
}
|
||||
|
||||
|
||||
static inline void EvacuateShortcutCandidate(Map* map, HeapObject** slot,
|
||||
HeapObject* object) {
|
||||
DCHECK(IsShortcutCandidate(map->instance_type()));
|
||||
|
||||
Heap* heap = map->GetHeap();
|
||||
|
||||
if (marks_handling == IGNORE_MARKS &&
|
||||
ConsString::cast(object)->unchecked_second() == heap->empty_string()) {
|
||||
HeapObject* first =
|
||||
HeapObject::cast(ConsString::cast(object)->unchecked_first());
|
||||
|
||||
*slot = first;
|
||||
|
||||
if (!heap->InNewSpace(first)) {
|
||||
object->set_map_word(MapWord::FromForwardingAddress(first));
|
||||
return;
|
||||
}
|
||||
|
||||
MapWord first_word = first->map_word();
|
||||
if (first_word.IsForwardingAddress()) {
|
||||
HeapObject* target = first_word.ToForwardingAddress();
|
||||
|
||||
*slot = target;
|
||||
object->set_map_word(MapWord::FromForwardingAddress(target));
|
||||
return;
|
||||
}
|
||||
|
||||
Scavenger::ScavengeObjectSlow(slot, first);
|
||||
object->set_map_word(MapWord::FromForwardingAddress(*slot));
|
||||
return;
|
||||
}
|
||||
|
||||
int object_size = ConsString::kSize;
|
||||
EvacuateObject<POINTER_OBJECT, kWordAligned>(map, slot, object,
|
||||
object_size);
|
||||
}
|
||||
|
||||
template <ObjectContents object_contents>
|
||||
class ObjectEvacuationStrategy {
|
||||
public:
|
||||
template <int object_size>
|
||||
static inline void VisitSpecialized(Map* map, HeapObject** slot,
|
||||
HeapObject* object) {
|
||||
EvacuateObject<object_contents, kWordAligned>(map, slot, object,
|
||||
object_size);
|
||||
}
|
||||
|
||||
static inline void Visit(Map* map, HeapObject** slot, HeapObject* object) {
|
||||
int object_size = map->instance_size();
|
||||
EvacuateObject<object_contents, kWordAligned>(map, slot, object,
|
||||
object_size);
|
||||
}
|
||||
};
|
||||
|
||||
static VisitorDispatchTable<ScavengingCallback> table_;
|
||||
};
|
||||
|
||||
|
||||
template <MarksHandling marks_handling,
|
||||
LoggingAndProfiling logging_and_profiling_mode>
|
||||
VisitorDispatchTable<ScavengingCallback>
|
||||
ScavengingVisitor<marks_handling, logging_and_profiling_mode>::table_;
|
||||
|
||||
|
||||
// static
|
||||
void Scavenger::Initialize() {
|
||||
ScavengingVisitor<TRANSFER_MARKS,
|
||||
LOGGING_AND_PROFILING_DISABLED>::Initialize();
|
||||
ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_DISABLED>::Initialize();
|
||||
ScavengingVisitor<TRANSFER_MARKS,
|
||||
LOGGING_AND_PROFILING_ENABLED>::Initialize();
|
||||
ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_ENABLED>::Initialize();
|
||||
}
|
||||
|
||||
|
||||
// static
|
||||
void Scavenger::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
|
||||
SLOW_DCHECK(object->GetIsolate()->heap()->InFromSpace(object));
|
||||
MapWord first_word = object->map_word();
|
||||
SLOW_DCHECK(!first_word.IsForwardingAddress());
|
||||
Map* map = first_word.ToMap();
|
||||
Scavenger* scavenger = map->GetHeap()->scavenge_collector_;
|
||||
scavenger->scavenging_visitors_table_.GetVisitor(map)(map, p, object);
|
||||
}
|
||||
|
||||
|
||||
void Scavenger::SelectScavengingVisitorsTable() {
|
||||
bool logging_and_profiling =
|
||||
FLAG_verify_predictable || isolate()->logger()->is_logging() ||
|
||||
isolate()->cpu_profiler()->is_profiling() ||
|
||||
(isolate()->heap_profiler() != NULL &&
|
||||
isolate()->heap_profiler()->is_tracking_object_moves());
|
||||
|
||||
if (!heap()->incremental_marking()->IsMarking()) {
|
||||
if (!logging_and_profiling) {
|
||||
scavenging_visitors_table_.CopyFrom(
|
||||
ScavengingVisitor<IGNORE_MARKS,
|
||||
LOGGING_AND_PROFILING_DISABLED>::GetTable());
|
||||
} else {
|
||||
scavenging_visitors_table_.CopyFrom(
|
||||
ScavengingVisitor<IGNORE_MARKS,
|
||||
LOGGING_AND_PROFILING_ENABLED>::GetTable());
|
||||
}
|
||||
} else {
|
||||
if (!logging_and_profiling) {
|
||||
scavenging_visitors_table_.CopyFrom(
|
||||
ScavengingVisitor<TRANSFER_MARKS,
|
||||
LOGGING_AND_PROFILING_DISABLED>::GetTable());
|
||||
} else {
|
||||
scavenging_visitors_table_.CopyFrom(
|
||||
ScavengingVisitor<TRANSFER_MARKS,
|
||||
LOGGING_AND_PROFILING_ENABLED>::GetTable());
|
||||
}
|
||||
|
||||
if (heap()->incremental_marking()->IsCompacting()) {
|
||||
// When compacting forbid short-circuiting of cons-strings.
|
||||
// Scavenging code relies on the fact that new space object
|
||||
// can't be evacuated into evacuation candidate but
|
||||
// short-circuiting violates this assumption.
|
||||
scavenging_visitors_table_.Register(
|
||||
StaticVisitorBase::kVisitShortcutCandidate,
|
||||
scavenging_visitors_table_.GetVisitorById(
|
||||
StaticVisitorBase::kVisitConsString));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Isolate* Scavenger::isolate() { return heap()->isolate(); }
|
||||
|
||||
|
||||
void ScavengeVisitor::VisitPointer(Object** p) { ScavengePointer(p); }
|
||||
|
||||
|
||||
void ScavengeVisitor::VisitPointers(Object** start, Object** end) {
|
||||
// Copy all HeapObject pointers in [start, end)
|
||||
for (Object** p = start; p < end; p++) ScavengePointer(p);
|
||||
}
|
||||
|
||||
|
||||
void ScavengeVisitor::ScavengePointer(Object** p) {
|
||||
Object* object = *p;
|
||||
if (!heap_->InNewSpace(object)) return;
|
||||
Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(p),
|
||||
reinterpret_cast<HeapObject*>(object));
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
72
src/heap/scavenger.h
Normal file
72
src/heap/scavenger.h
Normal file
@ -0,0 +1,72 @@
|
||||
// Copyright 2015 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#ifndef V8_HEAP_SCAVENGER_H_
|
||||
#define V8_HEAP_SCAVENGER_H_
|
||||
|
||||
#include "src/heap/objects-visiting.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
typedef void (*ScavengingCallback)(Map* map, HeapObject** slot,
|
||||
HeapObject* object);
|
||||
|
||||
class Scavenger {
|
||||
public:
|
||||
explicit Scavenger(Heap* heap) : heap_(heap) {}
|
||||
|
||||
// Initializes static visitor dispatch tables.
|
||||
static void Initialize();
|
||||
|
||||
// Callback function passed to Heap::Iterate etc. Copies an object if
|
||||
// necessary, the object might be promoted to an old space. The caller must
|
||||
// ensure the precondition that the object is (a) a heap object and (b) in
|
||||
// the heap's from space.
|
||||
static inline void ScavengeObject(HeapObject** p, HeapObject* object);
|
||||
|
||||
// Slow part of {ScavengeObject} above.
|
||||
static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
|
||||
|
||||
// Chooses an appropriate static visitor table depending on the current state
|
||||
// of the heap (i.e. incremental marking, logging and profiling).
|
||||
void SelectScavengingVisitorsTable();
|
||||
|
||||
Isolate* isolate();
|
||||
Heap* heap() { return heap_; }
|
||||
|
||||
private:
|
||||
Heap* heap_;
|
||||
VisitorDispatchTable<ScavengingCallback> scavenging_visitors_table_;
|
||||
};
|
||||
|
||||
|
||||
// Helper class for turning the scavenger into an object visitor that is also
|
||||
// filtering out non-HeapObjects and objects which do not reside in new space.
|
||||
class ScavengeVisitor : public ObjectVisitor {
|
||||
public:
|
||||
explicit ScavengeVisitor(Heap* heap) : heap_(heap) {}
|
||||
|
||||
void VisitPointer(Object** p);
|
||||
void VisitPointers(Object** start, Object** end);
|
||||
|
||||
private:
|
||||
inline void ScavengePointer(Object** p);
|
||||
|
||||
Heap* heap_;
|
||||
};
|
||||
|
||||
|
||||
// Helper class for turning the scavenger into an object visitor that is also
|
||||
// filtering out non-HeapObjects and objects which do not reside in new space.
|
||||
class StaticScavengeVisitor
|
||||
: public StaticNewSpaceVisitor<StaticScavengeVisitor> {
|
||||
public:
|
||||
static inline void VisitPointer(Heap* heap, Object** p);
|
||||
};
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
#endif // V8_HEAP_SCAVENGER_H_
|
@ -734,6 +734,9 @@
|
||||
'../../src/heap/objects-visiting-inl.h',
|
||||
'../../src/heap/objects-visiting.cc',
|
||||
'../../src/heap/objects-visiting.h',
|
||||
'../../src/heap/scavenger-inl.h',
|
||||
'../../src/heap/scavenger.cc',
|
||||
'../../src/heap/scavenger.h',
|
||||
'../../src/heap/spaces-inl.h',
|
||||
'../../src/heap/spaces.cc',
|
||||
'../../src/heap/spaces.h',
|
||||
|
Loading…
Reference in New Issue
Block a user