Merging scavenge into sweeping phase of mark-sweep(-compact) collector.

Should reduce full GC pauses. 
Review URL: http://codereview.chromium.org/1217011

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@4318 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
vegorov@chromium.org 2010-03-30 12:01:06 +00:00
parent dde48831be
commit 6d3cdbbf98
6 changed files with 404 additions and 151 deletions

View File

@ -240,6 +240,27 @@ void Heap::CopyBlock(Object** dst, Object** src, int byte_size) {
}
void Heap::MoveBlock(Object** dst, Object** src, size_t byte_size) {
ASSERT(IsAligned<size_t>(byte_size, kPointerSize));
int size_in_words = byte_size / kPointerSize;
if ((dst < src) || (dst >= (src + size_in_words))) {
ASSERT((dst >= (src + size_in_words)) ||
((OffsetFrom(reinterpret_cast<Address>(src)) -
OffsetFrom(reinterpret_cast<Address>(dst))) >= kPointerSize));
Object** end = src + size_in_words;
while (src != end) {
*dst++ = *src++;
}
} else {
memmove(dst, src, byte_size);
}
}
void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
ASSERT(InFromSpace(object));

View File

@ -562,23 +562,18 @@ void Heap::PerformGarbageCollection(AllocationSpace space,
EnsureFromSpaceIsCommitted();
if (collector == MARK_COMPACTOR) {
// Perform mark-sweep with optional compaction.
if (collector == MARK_COMPACTOR) {
MarkCompact(tracer);
}
// Always perform a scavenge to make room in new space.
Scavenge();
// Update the old space promotion limits after the scavenge due to
// promotions during scavenge.
if (collector == MARK_COMPACTOR) {
int old_gen_size = PromotedSpaceSize();
old_gen_promotion_limit_ =
old_gen_size + Max(kMinimumPromotionLimit, old_gen_size / 3);
old_gen_allocation_limit_ =
old_gen_size + Max(kMinimumAllocationLimit, old_gen_size / 2);
old_gen_exhausted_ = false;
} else {
Scavenge();
}
Counters::objs_since_last_young.Set(0);
@ -764,6 +759,17 @@ static void VerifyNonPointerSpacePointers() {
#endif
void Heap::CheckNewSpaceExpansionCriteria() {
if (new_space_.Capacity() < new_space_.MaximumCapacity() &&
survived_since_last_expansion_ > new_space_.Capacity()) {
// Grow the size of new space if there is room to grow and enough
// data has survived scavenge since the last expansion.
new_space_.Grow();
survived_since_last_expansion_ = 0;
}
}
void Heap::Scavenge() {
#ifdef DEBUG
if (FLAG_enable_slow_asserts) VerifyNonPointerSpacePointers();
@ -780,13 +786,7 @@ void Heap::Scavenge() {
// Used for updating survived_since_last_expansion_ at function end.
int survived_watermark = PromotedSpaceSize();
if (new_space_.Capacity() < new_space_.MaximumCapacity() &&
survived_since_last_expansion_ > new_space_.Capacity()) {
// Grow the size of new space if there is room to grow and enough
// data has survived scavenge since the last expansion.
new_space_.Grow();
survived_since_last_expansion_ = 0;
}
CheckNewSpaceExpansionCriteria();
// Flip the semispaces. After flipping, to space is empty, from space has
// live objects.
@ -837,15 +837,17 @@ void Heap::Scavenge() {
new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
ScavengeExternalStringTable();
UpdateNewSpaceReferencesInExternalStringTable(
&UpdateNewSpaceReferenceInExternalStringTableEntry);
ASSERT(new_space_front == new_space_.top());
// Set age mark.
new_space_.set_age_mark(new_space_.top());
// Update how much has survived scavenge.
survived_since_last_expansion_ +=
(PromotedSpaceSize() - survived_watermark) + new_space_.Size();
IncrementYoungSurvivorsCounter(
(PromotedSpaceSize() - survived_watermark) + new_space_.Size());
LOG(ResourceEvent("scavenge", "end"));
@ -853,7 +855,22 @@ void Heap::Scavenge() {
}
void Heap::ScavengeExternalStringTable() {
String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Object** p) {
MapWord first_word = HeapObject::cast(*p)->map_word();
if (!first_word.IsForwardingAddress()) {
// Unreachable external string can be finalized.
FinalizeExternalString(String::cast(*p));
return NULL;
}
// String is still reachable.
return String::cast(first_word.ToForwardingAddress());
}
void Heap::UpdateNewSpaceReferencesInExternalStringTable(
ExternalStringTableUpdaterCallback updater_func) {
ExternalStringTable::Verify();
if (ExternalStringTable::new_space_strings_.is_empty()) return;
@ -864,16 +881,10 @@ void Heap::ScavengeExternalStringTable() {
for (Object** p = start; p < end; ++p) {
ASSERT(Heap::InFromSpace(*p));
MapWord first_word = HeapObject::cast(*p)->map_word();
String* target = updater_func(p);
if (!first_word.IsForwardingAddress()) {
// Unreachable external string can be finalized.
FinalizeExternalString(String::cast(*p));
continue;
}
if (target == NULL) continue;
// String is still reachable.
String* target = String::cast(first_word.ToForwardingAddress());
ASSERT(target->IsExternalString());
if (Heap::InNewSpace(target)) {

View File

@ -200,6 +200,9 @@ class GCTracer;
class HeapStats;
typedef String* (*ExternalStringTableUpdaterCallback)(Object** pointer);
// The all static Heap captures the interface to the global object heap.
// All JavaScript contexts by this process share the same object heap.
@ -943,6 +946,30 @@ class Heap : public AllStatic {
static void RecordStats(HeapStats* stats);
// Copy block of memory from src to dst. Size of block should be aligned
// by pointer size.
static inline void CopyBlock(Object** dst, Object** src, int byte_size);
// Optimized version of memmove for blocks with pointer size aligned sizes and
// pointer size aligned addresses.
static inline void MoveBlock(Object** dst, Object** src, size_t byte_size);
// Check new space expansion criteria and expand semispaces if it was hit.
static void CheckNewSpaceExpansionCriteria();
static inline void IncrementYoungSurvivorsCounter(int survived) {
survived_since_last_expansion_ += survived;
}
static void UpdateNewSpaceReferencesInExternalStringTable(
ExternalStringTableUpdaterCallback updater_func);
// Helper function that governs the promotion policy from new space to
// old. If the object's old address lies below the new space's age
// mark or if we've already filled the bottom 1/16th of the to space,
// we try to promote this object.
static inline bool ShouldBePromoted(Address old_address, int object_size);
static int MaxObjectSizeInNewSpace() { return kMaxObjectSizeInNewSpace; }
private:
@ -1139,7 +1166,10 @@ class Heap : public AllStatic {
// Performs a minor collection in new generation.
static void Scavenge();
static void ScavengeExternalStringTable();
static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
Object** pointer);
static Address DoScavenge(ObjectVisitor* scavenge_visitor,
Address new_space_front);
@ -1157,11 +1187,6 @@ class Heap : public AllStatic {
HeapObject* target,
int size);
// Helper function that governs the promotion policy from new space to
// old. If the object's old address lies below the new space's age
// mark or if we've already filled the bottom 1/16th of the to space,
// we try to promote this object.
static inline bool ShouldBePromoted(Address old_address, int object_size);
#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
// Record the copy of an object in the NewSpace's statistics.
static void RecordCopiedObject(HeapObject* obj);
@ -1180,9 +1205,6 @@ class Heap : public AllStatic {
// Slow part of scavenge object.
static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
// Copy memory from src to dst.
static inline void CopyBlock(Object** dst, Object** src, int byte_size);
// Initializes a function with a shared part and prototype.
// Returns the function.
// Note: this code was factored out of AllocateFunction such that

View File

@ -53,13 +53,13 @@ MarkCompactCollector::CollectorState MarkCompactCollector::state_ = IDLE;
// Counters used for debugging the marking phase of mark-compact or mark-sweep
// collection.
int MarkCompactCollector::live_bytes_ = 0;
int MarkCompactCollector::live_young_objects_ = 0;
int MarkCompactCollector::live_old_data_objects_ = 0;
int MarkCompactCollector::live_old_pointer_objects_ = 0;
int MarkCompactCollector::live_code_objects_ = 0;
int MarkCompactCollector::live_map_objects_ = 0;
int MarkCompactCollector::live_cell_objects_ = 0;
int MarkCompactCollector::live_lo_objects_ = 0;
int MarkCompactCollector::live_young_objects_size_ = 0;
int MarkCompactCollector::live_old_data_objects_size_ = 0;
int MarkCompactCollector::live_old_pointer_objects_size_ = 0;
int MarkCompactCollector::live_code_objects_size_ = 0;
int MarkCompactCollector::live_map_objects_size_ = 0;
int MarkCompactCollector::live_cell_objects_size_ = 0;
int MarkCompactCollector::live_lo_objects_size_ = 0;
#endif
void MarkCompactCollector::CollectGarbage() {
@ -136,13 +136,13 @@ void MarkCompactCollector::Prepare(GCTracer* tracer) {
#ifdef DEBUG
live_bytes_ = 0;
live_young_objects_ = 0;
live_old_pointer_objects_ = 0;
live_old_data_objects_ = 0;
live_code_objects_ = 0;
live_map_objects_ = 0;
live_cell_objects_ = 0;
live_lo_objects_ = 0;
live_young_objects_size_ = 0;
live_old_pointer_objects_size_ = 0;
live_old_data_objects_size_ = 0;
live_code_objects_size_ = 0;
live_map_objects_size_ = 0;
live_cell_objects_size_ = 0;
live_lo_objects_size_ = 0;
#endif
}
@ -742,21 +742,21 @@ static int CountMarkedCallback(HeapObject* obj) {
void MarkCompactCollector::UpdateLiveObjectCount(HeapObject* obj) {
live_bytes_ += obj->Size();
if (Heap::new_space()->Contains(obj)) {
live_young_objects_++;
live_young_objects_size_ += obj->Size();
} else if (Heap::map_space()->Contains(obj)) {
ASSERT(obj->IsMap());
live_map_objects_++;
live_map_objects_size_ += obj->Size();
} else if (Heap::cell_space()->Contains(obj)) {
ASSERT(obj->IsJSGlobalPropertyCell());
live_cell_objects_++;
live_cell_objects_size_ += obj->Size();
} else if (Heap::old_pointer_space()->Contains(obj)) {
live_old_pointer_objects_++;
live_old_pointer_objects_size_ += obj->Size();
} else if (Heap::old_data_space()->Contains(obj)) {
live_old_data_objects_++;
live_old_data_objects_size_ += obj->Size();
} else if (Heap::code_space()->Contains(obj)) {
live_code_objects_++;
live_code_objects_size_ += obj->Size();
} else if (Heap::lo_space()->Contains(obj)) {
live_lo_objects_++;
live_lo_objects_size_ += obj->Size();
} else {
UNREACHABLE();
}
@ -1068,31 +1068,210 @@ void MarkCompactCollector::EncodeForwardingAddressesInPagedSpace(
}
static void SweepSpace(NewSpace* space) {
// We scavange new space simultaneously with sweeping. This is done in two
// passes.
// The first pass migrates all alive objects from one semispace to another or
// promotes them to old space. Forwading address is written directly into
// first word of object without any encoding. If object is dead we are writing
// NULL as a forwarding address.
// The second pass updates pointers to new space in all spaces. It is possible
// to encounter pointers to dead objects during traversal of remembered set for
// map space because remembered set bits corresponding to dead maps are cleared
// later during map space sweeping.
static void MigrateObject(Address dst, Address src, int size) {
Heap::CopyBlock(reinterpret_cast<Object**>(dst),
reinterpret_cast<Object**>(src),
size);
Memory::Address_at(src) = dst;
}
// Visitor for updating pointers from live objects in old spaces to new space.
// It does not expect to encounter pointers to dead objects.
class PointersToNewGenUpdatingVisitor: public ObjectVisitor {
public:
void VisitPointer(Object** p) {
UpdatePointer(p);
}
void VisitPointers(Object** start, Object** end) {
for (Object** p = start; p < end; p++) UpdatePointer(p);
}
void VisitCodeTarget(RelocInfo* rinfo) {
ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
VisitPointer(&target);
rinfo->set_target_address(Code::cast(target)->instruction_start());
}
void VisitDebugTarget(RelocInfo* rinfo) {
ASSERT(RelocInfo::IsJSReturn(rinfo->rmode()) &&
rinfo->IsPatchedReturnSequence());
Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
VisitPointer(&target);
rinfo->set_call_address(Code::cast(target)->instruction_start());
}
private:
void UpdatePointer(Object** p) {
if (!(*p)->IsHeapObject()) return;
HeapObject* obj = HeapObject::cast(*p);
Address old_addr = obj->address();
if (Heap::new_space()->Contains(obj)) {
ASSERT(Heap::InFromSpace(*p));
*p = HeapObject::FromAddress(Memory::Address_at(old_addr));
}
}
};
// Visitor for updating pointers from live objects in old spaces to new space.
// It can encounter pointers to dead objects in new space when traversing map
// space (see comment for MigrateObject).
static void UpdatePointerToNewGen(HeapObject** p) {
if (!(*p)->IsHeapObject()) return;
Address old_addr = (*p)->address();
ASSERT(Heap::InFromSpace(*p));
Address new_addr = Memory::Address_at(old_addr);
// Object pointed by *p is dead. Update is not required.
if (new_addr == NULL) return;
*p = HeapObject::FromAddress(new_addr);
}
static String* UpdateNewSpaceReferenceInExternalStringTableEntry(Object **p) {
Address old_addr = HeapObject::cast(*p)->address();
Address new_addr = Memory::Address_at(old_addr);
return String::cast(HeapObject::FromAddress(new_addr));
}
static bool TryPromoteObject(HeapObject* object, int object_size) {
Object* result;
if (object_size > Heap::MaxObjectSizeInPagedSpace()) {
result = Heap::lo_space()->AllocateRawFixedArray(object_size);
if (!result->IsFailure()) {
HeapObject* target = HeapObject::cast(result);
MigrateObject(target->address(), object->address(), object_size);
Heap::UpdateRSet(target);
return true;
}
} else {
OldSpace* target_space = Heap::TargetSpace(object);
ASSERT(target_space == Heap::old_pointer_space() ||
target_space == Heap::old_data_space());
result = target_space->AllocateRaw(object_size);
if (!result->IsFailure()) {
HeapObject* target = HeapObject::cast(result);
MigrateObject(target->address(), object->address(), object_size);
if (target_space == Heap::old_pointer_space()) {
Heap::UpdateRSet(target);
}
return true;
}
}
return false;
}
static void SweepNewSpace(NewSpace* space) {
Heap::CheckNewSpaceExpansionCriteria();
Address from_bottom = space->bottom();
Address from_top = space->top();
// Flip the semispaces. After flipping, to space is empty, from space has
// live objects.
space->Flip();
space->ResetAllocationInfo();
int size = 0;
int survivors_size = 0;
// First pass: traverse all objects in inactive semispace, remove marks,
// migrate live objects and write forwarding addresses.
for (Address current = from_bottom; current < from_top; current += size) {
HeapObject* object = HeapObject::FromAddress(current);
if (object->IsMarked()) {
object->ClearMark();
MarkCompactCollector::tracer()->decrement_marked_count();
size = object->Size();
survivors_size += size;
if (Heap::ShouldBePromoted(current, size) &&
TryPromoteObject(object, size)) {
continue;
}
// Promotion either failed or not required.
// Copy the content of the object.
Object* target = space->AllocateRaw(size);
// Allocation cannot fail at this point: semispaces are of equal size.
ASSERT(!target->IsFailure());
MigrateObject(HeapObject::cast(target)->address(), current, size);
} else {
size = object->Size();
Memory::Address_at(current) = NULL;
}
}
// Second pass: find pointers to new space and update them.
PointersToNewGenUpdatingVisitor updating_visitor;
// Update pointers in to space.
HeapObject* object;
for (Address current = space->bottom();
current < space->top();
current += object->Size()) {
object = HeapObject::FromAddress(current);
if (object->IsMarked()) {
object->ClearMark();
MarkCompactCollector::tracer()->decrement_marked_count();
} else {
// We give non-live objects a map that will correctly give their size,
// since their existing map might not be live after the collection.
int size = object->Size();
if (size >= ByteArray::kHeaderSize) {
object->set_map(Heap::raw_unchecked_byte_array_map());
ByteArray::cast(object)->set_length(ByteArray::LengthFor(size));
} else {
ASSERT(size == kPointerSize);
object->set_map(Heap::raw_unchecked_one_pointer_filler_map());
object->IterateBody(object->map()->instance_type(),
object->Size(),
&updating_visitor);
}
ASSERT(object->Size() == size);
// Update roots.
Heap::IterateRoots(&updating_visitor, VISIT_ALL_IN_SCAVENGE);
// Update pointers in old spaces.
Heap::IterateRSet(Heap::old_pointer_space(), &UpdatePointerToNewGen);
Heap::IterateRSet(Heap::map_space(), &UpdatePointerToNewGen);
Heap::lo_space()->IterateRSet(&UpdatePointerToNewGen);
// Update pointers from cells.
HeapObjectIterator cell_iterator(Heap::cell_space());
for (HeapObject* cell = cell_iterator.next();
cell != NULL;
cell = cell_iterator.next()) {
if (cell->IsJSGlobalPropertyCell()) {
Address value_address =
reinterpret_cast<Address>(cell) +
(JSGlobalPropertyCell::kValueOffset - kHeapObjectTag);
updating_visitor.VisitPointer(reinterpret_cast<Object**>(value_address));
}
// The object is now unmarked for the call to Size() at the top of the
// loop.
}
// Update pointers from external string table.
Heap::UpdateNewSpaceReferencesInExternalStringTable(
&UpdateNewSpaceReferenceInExternalStringTableEntry);
// All pointers were updated. Update auxiliary allocation info.
Heap::IncrementYoungSurvivorsCounter(survivors_size);
space->set_age_mark(space->top());
}
@ -1382,10 +1561,12 @@ class MapCompact {
ASSERT(FreeListNode::IsFreeListNode(vacant_map));
ASSERT(map_to_evacuate->IsMap());
memcpy(
reinterpret_cast<void*>(vacant_map->address()),
reinterpret_cast<void*>(map_to_evacuate->address()),
ASSERT(Map::kSize % 4 == 0);
Heap::CopyBlock(reinterpret_cast<Object**>(vacant_map->address()),
reinterpret_cast<Object**>(map_to_evacuate->address()),
Map::kSize);
ASSERT(vacant_map->IsMap()); // Due to memcpy above.
MapWord forwarding_map_word = MapWord::FromMap(vacant_map);
@ -1465,10 +1646,11 @@ void MarkCompactCollector::SweepSpaces() {
SweepSpace(Heap::old_data_space(), &DeallocateOldDataBlock);
SweepSpace(Heap::code_space(), &DeallocateCodeBlock);
SweepSpace(Heap::cell_space(), &DeallocateCellBlock);
SweepSpace(Heap::new_space());
SweepNewSpace(Heap::new_space());
SweepSpace(Heap::map_space(), &DeallocateMapBlock);
int live_maps = Heap::map_space()->Size() / Map::kSize;
ASSERT(live_map_objects_ == live_maps);
int live_maps_size = Heap::map_space()->Size();
int live_maps = live_maps_size / Map::kSize;
ASSERT(live_map_objects_size_ == live_maps_size);
if (Heap::map_space()->NeedsCompaction(live_maps)) {
MapCompact map_compact(live_maps);
@ -1500,7 +1682,7 @@ int MarkCompactCollector::IterateLiveObjectsInRange(
Address start,
Address end,
HeapObjectCallback size_func) {
int live_objects = 0;
int live_objects_size = 0;
Address current = start;
while (current < end) {
uint32_t encoded_map = Memory::uint32_at(current);
@ -1509,11 +1691,12 @@ int MarkCompactCollector::IterateLiveObjectsInRange(
} else if (encoded_map == kMultiFreeEncoding) {
current += Memory::int_at(current + kIntSize);
} else {
live_objects++;
current += size_func(HeapObject::FromAddress(current));
int size = size_func(HeapObject::FromAddress(current));
current += size;
live_objects_size += size;
}
}
return live_objects;
return live_objects_size;
}
@ -1639,17 +1822,17 @@ void MarkCompactCollector::UpdatePointers() {
Heap::IterateRoots(&updating_visitor, VISIT_ONLY_STRONG);
GlobalHandles::IterateWeakRoots(&updating_visitor);
int live_maps = IterateLiveObjects(Heap::map_space(),
int live_maps_size = IterateLiveObjects(Heap::map_space(),
&UpdatePointersInOldObject);
int live_pointer_olds = IterateLiveObjects(Heap::old_pointer_space(),
int live_pointer_olds_size = IterateLiveObjects(Heap::old_pointer_space(),
&UpdatePointersInOldObject);
int live_data_olds = IterateLiveObjects(Heap::old_data_space(),
int live_data_olds_size = IterateLiveObjects(Heap::old_data_space(),
&UpdatePointersInOldObject);
int live_codes = IterateLiveObjects(Heap::code_space(),
int live_codes_size = IterateLiveObjects(Heap::code_space(),
&UpdatePointersInOldObject);
int live_cells = IterateLiveObjects(Heap::cell_space(),
int live_cells_size = IterateLiveObjects(Heap::cell_space(),
&UpdatePointersInOldObject);
int live_news = IterateLiveObjects(Heap::new_space(),
int live_news_size = IterateLiveObjects(Heap::new_space(),
&UpdatePointersInNewObject);
// Large objects do not move, the map word can be updated directly.
@ -1657,18 +1840,18 @@ void MarkCompactCollector::UpdatePointers() {
for (HeapObject* obj = it.next(); obj != NULL; obj = it.next())
UpdatePointersInNewObject(obj);
USE(live_maps);
USE(live_pointer_olds);
USE(live_data_olds);
USE(live_codes);
USE(live_cells);
USE(live_news);
ASSERT(live_maps == live_map_objects_);
ASSERT(live_data_olds == live_old_data_objects_);
ASSERT(live_pointer_olds == live_old_pointer_objects_);
ASSERT(live_codes == live_code_objects_);
ASSERT(live_cells == live_cell_objects_);
ASSERT(live_news == live_young_objects_);
USE(live_maps_size);
USE(live_pointer_olds_size);
USE(live_data_olds_size);
USE(live_codes_size);
USE(live_cells_size);
USE(live_news_size);
ASSERT(live_maps_size == live_map_objects_size_);
ASSERT(live_data_olds_size == live_old_data_objects_size_);
ASSERT(live_pointer_olds_size == live_old_pointer_objects_size_);
ASSERT(live_codes_size == live_code_objects_size_);
ASSERT(live_cells_size == live_cell_objects_size_);
ASSERT(live_news_size == live_young_objects_size_);
}
@ -1783,27 +1966,31 @@ void MarkCompactCollector::RelocateObjects() {
#endif
// Relocates objects, always relocate map objects first. Relocating
// objects in other space relies on map objects to get object size.
int live_maps = IterateLiveObjects(Heap::map_space(), &RelocateMapObject);
int live_pointer_olds = IterateLiveObjects(Heap::old_pointer_space(),
int live_maps_size = IterateLiveObjects(Heap::map_space(),
&RelocateMapObject);
int live_pointer_olds_size = IterateLiveObjects(Heap::old_pointer_space(),
&RelocateOldPointerObject);
int live_data_olds = IterateLiveObjects(Heap::old_data_space(),
int live_data_olds_size = IterateLiveObjects(Heap::old_data_space(),
&RelocateOldDataObject);
int live_codes = IterateLiveObjects(Heap::code_space(), &RelocateCodeObject);
int live_cells = IterateLiveObjects(Heap::cell_space(), &RelocateCellObject);
int live_news = IterateLiveObjects(Heap::new_space(), &RelocateNewObject);
int live_codes_size = IterateLiveObjects(Heap::code_space(),
&RelocateCodeObject);
int live_cells_size = IterateLiveObjects(Heap::cell_space(),
&RelocateCellObject);
int live_news_size = IterateLiveObjects(Heap::new_space(),
&RelocateNewObject);
USE(live_maps);
USE(live_data_olds);
USE(live_pointer_olds);
USE(live_codes);
USE(live_cells);
USE(live_news);
ASSERT(live_maps == live_map_objects_);
ASSERT(live_data_olds == live_old_data_objects_);
ASSERT(live_pointer_olds == live_old_pointer_objects_);
ASSERT(live_codes == live_code_objects_);
ASSERT(live_cells == live_cell_objects_);
ASSERT(live_news == live_young_objects_);
USE(live_maps_size);
USE(live_pointer_olds_size);
USE(live_data_olds_size);
USE(live_codes_size);
USE(live_cells_size);
USE(live_news_size);
ASSERT(live_maps_size == live_map_objects_size_);
ASSERT(live_data_olds_size == live_old_data_objects_size_);
ASSERT(live_pointer_olds_size == live_old_pointer_objects_size_);
ASSERT(live_codes_size == live_code_objects_size_);
ASSERT(live_cells_size == live_cell_objects_size_);
ASSERT(live_news_size == live_young_objects_size_);
// Flip from and to spaces
Heap::new_space()->Flip();
@ -1821,6 +2008,9 @@ void MarkCompactCollector::RelocateObjects() {
PagedSpaces spaces;
for (PagedSpace* space = spaces.next(); space != NULL; space = spaces.next())
space->MCCommitRelocationInfo();
Heap::CheckNewSpaceExpansionCriteria();
Heap::IncrementYoungSurvivorsCounter(live_news_size);
}
@ -1840,7 +2030,10 @@ int MarkCompactCollector::RelocateMapObject(HeapObject* obj) {
Address old_addr = obj->address();
if (new_addr != old_addr) {
memmove(new_addr, old_addr, Map::kSize); // copy contents
// Move contents.
Heap::MoveBlock(reinterpret_cast<Object**>(new_addr),
reinterpret_cast<Object**>(old_addr),
Map::kSize);
}
#ifdef DEBUG
@ -1896,7 +2089,10 @@ int MarkCompactCollector::RelocateOldNonCodeObject(HeapObject* obj,
Address old_addr = obj->address();
if (new_addr != old_addr) {
memmove(new_addr, old_addr, obj_size); // Copy contents
// Move contents.
Heap::MoveBlock(reinterpret_cast<Object**>(new_addr),
reinterpret_cast<Object**>(old_addr),
obj_size);
}
ASSERT(!HeapObject::FromAddress(new_addr)->IsCode());
@ -1940,7 +2136,10 @@ int MarkCompactCollector::RelocateCodeObject(HeapObject* obj) {
Address old_addr = obj->address();
if (new_addr != old_addr) {
memmove(new_addr, old_addr, obj_size); // Copy contents.
// Move contents.
Heap::MoveBlock(reinterpret_cast<Object**>(new_addr),
reinterpret_cast<Object**>(old_addr),
obj_size);
}
HeapObject* copied_to = HeapObject::FromAddress(new_addr);
@ -1976,8 +2175,8 @@ int MarkCompactCollector::RelocateNewObject(HeapObject* obj) {
#endif
// New and old addresses cannot overlap.
memcpy(reinterpret_cast<void*>(new_addr),
reinterpret_cast<void*>(old_addr),
Heap::CopyBlock(reinterpret_cast<Object**>(new_addr),
reinterpret_cast<Object**>(old_addr),
obj_size);
#ifdef DEBUG

View File

@ -407,26 +407,26 @@ class MarkCompactCollector: public AllStatic {
// Counters used for debugging the marking phase of mark-compact or
// mark-sweep collection.
// Number of live objects in Heap::to_space_.
static int live_young_objects_;
// Size of live objects in Heap::to_space_.
static int live_young_objects_size_;
// Number of live objects in Heap::old_pointer_space_.
static int live_old_pointer_objects_;
// Size of live objects in Heap::old_pointer_space_.
static int live_old_pointer_objects_size_;
// Number of live objects in Heap::old_data_space_.
static int live_old_data_objects_;
// Size of live objects in Heap::old_data_space_.
static int live_old_data_objects_size_;
// Number of live objects in Heap::code_space_.
static int live_code_objects_;
// Size of live objects in Heap::code_space_.
static int live_code_objects_size_;
// Number of live objects in Heap::map_space_.
static int live_map_objects_;
// Size of live objects in Heap::map_space_.
static int live_map_objects_size_;
// Number of live objects in Heap::cell_space_.
static int live_cell_objects_;
// Size of live objects in Heap::cell_space_.
static int live_cell_objects_size_;
// Number of live objects in Heap::lo_space_.
static int live_lo_objects_;
// Size of live objects in Heap::lo_space_.
static int live_lo_objects_size_;
// Number of live bytes in this collection.
static int live_bytes_;

View File

@ -657,7 +657,7 @@ inline Dest BitCast(const Source& source) {
return dest;
}
} } // namespace v8::internal
#endif // V8_UTILS_H_