Record the addresses of pages that are unmapped to aid

in post mortem crash dump analysis.
Review URL: https://chromiumcodereview.appspot.com/9700061

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@11071 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
erik.corry@gmail.com 2012-03-16 14:13:22 +00:00
parent 9104cfd959
commit e3774cf23f
4 changed files with 27 additions and 2 deletions

View File

@ -104,6 +104,7 @@ Heap::Heap()
gc_post_processing_depth_(0),
ms_count_(0),
gc_count_(0),
remembered_unmapped_pages_index_(0),
unflattened_strings_length_(0),
#ifdef DEBUG
allocation_allowed_(true),
@ -6971,4 +6972,19 @@ void Heap::FreeQueuedChunks() {
chunks_queued_for_free_ = NULL;
}
void Heap::RememberUnmappedPage(Address page, bool compacted) {
uintptr_t p = reinterpret_cast<uintptr_t>(page);
// Tag the page pointer to make it findable in the dump file.
if (compacted) {
p ^= 0xc1ead & (Page::kPageSize - 1); // Cleared.
} else {
p ^= 0x1d1ed & (Page::kPageSize - 1); // I died.
}
remembered_unmapped_pages_[remembered_unmapped_pages_index_] =
reinterpret_cast<Address>(p);
remembered_unmapped_pages_index_++;
remembered_unmapped_pages_index_ %= kRememberedUnmappedPages;
}
} } // namespace v8::internal

View File

@ -1583,6 +1583,9 @@ class Heap {
set_construct_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
}
// For post mortem debugging.
void RememberUnmappedPage(Address page, bool compacted);
private:
Heap();
@ -1634,6 +1637,11 @@ class Heap {
int ms_count_; // how many mark-sweep collections happened
unsigned int gc_count_; // how many gc happened
// For post mortem debugging.
static const int kRememberedUnmappedPages = 128;
int remembered_unmapped_pages_index_;
Address remembered_unmapped_pages_[kRememberedUnmappedPages];
// Total length of the strings we failed to flatten since the last GC.
int unflattened_strings_length_;
@ -1781,7 +1789,6 @@ class Heap {
inline void UpdateOldSpaceLimits();
// Allocate an uninitialized object in map space. The behavior is identical
// to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't
// have to test the allocation space argument and (b) can reduce code size

View File

@ -3427,7 +3427,6 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
space->Free(p->area_start(), p->area_size());
p->set_scan_on_scavenge(false);
slots_buffer_allocator_.DeallocateChain(p->slots_buffer_address());
p->ClearEvacuationCandidate();
p->ResetLiveBytes();
space->ReleasePage(p);
}

View File

@ -594,6 +594,9 @@ void MemoryAllocator::Free(MemoryChunk* chunk) {
PerformAllocationCallback(space, kAllocationActionFree, chunk->size());
}
isolate_->heap()->RememberUnmappedPage(
reinterpret_cast<Address>(chunk), chunk->IsEvacuationCandidate());
delete chunk->slots_buffer();
delete chunk->skip_list();