[heap] Improve size profiling for ArrayBuffer tracking

Eagerly account for retained sizes during ArrayBuffer tracking. Following up on this,
we can now do Scavenges if the amount of memory retained from new space is too large.

BUG=chromium:621829
R=jochen@chromium.org,hpayer@chromium.org

Review-Url: https://codereview.chromium.org/2210263002
Cr-Commit-Position: refs/heads/master@{#38731}
This commit is contained in:
mlippautz 2016-08-18 13:45:18 -07:00 committed by Commit bot
parent 255971d3ac
commit 28e13bd6a7
9 changed files with 236 additions and 83 deletions

View File

@ -7605,7 +7605,7 @@ class Internals {
kExternalMemoryOffset + kApiInt64Size;
static const int kIsolateRootsOffset = kExternalMemoryLimitOffset +
kApiInt64Size + kApiInt64Size +
kApiPointerSize + kApiPointerSize;
kApiPointerSize;
static const int kUndefinedValueRootIndex = 4;
static const int kTheHoleValueRootIndex = 5;
static const int kNullValueRootIndex = 6;

View File

@ -10,7 +10,7 @@
namespace v8 {
namespace internal {
void ArrayBufferTracker::RegisterNew(Heap* heap, JSArrayBuffer* buffer) {
void ArrayBufferTracker::RegisterNew(JSArrayBuffer* buffer) {
void* data = buffer->backing_store();
if (!data) return;
@ -26,13 +26,18 @@ void ArrayBufferTracker::RegisterNew(Heap* heap, JSArrayBuffer* buffer) {
DCHECK_NOT_NULL(tracker);
tracker->Add(buffer, length);
}
if (page->InNewSpace()) {
retained_from_new_space_.Increment(length);
} else {
retained_from_old_space_.Increment(length);
}
// We may go over the limit of externally allocated memory here. We call the
// api function to trigger a GC in this case.
reinterpret_cast<v8::Isolate*>(heap->isolate())
reinterpret_cast<v8::Isolate*>(heap_->isolate())
->AdjustAmountOfExternalAllocatedMemory(length);
}
void ArrayBufferTracker::Unregister(Heap* heap, JSArrayBuffer* buffer) {
void ArrayBufferTracker::Unregister(JSArrayBuffer* buffer) {
void* data = buffer->backing_store();
if (!data) return;
@ -44,7 +49,12 @@ void ArrayBufferTracker::Unregister(Heap* heap, JSArrayBuffer* buffer) {
DCHECK_NOT_NULL(tracker);
length = tracker->Remove(buffer);
}
heap->update_external_memory(-static_cast<intptr_t>(length));
if (page->InNewSpace()) {
retained_from_new_space_.Decrement(length);
} else {
retained_from_old_space_.Decrement(length);
}
heap_->update_external_memory(-static_cast<int64_t>(length));
}
void LocalArrayBufferTracker::Add(Key key, const Value& value) {

View File

@ -14,7 +14,7 @@ LocalArrayBufferTracker::~LocalArrayBufferTracker() {
}
template <LocalArrayBufferTracker::FreeMode free_mode>
void LocalArrayBufferTracker::Free() {
LocalArrayBufferTracker::ProcessResult LocalArrayBufferTracker::Free() {
size_t freed_memory = 0;
for (TrackingData::iterator it = array_buffers_.begin();
it != array_buffers_.end();) {
@ -30,60 +30,71 @@ void LocalArrayBufferTracker::Free() {
++it;
}
}
if (freed_memory > 0) {
heap_->update_external_memory_concurrently_freed(
static_cast<intptr_t>(freed_memory));
}
return ProcessResult(freed_memory, 0);
}
template <typename Callback>
void LocalArrayBufferTracker::Process(Callback callback) {
LocalArrayBufferTracker::ProcessResult LocalArrayBufferTracker::Process(
Callback callback) {
JSArrayBuffer* new_buffer = nullptr;
size_t freed_memory = 0;
size_t promoted_memory = 0;
size_t len = 0;
Page* target_page = nullptr;
LocalArrayBufferTracker* tracker = nullptr;
for (TrackingData::iterator it = array_buffers_.begin();
it != array_buffers_.end();) {
const CallbackResult result = callback(it->first, &new_buffer);
if (result == kKeepEntry) {
++it;
} else if (result == kUpdateEntry) {
DCHECK_NOT_NULL(new_buffer);
Page* target_page = Page::FromAddress(new_buffer->address());
// We need to lock the target page because we cannot guarantee
// exclusive access to new space pages.
if (target_page->InNewSpace()) target_page->mutex()->Lock();
LocalArrayBufferTracker* tracker = target_page->local_tracker();
if (tracker == nullptr) {
target_page->AllocateLocalTracker();
switch (callback(it->first, &new_buffer)) {
case kKeepEntry:
++it;
break;
case kUpdateEntry:
DCHECK_NOT_NULL(new_buffer);
target_page = Page::FromAddress(new_buffer->address());
// We need to lock the target page because we cannot guarantee
// exclusive access to new space pages.
if (target_page->InNewSpace()) target_page->mutex()->Lock();
tracker = target_page->local_tracker();
}
DCHECK_NOT_NULL(tracker);
tracker->Add(new_buffer, it->second);
if (target_page->InNewSpace()) target_page->mutex()->Unlock();
it = array_buffers_.erase(it);
} else if (result == kRemoveEntry) {
const size_t len = it->second;
heap_->isolate()->array_buffer_allocator()->Free(
it->first->backing_store(), len);
freed_memory += len;
it = array_buffers_.erase(it);
} else {
UNREACHABLE();
if (tracker == nullptr) {
target_page->AllocateLocalTracker();
tracker = target_page->local_tracker();
}
DCHECK_NOT_NULL(tracker);
len = it->second;
tracker->Add(new_buffer, len);
if (target_page->InNewSpace()) {
target_page->mutex()->Unlock();
} else {
promoted_memory += len;
}
it = array_buffers_.erase(it);
break;
case kRemoveEntry:
len = it->second;
heap_->isolate()->array_buffer_allocator()->Free(
it->first->backing_store(), len);
freed_memory += len;
it = array_buffers_.erase(it);
break;
}
}
if (freed_memory > 0) {
heap_->update_external_memory_concurrently_freed(
static_cast<intptr_t>(freed_memory));
}
return ProcessResult(freed_memory, promoted_memory);
}
void ArrayBufferTracker::FreeDeadInNewSpace(Heap* heap) {
DCHECK_EQ(heap->gc_state(), Heap::HeapState::SCAVENGE);
for (Page* page : NewSpacePageRange(heap->new_space()->FromSpaceStart(),
heap->new_space()->FromSpaceEnd())) {
void ArrayBufferTracker::AccountForConcurrentlyFreedMemory() {
heap_->update_external_memory(
static_cast<int64_t>(concurrently_freed_.Value()));
concurrently_freed_.SetValue(0);
}
void ArrayBufferTracker::FreeDeadInNewSpace() {
DCHECK_EQ(heap_->gc_state(), Heap::HeapState::SCAVENGE);
for (Page* page : NewSpacePageRange(heap_->new_space()->FromSpaceStart(),
heap_->new_space()->FromSpaceEnd())) {
bool empty = ProcessBuffers(page, kUpdateForwardedRemoveOthers);
CHECK(empty);
}
heap->account_external_memory_concurrently_freed();
AccountForConcurrentlyFreedMemory();
}
void ArrayBufferTracker::FreeDead(Page* page) {
@ -91,7 +102,13 @@ void ArrayBufferTracker::FreeDead(Page* page) {
LocalArrayBufferTracker* tracker = page->local_tracker();
if (tracker == nullptr) return;
DCHECK(!page->SweepingDone());
tracker->Free<LocalArrayBufferTracker::kFreeDead>();
LocalArrayBufferTracker::ProcessResult result =
tracker->Free<LocalArrayBufferTracker::kFreeDead>();
if (page->InNewSpace()) {
retained_from_new_space_.Decrement(result.freed);
} else {
retained_from_old_space_.Decrement(result.freed);
}
if (tracker->IsEmpty()) {
page->ReleaseLocalTracker();
}
@ -100,7 +117,14 @@ void ArrayBufferTracker::FreeDead(Page* page) {
void ArrayBufferTracker::FreeAll(Page* page) {
LocalArrayBufferTracker* tracker = page->local_tracker();
if (tracker == nullptr) return;
tracker->Free<LocalArrayBufferTracker::kFreeAll>();
LocalArrayBufferTracker::ProcessResult result =
tracker->Free<LocalArrayBufferTracker::kFreeAll>();
concurrently_freed_.Increment(result.freed);
if (page->InNewSpace()) {
retained_from_new_space_.Decrement(result.freed);
} else {
retained_from_old_space_.Decrement(result.freed);
}
if (tracker->IsEmpty()) {
page->ReleaseLocalTracker();
}
@ -111,7 +135,7 @@ bool ArrayBufferTracker::ProcessBuffers(Page* page, ProcessingMode mode) {
if (tracker == nullptr) return true;
DCHECK(page->SweepingDone());
tracker->Process(
LocalArrayBufferTracker::ProcessResult result = tracker->Process(
[mode](JSArrayBuffer* old_buffer, JSArrayBuffer** new_buffer) {
MapWord map_word = old_buffer->map_word();
if (map_word.IsForwardingAddress()) {
@ -122,6 +146,13 @@ bool ArrayBufferTracker::ProcessBuffers(Page* page, ProcessingMode mode) {
? LocalArrayBufferTracker::kKeepEntry
: LocalArrayBufferTracker::kRemoveEntry;
});
concurrently_freed_.Increment(result.freed);
if (page->InNewSpace()) {
retained_from_new_space_.Decrement(result.freed + result.promoted);
} else {
retained_from_old_space_.Decrement(result.freed);
}
retained_from_old_space_.Increment(result.promoted);
return tracker->IsEmpty();
}

View File

@ -8,6 +8,7 @@
#include <unordered_map>
#include "src/allocation.h"
#include "src/base/atomic-utils.h"
#include "src/base/platform/mutex.h"
#include "src/globals.h"
@ -18,40 +19,61 @@ class Heap;
class JSArrayBuffer;
class Page;
class ArrayBufferTracker : public AllStatic {
class ArrayBufferTracker {
public:
enum ProcessingMode {
kUpdateForwardedRemoveOthers,
kUpdateForwardedKeepOthers,
};
// Returns whether a buffer is currently tracked.
static bool IsTracked(JSArrayBuffer* buffer);
explicit ArrayBufferTracker(Heap* heap)
: heap_(heap),
concurrently_freed_(0),
retained_from_new_space_(0),
retained_from_old_space_(0) {}
// The following methods are used to track raw C++ pointers to externally
// allocated memory used as backing store in live array buffers.
// Register/unregister a new JSArrayBuffer |buffer| for tracking. Guards all
// access to the tracker by taking the page lock for the corresponding page.
inline static void RegisterNew(Heap* heap, JSArrayBuffer* buffer);
inline static void Unregister(Heap* heap, JSArrayBuffer* buffer);
inline void RegisterNew(JSArrayBuffer* buffer);
inline void Unregister(JSArrayBuffer* buffer);
// Frees all backing store pointers for dead JSArrayBuffers in new space.
// Does not take any locks and can only be called during Scavenge.
static void FreeDeadInNewSpace(Heap* heap);
void FreeDeadInNewSpace();
// Frees all backing store pointers for dead JSArrayBuffer on a given page.
// Requires marking information to be present. Requires the page lock to be
// taken by the caller.
static void FreeDead(Page* page);
void FreeDead(Page* page);
// Frees all remaining, live or dead, array buffers on a page. Only useful
// during tear down.
static void FreeAll(Page* page);
void FreeAll(Page* page);
// Processes all array buffers on a given page. |mode| specifies the action
// to perform on the buffers. Returns whether the tracker is empty or not.
static bool ProcessBuffers(Page* page, ProcessingMode mode);
bool ProcessBuffers(Page* page, ProcessingMode mode);
// Returns whether a buffer is currently tracked.
static bool IsTracked(JSArrayBuffer* buffer);
void AccountForConcurrentlyFreedMemory();
size_t retained_from_new_space() { return retained_from_new_space_.Value(); }
size_t retained_from_old_space() { return retained_from_old_space_.Value(); }
private:
Heap* heap_;
base::AtomicNumber<size_t> concurrently_freed_;
// Number of bytes retained from new space.
base::AtomicNumber<size_t> retained_from_new_space_;
// Number of bytes retained from old space.
base::AtomicNumber<size_t> retained_from_old_space_;
};
// LocalArrayBufferTracker tracks internalized array buffers.
@ -65,23 +87,32 @@ class LocalArrayBufferTracker {
enum CallbackResult { kKeepEntry, kUpdateEntry, kRemoveEntry };
enum FreeMode { kFreeDead, kFreeAll };
struct ProcessResult {
ProcessResult(size_t freed, size_t promoted)
: freed(freed), promoted(promoted) {}
size_t freed;
size_t promoted;
};
explicit LocalArrayBufferTracker(Heap* heap) : heap_(heap) {}
~LocalArrayBufferTracker();
inline void Add(Key key, const Value& value);
inline Value Remove(Key key);
// Frees up array buffers determined by |free_mode|.
// Frees up array buffers determined by |free_mode|. Returns statistics in
// ProcessResult.
template <FreeMode free_mode>
void Free();
ProcessResult Free();
// Processes buffers one by one. The CallbackResult of the callback decides
// what action to take on the buffer.
// what action to take on the buffer. Returns statistics in ProcessResult.
//
// Callback should be of type:
// CallbackResult fn(JSArrayBuffer* buffer, JSArrayBuffer** new_buffer);
template <typename Callback>
void Process(Callback callback);
ProcessResult Process(Callback callback);
bool IsEmpty() { return array_buffers_.empty(); }

View File

@ -162,7 +162,8 @@ Heap::Heap()
deserialization_complete_(false),
strong_roots_list_(NULL),
heap_iterator_depth_(0),
force_oom_(false) {
force_oom_(false),
array_buffer_tracker_(nullptr) {
// Allow build-time customization of the max semispace size. Building
// V8 with snapshots and a non-default max semispace size is much
// easier if you can define it as part of the build environment.
@ -312,6 +313,21 @@ GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space,
return SCAVENGER;
}
size_t Heap::external_memory_retained_from_new_space() {
// This is just an approximation.
return array_buffer_tracker()->retained_from_new_space();
}
bool Heap::ShouldDoScavengeForReducingExternalMemory() {
size_t retained_new_space = external_memory_retained_from_new_space();
size_t retained_old_space = external_memory() - retained_new_space;
float new_space_ratio =
static_cast<float>(new_space_.SizeOfObjects()) / retained_new_space;
float old_space_ratio =
static_cast<float>(old_space_->SizeOfObjects()) / retained_old_space;
// TODO(mlippautz): Add some lower bound.
return new_space_ratio > old_space_ratio;
}
// TODO(1238405): Combine the infrastructure for --heap-stats and
// --log-gc to avoid the complicated preprocessor and flag testing.
@ -1738,7 +1754,7 @@ void Heap::Scavenge() {
// Set age mark.
new_space_.set_age_mark(new_space_.top());
ArrayBufferTracker::FreeDeadInNewSpace(this);
array_buffer_tracker()->FreeDeadInNewSpace();
// Update how much has survived scavenge.
IncrementYoungSurvivorsCounter(static_cast<int>(
@ -2024,12 +2040,12 @@ HeapObject* Heap::DoubleAlignForDeserialization(HeapObject* object, int size) {
void Heap::RegisterNewArrayBuffer(JSArrayBuffer* buffer) {
ArrayBufferTracker::RegisterNew(this, buffer);
array_buffer_tracker()->RegisterNew(buffer);
}
void Heap::UnregisterArrayBuffer(JSArrayBuffer* buffer) {
ArrayBufferTracker::Unregister(this, buffer);
array_buffer_tracker()->Unregister(buffer);
}
@ -5383,6 +5399,8 @@ bool Heap::SetUp() {
*this, ScavengeJob::kBytesAllocatedBeforeNextIdleTask);
new_space()->AddAllocationObserver(idle_scavenge_observer_);
array_buffer_tracker_ = new ArrayBufferTracker(this);
return true;
}
@ -5591,6 +5609,9 @@ void Heap::TearDown() {
delete store_buffer_;
store_buffer_ = nullptr;
delete array_buffer_tracker_;
array_buffer_tracker_ = nullptr;
delete memory_allocator_;
memory_allocator_ = nullptr;
}

View File

@ -844,14 +844,9 @@ class Heap {
int64_t external_memory() { return external_memory_; }
void update_external_memory(int64_t delta) { external_memory_ += delta; }
void update_external_memory_concurrently_freed(intptr_t freed) {
external_memory_concurrently_freed_.Increment(freed);
}
size_t external_memory_retained_from_new_space();
void account_external_memory_concurrently_freed() {
external_memory_ -= external_memory_concurrently_freed_.Value();
external_memory_concurrently_freed_.SetValue(0);
}
bool ShouldDoScavengeForReducingExternalMemory();
void DeoptMarkedAllocationSites();
@ -1406,6 +1401,8 @@ class Heap {
void RegisterNewArrayBuffer(JSArrayBuffer* buffer);
void UnregisterArrayBuffer(JSArrayBuffer* buffer);
ArrayBufferTracker* array_buffer_tracker() { return array_buffer_tracker_; }
// ===========================================================================
// Allocation site tracking. =================================================
// ===========================================================================
@ -2046,9 +2043,6 @@ class Heap {
// Caches the amount of external memory registered at the last MC.
int64_t external_memory_at_last_mark_compact_;
// The amount of memory that has been freed concurrently.
base::AtomicNumber<intptr_t> external_memory_concurrently_freed_;
// This can be calculated directly from a pointer to the heap; however, it is
// more expedient to get at the isolate directly from within Heap methods.
Isolate* isolate_;
@ -2290,6 +2284,9 @@ class Heap {
// Used for testing purposes.
bool force_oom_;
// Tracker for ArrayBuffers pointing to external memory.
ArrayBufferTracker* array_buffer_tracker_;
// Classes in "heap" can be friends.
friend class AlwaysAllocateScope;
friend class GCCallbacksScope;

View File

@ -833,7 +833,7 @@ void MarkCompactCollector::Prepare() {
space = spaces.next()) {
space->PrepareForMarkCompact();
}
heap()->account_external_memory_concurrently_freed();
heap()->array_buffer_tracker()->AccountForConcurrentlyFreedMemory();
#ifdef VERIFY_HEAP
if (!was_marked_incrementally_ && FLAG_verify_heap) {
@ -3129,7 +3129,8 @@ bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) {
case kObjectsNewToOld:
success = collector_->VisitLiveObjects(page, &new_space_visitor_,
kClearMarkbits);
ArrayBufferTracker::ProcessBuffers(
heap->array_buffer_tracker()->ProcessBuffers(
page, ArrayBufferTracker::kUpdateForwardedRemoveOthers);
DCHECK(success);
break;
@ -3156,14 +3157,14 @@ bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) {
EvacuateRecordOnlyVisitor record_visitor(collector_->heap());
success =
collector_->VisitLiveObjects(page, &record_visitor, kKeepMarking);
ArrayBufferTracker::ProcessBuffers(
heap->array_buffer_tracker()->ProcessBuffers(
page, ArrayBufferTracker::kUpdateForwardedKeepOthers);
DCHECK(success);
// We need to return failure here to indicate that we want this page
// added to the sweeper.
success = false;
} else {
ArrayBufferTracker::ProcessBuffers(
heap->array_buffer_tracker()->ProcessBuffers(
page, ArrayBufferTracker::kUpdateForwardedRemoveOthers);
}
break;
@ -3367,7 +3368,7 @@ int MarkCompactCollector::Sweeper::RawSweep(
// Before we sweep objects on the page, we free dead array buffers which
// requires valid mark bits.
ArrayBufferTracker::FreeDead(p);
p->heap()->array_buffer_tracker()->FreeDead(p);
// We also release the black area markers here.
p->ReleaseBlackAreaEndMarkerMap();
@ -3938,7 +3939,7 @@ void MarkCompactCollector::StartSweepSpace(PagedSpace* space) {
PrintIsolate(isolate(), "sweeping: released page: %p",
static_cast<void*>(p));
}
ArrayBufferTracker::FreeAll(p);
heap()->array_buffer_tracker()->FreeAll(p);
space->ReleasePage(p);
continue;
}

View File

@ -1127,7 +1127,7 @@ bool PagedSpace::HasBeenSetUp() { return true; }
void PagedSpace::TearDown() {
for (auto it = begin(); it != end();) {
Page* page = *(it++); // Will be erased.
ArrayBufferTracker::FreeAll(page);
heap()->array_buffer_tracker()->FreeAll(page);
heap()->memory_allocator()->Free<MemoryAllocator::kFull>(page);
}
anchor_.set_next_page(&anchor_);
@ -1627,6 +1627,8 @@ void NewSpace::UpdateInlineAllocationLimit(int size_in_bytes) {
bool NewSpace::AddFreshPage() {
if (heap()->ShouldDoScavengeForReducingExternalMemory()) return false;
Address top = allocation_info_.top();
DCHECK(!Page::IsAtObjectStart(top));
if (!to_space_.AdvancePage()) {
@ -1819,7 +1821,7 @@ void SemiSpace::TearDown() {
// Properly uncommit memory to keep the allocator counters in sync.
if (is_committed()) {
for (Page* p : *this) {
ArrayBufferTracker::FreeAll(p);
heap()->array_buffer_tracker()->FreeAll(p);
}
Uncommit();
}

View File

@ -314,5 +314,65 @@ UNINITIALIZED_TEST(ArrayBuffer_SemiSpaceCopyMultipleTasks) {
}
}
TEST(ArrayBuffer_RetainedCounterPromotion) {
// The test checks that retained counters on ArrayBufferTracker are consistent
// with where the buffers are on the heap.
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
ArrayBufferTracker* tracker = heap->array_buffer_tracker();
{
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
CHECK(heap->InNewSpace(*buf));
CHECK_EQ(tracker->retained_from_new_space(), 100u);
CHECK_EQ(tracker->retained_from_old_space(), 0u);
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(heap->InNewSpace(*buf));
CHECK_EQ(tracker->retained_from_new_space(), 100u);
CHECK_EQ(tracker->retained_from_old_space(), 0u);
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(heap->InOldSpace(*buf));
CHECK_EQ(tracker->retained_from_new_space(), 0u);
CHECK_EQ(tracker->retained_from_old_space(), 100u);
}
}
TEST(ArrayBuffer_RetainedCounterNewSpace) {
// The test checks that retained counters on ArrayBufferTracker are consistent
// with where the buffers are on the heap.
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
ArrayBufferTracker* tracker = heap->array_buffer_tracker();
{
v8::HandleScope handle_scope(isolate);
{
v8::HandleScope tmp_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
CHECK(heap->InNewSpace(*buf));
}
{
v8::HandleScope tmp_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 50);
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
CHECK(heap->InNewSpace(*buf));
CHECK_EQ(tracker->retained_from_new_space(), 150u);
CHECK_EQ(tracker->retained_from_old_space(), 0u);
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(heap->InNewSpace(*buf));
CHECK_EQ(tracker->retained_from_new_space(), 50u);
CHECK_EQ(tracker->retained_from_old_space(), 0u);
}
heap::GcAndSweep(heap, NEW_SPACE);
CHECK_EQ(tracker->retained_from_new_space(), 0u);
CHECK_EQ(tracker->retained_from_old_space(), 0u);
}
}
} // namespace internal
} // namespace v8