[heap] Fine-grained JSArrayBuffer tracking

Track based on JSArrayBuffer addresses instead of the attached backing store.
This way we can later on iterate buffers on a single page. The reland also
switches to a page-based implementation where a page contains the set of its
contained (live and dead) buffers.

Details of tracking:
- Scavenge: New space pages are processes in bulk on the main thread
- MC: Unswept pages are processed in bulk in parallel. All other pages
  are processed by the sweeper concurrently.

BUG=chromium:611688
LOG=N
CQ_EXTRA_TRYBOTS=tryserver.v8:v8_linux_arm64_gc_stress_dbg,v8_linux_gc_stress_dbg,v8_mac_gc_stress_dbg,v8_linux64_tsan_rel,v8_mac64_asan_rel

Review-Url: https://codereview.chromium.org/1964023002
Cr-Commit-Position: refs/heads/master@{#36437}
This commit is contained in:
mlippautz 2016-05-23 04:19:55 -07:00 committed by Commit bot
parent dbd7d5a59f
commit b2d8bfc793
18 changed files with 514 additions and 169 deletions

View File

@ -1170,6 +1170,7 @@ v8_source_set("v8_base") {
"src/handles.h",
"src/hashmap.h",
"src/heap-symbols.h",
"src/heap/array-buffer-tracker-inl.h",
"src/heap/array-buffer-tracker.cc",
"src/heap/array-buffer-tracker.h",
"src/heap/gc-idle-time-handler.cc",

View File

@ -7392,7 +7392,7 @@ class Internals {
kAmountOfExternalAllocatedMemoryOffset + kApiInt64Size;
static const int kIsolateRootsOffset =
kAmountOfExternalAllocatedMemoryAtLastGlobalGCOffset + kApiInt64Size +
kApiPointerSize;
kApiPointerSize + kApiPointerSize;
static const int kUndefinedValueRootIndex = 4;
static const int kTheHoleValueRootIndex = 5;
static const int kNullValueRootIndex = 6;

View File

@ -0,0 +1,74 @@
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/array-buffer-tracker.h"
#include "src/heap/heap.h"
#include "src/heap/mark-compact.h"
#include "src/isolate.h"
namespace v8 {
namespace internal {
template <typename Callback>
void LocalArrayBufferTracker::Process(Callback callback) {
JSArrayBuffer* new_buffer = nullptr;
size_t freed_memory = 0;
for (TrackingMap::iterator it = live_.begin(); it != live_.end();) {
switch (callback(it->first, &new_buffer)) {
case kKeepEntry:
it++;
break;
case kKeepAndUpdateEntry:
DCHECK_NOT_NULL(new_buffer);
Page::FromAddress(new_buffer->address())
->local_tracker()
->AddLive(new_buffer, it->second);
live_.erase(it++);
break;
case kRemoveEntry:
heap_->isolate()->array_buffer_allocator()->Free(it->second.first,
it->second.second);
freed_memory += it->second.second;
live_.erase(it++);
break;
default:
UNREACHABLE();
}
}
if (freed_memory > 0) {
heap_->update_amount_of_external_allocated_freed_memory(
static_cast<intptr_t>(freed_memory));
}
not_yet_discovered_.clear();
started_ = false;
}
template <LocalArrayBufferTracker::LivenessIndicator liveness_indicator>
void LocalArrayBufferTracker::ScanAndFreeDead() {
switch (liveness_indicator) {
case kForwardingPointer:
Process([](JSArrayBuffer* old_buffer, JSArrayBuffer** new_buffer) {
MapWord map_word = old_buffer->map_word();
if (map_word.IsForwardingAddress()) {
*new_buffer = JSArrayBuffer::cast(map_word.ToForwardingAddress());
return LocalArrayBufferTracker::kKeepAndUpdateEntry;
}
return LocalArrayBufferTracker::kRemoveEntry;
});
break;
case kMarkBit:
Process([](JSArrayBuffer* old_buffer, JSArrayBuffer**) {
if (Marking::IsBlackOrGrey(Marking::MarkBitFrom(old_buffer))) {
return LocalArrayBufferTracker::kKeepEntry;
}
return LocalArrayBufferTracker::kRemoveEntry;
});
break;
default:
UNREACHABLE();
}
}
} // namespace internal
} // namespace v8

View File

@ -3,53 +3,98 @@
// found in the LICENSE file.
#include "src/heap/array-buffer-tracker.h"
#include "src/heap/array-buffer-tracker-inl.h"
#include "src/heap/heap.h"
#include "src/isolate.h"
#include "src/objects.h"
#include "src/objects-inl.h"
#include "src/objects.h"
#include "src/v8.h"
namespace v8 {
namespace internal {
ArrayBufferTracker::~ArrayBufferTracker() {
Isolate* isolate = heap()->isolate();
LocalArrayBufferTracker::~LocalArrayBufferTracker() {
size_t freed_memory = 0;
for (auto& buffer : live_array_buffers_) {
isolate->array_buffer_allocator()->Free(buffer.first, buffer.second);
freed_memory += buffer.second;
for (auto& buffer : live_) {
heap_->isolate()->array_buffer_allocator()->Free(buffer.second.first,
buffer.second.second);
freed_memory += buffer.second.second;
}
for (auto& buffer : live_array_buffers_for_scavenge_) {
isolate->array_buffer_allocator()->Free(buffer.first, buffer.second);
freed_memory += buffer.second;
}
live_array_buffers_.clear();
live_array_buffers_for_scavenge_.clear();
not_yet_discovered_array_buffers_.clear();
not_yet_discovered_array_buffers_for_scavenge_.clear();
if (freed_memory > 0) {
heap()->update_amount_of_external_allocated_memory(
-static_cast<int64_t>(freed_memory));
heap_->update_amount_of_external_allocated_freed_memory(
static_cast<intptr_t>(freed_memory));
}
live_.clear();
not_yet_discovered_.clear();
}
void LocalArrayBufferTracker::Add(Key key, const Value& value) {
live_[key] = value;
not_yet_discovered_[key] = value;
}
void LocalArrayBufferTracker::AddLive(Key key, const Value& value) {
DCHECK_EQ(not_yet_discovered_.count(key), 0);
live_[key] = value;
}
void LocalArrayBufferTracker::MarkLive(Key key) {
DCHECK_EQ(live_.count(key), 1);
not_yet_discovered_.erase(key);
}
LocalArrayBufferTracker::Value LocalArrayBufferTracker::Remove(Key key) {
DCHECK_EQ(live_.count(key), 1);
Value value = live_[key];
live_.erase(key);
not_yet_discovered_.erase(key);
return value;
}
void LocalArrayBufferTracker::FreeDead() {
size_t freed_memory = 0;
for (TrackingMap::iterator it = not_yet_discovered_.begin();
it != not_yet_discovered_.end();) {
heap_->isolate()->array_buffer_allocator()->Free(it->second.first,
it->second.second);
freed_memory += it->second.second;
live_.erase(it->first);
not_yet_discovered_.erase(it++);
}
if (freed_memory > 0) {
heap_->update_amount_of_external_allocated_freed_memory(
static_cast<intptr_t>(freed_memory));
}
started_ = false;
}
void LocalArrayBufferTracker::Reset() {
if (!started_) {
not_yet_discovered_ = live_;
started_ = true;
}
}
bool LocalArrayBufferTracker::IsEmpty() {
return live_.empty() && not_yet_discovered_.empty();
}
void ArrayBufferTracker::RegisterNew(JSArrayBuffer* buffer) {
ArrayBufferTracker::~ArrayBufferTracker() {}
void ArrayBufferTracker::RegisterNew(JSArrayBuffer* buffer, bool track_live) {
void* data = buffer->backing_store();
if (!data) return;
bool in_new_space = heap()->InNewSpace(buffer);
size_t length = NumberToSize(heap()->isolate(), buffer->byte_length());
if (in_new_space) {
live_array_buffers_for_scavenge_[data] = length;
size_t length = NumberToSize(heap_->isolate(), buffer->byte_length());
Page* page = Page::FromAddress(buffer->address());
if (track_live) {
page->local_tracker()->AddLive(buffer, std::make_pair(data, length));
} else {
live_array_buffers_[data] = length;
page->local_tracker()->Add(buffer, std::make_pair(data, length));
}
// We may go over the limit of externally allocated memory here. We call the
// api function to trigger a GC in this case.
reinterpret_cast<v8::Isolate*>(heap()->isolate())
reinterpret_cast<v8::Isolate*>(heap_->isolate())
->AdjustAmountOfExternalAllocatedMemory(length);
}
@ -58,84 +103,49 @@ void ArrayBufferTracker::Unregister(JSArrayBuffer* buffer) {
void* data = buffer->backing_store();
if (!data) return;
bool in_new_space = heap()->InNewSpace(buffer);
std::map<void*, size_t>* live_buffers =
in_new_space ? &live_array_buffers_for_scavenge_ : &live_array_buffers_;
std::map<void*, size_t>* not_yet_discovered_buffers =
in_new_space ? &not_yet_discovered_array_buffers_for_scavenge_
: &not_yet_discovered_array_buffers_;
DCHECK(live_buffers->count(data) > 0);
size_t length = (*live_buffers)[data];
live_buffers->erase(data);
not_yet_discovered_buffers->erase(data);
heap()->update_amount_of_external_allocated_memory(
-static_cast<int64_t>(length));
size_t length = Page::FromAddress(buffer->address())
->local_tracker()
->Remove(buffer)
.second;
heap_->update_amount_of_external_allocated_memory(
-static_cast<intptr_t>(length));
}
void ArrayBufferTracker::FreeDeadInNewSpace() {
NewSpacePageIterator from_it(heap_->new_space()->FromSpaceStart(),
heap_->new_space()->FromSpaceEnd());
while (from_it.has_next()) {
Page* p = from_it.next();
p->ScanAndFreeDeadArrayBuffers<
LocalArrayBufferTracker::kForwardingPointer>();
}
heap_->account_amount_of_external_allocated_freed_memory();
}
void ArrayBufferTracker::ResetTrackersInOldSpace() {
heap_->old_space()->ForAllPages([](Page* p) { p->ResetTracker(); });
}
#define UPDATE_GUARD(buffer, data) \
if (buffer->is_external()) return; \
data = buffer->backing_store(); \
if (data == nullptr) return; \
if (data == heap_->undefined_value()) return;
void ArrayBufferTracker::MarkLive(JSArrayBuffer* buffer) {
base::LockGuard<base::Mutex> guard(&mutex_);
void* data = buffer->backing_store();
void* data = nullptr;
UPDATE_GUARD(buffer, data);
// ArrayBuffer might be in the middle of being constructed.
if (data == heap()->undefined_value()) return;
if (heap()->InNewSpace(buffer)) {
not_yet_discovered_array_buffers_for_scavenge_.erase(data);
LocalArrayBufferTracker* tracker =
Page::FromAddress(buffer->address())->local_tracker();
if (tracker->IsTracked(buffer)) {
tracker->MarkLive((buffer));
} else {
not_yet_discovered_array_buffers_.erase(data);
heap_->RegisterNewArrayBuffer(buffer);
}
}
void ArrayBufferTracker::FreeDead(bool from_scavenge) {
size_t freed_memory = 0;
Isolate* isolate = heap()->isolate();
for (auto& buffer : not_yet_discovered_array_buffers_for_scavenge_) {
isolate->array_buffer_allocator()->Free(buffer.first, buffer.second);
freed_memory += buffer.second;
live_array_buffers_for_scavenge_.erase(buffer.first);
}
if (!from_scavenge) {
for (auto& buffer : not_yet_discovered_array_buffers_) {
isolate->array_buffer_allocator()->Free(buffer.first, buffer.second);
freed_memory += buffer.second;
live_array_buffers_.erase(buffer.first);
}
}
not_yet_discovered_array_buffers_for_scavenge_ =
live_array_buffers_for_scavenge_;
if (!from_scavenge) not_yet_discovered_array_buffers_ = live_array_buffers_;
// Do not call through the api as this code is triggered while doing a GC.
heap()->update_amount_of_external_allocated_memory(
-static_cast<int64_t>(freed_memory));
}
void ArrayBufferTracker::PrepareDiscoveryInNewSpace() {
not_yet_discovered_array_buffers_for_scavenge_ =
live_array_buffers_for_scavenge_;
}
void ArrayBufferTracker::Promote(JSArrayBuffer* buffer) {
base::LockGuard<base::Mutex> guard(&mutex_);
if (buffer->is_external()) return;
void* data = buffer->backing_store();
if (!data) return;
// ArrayBuffer might be in the middle of being constructed.
if (data == heap()->undefined_value()) return;
DCHECK(live_array_buffers_for_scavenge_.count(data) > 0);
live_array_buffers_[data] = live_array_buffers_for_scavenge_[data];
live_array_buffers_for_scavenge_.erase(data);
not_yet_discovered_array_buffers_for_scavenge_.erase(data);
}
#undef UPDATE_GUARD
} // namespace internal
} // namespace v8

View File

@ -15,61 +15,105 @@ namespace internal {
// Forward declarations.
class Heap;
class Page;
class JSArrayBuffer;
// LocalArrayBufferTracker is tracker for live and dead JSArrayBuffer objects.
//
// It consists of two sets, a live, and a not yet discovered set of buffers.
// Upon registration (in the ArrayBufferTracker) the buffers are added to both
// sets. When a buffer is encountered as live (or added is live) it is removed
// from the not yet discovered set. Finally, after each round (sometime during
// GC) the left over not yet discovered buffers are cleaned up. Upon starting
// a new round the not yet discovered buffers are initialized from the live set.
//
// Caveats:
// - Between cleaning up the buffers using |Free| we always need a |Reset| and
// thus another marking phase.
class LocalArrayBufferTracker {
public:
typedef std::pair<void*, size_t> Value;
typedef JSArrayBuffer* Key;
enum LivenessIndicator { kForwardingPointer, kMarkBit };
enum CallbackResult { kKeepEntry, kKeepAndUpdateEntry, kRemoveEntry };
explicit LocalArrayBufferTracker(Heap* heap) : heap_(heap), started_(false) {}
~LocalArrayBufferTracker();
void Add(Key key, const Value& value);
void AddLive(Key key, const Value& value);
Value Remove(Key key);
void MarkLive(Key key);
bool IsEmpty();
// Resets the tracking set, i.e., not yet discovered buffers are initialized
// from the remaining live set of buffers.
void Reset();
// Frees up any dead backing stores of not yet discovered array buffers.
// Requires that the buffers have been properly marked using MarkLive.
void FreeDead();
// Scans the whole tracker and decides based on liveness_indicator whether
// a JSArrayBuffer is still considered live.
template <LivenessIndicator liveness_indicator>
inline void ScanAndFreeDead();
bool IsTracked(Key key) { return live_.find(key) != live_.end(); }
private:
// TODO(mlippautz): Switch to unordered_map once it is supported on all
// platforms.
typedef std::map<Key, Value> TrackingMap;
// Processes buffers one by one. The CallbackResult decides whether the buffer
// will be dropped or not.
//
// Callback should be of type:
// CallbackResult fn(JSArrayBuffer*, JSArrayBuffer**);
template <typename Callback>
inline void Process(Callback callback);
Heap* heap_;
// |live_| maps tracked JSArrayBuffers to the internally allocated backing
// store and length. For each GC round |not_yet_discovered_| is initialized
// as a copy of |live_|. Upon finding a JSArrayBuffer during GC, the buffer
// is removed from |not_yet_discovered_|. At the end of a GC, we free up the
// remaining JSArrayBuffers in |not_yet_discovered_|.
TrackingMap live_;
TrackingMap not_yet_discovered_;
bool started_;
};
class ArrayBufferTracker {
public:
explicit ArrayBufferTracker(Heap* heap) : heap_(heap) {}
~ArrayBufferTracker();
inline Heap* heap() { return heap_; }
// The following methods are used to track raw C++ pointers to externally
// allocated memory used as backing store in live array buffers.
// A new ArrayBuffer was created with |data| as backing store.
void RegisterNew(JSArrayBuffer* buffer);
// The backing store |data| is no longer owned by V8.
// Register/unregister a new JSArrayBuffer |buffer| for tracking.
// |track_live| indicates whether marking will still visit the buffer and we
// can delay marking it as live.
void RegisterNew(JSArrayBuffer* buffer, bool track_live);
void Unregister(JSArrayBuffer* buffer);
// A live ArrayBuffer was discovered during marking/scavenge.
// Frees all backing store pointers for dead JSArrayBuffers in new space.
void FreeDeadInNewSpace();
void ResetTrackersInOldSpace();
// A live JSArrayBuffer was discovered during marking.
void MarkLive(JSArrayBuffer* buffer);
// Frees all backing store pointers that weren't discovered in the previous
// marking or scavenge phase.
void FreeDead(bool from_scavenge);
// Prepare for a new scavenge phase. A new marking phase is implicitly
// prepared by finishing the previous one.
void PrepareDiscoveryInNewSpace();
// An ArrayBuffer moved from new space to old space.
void Promote(JSArrayBuffer* buffer);
private:
base::Mutex mutex_;
Heap* heap_;
// |live_array_buffers_| maps externally allocated memory used as backing
// store for ArrayBuffers to the length of the respective memory blocks.
//
// At the beginning of mark/compact, |not_yet_discovered_array_buffers_| is
// a copy of |live_array_buffers_| and we remove pointers as we discover live
// ArrayBuffer objects during marking. At the end of mark/compact, the
// remaining memory blocks can be freed.
std::map<void*, size_t> live_array_buffers_;
std::map<void*, size_t> not_yet_discovered_array_buffers_;
// To be able to free memory held by ArrayBuffers during scavenge as well, we
// have a separate list of allocated memory held by ArrayBuffers in new space.
//
// Since mark/compact also evacuates the new space, all pointers in the
// |live_array_buffers_for_scavenge_| list are also in the
// |live_array_buffers_| list.
std::map<void*, size_t> live_array_buffers_for_scavenge_;
std::map<void*, size_t> not_yet_discovered_array_buffers_for_scavenge_;
};
} // namespace internal
} // namespace v8
#endif // V8_HEAP_ARRAY_BUFFER_TRACKER_H_

View File

@ -1626,8 +1626,6 @@ void Heap::Scavenge() {
scavenge_collector_->SelectScavengingVisitorsTable();
array_buffer_tracker()->PrepareDiscoveryInNewSpace();
// Flip the semispaces. After flipping, to space is empty, from space has
// live objects.
new_space_.Flip();
@ -1744,7 +1742,7 @@ void Heap::Scavenge() {
// Set age mark.
new_space_.set_age_mark(new_space_.top());
array_buffer_tracker()->FreeDead(true);
array_buffer_tracker()->FreeDeadInNewSpace();
// Update how much has survived scavenge.
IncrementYoungSurvivorsCounter(static_cast<int>(
@ -2024,7 +2022,8 @@ HeapObject* Heap::DoubleAlignForDeserialization(HeapObject* object, int size) {
void Heap::RegisterNewArrayBuffer(JSArrayBuffer* buffer) {
return array_buffer_tracker()->RegisterNew(buffer);
const bool track_live = Marking::IsBlack(Marking::MarkBitFrom(buffer));
return array_buffer_tracker()->RegisterNew(buffer, track_live);
}

View File

@ -815,6 +815,16 @@ class Heap {
amount_of_external_allocated_memory_ += delta;
}
void update_amount_of_external_allocated_freed_memory(intptr_t freed) {
amount_of_external_allocated_memory_freed_.Increment(freed);
}
void account_amount_of_external_allocated_freed_memory() {
amount_of_external_allocated_memory_ -=
amount_of_external_allocated_memory_freed_.Value();
amount_of_external_allocated_memory_freed_.SetValue(0);
}
void DeoptMarkedAllocationSites();
bool DeoptMaybeTenuredAllocationSites() {
@ -1982,6 +1992,8 @@ class Heap {
// Caches the amount of external memory registered at the last global gc.
int64_t amount_of_external_allocated_memory_at_last_global_gc_;
base::AtomicNumber<intptr_t> amount_of_external_allocated_memory_freed_;
// This can be calculated directly from a pointer to the heap; however, it is
// more expedient to get at the isolate directly from within Heap methods.
Isolate* isolate_;

View File

@ -547,6 +547,7 @@ void IncrementalMarking::StartMarking() {
MarkCompactCollector::kMaxMarkingDequeSize);
ActivateIncrementalWriteBarrier();
heap_->array_buffer_tracker()->ResetTrackersInOldSpace();
// Marking bits are cleared by the sweeper.
#ifdef VERIFY_HEAP

View File

@ -14,7 +14,7 @@
#include "src/frames-inl.h"
#include "src/gdb-jit.h"
#include "src/global-handles.h"
#include "src/heap/array-buffer-tracker.h"
#include "src/heap/array-buffer-tracker-inl.h"
#include "src/heap/gc-tracer.h"
#include "src/heap/incremental-marking.h"
#include "src/heap/mark-compact-inl.h"
@ -872,6 +872,10 @@ void MarkCompactCollector::Prepare() {
space = spaces.next()) {
space->PrepareForMarkCompact();
}
if (!was_marked_incrementally_) {
heap_->array_buffer_tracker()->ResetTrackersInOldSpace();
}
heap()->account_amount_of_external_allocated_freed_memory();
#ifdef VERIFY_HEAP
if (!was_marked_incrementally_ && FLAG_verify_heap) {
@ -1727,20 +1731,12 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final
if (heap_->ShouldBePromoted(object->address(), size) &&
TryEvacuateObject(compaction_spaces_->Get(OLD_SPACE), object,
&target_object)) {
// If we end up needing more special cases, we should factor this out.
if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) {
heap_->array_buffer_tracker()->Promote(
JSArrayBuffer::cast(target_object));
}
promoted_size_ += size;
return true;
}
HeapObject* target = nullptr;
AllocationSpace space = AllocateTargetObject(object, &target);
MigrateObject(HeapObject::cast(target), object, size, space);
if (V8_UNLIKELY(target->IsJSArrayBuffer())) {
heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target));
}
semispace_copied_size_ += size;
return true;
}
@ -1865,10 +1861,6 @@ class MarkCompactCollector::EvacuateNewSpacePageVisitor final
}
inline bool Visit(HeapObject* object) {
if (V8_UNLIKELY(object->IsJSArrayBuffer())) {
object->GetHeap()->array_buffer_tracker()->Promote(
JSArrayBuffer::cast(object));
}
RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector());
object->IterateBodyFast(&visitor);
promoted_size_ += object->Size();
@ -1909,6 +1901,9 @@ class MarkCompactCollector::EvacuateRecordOnlyVisitor final
inline bool Visit(HeapObject* object) {
RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector());
object->IterateBody(&visitor);
if (V8_UNLIKELY(object->IsJSArrayBuffer())) {
heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(object));
}
return true;
}
@ -3126,21 +3121,27 @@ bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) {
switch (ComputeEvacuationMode(page)) {
case kObjectsNewToOld:
result = EvacuateSinglePage<kClearMarkbits>(page, &new_space_visitor_);
page->ScanAndFreeDeadArrayBuffers<
LocalArrayBufferTracker::kForwardingPointer>();
DCHECK(result);
USE(result);
break;
case kPageNewToOld:
result = EvacuateSinglePage<kKeepMarking>(page, &new_space_page_visitor);
// ArrayBufferTracker will be updated during sweeping.
DCHECK(result);
USE(result);
break;
case kObjectsOldToOld:
result = EvacuateSinglePage<kClearMarkbits>(page, &old_space_visitor_);
page->ScanAndFreeDeadArrayBuffers<
LocalArrayBufferTracker::kForwardingPointer>();
if (!result) {
// Aborted compaction page. We can record slots here to have them
// processed in parallel later on.
EvacuateRecordOnlyVisitor record_visitor(collector_->heap());
result = EvacuateSinglePage<kKeepMarking>(page, &record_visitor);
page->ScanAndFreeDeadArrayBuffers<LocalArrayBufferTracker::kMarkBit>();
DCHECK(result);
USE(result);
// We need to return failure here to indicate that we want this page
@ -3383,6 +3384,7 @@ int MarkCompactCollector::Sweeper::RawSweep(PagedSpace* space, Page* p,
freed_bytes = space->UnaccountedFree(free_start, size);
max_freed_bytes = Max(freed_bytes, max_freed_bytes);
}
p->FreeDeadArrayBuffers();
p->concurrent_sweeping_state().SetValue(Page::kSweepingDone);
return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes));
}
@ -3526,11 +3528,6 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
}
}
// EvacuateNewSpaceAndCandidates iterates over new space objects and for
// ArrayBuffers either re-registers them as live or promotes them. This is
// needed to properly free them.
heap()->array_buffer_tracker()->FreeDead(false);
// Deallocate evacuated candidate pages.
ReleaseEvacuationCandidates();
}

View File

@ -105,11 +105,6 @@ int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
Map* map, HeapObject* object) {
typedef FlexibleBodyVisitor<StaticVisitor, JSArrayBuffer::BodyDescriptor, int>
JSArrayBufferBodyVisitor;
if (!JSArrayBuffer::cast(object)->is_external()) {
Heap* heap = map->GetHeap();
heap->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(object));
}
return JSArrayBufferBodyVisitor::Visit(map, object);
}

View File

@ -289,14 +289,6 @@ class ScavengingVisitor : public StaticVisitorBase {
static inline void EvacuateJSArrayBuffer(Map* map, HeapObject** slot,
HeapObject* object) {
ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object);
Heap* heap = map->GetHeap();
MapWord map_word = object->map_word();
DCHECK(map_word.IsForwardingAddress());
HeapObject* target = map_word.ToForwardingAddress();
if (!heap->InNewSpace(target)) {
heap->array_buffer_tracker()->Promote(JSArrayBuffer::cast(target));
}
}

View File

@ -518,6 +518,7 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size,
Bitmap::Clear(chunk);
chunk->set_next_chunk(nullptr);
chunk->set_prev_chunk(nullptr);
chunk->local_tracker_ = nullptr;
DCHECK(OFFSET_OF(MemoryChunk, flags_) == kFlagsOffset);
DCHECK(OFFSET_OF(MemoryChunk, live_byte_count_) == kLiveBytesOffset);
@ -1039,6 +1040,11 @@ void MemoryChunk::ReleaseAllocatedMemory() {
if (old_to_old_slots_ != nullptr) ReleaseOldToOldSlots();
if (typed_old_to_new_slots_ != nullptr) ReleaseTypedOldToNewSlots();
if (typed_old_to_old_slots_ != nullptr) ReleaseTypedOldToOldSlots();
if (local_tracker_ != nullptr) {
delete local_tracker_;
local_tracker_ = nullptr;
}
}
static SlotSet* AllocateSlotSet(size_t size, Address page_start) {

View File

@ -14,6 +14,7 @@
#include "src/base/platform/mutex.h"
#include "src/flags.h"
#include "src/hashmap.h"
#include "src/heap/array-buffer-tracker.h"
#include "src/list.h"
#include "src/objects.h"
#include "src/utils.h"
@ -523,7 +524,8 @@ class MemoryChunk {
+ kPointerSize // AtomicValue next_chunk_
+ kPointerSize // AtomicValue prev_chunk_
// FreeListCategory categories_[kNumberOfCategories]
+ FreeListCategory::kSize * kNumberOfCategories;
+ FreeListCategory::kSize * kNumberOfCategories +
kPointerSize; // LocalArrayBufferTracker tracker_
// We add some more space to the computed header size to amount for missing
// alignment requirements in our computation.
@ -824,6 +826,8 @@ class MemoryChunk {
FreeListCategory categories_[kNumberOfCategories];
LocalArrayBufferTracker* local_tracker_;
private:
void InitializeReservedMemory() { reservation_.Reset(); }
@ -954,6 +958,40 @@ class Page : public MemoryChunk {
available_in_free_list_.Increment(available);
}
LocalArrayBufferTracker* local_tracker() {
if (local_tracker_ == nullptr) {
local_tracker_ = new LocalArrayBufferTracker(heap_);
}
return local_tracker_;
}
void FreeDeadArrayBuffers() {
if (local_tracker_ != nullptr) {
local_tracker_->FreeDead();
if (local_tracker_->IsEmpty()) {
delete local_tracker_;
local_tracker_ = nullptr;
}
}
}
template <LocalArrayBufferTracker::LivenessIndicator liveness_indicator>
void ScanAndFreeDeadArrayBuffers() {
if (local_tracker_ != nullptr) {
local_tracker_->ScanAndFreeDead<liveness_indicator>();
if (local_tracker_->IsEmpty()) {
delete local_tracker_;
local_tracker_ = nullptr;
}
}
}
void ResetTracker() {
if (local_tracker_ != nullptr) {
local_tracker_->Reset();
}
}
#ifdef DEBUG
void Print();
#endif // DEBUG
@ -2295,6 +2333,16 @@ class PagedSpace : public Space {
inline void UnlinkFreeListCategories(Page* page);
inline intptr_t RelinkFreeListCategories(Page* page);
// Callback signature:
// void Callback(Page*);
template <typename Callback>
void ForAllPages(Callback callback) {
PageIterator it(this);
while (it.has_next()) {
callback(it.next());
}
}
protected:
// PagedSpaces that should be included in snapshots have different, i.e.,
// smaller, initial pages.

View File

@ -830,6 +830,7 @@
'handles.h',
'hashmap.h',
'heap-symbols.h',
'heap/array-buffer-tracker-inl.h',
'heap/array-buffer-tracker.cc',
'heap/array-buffer-tracker.h',
'heap/memory-reducer.cc',

View File

@ -102,6 +102,7 @@
'heap/heap-utils.cc',
'heap/heap-utils.h',
'heap/test-alloc.cc',
'heap/test-array-buffer-tracker.cc',
'heap/test-compaction.cc',
'heap/test-heap.cc',
'heap/test-incremental-marking.cc',

View File

@ -141,6 +141,21 @@ void SimulateFullSpace(v8::internal::PagedSpace* space) {
space->ClearStats();
}
void AbandonCurrentlyFreeMemory(PagedSpace* space) {
space->EmptyAllocationInfo();
PageIterator pit(space);
while (pit.has_next()) {
pit.next()->MarkNeverAllocateForTesting();
}
}
void GcAndSweep(Heap* heap, AllocationSpace space) {
heap->CollectGarbage(space);
if (heap->mark_compact_collector()->sweeping_in_progress()) {
heap->mark_compact_collector()->EnsureSweepingCompleted();
}
}
} // namespace heap
} // namespace internal
} // namespace v8

View File

@ -40,6 +40,10 @@ void SimulateIncrementalMarking(i::Heap* heap, bool force_completion = true);
// Helper function that simulates a full old-space in the heap.
void SimulateFullSpace(v8::internal::PagedSpace* space);
void AbandonCurrentlyFreeMemory(PagedSpace* space);
void GcAndSweep(Heap* heap, AllocationSpace space);
} // namespace heap
} // namespace internal
} // namespace v8

View File

@ -0,0 +1,145 @@
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/array-buffer-tracker-inl.h"
#include "src/heap/array-buffer-tracker.h"
#include "test/cctest/cctest.h"
#include "test/cctest/heap/heap-utils.h"
namespace {
typedef i::LocalArrayBufferTracker LocalTracker;
void VerifyTrackedInNewSpace(i::JSArrayBuffer* buf) {
CHECK(i::Page::FromAddress(buf->address())->InNewSpace());
CHECK(i::Page::FromAddress(buf->address())->local_tracker()->IsTracked(buf));
}
void VerifyTrackedInOldSpace(i::JSArrayBuffer* buf) {
CHECK(!i::Page::FromAddress(buf->address())->InNewSpace());
CHECK(i::Page::FromAddress(buf->address())->local_tracker()->IsTracked(buf));
}
void VerifyUntracked(i::JSArrayBuffer* buf) {
CHECK(!i::Page::FromAddress(buf->address())->local_tracker()->IsTracked(buf));
}
} // namespace
namespace v8 {
namespace internal {
// The following tests make sure that JSArrayBuffer tracking works expected when
// moving the objects through various spaces during GC phases.
TEST(ArrayBuffer_OnlyMC) {
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
JSArrayBuffer* raw_ab = nullptr;
{
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
VerifyTrackedInNewSpace(*buf);
heap::GcAndSweep(heap, OLD_SPACE);
VerifyTrackedInNewSpace(*buf);
heap::GcAndSweep(heap, OLD_SPACE);
VerifyTrackedInOldSpace(*buf);
raw_ab = *buf;
}
// 2 GCs are needed because we promote to old space as live, meaining that
// we will survive one GC.
heap::GcAndSweep(heap, OLD_SPACE);
heap::GcAndSweep(heap, OLD_SPACE);
VerifyUntracked(raw_ab);
}
TEST(ArrayBuffer_OnlyScavenge) {
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
JSArrayBuffer* raw_ab = nullptr;
{
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
VerifyTrackedInNewSpace(*buf);
heap::GcAndSweep(heap, NEW_SPACE);
VerifyTrackedInNewSpace(*buf);
heap::GcAndSweep(heap, NEW_SPACE);
VerifyTrackedInOldSpace(*buf);
heap::GcAndSweep(heap, NEW_SPACE);
VerifyTrackedInOldSpace(*buf);
raw_ab = *buf;
}
// 2 GCs are needed because we promote to old space as live, meaning that
// we will survive one GC.
heap::GcAndSweep(heap, OLD_SPACE);
heap::GcAndSweep(heap, OLD_SPACE);
VerifyUntracked(raw_ab);
}
TEST(ArrayBuffer_ScavengeAndMC) {
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
JSArrayBuffer* raw_ab = nullptr;
{
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
VerifyTrackedInNewSpace(*buf);
heap::GcAndSweep(heap, NEW_SPACE);
VerifyTrackedInNewSpace(*buf);
heap::GcAndSweep(heap, NEW_SPACE);
VerifyTrackedInOldSpace(*buf);
heap::GcAndSweep(heap, OLD_SPACE);
VerifyTrackedInOldSpace(*buf);
heap::GcAndSweep(heap, NEW_SPACE);
VerifyTrackedInOldSpace(*buf);
raw_ab = *buf;
}
// 2 GCs are needed because we promote to old space as live, meaning that
// we will survive one GC.
heap::GcAndSweep(heap, OLD_SPACE);
heap::GcAndSweep(heap, OLD_SPACE);
VerifyUntracked(raw_ab);
}
TEST(ArrayBuffer_Compaction) {
FLAG_manual_evacuation_candidates_selection = true;
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
heap::AbandonCurrentlyFreeMemory(heap->old_space());
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab1 = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf1 = v8::Utils::OpenHandle(*ab1);
VerifyTrackedInNewSpace(*buf1);
heap::GcAndSweep(heap, NEW_SPACE);
heap::GcAndSweep(heap, NEW_SPACE);
Page* page_before_gc = Page::FromAddress(buf1->address());
page_before_gc->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
VerifyTrackedInOldSpace(*buf1);
heap->CollectAllGarbage();
Page* page_after_gc = Page::FromAddress(buf1->address());
VerifyTrackedInOldSpace(*buf1);
CHECK_NE(page_before_gc, page_after_gc);
}
} // namespace internal
} // namespace v8