From 93fe04afa9428f4f68dc5dd302cda83908be84a7 Mon Sep 17 00:00:00 2001 From: mlippautz Date: Wed, 25 May 2016 02:37:49 -0700 Subject: [PATCH] Revert of "[heap] Fine-grained JSArrayBuffer tracking" (patchset #6 id:220001 of https://codereview.chromium.org/2005723005/ ) Reason for revert: Breaking TSAN https://uberchromegw.corp.google.com/i/client.v8/builders/V8%20Linux64%20TSAN/builds/9741/steps/Ignition%20-%20turbofan%20%28flakes%29/logs/FixedUint32Array Original issue's description: > Reland of "[heap] Fine-grained JSArrayBuffer tracking" > > Track based on JSArrayBuffer addresses on pages instead of the attached > backing store. > > Details of tracking: > - Scavenge: New space pages are processes in bulk on the main thread > - MC: Unswept pages are processed in bulk in parallel. All other pages > are processed by the sweeper concurrently. > > This reverts commit d2dff0320b75bf6054e46c379b192619d4884caf. > > R=hpayer@chromium.org > BUG=chromium:611688 > LOG=N > CQ_EXTRA_TRYBOTS=tryserver.v8:v8_linux_arm64_gc_stress_dbg,v8_linux_gc_stress_dbg,v8_mac_gc_stress_dbg,v8_linux64_tsan_rel,v8_mac64_asan_rel > > Committed: https://crrev.com/b5704bc1e6ba74c9ca975f5d58f5b2dc60261457 > Cr-Commit-Position: refs/heads/master@{#36495} TBR=hpayer@chromium.org # Skipping CQ checks because original CL landed less than 1 days ago. NOPRESUBMIT=true NOTREECHECKS=true NOTRY=true BUG=chromium:611688 Review-Url: https://codereview.chromium.org/2011863002 Cr-Commit-Position: refs/heads/master@{#36504} --- BUILD.gn | 1 - include/v8.h | 2 +- src/heap/array-buffer-tracker-inl.h | 74 ------ src/heap/array-buffer-tracker.cc | 238 +++++++----------- src/heap/array-buffer-tracker.h | 126 +++------- src/heap/heap.cc | 4 +- src/heap/heap.h | 12 - src/heap/incremental-marking.cc | 1 - src/heap/mark-compact.cc | 38 ++- src/heap/objects-visiting-inl.h | 5 + src/heap/scavenger.cc | 8 + src/heap/spaces-inl.h | 1 + src/heap/spaces.cc | 11 +- src/heap/spaces.h | 28 +-- src/v8.gyp | 1 - test/cctest/cctest.gyp | 1 - test/cctest/heap/heap-utils.cc | 15 -- test/cctest/heap/heap-utils.h | 4 - test/cctest/heap/test-array-buffer-tracker.cc | 206 --------------- 19 files changed, 170 insertions(+), 606 deletions(-) delete mode 100644 src/heap/array-buffer-tracker-inl.h delete mode 100644 test/cctest/heap/test-array-buffer-tracker.cc diff --git a/BUILD.gn b/BUILD.gn index 6c5cc03bb6..13943c9078 100644 --- a/BUILD.gn +++ b/BUILD.gn @@ -1159,7 +1159,6 @@ v8_source_set("v8_base") { "src/handles.h", "src/hashmap.h", "src/heap-symbols.h", - "src/heap/array-buffer-tracker-inl.h", "src/heap/array-buffer-tracker.cc", "src/heap/array-buffer-tracker.h", "src/heap/gc-idle-time-handler.cc", diff --git a/include/v8.h b/include/v8.h index 62bb7f18dc..b5db5fe429 100644 --- a/include/v8.h +++ b/include/v8.h @@ -7392,7 +7392,7 @@ class Internals { kAmountOfExternalAllocatedMemoryOffset + kApiInt64Size; static const int kIsolateRootsOffset = kAmountOfExternalAllocatedMemoryAtLastGlobalGCOffset + kApiInt64Size + - kApiPointerSize + kApiPointerSize; + kApiPointerSize; static const int kUndefinedValueRootIndex = 4; static const int kTheHoleValueRootIndex = 5; static const int kNullValueRootIndex = 6; diff --git a/src/heap/array-buffer-tracker-inl.h b/src/heap/array-buffer-tracker-inl.h deleted file mode 100644 index 5a5be4938e..0000000000 --- a/src/heap/array-buffer-tracker-inl.h +++ /dev/null @@ -1,74 +0,0 @@ -// Copyright 2016 the V8 project authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include "src/heap/array-buffer-tracker.h" -#include "src/heap/heap.h" -#include "src/heap/mark-compact.h" -#include "src/isolate.h" - -namespace v8 { -namespace internal { - -template -void LocalArrayBufferTracker::Process(Callback callback) { - JSArrayBuffer* new_buffer = nullptr; - size_t freed_memory = 0; - for (TrackingMap::iterator it = live_.begin(); it != live_.end();) { - switch (callback(it->first, &new_buffer)) { - case kKeepEntry: - it++; - break; - case kKeepAndUpdateEntry: - DCHECK_NOT_NULL(new_buffer); - Page::FromAddress(new_buffer->address()) - ->local_tracker() - ->AddLive(new_buffer, it->second); - live_.erase(it++); - break; - case kRemoveEntry: - heap_->isolate()->array_buffer_allocator()->Free(it->second.first, - it->second.second); - freed_memory += it->second.second; - live_.erase(it++); - break; - default: - UNREACHABLE(); - } - } - if (freed_memory > 0) { - heap_->update_amount_of_external_allocated_freed_memory( - static_cast(freed_memory)); - } - not_yet_discovered_.clear(); - started_ = false; -} - -template -void LocalArrayBufferTracker::ScanAndFreeDead() { - switch (liveness_indicator) { - case kForwardingPointer: - Process([](JSArrayBuffer* old_buffer, JSArrayBuffer** new_buffer) { - MapWord map_word = old_buffer->map_word(); - if (map_word.IsForwardingAddress()) { - *new_buffer = JSArrayBuffer::cast(map_word.ToForwardingAddress()); - return LocalArrayBufferTracker::kKeepAndUpdateEntry; - } - return LocalArrayBufferTracker::kRemoveEntry; - }); - break; - case kMarkBit: - Process([](JSArrayBuffer* old_buffer, JSArrayBuffer**) { - if (Marking::IsBlackOrGrey(Marking::MarkBitFrom(old_buffer))) { - return LocalArrayBufferTracker::kKeepEntry; - } - return LocalArrayBufferTracker::kRemoveEntry; - }); - break; - default: - UNREACHABLE(); - } -} - -} // namespace internal -} // namespace v8 diff --git a/src/heap/array-buffer-tracker.cc b/src/heap/array-buffer-tracker.cc index 3130f31209..6e389c1cbf 100644 --- a/src/heap/array-buffer-tracker.cc +++ b/src/heap/array-buffer-tracker.cc @@ -3,104 +3,53 @@ // found in the LICENSE file. #include "src/heap/array-buffer-tracker.h" -#include "src/heap/array-buffer-tracker-inl.h" #include "src/heap/heap.h" #include "src/isolate.h" -#include "src/objects-inl.h" #include "src/objects.h" +#include "src/objects-inl.h" #include "src/v8.h" namespace v8 { namespace internal { -LocalArrayBufferTracker::~LocalArrayBufferTracker() { +ArrayBufferTracker::~ArrayBufferTracker() { + Isolate* isolate = heap()->isolate(); size_t freed_memory = 0; - for (auto& buffer : live_) { - heap_->isolate()->array_buffer_allocator()->Free(buffer.second.first, - buffer.second.second); - freed_memory += buffer.second.second; + for (auto& buffer : live_array_buffers_) { + isolate->array_buffer_allocator()->Free(buffer.first, buffer.second); + freed_memory += buffer.second; } + for (auto& buffer : live_array_buffers_for_scavenge_) { + isolate->array_buffer_allocator()->Free(buffer.first, buffer.second); + freed_memory += buffer.second; + } + live_array_buffers_.clear(); + live_array_buffers_for_scavenge_.clear(); + not_yet_discovered_array_buffers_.clear(); + not_yet_discovered_array_buffers_for_scavenge_.clear(); + if (freed_memory > 0) { - heap_->update_amount_of_external_allocated_freed_memory( - static_cast(freed_memory)); - } - live_.clear(); - not_yet_discovered_.clear(); -} - -void LocalArrayBufferTracker::Add(Key key, const Value& value) { - live_[key] = value; - not_yet_discovered_[key] = value; -} - -void LocalArrayBufferTracker::AddLive(Key key, const Value& value) { - DCHECK_EQ(not_yet_discovered_.count(key), 0); - live_[key] = value; -} - -void LocalArrayBufferTracker::MarkLive(Key key) { - DCHECK_EQ(live_.count(key), 1); - not_yet_discovered_.erase(key); -} - -LocalArrayBufferTracker::Value LocalArrayBufferTracker::Remove(Key key) { - DCHECK_EQ(live_.count(key), 1); - Value value = live_[key]; - live_.erase(key); - not_yet_discovered_.erase(key); - return value; -} - -void LocalArrayBufferTracker::FreeDead() { - size_t freed_memory = 0; - for (TrackingMap::iterator it = not_yet_discovered_.begin(); - it != not_yet_discovered_.end();) { - heap_->isolate()->array_buffer_allocator()->Free(it->second.first, - it->second.second); - freed_memory += it->second.second; - live_.erase(it->first); - not_yet_discovered_.erase(it++); - } - if (freed_memory > 0) { - heap_->update_amount_of_external_allocated_freed_memory( - static_cast(freed_memory)); - } - started_ = false; -} - -void LocalArrayBufferTracker::Reset() { - if (!started_) { - not_yet_discovered_ = live_; - started_ = true; + heap()->update_amount_of_external_allocated_memory( + -static_cast(freed_memory)); } } -bool LocalArrayBufferTracker::IsEmpty() { - return live_.empty() && not_yet_discovered_.empty(); -} - -ArrayBufferTracker::~ArrayBufferTracker() {} void ArrayBufferTracker::RegisterNew(JSArrayBuffer* buffer) { void* data = buffer->backing_store(); if (!data) return; - size_t length = NumberToSize(heap_->isolate(), buffer->byte_length()); - Page* page = Page::FromAddress(buffer->address()); - LocalArrayBufferTracker* tracker = - page->local_tracker(); - DCHECK_NOT_NULL(tracker); - { - base::LockGuard guard(tracker->mutex()); - if (Marking::IsBlack(Marking::MarkBitFrom(buffer))) { - tracker->AddLive(buffer, std::make_pair(data, length)); - } else { - tracker->Add(buffer, std::make_pair(data, length)); - } + bool in_new_space = heap()->InNewSpace(buffer); + size_t length = NumberToSize(heap()->isolate(), buffer->byte_length()); + if (in_new_space) { + live_array_buffers_for_scavenge_[data] = length; + } else { + live_array_buffers_[data] = length; } + // We may go over the limit of externally allocated memory here. We call the // api function to trigger a GC in this case. - reinterpret_cast(heap_->isolate()) + reinterpret_cast(heap()->isolate()) ->AdjustAmountOfExternalAllocatedMemory(length); } @@ -109,79 +58,84 @@ void ArrayBufferTracker::Unregister(JSArrayBuffer* buffer) { void* data = buffer->backing_store(); if (!data) return; - LocalArrayBufferTracker* tracker = - Page::FromAddress(buffer->address())->local_tracker(); - DCHECK_NOT_NULL(tracker); - size_t length = 0; - { - base::LockGuard guard(tracker->mutex()); - length = tracker->Remove(buffer).second; - } - heap_->update_amount_of_external_allocated_memory( - -static_cast(length)); + bool in_new_space = heap()->InNewSpace(buffer); + std::map* live_buffers = + in_new_space ? &live_array_buffers_for_scavenge_ : &live_array_buffers_; + std::map* not_yet_discovered_buffers = + in_new_space ? ¬_yet_discovered_array_buffers_for_scavenge_ + : ¬_yet_discovered_array_buffers_; + + DCHECK(live_buffers->count(data) > 0); + + size_t length = (*live_buffers)[data]; + live_buffers->erase(data); + not_yet_discovered_buffers->erase(data); + + heap()->update_amount_of_external_allocated_memory( + -static_cast(length)); } -void ArrayBufferTracker::FreeDeadInNewSpace() { - NewSpacePageIterator from_it(heap_->new_space()->FromSpaceStart(), - heap_->new_space()->FromSpaceEnd()); - while (from_it.has_next()) { - ScanAndFreeDeadArrayBuffers( - from_it.next()); - } - heap_->account_amount_of_external_allocated_freed_memory(); -} - -void ArrayBufferTracker::ResetTrackersInOldSpace() { - heap_->old_space()->ForAllPages([](Page* p) { - LocalArrayBufferTracker* tracker = p->local_tracker(); - if (tracker != nullptr) { - tracker->Reset(); - if (tracker->IsEmpty()) { - p->ReleaseLocalTracker(); - } - } - }); -} void ArrayBufferTracker::MarkLive(JSArrayBuffer* buffer) { + base::LockGuard guard(&mutex_); + void* data = buffer->backing_store(); + + // ArrayBuffer might be in the middle of being constructed. + if (data == heap()->undefined_value()) return; + if (heap()->InNewSpace(buffer)) { + not_yet_discovered_array_buffers_for_scavenge_.erase(data); + } else { + not_yet_discovered_array_buffers_.erase(data); + } +} + + +void ArrayBufferTracker::FreeDead(bool from_scavenge) { + size_t freed_memory = 0; + Isolate* isolate = heap()->isolate(); + for (auto& buffer : not_yet_discovered_array_buffers_for_scavenge_) { + isolate->array_buffer_allocator()->Free(buffer.first, buffer.second); + freed_memory += buffer.second; + live_array_buffers_for_scavenge_.erase(buffer.first); + } + + if (!from_scavenge) { + for (auto& buffer : not_yet_discovered_array_buffers_) { + isolate->array_buffer_allocator()->Free(buffer.first, buffer.second); + freed_memory += buffer.second; + live_array_buffers_.erase(buffer.first); + } + } + + not_yet_discovered_array_buffers_for_scavenge_ = + live_array_buffers_for_scavenge_; + if (!from_scavenge) not_yet_discovered_array_buffers_ = live_array_buffers_; + + // Do not call through the api as this code is triggered while doing a GC. + heap()->update_amount_of_external_allocated_memory( + -static_cast(freed_memory)); +} + + +void ArrayBufferTracker::PrepareDiscoveryInNewSpace() { + not_yet_discovered_array_buffers_for_scavenge_ = + live_array_buffers_for_scavenge_; +} + + +void ArrayBufferTracker::Promote(JSArrayBuffer* buffer) { + base::LockGuard guard(&mutex_); + if (buffer->is_external()) return; void* data = buffer->backing_store(); - if (data == nullptr) return; - if (data == heap_->undefined_value()) return; - - Page* page = Page::FromAddress(buffer->address()); - LocalArrayBufferTracker* tracker = - page->local_tracker(); - DCHECK_NOT_NULL(tracker); - if (tracker->IsTracked(buffer)) { - base::LockGuard guard(page->mutex()); - tracker->MarkLive((buffer)); - } else { - RegisterNew(buffer); - } + if (!data) return; + // ArrayBuffer might be in the middle of being constructed. + if (data == heap()->undefined_value()) return; + DCHECK(live_array_buffers_for_scavenge_.count(data) > 0); + live_array_buffers_[data] = live_array_buffers_for_scavenge_[data]; + live_array_buffers_for_scavenge_.erase(data); + not_yet_discovered_array_buffers_for_scavenge_.erase(data); } -void ArrayBufferTracker::FreeDead(Page* page) { - LocalArrayBufferTracker* tracker = page->local_tracker(); - if (tracker != nullptr) { - base::LockGuard guard(tracker->mutex()); - tracker->FreeDead(); - } -} - -template -void ArrayBufferTracker::ScanAndFreeDeadArrayBuffers(Page* page) { - LocalArrayBufferTracker* tracker = page->local_tracker(); - if (tracker != nullptr) { - base::LockGuard guard(tracker->mutex()); - tracker->ScanAndFreeDead(); - } -} - -template void ArrayBufferTracker::ScanAndFreeDeadArrayBuffers< - LocalArrayBufferTracker::LivenessIndicator::kForwardingPointer>(Page* page); -template void ArrayBufferTracker::ScanAndFreeDeadArrayBuffers< - LocalArrayBufferTracker::LivenessIndicator::kMarkBit>(Page* page); - } // namespace internal } // namespace v8 diff --git a/src/heap/array-buffer-tracker.h b/src/heap/array-buffer-tracker.h index 45dae269cf..6130003d15 100644 --- a/src/heap/array-buffer-tracker.h +++ b/src/heap/array-buffer-tracker.h @@ -15,115 +15,61 @@ namespace internal { // Forward declarations. class Heap; -class Page; class JSArrayBuffer; -// LocalArrayBufferTracker is tracker for live and dead JSArrayBuffer objects. -// -// It consists of two sets, a live, and a not yet discovered set of buffers. -// Upon registration (in the ArrayBufferTracker) the buffers are added to both -// sets. When a buffer is encountered as live (or added is live) it is removed -// from the not yet discovered set. Finally, after each round (sometime during -// GC) the left over not yet discovered buffers are cleaned up. Upon starting -// a new round the not yet discovered buffers are initialized from the live set. -// -// Caveats: -// - Between cleaning up the buffers using |Free| we always need a |Reset| and -// thus another marking phase. -// - LocalArrayBufferTracker is completely unlocked. Calls need to ensure -// exclusive access. -class LocalArrayBufferTracker { - public: - typedef std::pair Value; - typedef JSArrayBuffer* Key; - - enum LivenessIndicator { kForwardingPointer, kMarkBit }; - enum CallbackResult { kKeepEntry, kKeepAndUpdateEntry, kRemoveEntry }; - - explicit LocalArrayBufferTracker(Heap* heap) : heap_(heap), started_(false) {} - ~LocalArrayBufferTracker(); - - void Add(Key key, const Value& value); - void AddLive(Key key, const Value& value); - Value Remove(Key key); - void MarkLive(Key key); - bool IsEmpty(); - - // Resets the tracking set, i.e., not yet discovered buffers are initialized - // from the remaining live set of buffers. - void Reset(); - - // Frees up any dead backing stores of not yet discovered array buffers. - // Requires that the buffers have been properly marked using MarkLive. - void FreeDead(); - - // Scans the whole tracker and decides based on liveness_indicator whether - // a JSArrayBuffer is still considered live. - template - inline void ScanAndFreeDead(); - - bool IsTracked(Key key) { return live_.find(key) != live_.end(); } - - base::Mutex* mutex() { return &mutex_; } - - private: - // TODO(mlippautz): Switch to unordered_map once it is supported on all - // platforms. - typedef std::map TrackingMap; - - // Processes buffers one by one. The CallbackResult decides whether the buffer - // will be dropped or not. - // - // Callback should be of type: - // CallbackResult fn(JSArrayBuffer*, JSArrayBuffer**); - template - inline void Process(Callback callback); - - Heap* heap_; - base::Mutex mutex_; - - // |live_| maps tracked JSArrayBuffers to the internally allocated backing - // store and length. For each GC round |not_yet_discovered_| is initialized - // as a copy of |live_|. Upon finding a JSArrayBuffer during GC, the buffer - // is removed from |not_yet_discovered_|. At the end of a GC, we free up the - // remaining JSArrayBuffers in |not_yet_discovered_|. - TrackingMap live_; - TrackingMap not_yet_discovered_; - - bool started_; -}; - class ArrayBufferTracker { public: explicit ArrayBufferTracker(Heap* heap) : heap_(heap) {} ~ArrayBufferTracker(); + inline Heap* heap() { return heap_; } + // The following methods are used to track raw C++ pointers to externally // allocated memory used as backing store in live array buffers. - // Register/unregister a new JSArrayBuffer |buffer| for tracking. + // A new ArrayBuffer was created with |data| as backing store. void RegisterNew(JSArrayBuffer* buffer); + + // The backing store |data| is no longer owned by V8. void Unregister(JSArrayBuffer* buffer); - // Frees all backing store pointers for dead JSArrayBuffers in new space. - void FreeDeadInNewSpace(); - - void FreeDead(Page* page); - - template - void ScanAndFreeDeadArrayBuffers(Page* page); - - // A live JSArrayBuffer was discovered during marking. + // A live ArrayBuffer was discovered during marking/scavenge. void MarkLive(JSArrayBuffer* buffer); - // Resets all trackers in old space. Is required to be called from the main - // thread. - void ResetTrackersInOldSpace(); + // Frees all backing store pointers that weren't discovered in the previous + // marking or scavenge phase. + void FreeDead(bool from_scavenge); + + // Prepare for a new scavenge phase. A new marking phase is implicitly + // prepared by finishing the previous one. + void PrepareDiscoveryInNewSpace(); + + // An ArrayBuffer moved from new space to old space. + void Promote(JSArrayBuffer* buffer); private: + base::Mutex mutex_; Heap* heap_; -}; + // |live_array_buffers_| maps externally allocated memory used as backing + // store for ArrayBuffers to the length of the respective memory blocks. + // + // At the beginning of mark/compact, |not_yet_discovered_array_buffers_| is + // a copy of |live_array_buffers_| and we remove pointers as we discover live + // ArrayBuffer objects during marking. At the end of mark/compact, the + // remaining memory blocks can be freed. + std::map live_array_buffers_; + std::map not_yet_discovered_array_buffers_; + + // To be able to free memory held by ArrayBuffers during scavenge as well, we + // have a separate list of allocated memory held by ArrayBuffers in new space. + // + // Since mark/compact also evacuates the new space, all pointers in the + // |live_array_buffers_for_scavenge_| list are also in the + // |live_array_buffers_| list. + std::map live_array_buffers_for_scavenge_; + std::map not_yet_discovered_array_buffers_for_scavenge_; +}; } // namespace internal } // namespace v8 #endif // V8_HEAP_ARRAY_BUFFER_TRACKER_H_ diff --git a/src/heap/heap.cc b/src/heap/heap.cc index b710c5606e..3c21f777e5 100644 --- a/src/heap/heap.cc +++ b/src/heap/heap.cc @@ -1626,6 +1626,8 @@ void Heap::Scavenge() { scavenge_collector_->SelectScavengingVisitorsTable(); + array_buffer_tracker()->PrepareDiscoveryInNewSpace(); + // Flip the semispaces. After flipping, to space is empty, from space has // live objects. new_space_.Flip(); @@ -1745,7 +1747,7 @@ void Heap::Scavenge() { // Set age mark. new_space_.set_age_mark(new_space_.top()); - array_buffer_tracker()->FreeDeadInNewSpace(); + array_buffer_tracker()->FreeDead(true); // Update how much has survived scavenge. IncrementYoungSurvivorsCounter(static_cast( diff --git a/src/heap/heap.h b/src/heap/heap.h index 150647fbc7..82642266af 100644 --- a/src/heap/heap.h +++ b/src/heap/heap.h @@ -818,16 +818,6 @@ class Heap { amount_of_external_allocated_memory_ += delta; } - void update_amount_of_external_allocated_freed_memory(intptr_t freed) { - amount_of_external_allocated_memory_freed_.Increment(freed); - } - - void account_amount_of_external_allocated_freed_memory() { - amount_of_external_allocated_memory_ -= - amount_of_external_allocated_memory_freed_.Value(); - amount_of_external_allocated_memory_freed_.SetValue(0); - } - void DeoptMarkedAllocationSites(); bool DeoptMaybeTenuredAllocationSites() { @@ -1995,8 +1985,6 @@ class Heap { // Caches the amount of external memory registered at the last global gc. int64_t amount_of_external_allocated_memory_at_last_global_gc_; - base::AtomicNumber amount_of_external_allocated_memory_freed_; - // This can be calculated directly from a pointer to the heap; however, it is // more expedient to get at the isolate directly from within Heap methods. Isolate* isolate_; diff --git a/src/heap/incremental-marking.cc b/src/heap/incremental-marking.cc index 4c57ecc833..c250b90b1c 100644 --- a/src/heap/incremental-marking.cc +++ b/src/heap/incremental-marking.cc @@ -547,7 +547,6 @@ void IncrementalMarking::StartMarking() { MarkCompactCollector::kMaxMarkingDequeSize); ActivateIncrementalWriteBarrier(); - heap_->array_buffer_tracker()->ResetTrackersInOldSpace(); // Marking bits are cleared by the sweeper. #ifdef VERIFY_HEAP diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc index 713ce8d3ac..7a4e2f2d03 100644 --- a/src/heap/mark-compact.cc +++ b/src/heap/mark-compact.cc @@ -14,7 +14,7 @@ #include "src/frames-inl.h" #include "src/gdb-jit.h" #include "src/global-handles.h" -#include "src/heap/array-buffer-tracker-inl.h" +#include "src/heap/array-buffer-tracker.h" #include "src/heap/gc-tracer.h" #include "src/heap/incremental-marking.h" #include "src/heap/mark-compact-inl.h" @@ -872,10 +872,6 @@ void MarkCompactCollector::Prepare() { space = spaces.next()) { space->PrepareForMarkCompact(); } - if (!was_marked_incrementally_) { - heap_->array_buffer_tracker()->ResetTrackersInOldSpace(); - } - heap()->account_amount_of_external_allocated_freed_memory(); #ifdef VERIFY_HEAP if (!was_marked_incrementally_ && FLAG_verify_heap) { @@ -1731,12 +1727,20 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final if (heap_->ShouldBePromoted(object->address(), size) && TryEvacuateObject(compaction_spaces_->Get(OLD_SPACE), object, &target_object)) { + // If we end up needing more special cases, we should factor this out. + if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) { + heap_->array_buffer_tracker()->Promote( + JSArrayBuffer::cast(target_object)); + } promoted_size_ += size; return true; } HeapObject* target = nullptr; AllocationSpace space = AllocateTargetObject(object, &target); MigrateObject(HeapObject::cast(target), object, size, space); + if (V8_UNLIKELY(target->IsJSArrayBuffer())) { + heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target)); + } semispace_copied_size_ += size; return true; } @@ -1861,6 +1865,10 @@ class MarkCompactCollector::EvacuateNewSpacePageVisitor final } inline bool Visit(HeapObject* object) { + if (V8_UNLIKELY(object->IsJSArrayBuffer())) { + object->GetHeap()->array_buffer_tracker()->Promote( + JSArrayBuffer::cast(object)); + } RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector()); object->IterateBodyFast(&visitor); promoted_size_ += object->Size(); @@ -1901,9 +1909,6 @@ class MarkCompactCollector::EvacuateRecordOnlyVisitor final inline bool Visit(HeapObject* object) { RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector()); object->IterateBody(&visitor); - if (V8_UNLIKELY(object->IsJSArrayBuffer())) { - heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(object)); - } return true; } @@ -3119,35 +3124,24 @@ bool MarkCompactCollector::Evacuator::EvacuateSinglePage(Page* p, bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) { bool result = false; DCHECK(page->SweepingDone()); - Heap* heap = page->heap(); switch (ComputeEvacuationMode(page)) { case kObjectsNewToOld: result = EvacuateSinglePage(page, &new_space_visitor_); - heap->array_buffer_tracker() - ->ScanAndFreeDeadArrayBuffers< - LocalArrayBufferTracker::kForwardingPointer>(page); DCHECK(result); USE(result); break; case kPageNewToOld: result = EvacuateSinglePage(page, &new_space_page_visitor); - // ArrayBufferTracker will be updated during sweeping. DCHECK(result); USE(result); break; case kObjectsOldToOld: result = EvacuateSinglePage(page, &old_space_visitor_); - heap->array_buffer_tracker() - ->ScanAndFreeDeadArrayBuffers< - LocalArrayBufferTracker::kForwardingPointer>(page); if (!result) { // Aborted compaction page. We can record slots here to have them // processed in parallel later on. EvacuateRecordOnlyVisitor record_visitor(collector_->heap()); result = EvacuateSinglePage(page, &record_visitor); - heap->array_buffer_tracker() - ->ScanAndFreeDeadArrayBuffers( - page); DCHECK(result); USE(result); // We need to return failure here to indicate that we want this page @@ -3390,7 +3384,6 @@ int MarkCompactCollector::Sweeper::RawSweep(PagedSpace* space, Page* p, freed_bytes = space->UnaccountedFree(free_start, size); max_freed_bytes = Max(freed_bytes, max_freed_bytes); } - p->heap()->array_buffer_tracker()->FreeDead(p); p->concurrent_sweeping_state().SetValue(Page::kSweepingDone); return FreeList::GuaranteedAllocatable(static_cast(max_freed_bytes)); } @@ -3534,6 +3527,11 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { } } + // EvacuateNewSpaceAndCandidates iterates over new space objects and for + // ArrayBuffers either re-registers them as live or promotes them. This is + // needed to properly free them. + heap()->array_buffer_tracker()->FreeDead(false); + // Deallocate evacuated candidate pages. ReleaseEvacuationCandidates(); } diff --git a/src/heap/objects-visiting-inl.h b/src/heap/objects-visiting-inl.h index 2316649560..01dd379775 100644 --- a/src/heap/objects-visiting-inl.h +++ b/src/heap/objects-visiting-inl.h @@ -105,6 +105,11 @@ int StaticNewSpaceVisitor::VisitJSArrayBuffer( Map* map, HeapObject* object) { typedef FlexibleBodyVisitor JSArrayBufferBodyVisitor; + + if (!JSArrayBuffer::cast(object)->is_external()) { + Heap* heap = map->GetHeap(); + heap->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(object)); + } return JSArrayBufferBodyVisitor::Visit(map, object); } diff --git a/src/heap/scavenger.cc b/src/heap/scavenger.cc index 4fa30e2650..2b0234dd0b 100644 --- a/src/heap/scavenger.cc +++ b/src/heap/scavenger.cc @@ -295,6 +295,14 @@ class ScavengingVisitor : public StaticVisitorBase { PromotionMode promotion_mode) { ObjectEvacuationStrategy::Visit(map, slot, object, promotion_mode); + + Heap* heap = map->GetHeap(); + MapWord map_word = object->map_word(); + DCHECK(map_word.IsForwardingAddress()); + HeapObject* target = map_word.ToForwardingAddress(); + if (!heap->InNewSpace(target)) { + heap->array_buffer_tracker()->Promote(JSArrayBuffer::cast(target)); + } } static inline void EvacuateByteArray(Map* map, HeapObject** slot, diff --git a/src/heap/spaces-inl.h b/src/heap/spaces-inl.h index f5e3bde1a4..7ccfcd648f 100644 --- a/src/heap/spaces-inl.h +++ b/src/heap/spaces-inl.h @@ -270,6 +270,7 @@ template Page* Page::Initialize(Heap* heap, MemoryChunk* chunk, Executability executable, PagedSpace* owner) { Page* page = reinterpret_cast(chunk); + page->mutex_ = new base::Mutex(); DCHECK(page->area_size() <= kAllocatableMemory); DCHECK(chunk->owner() == owner); diff --git a/src/heap/spaces.cc b/src/heap/spaces.cc index 08406fc085..0e0a3100f9 100644 --- a/src/heap/spaces.cc +++ b/src/heap/spaces.cc @@ -511,14 +511,13 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size, chunk->progress_bar_ = 0; chunk->high_water_mark_.SetValue(static_cast(area_start - base)); chunk->concurrent_sweeping_state().SetValue(kSweepingDone); - chunk->mutex_ = new base::Mutex(); + chunk->mutex_ = nullptr; chunk->available_in_free_list_ = 0; chunk->wasted_memory_ = 0; chunk->ResetLiveBytes(); Bitmap::Clear(chunk); chunk->set_next_chunk(nullptr); chunk->set_prev_chunk(nullptr); - chunk->local_tracker_ = nullptr; DCHECK(OFFSET_OF(MemoryChunk, flags_) == kFlagsOffset); DCHECK(OFFSET_OF(MemoryChunk, live_byte_count_) == kLiveBytesOffset); @@ -1040,8 +1039,6 @@ void MemoryChunk::ReleaseAllocatedMemory() { if (old_to_old_slots_ != nullptr) ReleaseOldToOldSlots(); if (typed_old_to_new_slots_ != nullptr) ReleaseTypedOldToNewSlots(); if (typed_old_to_old_slots_ != nullptr) ReleaseTypedOldToOldSlots(); - - if (local_tracker_ != nullptr) ReleaseLocalTracker(); } static SlotSet* AllocateSlotSet(size_t size, Address page_start) { @@ -1093,12 +1090,6 @@ void MemoryChunk::ReleaseTypedOldToOldSlots() { delete typed_old_to_old_slots_; typed_old_to_old_slots_ = nullptr; } - -void MemoryChunk::ReleaseLocalTracker() { - delete local_tracker_; - local_tracker_ = nullptr; -} - // ----------------------------------------------------------------------------- // PagedSpace implementation diff --git a/src/heap/spaces.h b/src/heap/spaces.h index 89e0c9bc37..aa30e7da3b 100644 --- a/src/heap/spaces.h +++ b/src/heap/spaces.h @@ -14,7 +14,6 @@ #include "src/base/platform/mutex.h" #include "src/flags.h" #include "src/hashmap.h" -#include "src/heap/array-buffer-tracker.h" #include "src/list.h" #include "src/objects.h" #include "src/utils.h" @@ -469,8 +468,6 @@ class MemoryChunk { kSweepingInProgress, }; - enum ArrayBufferTrackerAccessMode { kDontCreate, kCreateIfNotPresent }; - // Every n write barrier invocations we go to runtime even though // we could have handled it in generated code. This lets us check // whether we have hit the limit and should do some more marking. @@ -526,8 +523,7 @@ class MemoryChunk { + kPointerSize // AtomicValue next_chunk_ + kPointerSize // AtomicValue prev_chunk_ // FreeListCategory categories_[kNumberOfCategories] - + FreeListCategory::kSize * kNumberOfCategories + - kPointerSize; // LocalArrayBufferTracker tracker_ + + FreeListCategory::kSize * kNumberOfCategories; // We add some more space to the computed header size to amount for missing // alignment requirements in our computation. @@ -646,16 +642,6 @@ class MemoryChunk { void AllocateTypedOldToOldSlots(); void ReleaseTypedOldToOldSlots(); - template - inline LocalArrayBufferTracker* local_tracker() { - if (local_tracker_ == nullptr && tracker_access == kCreateIfNotPresent) { - local_tracker_ = new LocalArrayBufferTracker(heap_); - } - return local_tracker_; - } - - void ReleaseLocalTracker(); - Address area_start() { return area_start_; } Address area_end() { return area_end_; } int area_size() { return static_cast(area_end() - area_start()); } @@ -838,8 +824,6 @@ class MemoryChunk { FreeListCategory categories_[kNumberOfCategories]; - LocalArrayBufferTracker* local_tracker_; - private: void InitializeReservedMemory() { reservation_.Reset(); } @@ -2311,16 +2295,6 @@ class PagedSpace : public Space { inline void UnlinkFreeListCategories(Page* page); inline intptr_t RelinkFreeListCategories(Page* page); - // Callback signature: - // void Callback(Page*); - template - void ForAllPages(Callback callback) { - PageIterator it(this); - while (it.has_next()) { - callback(it.next()); - } - } - protected: // PagedSpaces that should be included in snapshots have different, i.e., // smaller, initial pages. diff --git a/src/v8.gyp b/src/v8.gyp index 3f85bcc139..c5e214acdf 100644 --- a/src/v8.gyp +++ b/src/v8.gyp @@ -830,7 +830,6 @@ 'handles.h', 'hashmap.h', 'heap-symbols.h', - 'heap/array-buffer-tracker-inl.h', 'heap/array-buffer-tracker.cc', 'heap/array-buffer-tracker.h', 'heap/memory-reducer.cc', diff --git a/test/cctest/cctest.gyp b/test/cctest/cctest.gyp index 51f9437ac1..c6c44d3637 100644 --- a/test/cctest/cctest.gyp +++ b/test/cctest/cctest.gyp @@ -102,7 +102,6 @@ 'heap/heap-utils.cc', 'heap/heap-utils.h', 'heap/test-alloc.cc', - 'heap/test-array-buffer-tracker.cc', 'heap/test-compaction.cc', 'heap/test-heap.cc', 'heap/test-incremental-marking.cc', diff --git a/test/cctest/heap/heap-utils.cc b/test/cctest/heap/heap-utils.cc index 8abc5c3b5c..e6daa45441 100644 --- a/test/cctest/heap/heap-utils.cc +++ b/test/cctest/heap/heap-utils.cc @@ -141,21 +141,6 @@ void SimulateFullSpace(v8::internal::PagedSpace* space) { space->ClearStats(); } -void AbandonCurrentlyFreeMemory(PagedSpace* space) { - space->EmptyAllocationInfo(); - PageIterator pit(space); - while (pit.has_next()) { - pit.next()->MarkNeverAllocateForTesting(); - } -} - -void GcAndSweep(Heap* heap, AllocationSpace space) { - heap->CollectGarbage(space); - if (heap->mark_compact_collector()->sweeping_in_progress()) { - heap->mark_compact_collector()->EnsureSweepingCompleted(); - } -} - } // namespace heap } // namespace internal } // namespace v8 diff --git a/test/cctest/heap/heap-utils.h b/test/cctest/heap/heap-utils.h index e03e6fa6e0..f7739d4b0e 100644 --- a/test/cctest/heap/heap-utils.h +++ b/test/cctest/heap/heap-utils.h @@ -40,10 +40,6 @@ void SimulateIncrementalMarking(i::Heap* heap, bool force_completion = true); // Helper function that simulates a full old-space in the heap. void SimulateFullSpace(v8::internal::PagedSpace* space); -void AbandonCurrentlyFreeMemory(PagedSpace* space); - -void GcAndSweep(Heap* heap, AllocationSpace space); - } // namespace heap } // namespace internal } // namespace v8 diff --git a/test/cctest/heap/test-array-buffer-tracker.cc b/test/cctest/heap/test-array-buffer-tracker.cc deleted file mode 100644 index 0e8b65cd2b..0000000000 --- a/test/cctest/heap/test-array-buffer-tracker.cc +++ /dev/null @@ -1,206 +0,0 @@ -// Copyright 2016 the V8 project authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include "src/heap/array-buffer-tracker-inl.h" -#include "src/heap/array-buffer-tracker.h" -#include "test/cctest/cctest.h" -#include "test/cctest/heap/heap-utils.h" - -namespace { - -typedef i::LocalArrayBufferTracker LocalTracker; - -void VerifyTrackedInNewSpace(i::JSArrayBuffer* buf) { - CHECK(i::Page::FromAddress(buf->address())->InNewSpace()); - CHECK(i::Page::FromAddress(buf->address()) - ->local_tracker() - ->IsTracked(buf)); -} - -void VerifyTrackedInOldSpace(i::JSArrayBuffer* buf) { - CHECK(!i::Page::FromAddress(buf->address())->InNewSpace()); - CHECK(i::Page::FromAddress(buf->address()) - ->local_tracker() - ->IsTracked(buf)); -} - -void VerifyUntracked(i::JSArrayBuffer* buf) { - CHECK(!i::Page::FromAddress(buf->address()) - ->local_tracker() - ->IsTracked(buf)); -} - -} // namespace - -namespace v8 { -namespace internal { - -// The following tests make sure that JSArrayBuffer tracking works expected when -// moving the objects through various spaces during GC phases. - -TEST(ArrayBuffer_OnlyMC) { - CcTest::InitializeVM(); - LocalContext env; - v8::Isolate* isolate = env->GetIsolate(); - Heap* heap = reinterpret_cast(isolate)->heap(); - - JSArrayBuffer* raw_ab = nullptr; - { - v8::HandleScope handle_scope(isolate); - Local ab = v8::ArrayBuffer::New(isolate, 100); - Handle buf = v8::Utils::OpenHandle(*ab); - VerifyTrackedInNewSpace(*buf); - heap::GcAndSweep(heap, OLD_SPACE); - VerifyTrackedInNewSpace(*buf); - heap::GcAndSweep(heap, OLD_SPACE); - VerifyTrackedInOldSpace(*buf); - raw_ab = *buf; - // Prohibit page from being released. - Page::FromAddress(buf->address())->MarkNeverEvacuate(); - } - // 2 GCs are needed because we promote to old space as live, meaning that - // we will survive one GC. - heap::GcAndSweep(heap, OLD_SPACE); - heap::GcAndSweep(heap, OLD_SPACE); - VerifyUntracked(raw_ab); -} - -TEST(ArrayBuffer_OnlyScavenge) { - CcTest::InitializeVM(); - LocalContext env; - v8::Isolate* isolate = env->GetIsolate(); - Heap* heap = reinterpret_cast(isolate)->heap(); - - JSArrayBuffer* raw_ab = nullptr; - { - v8::HandleScope handle_scope(isolate); - Local ab = v8::ArrayBuffer::New(isolate, 100); - Handle buf = v8::Utils::OpenHandle(*ab); - VerifyTrackedInNewSpace(*buf); - heap::GcAndSweep(heap, NEW_SPACE); - VerifyTrackedInNewSpace(*buf); - heap::GcAndSweep(heap, NEW_SPACE); - VerifyTrackedInOldSpace(*buf); - heap::GcAndSweep(heap, NEW_SPACE); - VerifyTrackedInOldSpace(*buf); - raw_ab = *buf; - // Prohibit page from being released. - Page::FromAddress(buf->address())->MarkNeverEvacuate(); - } - // 2 GCs are needed because we promote to old space as live, meaning that - // we will survive one GC. - heap::GcAndSweep(heap, OLD_SPACE); - heap::GcAndSweep(heap, OLD_SPACE); - VerifyUntracked(raw_ab); -} - -TEST(ArrayBuffer_ScavengeAndMC) { - CcTest::InitializeVM(); - LocalContext env; - v8::Isolate* isolate = env->GetIsolate(); - Heap* heap = reinterpret_cast(isolate)->heap(); - - JSArrayBuffer* raw_ab = nullptr; - { - v8::HandleScope handle_scope(isolate); - Local ab = v8::ArrayBuffer::New(isolate, 100); - Handle buf = v8::Utils::OpenHandle(*ab); - VerifyTrackedInNewSpace(*buf); - heap::GcAndSweep(heap, NEW_SPACE); - VerifyTrackedInNewSpace(*buf); - heap::GcAndSweep(heap, NEW_SPACE); - VerifyTrackedInOldSpace(*buf); - heap::GcAndSweep(heap, OLD_SPACE); - VerifyTrackedInOldSpace(*buf); - heap::GcAndSweep(heap, NEW_SPACE); - VerifyTrackedInOldSpace(*buf); - raw_ab = *buf; - // Prohibit page from being released. - Page::FromAddress(buf->address())->MarkNeverEvacuate(); - } - // 2 GCs are needed because we promote to old space as live, meaning that - // we will survive one GC. - heap::GcAndSweep(heap, OLD_SPACE); - heap::GcAndSweep(heap, OLD_SPACE); - VerifyUntracked(raw_ab); -} - -TEST(ArrayBuffer_Compaction) { - FLAG_manual_evacuation_candidates_selection = true; - CcTest::InitializeVM(); - LocalContext env; - v8::Isolate* isolate = env->GetIsolate(); - Heap* heap = reinterpret_cast(isolate)->heap(); - heap::AbandonCurrentlyFreeMemory(heap->old_space()); - - v8::HandleScope handle_scope(isolate); - Local ab1 = v8::ArrayBuffer::New(isolate, 100); - Handle buf1 = v8::Utils::OpenHandle(*ab1); - VerifyTrackedInNewSpace(*buf1); - heap::GcAndSweep(heap, NEW_SPACE); - heap::GcAndSweep(heap, NEW_SPACE); - - Page* page_before_gc = Page::FromAddress(buf1->address()); - page_before_gc->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); - VerifyTrackedInOldSpace(*buf1); - - heap->CollectAllGarbage(); - - Page* page_after_gc = Page::FromAddress(buf1->address()); - VerifyTrackedInOldSpace(*buf1); - - CHECK_NE(page_before_gc, page_after_gc); -} - -TEST(ArrayBuffer_UnregisterDuringSweep) { -// Regular pages in old space (without compaction) are processed concurrently -// in the sweeper. If we happen to unregister a buffer (either explicitly, or -// implicitly through e.g. |Externalize|) we need to sync with the sweeper -// task. -// -// Note: This test will will only fail on TSAN configurations. - -// Disable verify-heap since it forces sweeping to be completed in the -// epilogue of the GC. -#ifdef VERIFY_HEAP - i::FLAG_verify_heap = false; -#endif // VERIFY_HEAP - - CcTest::InitializeVM(); - LocalContext env; - v8::Isolate* isolate = env->GetIsolate(); - Heap* heap = reinterpret_cast(isolate)->heap(); - { - v8::HandleScope handle_scope(isolate); - Local ab = v8::ArrayBuffer::New(isolate, 100); - Handle buf = v8::Utils::OpenHandle(*ab); - - { - v8::HandleScope handle_scope(isolate); - // Allocate another buffer on the same page to force processing a - // non-empty set of buffers in the last GC. - Local ab2 = v8::ArrayBuffer::New(isolate, 100); - Handle buf2 = v8::Utils::OpenHandle(*ab2); - VerifyTrackedInNewSpace(*buf); - VerifyTrackedInNewSpace(*buf2); - heap::GcAndSweep(heap, NEW_SPACE); - VerifyTrackedInNewSpace(*buf); - VerifyTrackedInNewSpace(*buf2); - heap::GcAndSweep(heap, NEW_SPACE); - VerifyTrackedInOldSpace(*buf); - VerifyTrackedInOldSpace(*buf2); - } - - heap->CollectGarbage(OLD_SPACE); - // |Externalize| will cause the buffer to be |Unregister|ed. Without - // barriers and proper synchronization this will trigger a data race on - // TSAN. - v8::ArrayBuffer::Contents contents = ab->Externalize(); - heap->isolate()->array_buffer_allocator()->Free(contents.Data(), - contents.ByteLength()); - } -} - -} // namespace internal -} // namespace v8