[heap] Remove ArrayBufferTracker

ArrayBufferTracker was superseded by ArrayBufferList and
ArrayBufferSweeper. Now that ArrayBufferSweeper is used in production,
we can remove the unused ArrayBufferTracker mechanism.

Bug: v8:10064
Change-Id: I479169c76b6c5c634672024f77e689bb64a36504
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2339105
Reviewed-by: Simon Zünd <szuend@chromium.org>
Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/master@{#69351}
This commit is contained in:
Dominik Inführ 2020-08-10 10:55:41 +02:00 committed by Commit Bot
parent 01a96a90e9
commit 3c0fb324fa
34 changed files with 309 additions and 1383 deletions

View File

@ -130,9 +130,6 @@ declare_args() {
# Sets -dV8_CONCURRENT_MARKING
v8_enable_concurrent_marking = true
# Sets -dV8_ARRAY_BUFFER_EXTENSION
v8_enable_array_buffer_extension = true
# Runs mksnapshot with --turbo-profiling. After building in this
# configuration, any subsequent run of d8 will output information about usage
# of basic blocks in builtins.
@ -593,9 +590,6 @@ config("features") {
if (v8_enable_concurrent_marking) {
defines += [ "V8_CONCURRENT_MARKING" ]
}
if (v8_enable_array_buffer_extension) {
defines += [ "V8_ARRAY_BUFFER_EXTENSION" ]
}
if (v8_enable_lazy_source_positions) {
defines += [ "V8_ENABLE_LAZY_SOURCE_POSITIONS" ]
}
@ -2510,13 +2504,8 @@ v8_source_set("v8_base_without_compiler") {
"src/heap/allocation-observer.cc",
"src/heap/allocation-observer.h",
"src/heap/allocation-stats.h",
"src/heap/array-buffer-collector.cc",
"src/heap/array-buffer-collector.h",
"src/heap/array-buffer-sweeper.cc",
"src/heap/array-buffer-sweeper.h",
"src/heap/array-buffer-tracker-inl.h",
"src/heap/array-buffer-tracker.cc",
"src/heap/array-buffer-tracker.h",
"src/heap/barrier.h",
"src/heap/base-space.cc",
"src/heap/base-space.h",

View File

@ -68,10 +68,8 @@ TNode<JSArrayBuffer> TypedArrayBuiltinsAssembler::AllocateEmptyOnHeapBuffer(
StoreJSArrayBufferBackingStore(
buffer,
EncodeExternalPointer(ReinterpretCast<RawPtrT>(IntPtrConstant(0))));
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
StoreObjectFieldNoWriteBarrier(buffer, JSArrayBuffer::kExtensionOffset,
IntPtrConstant(0));
}
for (int offset = JSArrayBuffer::kHeaderSize;
offset < JSArrayBuffer::kSizeWithEmbedderFields; offset += kTaggedSize) {
StoreObjectFieldNoWriteBarrier(buffer, offset, SmiConstant(0));

View File

@ -387,8 +387,8 @@ DEFINE_BOOL(assert_types, false,
DEFINE_BOOL(allocation_site_pretenuring, true,
"pretenure with allocation sites")
DEFINE_BOOL(page_promotion, true, "promote pages based on utilization")
DEFINE_BOOL(always_promote_young_mc, true,
"always promote young objects during mark-compact")
DEFINE_BOOL_READONLY(always_promote_young_mc, true,
"always promote young objects during mark-compact")
DEFINE_INT(page_promotion_threshold, 70,
"min percentage of live bytes on a page to enable fast evacuation")
DEFINE_BOOL(trace_pretenuring, false,
@ -945,20 +945,11 @@ DEFINE_BOOL(write_protect_code_memory, true, "write protect code memory")
#endif
DEFINE_BOOL(concurrent_marking, V8_CONCURRENT_MARKING_BOOL,
"use concurrent marking")
#ifdef V8_ARRAY_BUFFER_EXTENSION
#define V8_ARRAY_BUFFER_EXTENSION_BOOL true
#else
#define V8_ARRAY_BUFFER_EXTENSION_BOOL false
#endif
DEFINE_BOOL_READONLY(array_buffer_extension, V8_ARRAY_BUFFER_EXTENSION_BOOL,
"enable array buffer tracking using extension objects")
DEFINE_IMPLICATION(array_buffer_extension, always_promote_young_mc)
DEFINE_BOOL(concurrent_array_buffer_sweeping, true,
"concurrently sweep array buffers")
DEFINE_BOOL(concurrent_allocation, false, "concurrently allocate in old space")
DEFINE_BOOL(local_heaps, false, "allow heap access from background tasks")
DEFINE_IMPLICATION(concurrent_inlining, local_heaps)
DEFINE_NEG_NEG_IMPLICATION(array_buffer_extension, local_heaps)
DEFINE_BOOL(stress_concurrent_allocation, false,
"start background threads that allocate memory")
DEFINE_IMPLICATION(stress_concurrent_allocation, concurrent_allocation)
@ -1814,7 +1805,6 @@ DEFINE_NEG_IMPLICATION(single_threaded_gc, concurrent_store_buffer)
#ifdef ENABLE_MINOR_MC
DEFINE_NEG_IMPLICATION(single_threaded_gc, minor_mc_parallel_marking)
#endif // ENABLE_MINOR_MC
DEFINE_NEG_IMPLICATION(single_threaded_gc, concurrent_array_buffer_freeing)
DEFINE_NEG_IMPLICATION(single_threaded_gc, concurrent_array_buffer_sweeping)
#undef FLAG

View File

@ -1,58 +0,0 @@
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/array-buffer-collector.h"
#include "src/base/template-utils.h"
#include "src/heap/array-buffer-tracker.h"
#include "src/heap/gc-tracer.h"
#include "src/heap/heap-inl.h"
#include "src/tasks/cancelable-task.h"
#include "src/tasks/task-utils.h"
namespace v8 {
namespace internal {
void ArrayBufferCollector::QueueOrFreeGarbageAllocations(
std::vector<std::shared_ptr<BackingStore>> backing_stores) {
if (heap_->ShouldReduceMemory()) {
// Destruct the vector, which destructs the std::shared_ptrs, freeing
// the backing stores.
backing_stores.clear();
} else {
base::MutexGuard guard(&allocations_mutex_);
allocations_.push_back(std::move(backing_stores));
}
}
void ArrayBufferCollector::PerformFreeAllocations() {
base::MutexGuard guard(&allocations_mutex_);
// Destruct the vector, which destructs the vecotr of std::shared_ptrs,
// freeing the backing stores if their refcount drops to zero.
allocations_.clear();
}
void ArrayBufferCollector::FreeAllocations() {
// TODO(wez): Remove backing-store from external memory accounting.
heap_->account_external_memory_concurrently_freed();
if (!heap_->IsTearingDown() && !heap_->ShouldReduceMemory() &&
FLAG_concurrent_array_buffer_freeing) {
V8::GetCurrentPlatform()->CallOnWorkerThread(
MakeCancelableTask(heap_->isolate(), [this] {
TRACE_BACKGROUND_GC(
heap_->tracer(),
GCTracer::BackgroundScope::BACKGROUND_ARRAY_BUFFER_FREE);
PerformFreeAllocations();
}));
} else {
// Fallback for when concurrency is disabled/restricted. This is e.g. the
// case when the GC should reduce memory. For such GCs the
// QueueOrFreeGarbageAllocations() call would immediately free the
// allocations and this call would free already queued ones.
PerformFreeAllocations();
}
}
} // namespace internal
} // namespace v8

View File

@ -1,54 +0,0 @@
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_ARRAY_BUFFER_COLLECTOR_H_
#define V8_HEAP_ARRAY_BUFFER_COLLECTOR_H_
#include <vector>
#include "src/base/platform/mutex.h"
#include "src/objects/js-array-buffer.h"
namespace v8 {
namespace internal {
class Heap;
// To support background processing of array buffer backing stores, we process
// array buffers using the ArrayBufferTracker class. The ArrayBufferCollector
// keeps track of garbage backing stores so that they can be freed on a
// background thread.
class ArrayBufferCollector {
public:
explicit ArrayBufferCollector(Heap* heap) : heap_(heap) {}
~ArrayBufferCollector() { PerformFreeAllocations(); }
// These allocations will be either
// - freed immediately when under memory pressure, or
// - queued for freeing in FreeAllocations() or during tear down.
//
// FreeAllocations() potentially triggers a background task for processing.
void QueueOrFreeGarbageAllocations(
std::vector<std::shared_ptr<BackingStore>> allocations);
// Calls FreeAllocations() on a background thread.
void FreeAllocations();
private:
class FreeingTask;
// Begin freeing the allocations added through QueueOrFreeGarbageAllocations.
// Also called by TearDown.
void PerformFreeAllocations();
Heap* const heap_;
base::Mutex allocations_mutex_;
std::vector<std::vector<std::shared_ptr<BackingStore>>> allocations_;
};
} // namespace internal
} // namespace v8
#endif // V8_HEAP_ARRAY_BUFFER_COLLECTOR_H_

View File

@ -67,7 +67,6 @@ size_t ArrayBufferList::BytesSlow() {
void ArrayBufferSweeper::EnsureFinished() {
if (!sweeping_in_progress_) return;
CHECK(V8_ARRAY_BUFFER_EXTENSION_BOOL);
TryAbortResult abort_result =
heap_->isolate()->cancelable_task_manager()->TryAbort(job_.id);
@ -138,8 +137,6 @@ void ArrayBufferSweeper::RequestSweep(SweepingScope scope) {
if (young_.IsEmpty() && (old_.IsEmpty() || scope == SweepingScope::Young))
return;
CHECK(V8_ARRAY_BUFFER_EXTENSION_BOOL);
if (!heap_->IsTearingDown() && !heap_->ShouldReduceMemory() &&
FLAG_concurrent_array_buffer_sweeping) {
Prepare(scope);
@ -208,7 +205,6 @@ void ArrayBufferSweeper::ReleaseAll(ArrayBufferList* list) {
void ArrayBufferSweeper::Append(JSArrayBuffer object,
ArrayBufferExtension* extension) {
CHECK(V8_ARRAY_BUFFER_EXTENSION_BOOL);
size_t bytes = extension->accounting_length();
if (Heap::InYoungGeneration(object)) {

View File

@ -1,185 +0,0 @@
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_ARRAY_BUFFER_TRACKER_INL_H_
#define V8_HEAP_ARRAY_BUFFER_TRACKER_INL_H_
#include "src/heap/array-buffer-tracker.h"
#include "src/heap/heap-inl.h"
#include "src/heap/spaces-inl.h"
#include "src/numbers/conversions-inl.h"
#include "src/objects/js-array-buffer-inl.h"
#include "src/objects/objects.h"
#define TRACE_BS(...) \
do { \
if (FLAG_trace_backing_store) PrintF(__VA_ARGS__); \
} while (false)
namespace v8 {
namespace internal {
void ArrayBufferTracker::RegisterNew(
Heap* heap, JSArrayBuffer buffer,
std::shared_ptr<BackingStore> backing_store) {
if (!backing_store) return;
// If {buffer_start} is {nullptr}, we don't have to track and free it.
if (!backing_store->buffer_start()) return;
// ArrayBuffer tracking works only for small objects.
DCHECK(!heap->IsLargeObject(buffer));
DCHECK_EQ(backing_store->buffer_start(), buffer.backing_store());
const size_t length = backing_store->PerIsolateAccountingLength();
Page* page = Page::FromHeapObject(buffer);
{
base::MutexGuard guard(page->mutex());
LocalArrayBufferTracker* tracker = page->local_tracker();
if (tracker == nullptr) {
page->AllocateLocalTracker();
tracker = page->local_tracker();
}
DCHECK_NOT_NULL(tracker);
TRACE_BS("ABT:reg bs=%p mem=%p (length=%zu) cnt=%ld\n",
backing_store.get(), backing_store->buffer_start(),
backing_store->byte_length(), backing_store.use_count());
tracker->Add(buffer, std::move(backing_store));
}
// TODO(wez): Remove backing-store from external memory accounting.
// We may go over the limit of externally allocated memory here. We call the
// api function to trigger a GC in this case.
reinterpret_cast<v8::Isolate*>(heap->isolate())
->AdjustAmountOfExternalAllocatedMemory(length);
}
std::shared_ptr<BackingStore> ArrayBufferTracker::Unregister(
Heap* heap, JSArrayBuffer buffer) {
std::shared_ptr<BackingStore> backing_store;
Page* page = Page::FromHeapObject(buffer);
{
base::MutexGuard guard(page->mutex());
LocalArrayBufferTracker* tracker = page->local_tracker();
DCHECK_NOT_NULL(tracker);
backing_store = tracker->Remove(buffer);
}
// TODO(wez): Remove backing-store from external memory accounting.
const size_t length = backing_store->PerIsolateAccountingLength();
heap->update_external_memory(-static_cast<intptr_t>(length));
return backing_store;
}
std::shared_ptr<BackingStore> ArrayBufferTracker::Lookup(Heap* heap,
JSArrayBuffer buffer) {
if (buffer.backing_store() == nullptr) return {};
Page* page = Page::FromHeapObject(buffer);
base::MutexGuard guard(page->mutex());
LocalArrayBufferTracker* tracker = page->local_tracker();
DCHECK_NOT_NULL(tracker);
return tracker->Lookup(buffer);
}
template <typename Callback>
void LocalArrayBufferTracker::Free(Callback should_free) {
size_t freed_memory = 0;
for (TrackingData::iterator it = array_buffers_.begin();
it != array_buffers_.end();) {
// Unchecked cast because the map might already be dead at this point.
JSArrayBuffer buffer = JSArrayBuffer::unchecked_cast(it->first);
const size_t length = it->second->PerIsolateAccountingLength();
if (should_free(buffer)) {
// Destroy the shared pointer, (perhaps) freeing the backing store.
TRACE_BS("ABT:die bs=%p mem=%p (length=%zu) cnt=%ld\n",
it->second.get(), it->second->buffer_start(),
it->second->byte_length(), it->second.use_count());
it = array_buffers_.erase(it);
freed_memory += length;
} else {
++it;
}
}
if (freed_memory > 0) {
page_->DecrementExternalBackingStoreBytes(
ExternalBackingStoreType::kArrayBuffer, freed_memory);
// TODO(wez): Remove backing-store from external memory accounting.
page_->heap()->update_external_memory_concurrently_freed(freed_memory);
}
}
template <typename MarkingState>
void ArrayBufferTracker::FreeDead(Page* page, MarkingState* marking_state) {
// Callers need to ensure having the page lock.
LocalArrayBufferTracker* tracker = page->local_tracker();
if (tracker == nullptr) return;
tracker->Free([marking_state](JSArrayBuffer buffer) {
return marking_state->IsWhite(buffer);
});
if (tracker->IsEmpty()) {
page->ReleaseLocalTracker();
}
}
void LocalArrayBufferTracker::Add(JSArrayBuffer buffer,
std::shared_ptr<BackingStore> backing_store) {
auto length = backing_store->PerIsolateAccountingLength();
page_->IncrementExternalBackingStoreBytes(
ExternalBackingStoreType::kArrayBuffer, length);
AddInternal(buffer, std::move(backing_store));
}
void LocalArrayBufferTracker::AddInternal(
JSArrayBuffer buffer, std::shared_ptr<BackingStore> backing_store) {
auto ret = array_buffers_.insert({buffer, std::move(backing_store)});
USE(ret);
// Check that we indeed inserted a new value and did not overwrite an existing
// one (which would be a bug).
DCHECK(ret.second);
}
std::shared_ptr<BackingStore> LocalArrayBufferTracker::Remove(
JSArrayBuffer buffer) {
TrackingData::iterator it = array_buffers_.find(buffer);
// Check that we indeed find a key to remove.
DCHECK(it != array_buffers_.end());
// Steal the underlying shared pointer before erasing the entry.
std::shared_ptr<BackingStore> backing_store = std::move(it->second);
TRACE_BS("ABT:rm bs=%p mem=%p (length=%zu) cnt=%ld\n", backing_store.get(),
backing_store->buffer_start(), backing_store->byte_length(),
backing_store.use_count());
// Erase the entry.
array_buffers_.erase(it);
// Update accounting.
auto length = backing_store->PerIsolateAccountingLength();
page_->DecrementExternalBackingStoreBytes(
ExternalBackingStoreType::kArrayBuffer, length);
return backing_store;
}
std::shared_ptr<BackingStore> LocalArrayBufferTracker::Lookup(
JSArrayBuffer buffer) {
TrackingData::iterator it = array_buffers_.find(buffer);
if (it != array_buffers_.end()) {
return it->second;
}
return {};
}
#undef TRACE_BS
} // namespace internal
} // namespace v8
#endif // V8_HEAP_ARRAY_BUFFER_TRACKER_INL_H_

View File

@ -1,163 +0,0 @@
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/array-buffer-tracker.h"
#include <vector>
#include "src/heap/array-buffer-collector.h"
#include "src/heap/array-buffer-tracker-inl.h"
#include "src/heap/heap.h"
#include "src/heap/memory-chunk-inl.h"
#include "src/heap/spaces.h"
#define TRACE_BS(...) \
do { \
if (FLAG_trace_backing_store) PrintF(__VA_ARGS__); \
} while (false)
namespace v8 {
namespace internal {
LocalArrayBufferTracker::~LocalArrayBufferTracker() {
CHECK(array_buffers_.empty());
}
template <typename Callback>
void LocalArrayBufferTracker::Process(Callback callback) {
std::vector<std::shared_ptr<BackingStore>> backing_stores_to_free;
TrackingData kept_array_buffers;
JSArrayBuffer new_buffer;
JSArrayBuffer old_buffer;
size_t freed_memory = 0;
for (TrackingData::iterator it = array_buffers_.begin();
it != array_buffers_.end(); ++it) {
old_buffer = it->first;
DCHECK_EQ(page_, Page::FromHeapObject(old_buffer));
const CallbackResult result = callback(old_buffer, &new_buffer);
if (result == kKeepEntry) {
kept_array_buffers.insert(std::move(*it));
} else if (result == kUpdateEntry) {
DCHECK_EQ(old_buffer.byte_length(), new_buffer.byte_length());
DCHECK(!new_buffer.is_null());
Page* target_page = Page::FromHeapObject(new_buffer);
{
base::MutexGuard guard(target_page->mutex());
LocalArrayBufferTracker* tracker = target_page->local_tracker();
if (tracker == nullptr) {
target_page->AllocateLocalTracker();
tracker = target_page->local_tracker();
}
DCHECK_NOT_NULL(tracker);
const size_t length = it->second->PerIsolateAccountingLength();
// We should decrement before adding to avoid potential overflows in
// the external memory counters.
tracker->AddInternal(new_buffer, std::move(it->second));
MemoryChunk::MoveExternalBackingStoreBytes(
ExternalBackingStoreType::kArrayBuffer,
static_cast<MemoryChunk*>(page_),
static_cast<MemoryChunk*>(target_page), length);
}
} else if (result == kRemoveEntry) {
auto backing_store = std::move(it->second);
freed_memory += backing_store->PerIsolateAccountingLength();
TRACE_BS("ABT:queue bs=%p mem=%p (length=%zu) cnt=%ld\n",
backing_store.get(), backing_store->buffer_start(),
backing_store->byte_length(), backing_store.use_count());
if (!backing_store->is_shared()) {
// Only retain non-shared backing stores. For shared backing stores,
// drop the shared_ptr right away, since this should be cheap,
// as it only updates a refcount, except that last, which will
// destruct it, which is rare.
backing_stores_to_free.push_back(backing_store);
}
} else {
UNREACHABLE();
}
}
if (freed_memory) {
page_->DecrementExternalBackingStoreBytes(
ExternalBackingStoreType::kArrayBuffer, freed_memory);
// TODO(wez): Remove backing-store from external memory accounting.
page_->heap()->update_external_memory_concurrently_freed(freed_memory);
}
array_buffers_.swap(kept_array_buffers);
// Pass the backing stores that need to be freed to the main thread for
// potential later distribution.
page_->heap()->array_buffer_collector()->QueueOrFreeGarbageAllocations(
std::move(backing_stores_to_free));
}
void ArrayBufferTracker::PrepareToFreeDeadInNewSpace(Heap* heap) {
DCHECK_EQ(heap->gc_state(), Heap::HeapState::SCAVENGE);
for (Page* page :
PageRange(heap->new_space()->from_space().first_page(), nullptr)) {
bool empty = ProcessBuffers(page, kUpdateForwardedRemoveOthers);
CHECK(empty);
}
}
void ArrayBufferTracker::FreeAll(Page* page) {
LocalArrayBufferTracker* tracker = page->local_tracker();
if (tracker == nullptr) return;
tracker->Free([](JSArrayBuffer buffer) { return true; });
if (tracker->IsEmpty()) {
page->ReleaseLocalTracker();
}
}
bool ArrayBufferTracker::ProcessBuffers(Page* page, ProcessingMode mode) {
LocalArrayBufferTracker* tracker = page->local_tracker();
if (tracker == nullptr) return true;
DCHECK(page->SweepingDone());
tracker->Process([mode](JSArrayBuffer old_buffer, JSArrayBuffer* new_buffer) {
MapWord map_word = old_buffer.map_word();
if (map_word.IsForwardingAddress()) {
*new_buffer = JSArrayBuffer::cast(map_word.ToForwardingAddress());
return LocalArrayBufferTracker::kUpdateEntry;
}
return mode == kUpdateForwardedKeepOthers
? LocalArrayBufferTracker::kKeepEntry
: LocalArrayBufferTracker::kRemoveEntry;
});
return tracker->IsEmpty();
}
bool ArrayBufferTracker::IsTracked(JSArrayBuffer buffer) {
Page* page = Page::FromHeapObject(buffer);
{
base::MutexGuard guard(page->mutex());
LocalArrayBufferTracker* tracker = page->local_tracker();
if (tracker == nullptr) return false;
return tracker->IsTracked(buffer);
}
}
void ArrayBufferTracker::TearDown(Heap* heap) {
// ArrayBuffers can only be found in NEW_SPACE and OLD_SPACE.
for (Page* p : *heap->old_space()) {
FreeAll(p);
}
NewSpace* new_space = heap->new_space();
if (new_space->to_space().is_committed()) {
for (Page* p : new_space->to_space()) {
FreeAll(p);
}
}
#ifdef DEBUG
if (new_space->from_space().is_committed()) {
for (Page* p : new_space->from_space()) {
DCHECK(!p->contains_array_buffers());
}
}
#endif // DEBUG
}
} // namespace internal
} // namespace v8
#undef TRACE_BS

View File

@ -1,131 +0,0 @@
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_ARRAY_BUFFER_TRACKER_H_
#define V8_HEAP_ARRAY_BUFFER_TRACKER_H_
#include <unordered_map>
#include "src/base/platform/mutex.h"
#include "src/common/globals.h"
#include "src/objects/backing-store.h"
#include "src/objects/js-array-buffer.h"
#include "src/utils/allocation.h"
namespace v8 {
namespace internal {
class MarkingState;
class Page;
class Space;
class ArrayBufferTracker : public AllStatic {
public:
enum ProcessingMode {
kUpdateForwardedRemoveOthers,
kUpdateForwardedKeepOthers,
};
// The following methods are used to track raw C++ pointers to externally
// allocated memory used as backing store in live array buffers.
// Register/unregister a new JSArrayBuffer |buffer| for tracking. Guards all
// access to the tracker by taking the page lock for the corresponding page.
inline static void RegisterNew(Heap* heap, JSArrayBuffer buffer,
std::shared_ptr<BackingStore>);
inline static std::shared_ptr<BackingStore> Unregister(Heap* heap,
JSArrayBuffer buffer);
inline static std::shared_ptr<BackingStore> Lookup(Heap* heap,
JSArrayBuffer buffer);
// Identifies all backing store pointers for dead JSArrayBuffers in new space.
// Does not take any locks and can only be called during Scavenge.
static void PrepareToFreeDeadInNewSpace(Heap* heap);
// Frees all backing store pointers for dead JSArrayBuffer on a given page.
// Requires marking information to be present. Requires the page lock to be
// taken by the caller.
template <typename MarkingState>
static void FreeDead(Page* page, MarkingState* marking_state);
// Frees all remaining, live or dead, array buffers on a page. Only useful
// during tear down.
static void FreeAll(Page* page);
// Processes all array buffers on a given page. |mode| specifies the action
// to perform on the buffers. Returns whether the tracker is empty or not.
static bool ProcessBuffers(Page* page, ProcessingMode mode);
// Returns whether a buffer is currently tracked.
V8_EXPORT_PRIVATE static bool IsTracked(JSArrayBuffer buffer);
// Tears down the tracker and frees up all registered array buffers.
static void TearDown(Heap* heap);
};
// LocalArrayBufferTracker tracks internalized array buffers.
//
// Never use directly but instead always call through |ArrayBufferTracker|.
class LocalArrayBufferTracker {
public:
enum CallbackResult { kKeepEntry, kUpdateEntry, kRemoveEntry };
enum FreeMode { kFreeDead, kFreeAll };
explicit LocalArrayBufferTracker(Page* page) : page_(page) {}
~LocalArrayBufferTracker();
inline void Add(JSArrayBuffer buffer,
std::shared_ptr<BackingStore> backing_store);
inline std::shared_ptr<BackingStore> Remove(JSArrayBuffer buffer);
inline std::shared_ptr<BackingStore> Lookup(JSArrayBuffer buffer);
// Frees up array buffers.
//
// Sample usage:
// Free([](HeapObject array_buffer) {
// if (should_free_internal(array_buffer)) return true;
// return false;
// });
template <typename Callback>
void Free(Callback should_free);
// Processes buffers one by one. The CallbackResult of the callback decides
// what action to take on the buffer.
//
// Callback should be of type:
// CallbackResult fn(JSArrayBuffer buffer, JSArrayBuffer* new_buffer);
template <typename Callback>
void Process(Callback callback);
bool IsEmpty() const { return array_buffers_.empty(); }
bool IsTracked(JSArrayBuffer buffer) const {
return array_buffers_.find(buffer) != array_buffers_.end();
}
private:
class Hasher {
public:
size_t operator()(JSArrayBuffer buffer) const {
return static_cast<size_t>(buffer.ptr() >> 3);
}
};
using TrackingData =
std::unordered_map<JSArrayBuffer, std::shared_ptr<BackingStore>, Hasher>;
// Internal version of add that does not update counters. Requires separate
// logic for updating external memory counters.
inline void AddInternal(JSArrayBuffer buffer,
std::shared_ptr<BackingStore> backing_store);
Page* page_;
// The set contains raw heap pointers which are removed by the GC upon
// processing the tracker through its owning page.
TrackingData array_buffers_;
};
} // namespace internal
} // namespace v8
#endif // V8_HEAP_ARRAY_BUFFER_TRACKER_H_

View File

@ -27,7 +27,6 @@ class Isolate;
class LargeObjectSpace;
class LargePage;
class LinearAllocationArea;
class LocalArrayBufferTracker;
class Page;
class PagedSpace;
class SemiSpace;

View File

@ -28,9 +28,7 @@
#include "src/execution/v8threads.h"
#include "src/execution/vm-state-inl.h"
#include "src/handles/global-handles.h"
#include "src/heap/array-buffer-collector.h"
#include "src/heap/array-buffer-sweeper.h"
#include "src/heap/array-buffer-tracker-inl.h"
#include "src/heap/barrier.h"
#include "src/heap/code-object-registry.h"
#include "src/heap/code-stats.h"
@ -2888,20 +2886,6 @@ void* Heap::AllocateExternalBackingStore(
return allocate(byte_length);
}
void Heap::RegisterBackingStore(JSArrayBuffer buffer,
std::shared_ptr<BackingStore> backing_store) {
ArrayBufferTracker::RegisterNew(this, buffer, std::move(backing_store));
}
std::shared_ptr<BackingStore> Heap::UnregisterBackingStore(
JSArrayBuffer buffer) {
return ArrayBufferTracker::Unregister(this, buffer);
}
std::shared_ptr<BackingStore> Heap::LookupBackingStore(JSArrayBuffer buffer) {
return ArrayBufferTracker::Lookup(this, buffer);
}
void Heap::ConfigureInitialOldGenerationSize() {
if (!old_generation_size_configured_ && tracer()->SurvivalEventsRecorded()) {
const size_t minimum_growing_step =
@ -3900,20 +3884,7 @@ void Heap::MemoryPressureNotification(MemoryPressureLevel level,
}
void Heap::EagerlyFreeExternalMemory() {
if (FLAG_array_buffer_extension) {
array_buffer_sweeper()->EnsureFinished();
} else {
CHECK(!FLAG_local_heaps);
for (Page* page : *old_space()) {
if (!page->SweepingDone()) {
base::MutexGuard guard(page->mutex());
if (!page->SweepingDone()) {
ArrayBufferTracker::FreeDead(
page, mark_compact_collector()->non_atomic_marking_state());
}
}
}
}
memory_allocator()->unmapper()->EnsureUnmappingCompleted();
}
@ -5334,7 +5305,6 @@ void Heap::SetUpSpaces() {
#else
minor_mark_compact_collector_ = nullptr;
#endif // ENABLE_MINOR_MC
array_buffer_collector_.reset(new ArrayBufferCollector(this));
array_buffer_sweeper_.reset(new ArrayBufferSweeper(this));
gc_idle_time_handler_.reset(new GCIdleTimeHandler());
memory_measurement_.reset(new MemoryMeasurement(isolate()));
@ -5578,7 +5548,6 @@ void Heap::TearDown() {
#endif // ENABLE_MINOR_MC
scavenger_collector_.reset();
array_buffer_collector_.reset();
array_buffer_sweeper_.reset();
incremental_marking_.reset();
concurrent_marking_.reset();
@ -5599,11 +5568,6 @@ void Heap::TearDown() {
external_string_table_.TearDown();
// Tear down all ArrayBuffers before tearing down the heap since their
// byte_length may be a HeapNumber which is required for freeing the backing
// store.
ArrayBufferTracker::TearDown(this);
tracer_.reset();
for (int i = FIRST_MUTABLE_SPACE; i <= LAST_MUTABLE_SPACE; i++) {
@ -6182,12 +6146,10 @@ void Heap::RememberUnmappedPage(Address page, bool compacted) {
}
size_t Heap::YoungArrayBufferBytes() {
DCHECK(V8_ARRAY_BUFFER_EXTENSION_BOOL);
return array_buffer_sweeper()->YoungBytes();
}
size_t Heap::OldArrayBufferBytes() {
DCHECK(V8_ARRAY_BUFFER_EXTENSION_BOOL);
return array_buffer_sweeper()->OldBytes();
}

View File

@ -788,10 +788,6 @@ class Heap {
return minor_mark_compact_collector_;
}
ArrayBufferCollector* array_buffer_collector() {
return array_buffer_collector_.get();
}
ArrayBufferSweeper* array_buffer_sweeper() {
return array_buffer_sweeper_.get();
}
@ -1348,14 +1344,6 @@ class Heap {
V8_EXPORT_PRIVATE void* AllocateExternalBackingStore(
const std::function<void*(size_t)>& allocate, size_t byte_length);
// ===========================================================================
// ArrayBuffer tracking. =====================================================
// ===========================================================================
void RegisterBackingStore(JSArrayBuffer buffer,
std::shared_ptr<BackingStore> backing_store);
std::shared_ptr<BackingStore> UnregisterBackingStore(JSArrayBuffer buffer);
std::shared_ptr<BackingStore> LookupBackingStore(JSArrayBuffer buffer);
// ===========================================================================
// Allocation site tracking. =================================================
// ===========================================================================
@ -2140,7 +2128,6 @@ class Heap {
std::unique_ptr<MarkCompactCollector> mark_compact_collector_;
MinorMarkCompactCollector* minor_mark_compact_collector_ = nullptr;
std::unique_ptr<ScavengerCollector> scavenger_collector_;
std::unique_ptr<ArrayBufferCollector> array_buffer_collector_;
std::unique_ptr<ArrayBufferSweeper> array_buffer_sweeper_;
std::unique_ptr<MemoryAllocator> memory_allocator_;

View File

@ -13,9 +13,7 @@
#include "src/execution/frames-inl.h"
#include "src/execution/vm-state-inl.h"
#include "src/handles/global-handles.h"
#include "src/heap/array-buffer-collector.h"
#include "src/heap/array-buffer-sweeper.h"
#include "src/heap/array-buffer-tracker-inl.h"
#include "src/heap/code-object-registry.h"
#include "src/heap/gc-tracer.h"
#include "src/heap/incremental-marking-inl.h"
@ -3050,7 +3048,6 @@ void FullEvacuator::RawEvacuatePage(MemoryChunk* chunk, intptr_t* live_bytes) {
LiveObjectVisitor::VisitBlackObjectsNoFail(
chunk, marking_state, &new_space_visitor_,
LiveObjectVisitor::kClearMarkbits);
// ArrayBufferTracker will be updated during pointers updating.
break;
case kPageNewToOld:
LiveObjectVisitor::VisitBlackObjectsNoFail(
@ -3058,7 +3055,6 @@ void FullEvacuator::RawEvacuatePage(MemoryChunk* chunk, intptr_t* live_bytes) {
LiveObjectVisitor::kKeepMarking);
new_to_old_page_visitor_.account_moved_bytes(
marking_state->live_bytes(chunk));
// ArrayBufferTracker will be updated during sweeping.
break;
case kPageNewToNew:
LiveObjectVisitor::VisitBlackObjectsNoFail(
@ -3066,7 +3062,6 @@ void FullEvacuator::RawEvacuatePage(MemoryChunk* chunk, intptr_t* live_bytes) {
LiveObjectVisitor::kKeepMarking);
new_to_new_page_visitor_.account_moved_bytes(
marking_state->live_bytes(chunk));
// ArrayBufferTracker will be updated during sweeping.
break;
case kObjectsOldToOld: {
const bool success = LiveObjectVisitor::VisitBlackObjects(
@ -3076,8 +3071,6 @@ void FullEvacuator::RawEvacuatePage(MemoryChunk* chunk, intptr_t* live_bytes) {
// Aborted compaction page. Actual processing happens on the main
// thread for simplicity reasons.
collector_->ReportAbortedEvacuationCandidate(failed_object, chunk);
} else {
// ArrayBufferTracker will be updated during pointers updating.
}
break;
}
@ -3188,7 +3181,7 @@ void MarkCompactCollector::EvacuatePagesInParallel() {
for (Page* page : new_space_evacuation_pages_) {
intptr_t live_bytes_on_page = non_atomic_marking_state()->live_bytes(page);
if (live_bytes_on_page == 0 && !page->contains_array_buffers()) continue;
if (live_bytes_on_page == 0) continue;
live_bytes += live_bytes_on_page;
if (ShouldMovePage(page, live_bytes_on_page,
FLAG_always_promote_young_mc)) {
@ -3715,40 +3708,6 @@ UpdatingItem* MarkCompactCollector::CreateRememberedSetUpdatingItem(
heap(), non_atomic_marking_state(), chunk, updating_mode);
}
// Update array buffers on a page that has been evacuated by copying objects.
// Target page exclusivity in old space is guaranteed by the fact that
// evacuation tasks either (a) retrieved a fresh page, or (b) retrieved all
// free list items of a given page. For new space the tracker will update
// using a lock.
class ArrayBufferTrackerUpdatingItem : public UpdatingItem {
public:
enum EvacuationState { kRegular, kAborted };
explicit ArrayBufferTrackerUpdatingItem(Page* page, EvacuationState state)
: page_(page), state_(state) {}
~ArrayBufferTrackerUpdatingItem() override = default;
void Process() override {
TRACE_EVENT1(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
"ArrayBufferTrackerUpdatingItem::Process", "EvacuationState",
state_);
switch (state_) {
case EvacuationState::kRegular:
ArrayBufferTracker::ProcessBuffers(
page_, ArrayBufferTracker::kUpdateForwardedRemoveOthers);
break;
case EvacuationState::kAborted:
ArrayBufferTracker::ProcessBuffers(
page_, ArrayBufferTracker::kUpdateForwardedKeepOthers);
break;
}
}
private:
Page* const page_;
const EvacuationState state_;
};
int MarkCompactCollectorBase::CollectToSpaceUpdatingItems(
ItemParallelJob* job) {
// Seed to space pages.
@ -3799,45 +3758,6 @@ int MarkCompactCollectorBase::CollectRememberedSetUpdatingItems(
return pages;
}
int MarkCompactCollector::CollectNewSpaceArrayBufferTrackerItems(
ItemParallelJob* job) {
int pages = 0;
for (Page* p : new_space_evacuation_pages_) {
if (Evacuator::ComputeEvacuationMode(p) == Evacuator::kObjectsNewToOld) {
if (p->local_tracker() == nullptr) continue;
pages++;
job->AddItem(new ArrayBufferTrackerUpdatingItem(
p, ArrayBufferTrackerUpdatingItem::kRegular));
}
}
return pages;
}
int MarkCompactCollector::CollectOldSpaceArrayBufferTrackerItems(
ItemParallelJob* job) {
int pages = 0;
for (Page* p : old_space_evacuation_pages_) {
if (Evacuator::ComputeEvacuationMode(p) == Evacuator::kObjectsOldToOld &&
p->IsEvacuationCandidate()) {
if (p->local_tracker() == nullptr) continue;
pages++;
job->AddItem(new ArrayBufferTrackerUpdatingItem(
p, ArrayBufferTrackerUpdatingItem::kRegular));
}
}
for (auto object_and_page : aborted_evacuation_candidates_) {
Page* p = object_and_page.second;
if (p->local_tracker() == nullptr) continue;
pages++;
job->AddItem(new ArrayBufferTrackerUpdatingItem(
p, ArrayBufferTrackerUpdatingItem::kAborted));
}
return pages;
}
class EphemeronTableUpdatingItem : public UpdatingItem {
public:
enum EvacuationState { kRegular, kAborted };
@ -3947,8 +3867,6 @@ void MarkCompactCollector::UpdatePointersAfterEvacuation() {
&page_parallel_job_semaphore_);
int array_buffer_pages = 0;
array_buffer_pages += CollectNewSpaceArrayBufferTrackerItems(&updating_job);
array_buffer_pages += CollectOldSpaceArrayBufferTrackerItems(&updating_job);
int remembered_set_pages = 0;
remembered_set_pages += CollectRememberedSetUpdatingItems(
@ -3966,7 +3884,6 @@ void MarkCompactCollector::UpdatePointersAfterEvacuation() {
GCTracer::BackgroundScope::MC_BACKGROUND_EVACUATE_UPDATE_POINTERS));
}
updating_job.Run();
heap()->array_buffer_collector()->FreeAllocations();
}
}
@ -4082,7 +3999,6 @@ void MarkCompactCollector::StartSweepSpace(PagedSpace* space) {
PrintIsolate(isolate(), "sweeping: released page: %p",
static_cast<void*>(p));
}
ArrayBufferTracker::FreeAll(p);
space->memory_chunk_list().Remove(p);
space->ReleasePage(p);
continue;
@ -4449,7 +4365,6 @@ void MinorMarkCompactCollector::UpdatePointersAfterEvacuation() {
ItemParallelJob updating_job(isolate()->cancelable_task_manager(),
&page_parallel_job_semaphore_);
CollectNewSpaceArrayBufferTrackerItems(&updating_job);
// Create batches of global handles.
const int to_space_tasks = CollectToSpaceUpdatingItems(&updating_job);
int remembered_set_pages = 0;
@ -4491,7 +4406,6 @@ void MinorMarkCompactCollector::UpdatePointersAfterEvacuation() {
TRACE_GC(heap()->tracer(),
GCTracer::Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS_SLOTS);
updating_job.Run();
heap()->array_buffer_collector()->FreeAllocations();
}
{
@ -5151,7 +5065,6 @@ void YoungGenerationEvacuator::RawEvacuatePage(MemoryChunk* chunk,
LiveObjectVisitor::VisitGreyObjectsNoFail(
chunk, marking_state, &new_space_visitor_,
LiveObjectVisitor::kClearMarkbits);
// ArrayBufferTracker will be updated during pointers updating.
break;
case kPageNewToOld:
LiveObjectVisitor::VisitGreyObjectsNoFail(
@ -5160,9 +5073,6 @@ void YoungGenerationEvacuator::RawEvacuatePage(MemoryChunk* chunk,
new_to_old_page_visitor_.account_moved_bytes(
marking_state->live_bytes(chunk));
if (!chunk->IsLargePage()) {
// TODO(mlippautz): If cleaning array buffers is too slow here we can
// delay it until the next GC.
ArrayBufferTracker::FreeDead(static_cast<Page*>(chunk), marking_state);
if (heap()->ShouldZapGarbage()) {
collector_->MakeIterable(static_cast<Page*>(chunk),
MarkingTreatmentMode::KEEP, ZAP_FREE_SPACE);
@ -5183,9 +5093,6 @@ void YoungGenerationEvacuator::RawEvacuatePage(MemoryChunk* chunk,
new_to_new_page_visitor_.account_moved_bytes(
marking_state->live_bytes(chunk));
DCHECK(!chunk->IsLargePage());
// TODO(mlippautz): If cleaning array buffers is too slow here we can
// delay it until the next GC.
ArrayBufferTracker::FreeDead(static_cast<Page*>(chunk), marking_state);
if (heap()->ShouldZapGarbage()) {
collector_->MakeIterable(static_cast<Page*>(chunk),
MarkingTreatmentMode::KEEP, ZAP_FREE_SPACE);
@ -5211,7 +5118,7 @@ void MinorMarkCompactCollector::EvacuatePagesInParallel() {
for (Page* page : new_space_evacuation_pages_) {
intptr_t live_bytes_on_page = non_atomic_marking_state()->live_bytes(page);
if (live_bytes_on_page == 0 && !page->contains_array_buffers()) continue;
if (live_bytes_on_page == 0) continue;
live_bytes += live_bytes_on_page;
if (ShouldMovePage(page, live_bytes_on_page, false)) {
if (page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK)) {
@ -5244,21 +5151,6 @@ void MinorMarkCompactCollector::EvacuatePagesInParallel() {
this, &evacuation_job, &observer, live_bytes);
}
int MinorMarkCompactCollector::CollectNewSpaceArrayBufferTrackerItems(
ItemParallelJob* job) {
int pages = 0;
for (Page* p : new_space_evacuation_pages_) {
if (Evacuator::ComputeEvacuationMode(p) == Evacuator::kObjectsNewToOld) {
if (p->local_tracker() == nullptr) continue;
pages++;
job->AddItem(new ArrayBufferTrackerUpdatingItem(
p, ArrayBufferTrackerUpdatingItem::kRegular));
}
}
return pages;
}
#endif // ENABLE_MINOR_MC
} // namespace internal

View File

@ -717,9 +717,6 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
UpdatingItem* CreateRememberedSetUpdatingItem(
MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) override;
int CollectNewSpaceArrayBufferTrackerItems(ItemParallelJob* job);
int CollectOldSpaceArrayBufferTrackerItems(ItemParallelJob* job);
void ReleaseEvacuationCandidates();
void PostProcessEvacuationCandidates();
void ReportAbortedEvacuationCandidate(HeapObject failed_object,
@ -861,8 +858,6 @@ class MinorMarkCompactCollector final : public MarkCompactCollectorBase {
UpdatingItem* CreateRememberedSetUpdatingItem(
MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) override;
int CollectNewSpaceArrayBufferTrackerItems(ItemParallelJob* job);
int NumberOfParallelMarkingTasks(int pages);
void SweepArrayBufferExtensions();

View File

@ -16,7 +16,6 @@ class Bitmap;
class CodeObjectRegistry;
class FreeListCategory;
class Heap;
class LocalArrayBuferTracker;
class TypedSlotsSet;
class SlotSet;
@ -58,7 +57,6 @@ class V8_EXPORT_PRIVATE MemoryChunkLayout {
FIELD(std::atomic<size_t>[kNumTypes], ExternalBackingStoreBytes),
FIELD(heap::ListNode<MemoryChunk>, ListNode),
FIELD(FreeListCategory**, Categories),
FIELD(LocalArrayBuferTracker*, LocalTracker),
FIELD(std::atomic<intptr_t>, YoungGenerationLiveByteCount),
FIELD(Bitmap*, YoungGenerationBitmap),
FIELD(CodeObjectRegistry*, CodeObjectRegistry),

View File

@ -5,7 +5,6 @@
#include "src/heap/memory-chunk.h"
#include "src/base/platform/platform.h"
#include "src/heap/array-buffer-tracker.h"
#include "src/heap/code-object-registry.h"
#include "src/heap/memory-allocator.h"
#include "src/heap/memory-chunk-inl.h"
@ -123,7 +122,6 @@ MemoryChunk* MemoryChunk::Initialize(BasicMemoryChunk* basic_chunk, Heap* heap,
chunk->write_unprotect_counter_ = 0;
chunk->mutex_ = new base::Mutex();
chunk->young_generation_bitmap_ = nullptr;
chunk->local_tracker_ = nullptr;
chunk->external_backing_store_bytes_[ExternalBackingStoreType::kArrayBuffer] =
0;
@ -218,7 +216,6 @@ void MemoryChunk::ReleaseAllocatedMemoryNeededForWritableChunk() {
ReleaseInvalidatedSlots<OLD_TO_NEW>();
ReleaseInvalidatedSlots<OLD_TO_OLD>();
if (local_tracker_ != nullptr) ReleaseLocalTracker();
if (young_generation_bitmap_ != nullptr) ReleaseYoungGenerationBitmap();
if (!IsLargePage()) {
@ -375,12 +372,6 @@ bool MemoryChunk::RegisteredObjectWithInvalidatedSlots(HeapObject object) {
invalidated_slots<type>()->end();
}
void MemoryChunk::ReleaseLocalTracker() {
DCHECK_NOT_NULL(local_tracker_);
delete local_tracker_;
local_tracker_ = nullptr;
}
void MemoryChunk::AllocateYoungGenerationBitmap() {
DCHECK_NULL(young_generation_bitmap_);
young_generation_bitmap_ = static_cast<Bitmap*>(calloc(1, Bitmap::kSize));
@ -429,9 +420,6 @@ void MemoryChunk::ValidateOffsets(MemoryChunk* chunk) {
MemoryChunkLayout::kListNodeOffset);
DCHECK_EQ(reinterpret_cast<Address>(&chunk->categories_) - chunk->address(),
MemoryChunkLayout::kCategoriesOffset);
DCHECK_EQ(
reinterpret_cast<Address>(&chunk->local_tracker_) - chunk->address(),
MemoryChunkLayout::kLocalTrackerOffset);
DCHECK_EQ(
reinterpret_cast<Address>(&chunk->young_generation_live_byte_count_) -
chunk->address(),

View File

@ -23,7 +23,6 @@ namespace internal {
class CodeObjectRegistry;
class FreeListCategory;
class LocalArrayBufferTracker;
// MemoryChunk represents a memory region owned by a specific space.
// It is divided into the header and the body. Chunk start is always
@ -156,8 +155,6 @@ class MemoryChunk : public BasicMemoryChunk {
return invalidated_slots_[type];
}
void ReleaseLocalTracker();
void AllocateYoungGenerationBitmap();
void ReleaseYoungGenerationBitmap();
@ -297,8 +294,6 @@ class MemoryChunk : public BasicMemoryChunk {
FreeListCategory** categories_;
LocalArrayBufferTracker* local_tracker_;
std::atomic<intptr_t> young_generation_live_byte_count_;
Bitmap* young_generation_bitmap_;

View File

@ -5,7 +5,6 @@
#include "src/heap/new-spaces.h"
#include "src/heap/array-buffer-sweeper.h"
#include "src/heap/array-buffer-tracker-inl.h"
#include "src/heap/heap-inl.h"
#include "src/heap/incremental-marking.h"
#include "src/heap/mark-compact.h"
@ -21,7 +20,6 @@ Page* SemiSpace::InitializePage(MemoryChunk* chunk) {
chunk->SetFlag(in_to_space ? MemoryChunk::TO_PAGE : MemoryChunk::FROM_PAGE);
Page* page = static_cast<Page*>(chunk);
page->SetYoungGenerationPageFlags(heap()->incremental_marking()->IsMarking());
page->AllocateLocalTracker();
page->list_node().Initialize();
#ifdef ENABLE_MINOR_MC
if (FLAG_minor_mc) {
@ -659,13 +657,6 @@ void NewSpace::Verify(Isolate* isolate) {
ExternalString external_string = ExternalString::cast(object);
size_t size = external_string.ExternalPayloadSize();
external_space_bytes[ExternalBackingStoreType::kExternalString] += size;
} else if (object.IsJSArrayBuffer()) {
JSArrayBuffer array_buffer = JSArrayBuffer::cast(object);
if (ArrayBufferTracker::IsTracked(array_buffer)) {
size_t size = ArrayBufferTracker::Lookup(heap(), array_buffer)
->PerIsolateAccountingLength();
external_space_bytes[ExternalBackingStoreType::kArrayBuffer] += size;
}
}
current += size;
@ -677,18 +668,14 @@ void NewSpace::Verify(Isolate* isolate) {
}
for (int i = 0; i < kNumTypes; i++) {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL &&
i == ExternalBackingStoreType::kArrayBuffer)
continue;
if (i == ExternalBackingStoreType::kArrayBuffer) continue;
ExternalBackingStoreType t = static_cast<ExternalBackingStoreType>(i);
CHECK_EQ(external_space_bytes[t], ExternalBackingStoreBytes(t));
}
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
size_t bytes = heap()->array_buffer_sweeper()->young().BytesSlow();
CHECK_EQ(bytes,
ExternalBackingStoreBytes(ExternalBackingStoreType::kArrayBuffer));
}
// Check semi-spaces.
CHECK_EQ(from_space_.id(), kFromSpace);

View File

@ -303,8 +303,7 @@ class V8_EXPORT_PRIVATE NewSpace
}
size_t ExternalBackingStoreBytes(ExternalBackingStoreType type) const final {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL &&
type == ExternalBackingStoreType::kArrayBuffer)
if (type == ExternalBackingStoreType::kArrayBuffer)
return heap()->YoungArrayBufferBytes();
DCHECK_EQ(0, from_space_.ExternalBackingStoreBytes(type));
return to_space_.ExternalBackingStoreBytes(type);

View File

@ -7,7 +7,6 @@
#include "src/heap/objects-visiting.h"
#include "src/heap/array-buffer-tracker.h"
#include "src/heap/embedder-tracing.h"
#include "src/heap/mark-compact.h"
#include "src/objects/free-space-inl.h"

View File

@ -9,7 +9,6 @@
#include "src/execution/isolate.h"
#include "src/execution/vm-state-inl.h"
#include "src/heap/array-buffer-sweeper.h"
#include "src/heap/array-buffer-tracker-inl.h"
#include "src/heap/heap.h"
#include "src/heap/incremental-marking.h"
#include "src/heap/memory-allocator.h"
@ -755,14 +754,6 @@ void PagedSpace::Verify(Isolate* isolate, ObjectVisitor* visitor) {
ExternalString external_string = ExternalString::cast(object);
size_t size = external_string.ExternalPayloadSize();
external_page_bytes[ExternalBackingStoreType::kExternalString] += size;
} else if (object.IsJSArrayBuffer()) {
JSArrayBuffer array_buffer = JSArrayBuffer::cast(object);
if (ArrayBufferTracker::IsTracked(array_buffer)) {
size_t size =
ArrayBufferTracker::Lookup(isolate->heap(), array_buffer)
->PerIsolateAccountingLength();
external_page_bytes[ExternalBackingStoreType::kArrayBuffer] += size;
}
}
}
for (int i = 0; i < kNumTypes; i++) {
@ -772,15 +763,13 @@ void PagedSpace::Verify(Isolate* isolate, ObjectVisitor* visitor) {
}
}
for (int i = 0; i < kNumTypes; i++) {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL &&
i == ExternalBackingStoreType::kArrayBuffer)
continue;
if (i == ExternalBackingStoreType::kArrayBuffer) continue;
ExternalBackingStoreType t = static_cast<ExternalBackingStoreType>(i);
CHECK_EQ(external_space_bytes[t], ExternalBackingStoreBytes(t));
}
CHECK(allocation_pointer_found_in_space);
if (identity() == OLD_SPACE && V8_ARRAY_BUFFER_EXTENSION_BOOL) {
if (identity() == OLD_SPACE) {
size_t bytes = heap()->array_buffer_sweeper()->old().BytesSlow();
CHECK_EQ(bytes,
ExternalBackingStoreBytes(ExternalBackingStoreType::kArrayBuffer));

View File

@ -478,8 +478,7 @@ class OldSpace : public PagedSpace {
}
size_t ExternalBackingStoreBytes(ExternalBackingStoreType type) const final {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL &&
type == ExternalBackingStoreType::kArrayBuffer)
if (type == ExternalBackingStoreType::kArrayBuffer)
return heap()->OldArrayBufferBytes();
return external_backing_store_bytes_[type];
}

View File

@ -4,7 +4,6 @@
#include "src/heap/scavenger.h"
#include "src/heap/array-buffer-collector.h"
#include "src/heap/array-buffer-sweeper.h"
#include "src/heap/barrier.h"
#include "src/heap/gc-tracer.h"
@ -390,12 +389,6 @@ void ScavengerCollector::CollectGarbage() {
// Set age mark.
heap_->new_space_->set_age_mark(heap_->new_space()->top());
{
TRACE_GC(heap_->tracer(), GCTracer::Scope::SCAVENGER_PROCESS_ARRAY_BUFFERS);
ArrayBufferTracker::PrepareToFreeDeadInNewSpace(heap_);
}
heap_->array_buffer_collector()->FreeAllocations();
// Since we promote all surviving large objects immediatelly, all remaining
// large objects must be dead.
// TODO(hpayer): Don't free all as soon as we have an intermediate generation.

View File

@ -12,7 +12,6 @@
#include "src/base/bounded-page-allocator.h"
#include "src/base/macros.h"
#include "src/common/globals.h"
#include "src/heap/array-buffer-tracker-inl.h"
#include "src/heap/combined-heap.h"
#include "src/heap/concurrent-marking.h"
#include "src/heap/gc-tracer.h"
@ -116,15 +115,6 @@ void Page::MergeOldToNewRememberedSets() {
sweeping_slot_set_ = nullptr;
}
void Page::AllocateLocalTracker() {
DCHECK_NULL(local_tracker_);
local_tracker_ = new LocalArrayBufferTracker(this);
}
bool Page::contains_array_buffers() {
return local_tracker_ != nullptr && !local_tracker_->IsEmpty();
}
size_t Page::AvailableInFreeList() {
size_t sum = 0;
ForAllFreeListCategories([&sum](FreeListCategory* category) {

View File

@ -38,7 +38,6 @@ class Isolate;
class LargeObjectSpace;
class LargePage;
class LinearAllocationArea;
class LocalArrayBufferTracker;
class Page;
class PagedSpace;
class SemiSpace;
@ -267,10 +266,6 @@ class Page : public MemoryChunk {
}
}
void AllocateLocalTracker();
inline LocalArrayBufferTracker* local_tracker() { return local_tracker_; }
bool contains_array_buffers();
size_t AvailableInFreeList();
size_t AvailableInFreeListFromAllocatedBytes() {

View File

@ -5,7 +5,6 @@
#include "src/heap/sweeper.h"
#include "src/execution/vm-state-inl.h"
#include "src/heap/array-buffer-tracker-inl.h"
#include "src/heap/code-object-registry.h"
#include "src/heap/free-list-inl.h"
#include "src/heap/gc-tracer.h"
@ -354,10 +353,6 @@ int Sweeper::RawSweep(
// Phase 1: Prepare the page for sweeping.
// Before we sweep objects on the page, we free dead array buffers which
// requires valid mark bits.
ArrayBufferTracker::FreeDead(p, marking_state_);
// Set the allocated_bytes_ counter to area_size and clear the wasted_memory_
// counter. The free operations below will decrease allocated_bytes_ to actual
// live bytes and keep track of wasted_memory_.

View File

@ -57,7 +57,6 @@ void JSArrayBuffer::SetBackingStoreRefForSerialization(uint32_t ref) {
}
ArrayBufferExtension* JSArrayBuffer::extension() const {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
#if V8_COMPRESS_POINTERS
// With pointer compression the extension-field might not be
// pointer-aligned. However on ARM64 this field needs to be aligned to
@ -81,13 +80,9 @@ ArrayBufferExtension* JSArrayBuffer::extension() const {
#else
return base::AsAtomicPointer::Acquire_Load(extension_location());
#endif
} else {
return nullptr;
}
}
void JSArrayBuffer::set_extension(ArrayBufferExtension* extension) {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
#if V8_COMPRESS_POINTERS
if (extension != nullptr) {
uintptr_t address = reinterpret_cast<uintptr_t>(extension);
@ -104,9 +99,6 @@ void JSArrayBuffer::set_extension(ArrayBufferExtension* extension) {
base::AsAtomicPointer::Release_Store(extension_location(), extension);
#endif
WriteBarrier::Marking(*this, extension);
} else {
CHECK_EQ(extension, nullptr);
}
}
ArrayBufferExtension** JSArrayBuffer::extension_location() const {

View File

@ -65,16 +65,12 @@ void JSArrayBuffer::Attach(std::shared_ptr<BackingStore> backing_store) {
set_byte_length(backing_store->byte_length());
if (backing_store->is_wasm_memory()) set_is_detachable(false);
if (!backing_store->free_on_destruct()) set_is_external(true);
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
Heap* heap = isolate->heap();
ArrayBufferExtension* extension = EnsureExtension();
size_t bytes = backing_store->PerIsolateAccountingLength();
extension->set_accounting_length(bytes);
extension->set_backing_store(std::move(backing_store));
heap->AppendArrayBufferExtension(*this, extension);
} else {
isolate->heap()->RegisterBackingStore(*this, std::move(backing_store));
}
Heap* heap = isolate->heap();
ArrayBufferExtension* extension = EnsureExtension();
size_t bytes = backing_store->PerIsolateAccountingLength();
extension->set_accounting_length(bytes);
extension->set_backing_store(std::move(backing_store));
heap->AppendArrayBufferExtension(*this, extension);
}
void JSArrayBuffer::Detach(bool force_for_wasm_memory) {
@ -90,11 +86,7 @@ void JSArrayBuffer::Detach(bool force_for_wasm_memory) {
Isolate* const isolate = GetIsolate();
if (backing_store()) {
std::shared_ptr<BackingStore> backing_store;
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
backing_store = RemoveExtension();
} else {
backing_store = isolate->heap()->UnregisterBackingStore(*this);
}
CHECK_IMPLIES(force_for_wasm_memory, backing_store->is_wasm_memory());
}
@ -110,16 +102,11 @@ void JSArrayBuffer::Detach(bool force_for_wasm_memory) {
}
std::shared_ptr<BackingStore> JSArrayBuffer::GetBackingStore() {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
if (!extension()) return nullptr;
return extension()->backing_store();
} else {
return GetIsolate()->heap()->LookupBackingStore(*this);
}
}
ArrayBufferExtension* JSArrayBuffer::EnsureExtension() {
DCHECK(V8_ARRAY_BUFFER_EXTENSION_BOOL);
ArrayBufferExtension* extension = this->extension();
if (extension != nullptr) return extension;

View File

@ -14,8 +14,7 @@ bitfield struct JSArrayBufferFlags extends uint32 {
extern class JSArrayBuffer extends JSObject {
byte_length: uintptr;
backing_store: ExternalPointer;
@if(V8_ARRAY_BUFFER_EXTENSION_BOOL) extension: RawPtr;
@ifnot(V8_ARRAY_BUFFER_EXTENSION_BOOL) extension: void;
extension: RawPtr;
bit_field: JSArrayBufferFlags;
// Pads header size to be a multiple of kTaggedSize.
@if(TAGGED_SIZE_8_BYTES) optional_padding: uint32;

View File

@ -48,8 +48,6 @@ class BuildFlags : public ContextualClass<BuildFlags> {
build_flags_["V8_SFI_HAS_UNIQUE_ID"] = V8_SFI_HAS_UNIQUE_ID;
build_flags_["TAGGED_SIZE_8_BYTES"] = TAGGED_SIZE_8_BYTES;
build_flags_["V8_DOUBLE_FIELDS_UNBOXING"] = V8_DOUBLE_FIELDS_UNBOXING;
build_flags_["V8_ARRAY_BUFFER_EXTENSION_BOOL"] =
V8_ARRAY_BUFFER_EXTENSION_BOOL;
build_flags_["TRUE_FOR_TESTING"] = true;
build_flags_["FALSE_FOR_TESTING"] = false;
}

View File

@ -5,7 +5,6 @@
#include "src/api/api-inl.h"
#include "src/execution/isolate.h"
#include "src/heap/array-buffer-sweeper.h"
#include "src/heap/array-buffer-tracker.h"
#include "src/heap/heap-inl.h"
#include "src/heap/spaces.h"
#include "src/objects/js-array-buffer-inl.h"
@ -15,12 +14,6 @@
namespace {
using LocalTracker = i::LocalArrayBufferTracker;
bool IsTracked(i::JSArrayBuffer buf) {
return i::ArrayBufferTracker::IsTracked(buf);
}
bool IsTrackedYoung(i::Heap* heap, i::ArrayBufferExtension* extension) {
bool in_young = heap->array_buffer_sweeper()->young().Contains(extension);
bool in_old = heap->array_buffer_sweeper()->old().Contains(extension);
@ -43,8 +36,7 @@ bool IsTracked(i::Heap* heap, i::ArrayBufferExtension* extension) {
}
bool IsTracked(i::Heap* heap, i::JSArrayBuffer buffer) {
return V8_ARRAY_BUFFER_EXTENSION_BOOL ? IsTracked(heap, buffer.extension())
: IsTracked(buffer);
return IsTracked(heap, buffer.extension());
}
} // namespace
@ -57,36 +49,6 @@ namespace heap {
// moving the objects through various spaces during GC phases.
TEST(ArrayBuffer_OnlyMC) {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
ManualGCScope manual_gc_scope;
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
JSArrayBuffer raw_ab;
{
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
CHECK(IsTracked(*buf));
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(IsTracked(*buf));
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(IsTracked(*buf));
raw_ab = *buf;
// Prohibit page from being released.
Page::FromHeapObject(*buf)->MarkNeverEvacuate();
}
// 2 GCs are needed because we promote to old space as live, meaning that
// we will survive one GC.
heap::GcAndSweep(heap, OLD_SPACE);
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(!IsTracked(raw_ab));
}
TEST(ArrayBuffer_OnlyMC_Extension) {
if (!V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
FLAG_concurrent_array_buffer_sweeping = false;
ManualGCScope manual_gc_scope;
@ -113,38 +75,6 @@ TEST(ArrayBuffer_OnlyMC_Extension) {
}
TEST(ArrayBuffer_OnlyScavenge) {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
ManualGCScope manual_gc_scope;
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
JSArrayBuffer raw_ab;
{
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
CHECK(IsTracked(*buf));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTracked(*buf));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTracked(*buf));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTracked(*buf));
raw_ab = *buf;
// Prohibit page from being released.
Page::FromHeapObject(*buf)->MarkNeverEvacuate();
}
// 2 GCs are needed because we promote to old space as live, meaning that
// we will survive one GC.
heap::GcAndSweep(heap, OLD_SPACE);
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(!IsTracked(raw_ab));
}
TEST(ArrayBuffer_OnlyScavenge_Extension) {
if (!V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
if (FLAG_single_generation) return;
FLAG_concurrent_array_buffer_sweeping = false;
@ -173,40 +103,7 @@ TEST(ArrayBuffer_OnlyScavenge_Extension) {
}
TEST(ArrayBuffer_ScavengeAndMC) {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
ManualGCScope manual_gc_scope;
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
JSArrayBuffer raw_ab;
{
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
CHECK(IsTracked(*buf));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTracked(*buf));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTracked(*buf));
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(IsTracked(*buf));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTracked(*buf));
raw_ab = *buf;
// Prohibit page from being released.
Page::FromHeapObject(*buf)->MarkNeverEvacuate();
}
// 2 GCs are needed because we promote to old space as live, meaning that
// we will survive one GC.
heap::GcAndSweep(heap, OLD_SPACE);
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(!IsTracked(raw_ab));
}
TEST(ArrayBuffer_ScavengeAndMC_Extension) {
if (!V8_ARRAY_BUFFER_EXTENSION_BOOL || FLAG_single_generation) return;
if (FLAG_single_generation) return;
FLAG_concurrent_array_buffer_sweeping = false;
ManualGCScope manual_gc_scope;
@ -236,9 +133,10 @@ TEST(ArrayBuffer_ScavengeAndMC_Extension) {
}
TEST(ArrayBuffer_Compaction) {
if (FLAG_never_compact || V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
if (FLAG_never_compact) return;
ManualGCScope manual_gc_scope;
FLAG_manual_evacuation_candidates_selection = true;
FLAG_concurrent_array_buffer_sweeping = false;
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
@ -248,24 +146,23 @@ TEST(ArrayBuffer_Compaction) {
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab1 = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf1 = v8::Utils::OpenHandle(*ab1);
CHECK(IsTracked(*buf1));
CHECK(IsTracked(heap, *buf1));
heap::GcAndSweep(heap, NEW_SPACE);
heap::GcAndSweep(heap, NEW_SPACE);
Page* page_before_gc = Page::FromHeapObject(*buf1);
heap::ForceEvacuationCandidate(page_before_gc);
CHECK(IsTracked(*buf1));
CHECK(IsTracked(heap, *buf1));
CcTest::CollectAllGarbage();
Page* page_after_gc = Page::FromHeapObject(*buf1);
CHECK(IsTracked(*buf1));
CHECK(IsTracked(heap, *buf1));
CHECK_NE(page_before_gc, page_after_gc);
}
TEST(ArrayBuffer_UnregisterDuringSweep) {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
// Regular pages in old space (without compaction) are processed concurrently
// in the sweeper. If we happen to unregister a buffer (either explicitly, or
// implicitly through e.g. |Detach|) we need to sync with the sweeper
@ -279,6 +176,7 @@ TEST(ArrayBuffer_UnregisterDuringSweep) {
i::FLAG_verify_heap = false;
#endif // VERIFY_HEAP
ManualGCScope manual_gc_scope;
i::FLAG_concurrent_array_buffer_sweeping = false;
CcTest::InitializeVM();
LocalContext env;
@ -295,14 +193,12 @@ TEST(ArrayBuffer_UnregisterDuringSweep) {
// non-empty set of buffers in the last GC.
Local<v8::ArrayBuffer> ab2 = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf2 = v8::Utils::OpenHandle(*ab2);
CHECK(IsTracked(*buf));
CHECK(IsTracked(*buf));
CHECK(IsTracked(heap, *buf));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTracked(*buf));
CHECK(IsTracked(*buf));
CHECK(IsTracked(heap, *buf));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTracked(*buf));
CHECK(IsTracked(*buf2));
CHECK(IsTracked(heap, *buf));
CHECK(IsTracked(heap, *buf2));
}
CcTest::CollectGarbage(OLD_SPACE);
@ -314,7 +210,8 @@ TEST(ArrayBuffer_UnregisterDuringSweep) {
}
TEST(ArrayBuffer_NonLivePromotion) {
if (!FLAG_incremental_marking || V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
if (!FLAG_incremental_marking) return;
FLAG_concurrent_array_buffer_sweeping = false;
ManualGCScope manual_gc_scope;
// The test verifies that the marking state is preserved when promoting
// a buffer to old space.
@ -335,23 +232,24 @@ TEST(ArrayBuffer_NonLivePromotion) {
root->set(0, *buf); // Buffer that should not be promoted as live.
}
heap::SimulateIncrementalMarking(heap, false);
CHECK(IsTracked(JSArrayBuffer::cast(root->get(0))));
CHECK(IsTracked(heap, JSArrayBuffer::cast(root->get(0))));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTracked(JSArrayBuffer::cast(root->get(0))));
CHECK(IsTracked(heap, JSArrayBuffer::cast(root->get(0))));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTracked(JSArrayBuffer::cast(root->get(0))));
CHECK(IsTracked(heap, JSArrayBuffer::cast(root->get(0))));
raw_ab = JSArrayBuffer::cast(root->get(0));
root->set(0, ReadOnlyRoots(heap).undefined_value());
heap::SimulateIncrementalMarking(heap, true);
// Prohibit page from being released.
Page::FromHeapObject(raw_ab)->MarkNeverEvacuate();
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(!IsTracked(raw_ab));
CHECK(!IsTracked(heap, raw_ab));
}
}
TEST(ArrayBuffer_LivePromotion) {
if (!FLAG_incremental_marking || V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
if (!FLAG_incremental_marking) return;
FLAG_concurrent_array_buffer_sweeping = false;
ManualGCScope manual_gc_scope;
// The test verifies that the marking state is preserved when promoting
// a buffer to old space.
@ -372,22 +270,23 @@ TEST(ArrayBuffer_LivePromotion) {
root->set(0, *buf); // Buffer that should be promoted as live.
}
heap::SimulateIncrementalMarking(heap, true);
CHECK(IsTracked(JSArrayBuffer::cast(root->get(0))));
CHECK(IsTracked(heap, JSArrayBuffer::cast(root->get(0))));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTracked(JSArrayBuffer::cast(root->get(0))));
CHECK(IsTracked(heap, JSArrayBuffer::cast(root->get(0))));
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(IsTracked(JSArrayBuffer::cast(root->get(0))));
CHECK(IsTracked(heap, JSArrayBuffer::cast(root->get(0))));
raw_ab = JSArrayBuffer::cast(root->get(0));
root->set(0, ReadOnlyRoots(heap).undefined_value());
// Prohibit page from being released.
Page::FromHeapObject(raw_ab)->MarkNeverEvacuate();
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(IsTracked(raw_ab));
CHECK(IsTracked(heap, raw_ab));
}
}
TEST(ArrayBuffer_SemiSpaceCopyThenPagePromotion) {
if (!i::FLAG_incremental_marking || V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
if (!i::FLAG_incremental_marking) return;
FLAG_concurrent_array_buffer_sweeping = false;
ManualGCScope manual_gc_scope;
// The test verifies that the marking state is preserved across semispace
// copy.
@ -413,19 +312,16 @@ TEST(ArrayBuffer_SemiSpaceCopyThenPagePromotion) {
// processed in the sweeper (relying on marking information) instead of
// processing during newspace evacuation.
heap::FillCurrentPage(heap->new_space(), &handles);
CHECK(IsTracked(JSArrayBuffer::cast(root->get(0))));
CHECK(IsTracked(heap, JSArrayBuffer::cast(root->get(0))));
heap::GcAndSweep(heap, NEW_SPACE);
heap::SimulateIncrementalMarking(heap, true);
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(IsTracked(JSArrayBuffer::cast(root->get(0))));
CHECK(IsTracked(heap, JSArrayBuffer::cast(root->get(0))));
}
}
TEST(ArrayBuffer_PagePromotion_Extension) {
if (!i::FLAG_incremental_marking || !V8_ARRAY_BUFFER_EXTENSION_BOOL ||
i::FLAG_single_generation)
return;
i::FLAG_always_promote_young_mc = true;
TEST(ArrayBuffer_PagePromotion) {
if (!i::FLAG_incremental_marking || i::FLAG_single_generation) return;
i::FLAG_concurrent_array_buffer_sweeping = false;
ManualGCScope manual_gc_scope;
@ -492,7 +388,6 @@ UNINITIALIZED_TEST(ArrayBuffer_SemiSpaceCopyMultipleTasks) {
}
TEST(ArrayBuffer_ExternalBackingStoreSizeIncreases) {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();

View File

@ -3,7 +3,6 @@
// found in the LICENSE file.
#include "src/execution/isolate.h"
#include "src/heap/array-buffer-tracker.h"
#include "src/heap/factory.h"
#include "src/heap/spaces-inl.h"
#include "src/objects/objects-inl.h"
@ -129,100 +128,6 @@ UNINITIALIZED_TEST(PagePromotion_NewToNew) {
isolate->Dispose();
}
UNINITIALIZED_TEST(PagePromotion_NewToNewJSArrayBuffer) {
if (!i::FLAG_page_promotion || FLAG_always_promote_young_mc ||
i::FLAG_single_generation)
return;
// Test makes sure JSArrayBuffer backing stores are still tracked after
// new-to-new promotion.
v8::Isolate* isolate = NewIsolateForPagePromotion();
Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
{
v8::Isolate::Scope isolate_scope(isolate);
v8::HandleScope handle_scope(isolate);
v8::Context::New(isolate)->Enter();
Heap* heap = i_isolate->heap();
// Fill the current page which potentially contains the age mark.
heap::FillCurrentPage(heap->new_space());
// Allocate a buffer we would like to check against.
Handle<JSArrayBuffer> buffer =
i_isolate->factory()
->NewJSArrayBufferAndBackingStore(100,
InitializedFlag::kZeroInitialized)
.ToHandleChecked();
std::vector<Handle<FixedArray>> handles;
// Simulate a full space, filling the interesting page with live objects.
heap::SimulateFullSpace(heap->new_space(), &handles);
CHECK_GT(handles.size(), 0u);
// First object in handles should be on the same page as the allocated
// JSArrayBuffer.
Handle<FixedArray> first_object = handles.front();
Page* to_be_promoted_page = Page::FromHeapObject(*first_object);
CHECK(!to_be_promoted_page->Contains(heap->new_space()->age_mark()));
CHECK(to_be_promoted_page->Contains(first_object->address()));
CHECK(to_be_promoted_page->Contains(buffer->address()));
CHECK(heap->new_space()->ToSpaceContainsSlow(first_object->address()));
CHECK(heap->new_space()->ToSpaceContainsSlow(buffer->address()));
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(heap->new_space()->ToSpaceContainsSlow(first_object->address()));
CHECK(heap->new_space()->ToSpaceContainsSlow(buffer->address()));
CHECK(to_be_promoted_page->Contains(first_object->address()));
CHECK(to_be_promoted_page->Contains(buffer->address()));
if (!V8_ARRAY_BUFFER_EXTENSION_BOOL)
CHECK(ArrayBufferTracker::IsTracked(*buffer));
}
isolate->Dispose();
}
UNINITIALIZED_TEST(PagePromotion_NewToOldJSArrayBuffer) {
if (i::FLAG_single_generation) return;
if (!i::FLAG_page_promotion) return;
// Test makes sure JSArrayBuffer backing stores are still tracked after
// new-to-old promotion.
v8::Isolate* isolate = NewIsolateForPagePromotion();
Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
{
v8::Isolate::Scope isolate_scope(isolate);
v8::HandleScope handle_scope(isolate);
v8::Context::New(isolate)->Enter();
Heap* heap = i_isolate->heap();
// Fill the current page which potentially contains the age mark.
heap::FillCurrentPage(heap->new_space());
// Allocate a buffer we would like to check against.
Handle<JSArrayBuffer> buffer =
i_isolate->factory()
->NewJSArrayBufferAndBackingStore(100,
InitializedFlag::kZeroInitialized)
.ToHandleChecked();
std::vector<Handle<FixedArray>> handles;
// Simulate a full space, filling the interesting page with live objects.
heap::SimulateFullSpace(heap->new_space(), &handles);
CHECK_GT(handles.size(), 0u);
// First object in handles should be on the same page as the allocated
// JSArrayBuffer.
Handle<FixedArray> first_object = handles.front();
Page* to_be_promoted_page = Page::FromHeapObject(*first_object);
CHECK(!to_be_promoted_page->Contains(heap->new_space()->age_mark()));
CHECK(to_be_promoted_page->Contains(first_object->address()));
CHECK(to_be_promoted_page->Contains(buffer->address()));
CHECK(heap->new_space()->ToSpaceContainsSlow(first_object->address()));
CHECK(heap->new_space()->ToSpaceContainsSlow(buffer->address()));
heap::GcAndSweep(heap, OLD_SPACE);
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(heap->old_space()->ContainsSlow(first_object->address()));
CHECK(heap->old_space()->ContainsSlow(buffer->address()));
CHECK(to_be_promoted_page->Contains(first_object->address()));
CHECK(to_be_promoted_page->Contains(buffer->address()));
if (!V8_ARRAY_BUFFER_EXTENSION_BOOL)
CHECK(ArrayBufferTracker::IsTracked(*buffer));
}
isolate->Dispose();
}
UNINITIALIZED_HEAP_TEST(Regress658718) {
if (!i::FLAG_page_promotion || FLAG_always_promote_young_mc) return;

View File

@ -887,12 +887,18 @@ TEST(ReadOnlySpaceMetrics_AlignedAllocations) {
faked_space->AllocateRaw(object_size, kDoubleAligned).ToObjectChecked();
CHECK_EQ(object.address() % alignment, 0);
// Calculate size of allocations based on area_start.
Address area_start = faked_space->pages().back()->GetAreaStart();
Address top = RoundUp(area_start, alignment) + object_size;
top = RoundUp(top, alignment) + object_size;
size_t expected_size = top - area_start;
faked_space->ShrinkPages();
faked_space->Seal(ReadOnlySpace::SealMode::kDoNotDetachFromHeap);
// Allocated objects size may will contain 4 bytes of padding on 32-bit or
// with pointer compression.
CHECK_EQ(faked_space->Size(), object_size + RoundUp(object_size, alignment));
CHECK_EQ(faked_space->Size(), expected_size);
size_t committed_memory = RoundUp(
MemoryChunkLayout::ObjectStartOffsetInDataPage() + faked_space->Size(),

View File

@ -205,266 +205,266 @@ INSTANCE_TYPES = {
# List of known V8 maps.
KNOWN_MAPS = {
("read_only_space", 0x0211d): (167, "FreeSpaceMap"),
("read_only_space", 0x02145): (170, "MetaMap"),
("read_only_space", 0x02189): (67, "NullMap"),
("read_only_space", 0x021c1): (162, "DescriptorArrayMap"),
("read_only_space", 0x021f1): (156, "WeakFixedArrayMap"),
("read_only_space", 0x02219): (166, "OnePointerFillerMap"),
("read_only_space", 0x02241): (166, "TwoPointerFillerMap"),
("read_only_space", 0x02285): (67, "UninitializedMap"),
("read_only_space", 0x022c9): (8, "OneByteInternalizedStringMap"),
("read_only_space", 0x02325): (67, "UndefinedMap"),
("read_only_space", 0x02359): (66, "HeapNumberMap"),
("read_only_space", 0x0239d): (67, "TheHoleMap"),
("read_only_space", 0x023fd): (67, "BooleanMap"),
("read_only_space", 0x02485): (131, "ByteArrayMap"),
("read_only_space", 0x024ad): (117, "FixedArrayMap"),
("read_only_space", 0x024d5): (117, "FixedCOWArrayMap"),
("read_only_space", 0x024fd): (118, "HashTableMap"),
("read_only_space", 0x02525): (64, "SymbolMap"),
("read_only_space", 0x0254d): (40, "OneByteStringMap"),
("read_only_space", 0x02575): (129, "ScopeInfoMap"),
("read_only_space", 0x0259d): (175, "SharedFunctionInfoMap"),
("read_only_space", 0x025c5): (159, "CodeMap"),
("read_only_space", 0x025ed): (158, "CellMap"),
("read_only_space", 0x02615): (174, "GlobalPropertyCellMap"),
("read_only_space", 0x0263d): (70, "ForeignMap"),
("read_only_space", 0x02665): (157, "TransitionArrayMap"),
("read_only_space", 0x0268d): (45, "ThinOneByteStringMap"),
("read_only_space", 0x026b5): (165, "FeedbackVectorMap"),
("read_only_space", 0x02709): (67, "ArgumentsMarkerMap"),
("read_only_space", 0x02769): (67, "ExceptionMap"),
("read_only_space", 0x027c5): (67, "TerminationExceptionMap"),
("read_only_space", 0x0282d): (67, "OptimizedOutMap"),
("read_only_space", 0x0288d): (67, "StaleRegisterMap"),
("read_only_space", 0x028d1): (130, "ScriptContextTableMap"),
("read_only_space", 0x028f9): (127, "ClosureFeedbackCellArrayMap"),
("read_only_space", 0x02921): (164, "FeedbackMetadataArrayMap"),
("read_only_space", 0x02949): (117, "ArrayListMap"),
("read_only_space", 0x02971): (65, "BigIntMap"),
("read_only_space", 0x02999): (128, "ObjectBoilerplateDescriptionMap"),
("read_only_space", 0x029c1): (132, "BytecodeArrayMap"),
("read_only_space", 0x029e9): (160, "CodeDataContainerMap"),
("read_only_space", 0x02a11): (161, "CoverageInfoMap"),
("read_only_space", 0x02a39): (133, "FixedDoubleArrayMap"),
("read_only_space", 0x02a61): (120, "GlobalDictionaryMap"),
("read_only_space", 0x02a89): (97, "ManyClosuresCellMap"),
("read_only_space", 0x02ab1): (117, "ModuleInfoMap"),
("read_only_space", 0x02ad9): (121, "NameDictionaryMap"),
("read_only_space", 0x02b01): (97, "NoClosuresCellMap"),
("read_only_space", 0x02b29): (122, "NumberDictionaryMap"),
("read_only_space", 0x02b51): (97, "OneClosureCellMap"),
("read_only_space", 0x02b79): (123, "OrderedHashMapMap"),
("read_only_space", 0x02ba1): (124, "OrderedHashSetMap"),
("read_only_space", 0x02bc9): (125, "OrderedNameDictionaryMap"),
("read_only_space", 0x02bf1): (172, "PreparseDataMap"),
("read_only_space", 0x02c19): (173, "PropertyArrayMap"),
("read_only_space", 0x02c41): (93, "SideEffectCallHandlerInfoMap"),
("read_only_space", 0x02c69): (93, "SideEffectFreeCallHandlerInfoMap"),
("read_only_space", 0x02c91): (93, "NextCallSideEffectFreeCallHandlerInfoMap"),
("read_only_space", 0x02cb9): (126, "SimpleNumberDictionaryMap"),
("read_only_space", 0x02ce1): (149, "SmallOrderedHashMapMap"),
("read_only_space", 0x02d09): (150, "SmallOrderedHashSetMap"),
("read_only_space", 0x02d31): (151, "SmallOrderedNameDictionaryMap"),
("read_only_space", 0x02d59): (152, "SourceTextModuleMap"),
("read_only_space", 0x02d81): (153, "SyntheticModuleMap"),
("read_only_space", 0x02da9): (155, "UncompiledDataWithoutPreparseDataMap"),
("read_only_space", 0x02dd1): (154, "UncompiledDataWithPreparseDataMap"),
("read_only_space", 0x02df9): (71, "WasmTypeInfoMap"),
("read_only_space", 0x02e21): (181, "WeakArrayListMap"),
("read_only_space", 0x02e49): (119, "EphemeronHashTableMap"),
("read_only_space", 0x02e71): (163, "EmbedderDataArrayMap"),
("read_only_space", 0x02e99): (182, "WeakCellMap"),
("read_only_space", 0x02ec1): (32, "StringMap"),
("read_only_space", 0x02ee9): (41, "ConsOneByteStringMap"),
("read_only_space", 0x02f11): (33, "ConsStringMap"),
("read_only_space", 0x02f39): (37, "ThinStringMap"),
("read_only_space", 0x02f61): (35, "SlicedStringMap"),
("read_only_space", 0x02f89): (43, "SlicedOneByteStringMap"),
("read_only_space", 0x02fb1): (34, "ExternalStringMap"),
("read_only_space", 0x02fd9): (42, "ExternalOneByteStringMap"),
("read_only_space", 0x03001): (50, "UncachedExternalStringMap"),
("read_only_space", 0x03029): (0, "InternalizedStringMap"),
("read_only_space", 0x03051): (2, "ExternalInternalizedStringMap"),
("read_only_space", 0x03079): (10, "ExternalOneByteInternalizedStringMap"),
("read_only_space", 0x030a1): (18, "UncachedExternalInternalizedStringMap"),
("read_only_space", 0x030c9): (26, "UncachedExternalOneByteInternalizedStringMap"),
("read_only_space", 0x030f1): (58, "UncachedExternalOneByteStringMap"),
("read_only_space", 0x03119): (67, "SelfReferenceMarkerMap"),
("read_only_space", 0x03141): (67, "BasicBlockCountersMarkerMap"),
("read_only_space", 0x03175): (96, "EnumCacheMap"),
("read_only_space", 0x031c5): (87, "ArrayBoilerplateDescriptionMap"),
("read_only_space", 0x032b1): (99, "InterceptorInfoMap"),
("read_only_space", 0x053a1): (72, "PromiseFulfillReactionJobTaskMap"),
("read_only_space", 0x053c9): (73, "PromiseRejectReactionJobTaskMap"),
("read_only_space", 0x053f1): (74, "CallableTaskMap"),
("read_only_space", 0x05419): (75, "CallbackTaskMap"),
("read_only_space", 0x05441): (76, "PromiseResolveThenableJobTaskMap"),
("read_only_space", 0x05469): (79, "FunctionTemplateInfoMap"),
("read_only_space", 0x05491): (80, "ObjectTemplateInfoMap"),
("read_only_space", 0x054b9): (81, "AccessCheckInfoMap"),
("read_only_space", 0x054e1): (82, "AccessorInfoMap"),
("read_only_space", 0x05509): (83, "AccessorPairMap"),
("read_only_space", 0x05531): (84, "AliasedArgumentsEntryMap"),
("read_only_space", 0x05559): (85, "AllocationMementoMap"),
("read_only_space", 0x05581): (88, "AsmWasmDataMap"),
("read_only_space", 0x055a9): (89, "AsyncGeneratorRequestMap"),
("read_only_space", 0x055d1): (90, "BreakPointMap"),
("read_only_space", 0x055f9): (91, "BreakPointInfoMap"),
("read_only_space", 0x05621): (92, "CachedTemplateObjectMap"),
("read_only_space", 0x05649): (94, "ClassPositionsMap"),
("read_only_space", 0x05671): (95, "DebugInfoMap"),
("read_only_space", 0x05699): (98, "FunctionTemplateRareDataMap"),
("read_only_space", 0x056c1): (100, "InterpreterDataMap"),
("read_only_space", 0x056e9): (101, "PromiseCapabilityMap"),
("read_only_space", 0x05711): (102, "PromiseReactionMap"),
("read_only_space", 0x05739): (103, "PropertyDescriptorObjectMap"),
("read_only_space", 0x05761): (104, "PrototypeInfoMap"),
("read_only_space", 0x05789): (105, "ScriptMap"),
("read_only_space", 0x057b1): (106, "SourceTextModuleInfoEntryMap"),
("read_only_space", 0x057d9): (107, "StackFrameInfoMap"),
("read_only_space", 0x05801): (108, "StackTraceFrameMap"),
("read_only_space", 0x05829): (109, "TemplateObjectDescriptionMap"),
("read_only_space", 0x05851): (110, "Tuple2Map"),
("read_only_space", 0x05879): (111, "WasmCapiFunctionDataMap"),
("read_only_space", 0x058a1): (112, "WasmExceptionTagMap"),
("read_only_space", 0x058c9): (113, "WasmExportedFunctionDataMap"),
("read_only_space", 0x058f1): (114, "WasmIndirectFunctionTableMap"),
("read_only_space", 0x05919): (115, "WasmJSFunctionDataMap"),
("read_only_space", 0x05941): (116, "WasmValueMap"),
("read_only_space", 0x05969): (135, "SloppyArgumentsElementsMap"),
("read_only_space", 0x05991): (171, "OnHeapBasicBlockProfilerDataMap"),
("read_only_space", 0x059b9): (168, "InternalClassMap"),
("read_only_space", 0x059e1): (177, "SmiPairMap"),
("read_only_space", 0x05a09): (176, "SmiBoxMap"),
("read_only_space", 0x05a31): (146, "ExportedSubClassBaseMap"),
("read_only_space", 0x05a59): (147, "ExportedSubClassMap"),
("read_only_space", 0x05a81): (68, "AbstractInternalClassSubclass1Map"),
("read_only_space", 0x05aa9): (69, "AbstractInternalClassSubclass2Map"),
("read_only_space", 0x05ad1): (134, "InternalClassWithSmiElementsMap"),
("read_only_space", 0x05af9): (169, "InternalClassWithStructElementsMap"),
("read_only_space", 0x05b21): (148, "ExportedSubClass2Map"),
("read_only_space", 0x05b49): (178, "SortStateMap"),
("read_only_space", 0x05b71): (86, "AllocationSiteWithWeakNextMap"),
("read_only_space", 0x05b99): (86, "AllocationSiteWithoutWeakNextMap"),
("read_only_space", 0x05bc1): (77, "LoadHandler1Map"),
("read_only_space", 0x05be9): (77, "LoadHandler2Map"),
("read_only_space", 0x05c11): (77, "LoadHandler3Map"),
("read_only_space", 0x05c39): (78, "StoreHandler0Map"),
("read_only_space", 0x05c61): (78, "StoreHandler1Map"),
("read_only_space", 0x05c89): (78, "StoreHandler2Map"),
("read_only_space", 0x05cb1): (78, "StoreHandler3Map"),
("map_space", 0x0211d): (1057, "ExternalMap"),
("map_space", 0x02145): (1072, "JSMessageObjectMap"),
("map_space", 0x0216d): (180, "WasmRttEqrefMap"),
("map_space", 0x02195): (180, "WasmRttExternrefMap"),
("map_space", 0x021bd): (180, "WasmRttFuncrefMap"),
("map_space", 0x021e5): (180, "WasmRttI31refMap"),
("read_only_space", 0x02115): (167, "FreeSpaceMap"),
("read_only_space", 0x0213d): (170, "MetaMap"),
("read_only_space", 0x02181): (67, "NullMap"),
("read_only_space", 0x021b9): (162, "DescriptorArrayMap"),
("read_only_space", 0x021e9): (156, "WeakFixedArrayMap"),
("read_only_space", 0x02211): (166, "OnePointerFillerMap"),
("read_only_space", 0x02239): (166, "TwoPointerFillerMap"),
("read_only_space", 0x0227d): (67, "UninitializedMap"),
("read_only_space", 0x022c1): (8, "OneByteInternalizedStringMap"),
("read_only_space", 0x0231d): (67, "UndefinedMap"),
("read_only_space", 0x02351): (66, "HeapNumberMap"),
("read_only_space", 0x02395): (67, "TheHoleMap"),
("read_only_space", 0x023f5): (67, "BooleanMap"),
("read_only_space", 0x0247d): (131, "ByteArrayMap"),
("read_only_space", 0x024a5): (117, "FixedArrayMap"),
("read_only_space", 0x024cd): (117, "FixedCOWArrayMap"),
("read_only_space", 0x024f5): (118, "HashTableMap"),
("read_only_space", 0x0251d): (64, "SymbolMap"),
("read_only_space", 0x02545): (40, "OneByteStringMap"),
("read_only_space", 0x0256d): (129, "ScopeInfoMap"),
("read_only_space", 0x02595): (175, "SharedFunctionInfoMap"),
("read_only_space", 0x025bd): (159, "CodeMap"),
("read_only_space", 0x025e5): (158, "CellMap"),
("read_only_space", 0x0260d): (174, "GlobalPropertyCellMap"),
("read_only_space", 0x02635): (70, "ForeignMap"),
("read_only_space", 0x0265d): (157, "TransitionArrayMap"),
("read_only_space", 0x02685): (45, "ThinOneByteStringMap"),
("read_only_space", 0x026ad): (165, "FeedbackVectorMap"),
("read_only_space", 0x02701): (67, "ArgumentsMarkerMap"),
("read_only_space", 0x02761): (67, "ExceptionMap"),
("read_only_space", 0x027bd): (67, "TerminationExceptionMap"),
("read_only_space", 0x02825): (67, "OptimizedOutMap"),
("read_only_space", 0x02885): (67, "StaleRegisterMap"),
("read_only_space", 0x028c9): (130, "ScriptContextTableMap"),
("read_only_space", 0x028f1): (127, "ClosureFeedbackCellArrayMap"),
("read_only_space", 0x02919): (164, "FeedbackMetadataArrayMap"),
("read_only_space", 0x02941): (117, "ArrayListMap"),
("read_only_space", 0x02969): (65, "BigIntMap"),
("read_only_space", 0x02991): (128, "ObjectBoilerplateDescriptionMap"),
("read_only_space", 0x029b9): (132, "BytecodeArrayMap"),
("read_only_space", 0x029e1): (160, "CodeDataContainerMap"),
("read_only_space", 0x02a09): (161, "CoverageInfoMap"),
("read_only_space", 0x02a31): (133, "FixedDoubleArrayMap"),
("read_only_space", 0x02a59): (120, "GlobalDictionaryMap"),
("read_only_space", 0x02a81): (97, "ManyClosuresCellMap"),
("read_only_space", 0x02aa9): (117, "ModuleInfoMap"),
("read_only_space", 0x02ad1): (121, "NameDictionaryMap"),
("read_only_space", 0x02af9): (97, "NoClosuresCellMap"),
("read_only_space", 0x02b21): (122, "NumberDictionaryMap"),
("read_only_space", 0x02b49): (97, "OneClosureCellMap"),
("read_only_space", 0x02b71): (123, "OrderedHashMapMap"),
("read_only_space", 0x02b99): (124, "OrderedHashSetMap"),
("read_only_space", 0x02bc1): (125, "OrderedNameDictionaryMap"),
("read_only_space", 0x02be9): (172, "PreparseDataMap"),
("read_only_space", 0x02c11): (173, "PropertyArrayMap"),
("read_only_space", 0x02c39): (93, "SideEffectCallHandlerInfoMap"),
("read_only_space", 0x02c61): (93, "SideEffectFreeCallHandlerInfoMap"),
("read_only_space", 0x02c89): (93, "NextCallSideEffectFreeCallHandlerInfoMap"),
("read_only_space", 0x02cb1): (126, "SimpleNumberDictionaryMap"),
("read_only_space", 0x02cd9): (149, "SmallOrderedHashMapMap"),
("read_only_space", 0x02d01): (150, "SmallOrderedHashSetMap"),
("read_only_space", 0x02d29): (151, "SmallOrderedNameDictionaryMap"),
("read_only_space", 0x02d51): (152, "SourceTextModuleMap"),
("read_only_space", 0x02d79): (153, "SyntheticModuleMap"),
("read_only_space", 0x02da1): (155, "UncompiledDataWithoutPreparseDataMap"),
("read_only_space", 0x02dc9): (154, "UncompiledDataWithPreparseDataMap"),
("read_only_space", 0x02df1): (71, "WasmTypeInfoMap"),
("read_only_space", 0x02e19): (181, "WeakArrayListMap"),
("read_only_space", 0x02e41): (119, "EphemeronHashTableMap"),
("read_only_space", 0x02e69): (163, "EmbedderDataArrayMap"),
("read_only_space", 0x02e91): (182, "WeakCellMap"),
("read_only_space", 0x02eb9): (32, "StringMap"),
("read_only_space", 0x02ee1): (41, "ConsOneByteStringMap"),
("read_only_space", 0x02f09): (33, "ConsStringMap"),
("read_only_space", 0x02f31): (37, "ThinStringMap"),
("read_only_space", 0x02f59): (35, "SlicedStringMap"),
("read_only_space", 0x02f81): (43, "SlicedOneByteStringMap"),
("read_only_space", 0x02fa9): (34, "ExternalStringMap"),
("read_only_space", 0x02fd1): (42, "ExternalOneByteStringMap"),
("read_only_space", 0x02ff9): (50, "UncachedExternalStringMap"),
("read_only_space", 0x03021): (0, "InternalizedStringMap"),
("read_only_space", 0x03049): (2, "ExternalInternalizedStringMap"),
("read_only_space", 0x03071): (10, "ExternalOneByteInternalizedStringMap"),
("read_only_space", 0x03099): (18, "UncachedExternalInternalizedStringMap"),
("read_only_space", 0x030c1): (26, "UncachedExternalOneByteInternalizedStringMap"),
("read_only_space", 0x030e9): (58, "UncachedExternalOneByteStringMap"),
("read_only_space", 0x03111): (67, "SelfReferenceMarkerMap"),
("read_only_space", 0x03139): (67, "BasicBlockCountersMarkerMap"),
("read_only_space", 0x0316d): (96, "EnumCacheMap"),
("read_only_space", 0x031bd): (87, "ArrayBoilerplateDescriptionMap"),
("read_only_space", 0x032a9): (99, "InterceptorInfoMap"),
("read_only_space", 0x05399): (72, "PromiseFulfillReactionJobTaskMap"),
("read_only_space", 0x053c1): (73, "PromiseRejectReactionJobTaskMap"),
("read_only_space", 0x053e9): (74, "CallableTaskMap"),
("read_only_space", 0x05411): (75, "CallbackTaskMap"),
("read_only_space", 0x05439): (76, "PromiseResolveThenableJobTaskMap"),
("read_only_space", 0x05461): (79, "FunctionTemplateInfoMap"),
("read_only_space", 0x05489): (80, "ObjectTemplateInfoMap"),
("read_only_space", 0x054b1): (81, "AccessCheckInfoMap"),
("read_only_space", 0x054d9): (82, "AccessorInfoMap"),
("read_only_space", 0x05501): (83, "AccessorPairMap"),
("read_only_space", 0x05529): (84, "AliasedArgumentsEntryMap"),
("read_only_space", 0x05551): (85, "AllocationMementoMap"),
("read_only_space", 0x05579): (88, "AsmWasmDataMap"),
("read_only_space", 0x055a1): (89, "AsyncGeneratorRequestMap"),
("read_only_space", 0x055c9): (90, "BreakPointMap"),
("read_only_space", 0x055f1): (91, "BreakPointInfoMap"),
("read_only_space", 0x05619): (92, "CachedTemplateObjectMap"),
("read_only_space", 0x05641): (94, "ClassPositionsMap"),
("read_only_space", 0x05669): (95, "DebugInfoMap"),
("read_only_space", 0x05691): (98, "FunctionTemplateRareDataMap"),
("read_only_space", 0x056b9): (100, "InterpreterDataMap"),
("read_only_space", 0x056e1): (101, "PromiseCapabilityMap"),
("read_only_space", 0x05709): (102, "PromiseReactionMap"),
("read_only_space", 0x05731): (103, "PropertyDescriptorObjectMap"),
("read_only_space", 0x05759): (104, "PrototypeInfoMap"),
("read_only_space", 0x05781): (105, "ScriptMap"),
("read_only_space", 0x057a9): (106, "SourceTextModuleInfoEntryMap"),
("read_only_space", 0x057d1): (107, "StackFrameInfoMap"),
("read_only_space", 0x057f9): (108, "StackTraceFrameMap"),
("read_only_space", 0x05821): (109, "TemplateObjectDescriptionMap"),
("read_only_space", 0x05849): (110, "Tuple2Map"),
("read_only_space", 0x05871): (111, "WasmCapiFunctionDataMap"),
("read_only_space", 0x05899): (112, "WasmExceptionTagMap"),
("read_only_space", 0x058c1): (113, "WasmExportedFunctionDataMap"),
("read_only_space", 0x058e9): (114, "WasmIndirectFunctionTableMap"),
("read_only_space", 0x05911): (115, "WasmJSFunctionDataMap"),
("read_only_space", 0x05939): (116, "WasmValueMap"),
("read_only_space", 0x05961): (135, "SloppyArgumentsElementsMap"),
("read_only_space", 0x05989): (171, "OnHeapBasicBlockProfilerDataMap"),
("read_only_space", 0x059b1): (168, "InternalClassMap"),
("read_only_space", 0x059d9): (177, "SmiPairMap"),
("read_only_space", 0x05a01): (176, "SmiBoxMap"),
("read_only_space", 0x05a29): (146, "ExportedSubClassBaseMap"),
("read_only_space", 0x05a51): (147, "ExportedSubClassMap"),
("read_only_space", 0x05a79): (68, "AbstractInternalClassSubclass1Map"),
("read_only_space", 0x05aa1): (69, "AbstractInternalClassSubclass2Map"),
("read_only_space", 0x05ac9): (134, "InternalClassWithSmiElementsMap"),
("read_only_space", 0x05af1): (169, "InternalClassWithStructElementsMap"),
("read_only_space", 0x05b19): (148, "ExportedSubClass2Map"),
("read_only_space", 0x05b41): (178, "SortStateMap"),
("read_only_space", 0x05b69): (86, "AllocationSiteWithWeakNextMap"),
("read_only_space", 0x05b91): (86, "AllocationSiteWithoutWeakNextMap"),
("read_only_space", 0x05bb9): (77, "LoadHandler1Map"),
("read_only_space", 0x05be1): (77, "LoadHandler2Map"),
("read_only_space", 0x05c09): (77, "LoadHandler3Map"),
("read_only_space", 0x05c31): (78, "StoreHandler0Map"),
("read_only_space", 0x05c59): (78, "StoreHandler1Map"),
("read_only_space", 0x05c81): (78, "StoreHandler2Map"),
("read_only_space", 0x05ca9): (78, "StoreHandler3Map"),
("map_space", 0x02115): (1057, "ExternalMap"),
("map_space", 0x0213d): (1072, "JSMessageObjectMap"),
("map_space", 0x02165): (180, "WasmRttEqrefMap"),
("map_space", 0x0218d): (180, "WasmRttExternrefMap"),
("map_space", 0x021b5): (180, "WasmRttFuncrefMap"),
("map_space", 0x021dd): (180, "WasmRttI31refMap"),
}
# List of known V8 objects.
KNOWN_OBJECTS = {
("read_only_space", 0x0216d): "NullValue",
("read_only_space", 0x021b1): "EmptyDescriptorArray",
("read_only_space", 0x021e9): "EmptyWeakFixedArray",
("read_only_space", 0x02269): "UninitializedValue",
("read_only_space", 0x02309): "UndefinedValue",
("read_only_space", 0x0234d): "NanValue",
("read_only_space", 0x02381): "TheHoleValue",
("read_only_space", 0x023d5): "HoleNanValue",
("read_only_space", 0x023e1): "TrueValue",
("read_only_space", 0x02449): "FalseValue",
("read_only_space", 0x02479): "empty_string",
("read_only_space", 0x026dd): "EmptyScopeInfo",
("read_only_space", 0x026e5): "EmptyFixedArray",
("read_only_space", 0x026ed): "ArgumentsMarker",
("read_only_space", 0x0274d): "Exception",
("read_only_space", 0x027a9): "TerminationException",
("read_only_space", 0x02811): "OptimizedOut",
("read_only_space", 0x02871): "StaleRegister",
("read_only_space", 0x03169): "EmptyEnumCache",
("read_only_space", 0x0319d): "EmptyPropertyArray",
("read_only_space", 0x031a5): "EmptyByteArray",
("read_only_space", 0x031ad): "EmptyObjectBoilerplateDescription",
("read_only_space", 0x031b9): "EmptyArrayBoilerplateDescription",
("read_only_space", 0x031ed): "EmptyClosureFeedbackCellArray",
("read_only_space", 0x031f5): "EmptySlowElementDictionary",
("read_only_space", 0x03219): "EmptyOrderedHashMap",
("read_only_space", 0x0322d): "EmptyOrderedHashSet",
("read_only_space", 0x03241): "EmptyFeedbackMetadata",
("read_only_space", 0x0324d): "EmptyPropertyCell",
("read_only_space", 0x03261): "EmptyPropertyDictionary",
("read_only_space", 0x03289): "NoOpInterceptorInfo",
("read_only_space", 0x032d9): "EmptyWeakArrayList",
("read_only_space", 0x032e5): "InfinityValue",
("read_only_space", 0x032f1): "MinusZeroValue",
("read_only_space", 0x032fd): "MinusInfinityValue",
("read_only_space", 0x03309): "SelfReferenceMarker",
("read_only_space", 0x03349): "BasicBlockCountersMarker",
("read_only_space", 0x0338d): "OffHeapTrampolineRelocationInfo",
("read_only_space", 0x03399): "TrampolineTrivialCodeDataContainer",
("read_only_space", 0x033a5): "TrampolinePromiseRejectionCodeDataContainer",
("read_only_space", 0x033b1): "GlobalThisBindingScopeInfo",
("read_only_space", 0x033e9): "EmptyFunctionScopeInfo",
("read_only_space", 0x03411): "NativeScopeInfo",
("read_only_space", 0x0342d): "HashSeed",
("old_space", 0x0211d): "ArgumentsIteratorAccessor",
("old_space", 0x02161): "ArrayLengthAccessor",
("old_space", 0x021a5): "BoundFunctionLengthAccessor",
("old_space", 0x021e9): "BoundFunctionNameAccessor",
("old_space", 0x0222d): "ErrorStackAccessor",
("old_space", 0x02271): "FunctionArgumentsAccessor",
("old_space", 0x022b5): "FunctionCallerAccessor",
("old_space", 0x022f9): "FunctionNameAccessor",
("old_space", 0x0233d): "FunctionLengthAccessor",
("old_space", 0x02381): "FunctionPrototypeAccessor",
("old_space", 0x023c5): "RegExpResultIndicesAccessor",
("old_space", 0x02409): "StringLengthAccessor",
("old_space", 0x0244d): "InvalidPrototypeValidityCell",
("old_space", 0x024d5): "EmptyScript",
("old_space", 0x02515): "ManyClosuresCell",
("old_space", 0x02521): "ArrayConstructorProtector",
("old_space", 0x02535): "NoElementsProtector",
("old_space", 0x02549): "IsConcatSpreadableProtector",
("old_space", 0x0255d): "ArraySpeciesProtector",
("old_space", 0x02571): "TypedArraySpeciesProtector",
("old_space", 0x02585): "PromiseSpeciesProtector",
("old_space", 0x02599): "RegExpSpeciesProtector",
("old_space", 0x025ad): "StringLengthProtector",
("old_space", 0x025c1): "ArrayIteratorProtector",
("old_space", 0x025d5): "ArrayBufferDetachingProtector",
("old_space", 0x025e9): "PromiseHookProtector",
("old_space", 0x025fd): "PromiseResolveProtector",
("old_space", 0x02611): "MapIteratorProtector",
("old_space", 0x02625): "PromiseThenProtector",
("old_space", 0x02639): "SetIteratorProtector",
("old_space", 0x0264d): "StringIteratorProtector",
("old_space", 0x02661): "SingleCharacterStringCache",
("old_space", 0x02a69): "StringSplitCache",
("old_space", 0x02e71): "RegExpMultipleCache",
("old_space", 0x03279): "BuiltinsConstantsTable",
("old_space", 0x0362d): "AsyncFunctionAwaitRejectSharedFun",
("old_space", 0x03655): "AsyncFunctionAwaitResolveSharedFun",
("old_space", 0x0367d): "AsyncGeneratorAwaitRejectSharedFun",
("old_space", 0x036a5): "AsyncGeneratorAwaitResolveSharedFun",
("old_space", 0x036cd): "AsyncGeneratorYieldResolveSharedFun",
("old_space", 0x036f5): "AsyncGeneratorReturnResolveSharedFun",
("old_space", 0x0371d): "AsyncGeneratorReturnClosedRejectSharedFun",
("old_space", 0x03745): "AsyncGeneratorReturnClosedResolveSharedFun",
("old_space", 0x0376d): "AsyncIteratorValueUnwrapSharedFun",
("old_space", 0x03795): "PromiseAllResolveElementSharedFun",
("old_space", 0x037bd): "PromiseAllSettledResolveElementSharedFun",
("old_space", 0x037e5): "PromiseAllSettledRejectElementSharedFun",
("old_space", 0x0380d): "PromiseAnyRejectElementSharedFun",
("old_space", 0x03835): "PromiseCapabilityDefaultRejectSharedFun",
("old_space", 0x0385d): "PromiseCapabilityDefaultResolveSharedFun",
("old_space", 0x03885): "PromiseCatchFinallySharedFun",
("old_space", 0x038ad): "PromiseGetCapabilitiesExecutorSharedFun",
("old_space", 0x038d5): "PromiseThenFinallySharedFun",
("old_space", 0x038fd): "PromiseThrowerFinallySharedFun",
("old_space", 0x03925): "PromiseValueThunkFinallySharedFun",
("old_space", 0x0394d): "ProxyRevokeSharedFun",
("read_only_space", 0x02165): "NullValue",
("read_only_space", 0x021a9): "EmptyDescriptorArray",
("read_only_space", 0x021e1): "EmptyWeakFixedArray",
("read_only_space", 0x02261): "UninitializedValue",
("read_only_space", 0x02301): "UndefinedValue",
("read_only_space", 0x02345): "NanValue",
("read_only_space", 0x02379): "TheHoleValue",
("read_only_space", 0x023cd): "HoleNanValue",
("read_only_space", 0x023d9): "TrueValue",
("read_only_space", 0x02441): "FalseValue",
("read_only_space", 0x02471): "empty_string",
("read_only_space", 0x026d5): "EmptyScopeInfo",
("read_only_space", 0x026dd): "EmptyFixedArray",
("read_only_space", 0x026e5): "ArgumentsMarker",
("read_only_space", 0x02745): "Exception",
("read_only_space", 0x027a1): "TerminationException",
("read_only_space", 0x02809): "OptimizedOut",
("read_only_space", 0x02869): "StaleRegister",
("read_only_space", 0x03161): "EmptyEnumCache",
("read_only_space", 0x03195): "EmptyPropertyArray",
("read_only_space", 0x0319d): "EmptyByteArray",
("read_only_space", 0x031a5): "EmptyObjectBoilerplateDescription",
("read_only_space", 0x031b1): "EmptyArrayBoilerplateDescription",
("read_only_space", 0x031e5): "EmptyClosureFeedbackCellArray",
("read_only_space", 0x031ed): "EmptySlowElementDictionary",
("read_only_space", 0x03211): "EmptyOrderedHashMap",
("read_only_space", 0x03225): "EmptyOrderedHashSet",
("read_only_space", 0x03239): "EmptyFeedbackMetadata",
("read_only_space", 0x03245): "EmptyPropertyCell",
("read_only_space", 0x03259): "EmptyPropertyDictionary",
("read_only_space", 0x03281): "NoOpInterceptorInfo",
("read_only_space", 0x032d1): "EmptyWeakArrayList",
("read_only_space", 0x032dd): "InfinityValue",
("read_only_space", 0x032e9): "MinusZeroValue",
("read_only_space", 0x032f5): "MinusInfinityValue",
("read_only_space", 0x03301): "SelfReferenceMarker",
("read_only_space", 0x03341): "BasicBlockCountersMarker",
("read_only_space", 0x03385): "OffHeapTrampolineRelocationInfo",
("read_only_space", 0x03391): "TrampolineTrivialCodeDataContainer",
("read_only_space", 0x0339d): "TrampolinePromiseRejectionCodeDataContainer",
("read_only_space", 0x033a9): "GlobalThisBindingScopeInfo",
("read_only_space", 0x033e1): "EmptyFunctionScopeInfo",
("read_only_space", 0x03409): "NativeScopeInfo",
("read_only_space", 0x03425): "HashSeed",
("old_space", 0x02115): "ArgumentsIteratorAccessor",
("old_space", 0x02159): "ArrayLengthAccessor",
("old_space", 0x0219d): "BoundFunctionLengthAccessor",
("old_space", 0x021e1): "BoundFunctionNameAccessor",
("old_space", 0x02225): "ErrorStackAccessor",
("old_space", 0x02269): "FunctionArgumentsAccessor",
("old_space", 0x022ad): "FunctionCallerAccessor",
("old_space", 0x022f1): "FunctionNameAccessor",
("old_space", 0x02335): "FunctionLengthAccessor",
("old_space", 0x02379): "FunctionPrototypeAccessor",
("old_space", 0x023bd): "RegExpResultIndicesAccessor",
("old_space", 0x02401): "StringLengthAccessor",
("old_space", 0x02445): "InvalidPrototypeValidityCell",
("old_space", 0x024cd): "EmptyScript",
("old_space", 0x0250d): "ManyClosuresCell",
("old_space", 0x02519): "ArrayConstructorProtector",
("old_space", 0x0252d): "NoElementsProtector",
("old_space", 0x02541): "IsConcatSpreadableProtector",
("old_space", 0x02555): "ArraySpeciesProtector",
("old_space", 0x02569): "TypedArraySpeciesProtector",
("old_space", 0x0257d): "PromiseSpeciesProtector",
("old_space", 0x02591): "RegExpSpeciesProtector",
("old_space", 0x025a5): "StringLengthProtector",
("old_space", 0x025b9): "ArrayIteratorProtector",
("old_space", 0x025cd): "ArrayBufferDetachingProtector",
("old_space", 0x025e1): "PromiseHookProtector",
("old_space", 0x025f5): "PromiseResolveProtector",
("old_space", 0x02609): "MapIteratorProtector",
("old_space", 0x0261d): "PromiseThenProtector",
("old_space", 0x02631): "SetIteratorProtector",
("old_space", 0x02645): "StringIteratorProtector",
("old_space", 0x02659): "SingleCharacterStringCache",
("old_space", 0x02a61): "StringSplitCache",
("old_space", 0x02e69): "RegExpMultipleCache",
("old_space", 0x03271): "BuiltinsConstantsTable",
("old_space", 0x03625): "AsyncFunctionAwaitRejectSharedFun",
("old_space", 0x0364d): "AsyncFunctionAwaitResolveSharedFun",
("old_space", 0x03675): "AsyncGeneratorAwaitRejectSharedFun",
("old_space", 0x0369d): "AsyncGeneratorAwaitResolveSharedFun",
("old_space", 0x036c5): "AsyncGeneratorYieldResolveSharedFun",
("old_space", 0x036ed): "AsyncGeneratorReturnResolveSharedFun",
("old_space", 0x03715): "AsyncGeneratorReturnClosedRejectSharedFun",
("old_space", 0x0373d): "AsyncGeneratorReturnClosedResolveSharedFun",
("old_space", 0x03765): "AsyncIteratorValueUnwrapSharedFun",
("old_space", 0x0378d): "PromiseAllResolveElementSharedFun",
("old_space", 0x037b5): "PromiseAllSettledResolveElementSharedFun",
("old_space", 0x037dd): "PromiseAllSettledRejectElementSharedFun",
("old_space", 0x03805): "PromiseAnyRejectElementSharedFun",
("old_space", 0x0382d): "PromiseCapabilityDefaultRejectSharedFun",
("old_space", 0x03855): "PromiseCapabilityDefaultResolveSharedFun",
("old_space", 0x0387d): "PromiseCatchFinallySharedFun",
("old_space", 0x038a5): "PromiseGetCapabilitiesExecutorSharedFun",
("old_space", 0x038cd): "PromiseThenFinallySharedFun",
("old_space", 0x038f5): "PromiseThrowerFinallySharedFun",
("old_space", 0x0391d): "PromiseValueThunkFinallySharedFun",
("old_space", 0x03945): "ProxyRevokeSharedFun",
}
# Lower 32 bits of first page addresses for various heap spaces.