[objects] Add ArrayBufferExtension class

This CL adds the ArrayBufferExtension class, which is used to track
JSArrayBuffers in a linked list. The ArrayBufferExtension is going to
replace the ArrayBufferTracker in the future but is currently behind
the v8_enable_array_buffer_extension feature flag.

When enabled, each JSArrayBuffer has a corresponding native-heap
allocated ArrayBufferExtension object. All extensions are currently
tracked in a single linked list. During marking the GC not only
marks the JSArrayBuffer but also its extension object. At the end of
mark-compact the GC iterates all extensions and removes unmarked ones.

Change-Id: I88298be255944d5ae1327c91b0d7f0fdbcd486d5
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1969791
Reviewed-by: Peter Marshall <petermarshall@chromium.org>
Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/master@{#65724}
This commit is contained in:
Dominik Inführ 2020-01-10 13:33:18 +01:00 committed by Commit Bot
parent e83a46115d
commit 69fda08a80
15 changed files with 235 additions and 16 deletions

View File

@ -128,7 +128,7 @@ declare_args() {
v8_enable_concurrent_marking = true
# Sets -dV8_ARRAY_BUFFER_EXTENSION
v8_enable_array_buffer_extension = true
v8_enable_array_buffer_extension = false
# Enables various testing features.
v8_enable_test_features = ""

View File

@ -51,6 +51,7 @@ TNode<JSArrayBuffer> TypedArrayBuiltinsAssembler::AllocateEmptyOnHeapBuffer(
// - Set IsExternal and IsDetachable bits of BitFieldSlot.
// - Set the byte_length field to byte_length.
// - Set backing_store to null/Smi(0).
// - Set extension to null.
// - Set all embedder fields to Smi(0).
if (FIELD_SIZE(JSArrayBuffer::kOptionalPaddingOffset) != 0) {
DCHECK_EQ(4, FIELD_SIZE(JSArrayBuffer::kOptionalPaddingOffset));
@ -70,6 +71,11 @@ TNode<JSArrayBuffer> TypedArrayBuiltinsAssembler::AllocateEmptyOnHeapBuffer(
StoreObjectFieldNoWriteBarrier(buffer, JSArrayBuffer::kBackingStoreOffset,
IntPtrConstant(0),
MachineType::PointerRepresentation());
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
StoreObjectFieldNoWriteBarrier(buffer, JSArrayBuffer::kExtensionOffset,
IntPtrConstant(0),
MachineType::PointerRepresentation());
}
for (int offset = JSArrayBuffer::kHeaderSize;
offset < JSArrayBuffer::kSizeWithEmbedderFields; offset += kTaggedSize) {
StoreObjectFieldNoWriteBarrier(buffer, offset, SmiConstant(0));

View File

@ -29,6 +29,10 @@ V8_EXPORT_PRIVATE void Heap_GenerationalBarrierSlow(HeapObject object,
V8_EXPORT_PRIVATE void Heap_MarkingBarrierSlow(HeapObject object, Address slot,
HeapObject value);
V8_EXPORT_PRIVATE void Heap_WriteBarrierForCodeSlow(Code host);
V8_EXPORT_PRIVATE void Heap_MarkingBarrierForArrayBufferExtensionSlow(
HeapObject object, ArrayBufferExtension* extension);
V8_EXPORT_PRIVATE void Heap_GenerationalBarrierForCodeSlow(Code host,
RelocInfo* rinfo,
HeapObject object);
@ -144,6 +148,14 @@ inline void WriteBarrierForCode(Code host) {
Heap_WriteBarrierForCodeSlow(host);
}
inline void MarkingBarrierForArrayBufferExtension(
HeapObject object, ArrayBufferExtension* extension) {
heap_internals::MemoryChunk* object_chunk =
heap_internals::MemoryChunk::FromHeapObject(object);
if (!extension || !object_chunk->IsMarking()) return;
Heap_MarkingBarrierForArrayBufferExtensionSlow(object, extension);
}
inline void GenerationalBarrier(HeapObject object, ObjectSlot slot,
Object value) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;

View File

@ -11,6 +11,7 @@
namespace v8 {
namespace internal {
class ArrayBufferExtension;
class Code;
class FixedArray;
class Heap;
@ -40,6 +41,9 @@ void MarkingBarrier(HeapObject object, ObjectSlot slot, HeapObject value);
void MarkingBarrier(HeapObject object, MaybeObjectSlot slot, MaybeObject value);
void MarkingBarrierForCode(Code host, RelocInfo* rinfo, HeapObject object);
void MarkingBarrierForArrayBufferExtension(HeapObject object,
ArrayBufferExtension* extension);
void MarkingBarrierForDescriptorArray(Heap* heap, HeapObject host,
HeapObject descriptor_array,
int number_of_own_descriptors);

View File

@ -108,6 +108,11 @@ void Heap_WriteBarrierForCodeSlow(Code host) {
Heap::WriteBarrierForCodeSlow(host);
}
void Heap_MarkingBarrierForArrayBufferExtensionSlow(
HeapObject object, ArrayBufferExtension* extension) {
Heap::MarkingBarrierForArrayBufferExtensionSlow(object, extension);
}
void Heap_GenerationalBarrierForCodeSlow(Code host, RelocInfo* rinfo,
HeapObject object) {
Heap::GenerationalBarrierForCodeSlow(host, rinfo, object);
@ -3779,6 +3784,18 @@ void Heap::RemoveNearHeapLimitCallback(v8::NearHeapLimitCallback callback,
UNREACHABLE();
}
void Heap::ReleaseAllArrayBufferExtensions() {
ArrayBufferExtension* current = array_buffer_extensions_;
while (current) {
ArrayBufferExtension* next = current->next();
delete current;
current = next;
}
array_buffer_extensions_ = nullptr;
}
void Heap::AutomaticallyRestoreInitialHeapLimit(double threshold_percent) {
initial_max_old_generation_size_threshold_ =
initial_max_old_generation_size_ * threshold_percent;
@ -5246,6 +5263,8 @@ void Heap::TearDown() {
// It's too late for Heap::Verify() here, as parts of the Isolate are
// already gone by the time this is called.
ReleaseAllArrayBufferExtensions();
UpdateMaximumCommitted();
if (FLAG_verify_predictable || FLAG_fuzzer_gc_analysis) {
@ -6234,6 +6253,16 @@ void Heap::WriteBarrierForCodeSlow(Code code) {
}
}
void Heap::MarkingBarrierForArrayBufferExtensionSlow(
HeapObject object, ArrayBufferExtension* extension) {
if (V8_CONCURRENT_MARKING_BOOL || GetIsolateFromWritableObject(object)
->heap()
->incremental_marking()
->marking_state()
->IsBlack(object))
extension->Mark();
}
void Heap::GenerationalBarrierSlow(HeapObject object, Address slot,
HeapObject value) {
MemoryChunk* chunk = MemoryChunk::FromHeapObject(object);

View File

@ -24,6 +24,7 @@
#include "src/objects/allocation-site.h"
#include "src/objects/fixed-array.h"
#include "src/objects/heap-object.h"
#include "src/objects/js-array-buffer.h"
#include "src/objects/objects.h"
#include "src/objects/smi.h"
#include "src/objects/string-table.h"
@ -385,6 +386,10 @@ class Heap {
TSlot end);
V8_EXPORT_PRIVATE static void WriteBarrierForCodeSlow(Code host);
V8_EXPORT_PRIVATE static void MarkingBarrierForArrayBufferExtensionSlow(
HeapObject object, ArrayBufferExtension* extension);
V8_EXPORT_PRIVATE static void GenerationalBarrierSlow(HeapObject object,
Address slot,
HeapObject value);
@ -400,6 +405,10 @@ class Heap {
V8_EXPORT_PRIVATE static void MarkingBarrierForCodeSlow(Code host,
RelocInfo* rinfo,
HeapObject value);
static void MarkingBarrierForArrayBufferExtension(
JSArrayBuffer object, ArrayBufferExtension* extension);
V8_EXPORT_PRIVATE static void MarkingBarrierForDescriptorArraySlow(
Heap* heap, HeapObject host, HeapObject descriptor_array,
int number_of_own_descriptors);
@ -575,6 +584,21 @@ class Heap {
V8_EXPORT_PRIVATE void AutomaticallyRestoreInitialHeapLimit(
double threshold_percent);
ArrayBufferExtension* array_buffer_extensions() {
return array_buffer_extensions_;
}
void set_array_buffer_extensions(ArrayBufferExtension* head) {
array_buffer_extensions_ = head;
}
void AppendArrayBufferExtension(ArrayBufferExtension* extension) {
extension->set_next(array_buffer_extensions_);
array_buffer_extensions_ = extension;
}
void ReleaseAllArrayBufferExtensions();
V8_EXPORT_PRIVATE double MonotonicallyIncreasingTimeInMs();
void RecordStats(HeapStats* stats, bool take_snapshot = false);
@ -1896,6 +1920,9 @@ class Heap {
// Map from the space id to the space.
Space* space_[LAST_SPACE + 1];
// List for tracking ArrayBufferExtensions
ArrayBufferExtension* array_buffer_extensions_ = nullptr;
// Determines whether code space is write-protected. This is essentially a
// race-free copy of the {FLAG_write_protect_code_memory} flag.
bool write_protect_code_memory_ = false;

View File

@ -885,6 +885,8 @@ void MarkCompactCollector::VerifyMarking() {
void MarkCompactCollector::Finish() {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_FINISH);
SweepArrayBufferExtensions();
#ifdef DEBUG
heap()->VerifyCountersBeforeConcurrentSweeping();
#endif
@ -925,6 +927,28 @@ void MarkCompactCollector::Finish() {
}
}
void MarkCompactCollector::SweepArrayBufferExtensions() {
if (!V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
ArrayBufferExtension* current = heap_->array_buffer_extensions();
ArrayBufferExtension* last = nullptr;
while (current) {
ArrayBufferExtension* next = current->next();
if (!current->IsMarked()) {
delete current;
} else {
current->Unmark();
current->set_next(last);
last = current;
}
current = next;
}
heap_->set_array_buffer_extensions(last);
}
class MarkCompactCollector::RootMarkingVisitor final : public RootVisitor {
public:
explicit RootMarkingVisitor(MarkCompactCollector* collector)

View File

@ -608,6 +608,9 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
// Finishes GC, performs heap verification if enabled.
void Finish();
// Free unmarked ArrayBufferExtensions.
void SweepArrayBufferExtensions();
void MarkLiveObjects() override;
// Marks the object black and adds it to the marking work list.

View File

@ -234,6 +234,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSApiObject(
template <typename ConcreteVisitor, typename MarkingState>
int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSArrayBuffer(
Map map, JSArrayBuffer object) {
object.MarkExtension();
return VisitEmbedderTracingSubclass(map, object);
}

View File

@ -44,17 +44,23 @@ void JSArrayBuffer::set_backing_store(void* value) {
WriteField<Address>(kBackingStoreOffset, reinterpret_cast<Address>(value));
}
void* JSArrayBuffer::extension() const {
ArrayBufferExtension* JSArrayBuffer::extension() const {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
return reinterpret_cast<void*>(ReadField<Address>(kExtensionOffset));
return base::AsAtomicPointer::Acquire_Load(extension_location());
} else {
return nullptr;
}
}
void JSArrayBuffer::set_extension(void* value) {
ArrayBufferExtension** JSArrayBuffer::extension_location() const {
Address location = field_address(kExtensionOffset);
return reinterpret_cast<ArrayBufferExtension**>(location);
}
void JSArrayBuffer::set_extension(ArrayBufferExtension* value) {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
WriteField<Address>(kExtensionOffset, reinterpret_cast<Address>(value));
base::AsAtomicPointer::Release_Store(extension_location(), value);
MarkingBarrierForArrayBufferExtension(*this, value);
} else {
CHECK_EQ(value, nullptr);
}

View File

@ -63,7 +63,13 @@ void JSArrayBuffer::Attach(std::shared_ptr<BackingStore> backing_store) {
set_byte_length(backing_store->byte_length());
if (backing_store->is_wasm_memory()) set_is_detachable(false);
if (!backing_store->free_on_destruct()) set_is_external(true);
GetIsolate()->heap()->RegisterBackingStore(*this, std::move(backing_store));
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
Heap* heap = GetIsolate()->heap();
EnsureExtension(heap);
extension()->set_backing_store(std::move(backing_store));
} else {
GetIsolate()->heap()->RegisterBackingStore(*this, std::move(backing_store));
}
}
void JSArrayBuffer::Detach(bool force_for_wasm_memory) {
@ -78,7 +84,12 @@ void JSArrayBuffer::Detach(bool force_for_wasm_memory) {
Isolate* const isolate = GetIsolate();
if (backing_store()) {
auto backing_store = isolate->heap()->UnregisterBackingStore(*this);
std::shared_ptr<BackingStore> backing_store;
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
backing_store = RemoveExtension();
} else {
backing_store = isolate->heap()->UnregisterBackingStore(*this);
}
CHECK_IMPLIES(force_for_wasm_memory, backing_store->is_wasm_memory());
}
@ -94,7 +105,40 @@ void JSArrayBuffer::Detach(bool force_for_wasm_memory) {
}
std::shared_ptr<BackingStore> JSArrayBuffer::GetBackingStore() {
return GetIsolate()->heap()->LookupBackingStore(*this);
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
if (!extension()) return nullptr;
return extension()->backing_store();
} else {
return GetIsolate()->heap()->LookupBackingStore(*this);
}
}
ArrayBufferExtension* JSArrayBuffer::EnsureExtension(Heap* heap) {
DCHECK(V8_ARRAY_BUFFER_EXTENSION_BOOL);
if (extension() != nullptr) return extension();
ArrayBufferExtension* extension =
new ArrayBufferExtension(std::shared_ptr<BackingStore>());
set_extension(extension);
heap->AppendArrayBufferExtension(extension);
return extension;
}
std::shared_ptr<BackingStore> JSArrayBuffer::RemoveExtension() {
ArrayBufferExtension* extension = this->extension();
DCHECK_NOT_NULL(extension);
auto result = extension->RemoveBackingStore();
// Remove pointer to extension such that the next GC will free it
// automatically.
set_extension(nullptr);
return result;
}
void JSArrayBuffer::MarkExtension() {
ArrayBufferExtension* extension = this->extension();
if (extension) {
extension->Mark();
}
}
Handle<JSArrayBuffer> JSTypedArray::GetBuffer() {

View File

@ -15,6 +15,8 @@
namespace v8 {
namespace internal {
class ArrayBufferExtension;
class JSArrayBuffer : public JSObject {
public:
// The maximum length for JSArrayBuffer's supported by V8.
@ -34,7 +36,7 @@ class JSArrayBuffer : public JSObject {
DECL_PRIMITIVE_ACCESSORS(backing_store, void*)
// [extension]: extension object used for GC
DECL_PRIMITIVE_ACCESSORS(extension, void*)
DECL_PRIMITIVE_ACCESSORS(extension, ArrayBufferExtension*)
// For non-wasm, allocation_length and allocation_base are byte_length and
// backing_store, respectively.
@ -103,6 +105,16 @@ class JSArrayBuffer : public JSObject {
// or a zero-length array buffer).
std::shared_ptr<BackingStore> GetBackingStore();
// Allocates an ArrayBufferExtension for this array buffer, unless it is
// already associated with an extension.
ArrayBufferExtension* EnsureExtension(Heap* heap);
// Frees the associated ArrayBufferExtension and returns its backing store.
std::shared_ptr<BackingStore> RemoveExtension();
// Marks ArrayBufferExtension
void MarkExtension();
// Dispatched behavior.
DECL_PRINTER(JSArrayBuffer)
DECL_VERIFIER(JSArrayBuffer)
@ -131,6 +143,49 @@ class JSArrayBuffer : public JSObject {
class BodyDescriptor;
OBJECT_CONSTRUCTORS(JSArrayBuffer, JSObject);
private:
inline ArrayBufferExtension** extension_location() const;
};
// Each JSArrayBuffer (with a backing store) has a corresponding native-heap
// allocated ArrayBufferExtension for GC purposes and storing the backing store.
// When marking a JSArrayBuffer, the GC also marks the native
// extension-object. The GC periodically iterates all extensions concurrently
// and frees unmarked ones.
// https://docs.google.com/document/d/1-ZrLdlFX1nXT3z-FAgLbKal1gI8Auiaya_My-a0UJ28/edit
class ArrayBufferExtension : public Malloced {
std::atomic<bool> marked_;
std::shared_ptr<BackingStore> backing_store_;
ArrayBufferExtension* next_;
public:
ArrayBufferExtension()
: marked_(false),
backing_store_(std::shared_ptr<BackingStore>()),
next_(nullptr) {}
explicit ArrayBufferExtension(std::shared_ptr<BackingStore> backing_store)
: marked_(false), backing_store_(backing_store), next_(nullptr) {}
void Mark() { marked_.store(true, std::memory_order_relaxed); }
void Unmark() { marked_.store(false, std::memory_order_relaxed); }
bool IsMarked() { return marked_.load(std::memory_order_relaxed); }
std::shared_ptr<BackingStore> backing_store() { return backing_store_; }
BackingStore* backing_store_raw() { return backing_store_.get(); }
std::shared_ptr<BackingStore> RemoveBackingStore() {
return std::move(backing_store_);
}
void set_backing_store(std::shared_ptr<BackingStore> backing_store) {
backing_store_ = std::move(backing_store);
}
void reset_backing_store() { backing_store_.reset(); }
ArrayBufferExtension* next() { return next_; }
void set_next(ArrayBufferExtension* extension) { next_ = extension; }
};
class JSArrayBufferView : public JSObject {

View File

@ -301,6 +301,7 @@ HeapObject Deserializer::PostProcessNewObject(HeapObject obj,
}
} else if (obj.IsJSArrayBuffer()) {
JSArrayBuffer buffer = JSArrayBuffer::cast(obj);
buffer.set_extension(nullptr);
// Only fixup for the off-heap case. This may trigger GC.
if (buffer.backing_store() != nullptr) {
new_off_heap_array_buffers_.push_back(handle(buffer, isolate_));

View File

@ -30,6 +30,7 @@ namespace heap {
// moving the objects through various spaces during GC phases.
TEST(ArrayBuffer_OnlyMC) {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
ManualGCScope manual_gc_scope;
CcTest::InitializeVM();
LocalContext env;
@ -58,6 +59,7 @@ TEST(ArrayBuffer_OnlyMC) {
}
TEST(ArrayBuffer_OnlyScavenge) {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
ManualGCScope manual_gc_scope;
CcTest::InitializeVM();
LocalContext env;
@ -88,6 +90,7 @@ TEST(ArrayBuffer_OnlyScavenge) {
}
TEST(ArrayBuffer_ScavengeAndMC) {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
ManualGCScope manual_gc_scope;
CcTest::InitializeVM();
LocalContext env;
@ -120,7 +123,7 @@ TEST(ArrayBuffer_ScavengeAndMC) {
}
TEST(ArrayBuffer_Compaction) {
if (FLAG_never_compact) return;
if (FLAG_never_compact || V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
ManualGCScope manual_gc_scope;
FLAG_manual_evacuation_candidates_selection = true;
CcTest::InitializeVM();
@ -149,6 +152,7 @@ TEST(ArrayBuffer_Compaction) {
}
TEST(ArrayBuffer_UnregisterDuringSweep) {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
// Regular pages in old space (without compaction) are processed concurrently
// in the sweeper. If we happen to unregister a buffer (either explicitly, or
// implicitly through e.g. |Detach|) we need to sync with the sweeper
@ -197,7 +201,7 @@ TEST(ArrayBuffer_UnregisterDuringSweep) {
}
TEST(ArrayBuffer_NonLivePromotion) {
if (!FLAG_incremental_marking) return;
if (!FLAG_incremental_marking || V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
ManualGCScope manual_gc_scope;
// The test verifies that the marking state is preserved when promoting
// a buffer to old space.
@ -234,7 +238,7 @@ TEST(ArrayBuffer_NonLivePromotion) {
}
TEST(ArrayBuffer_LivePromotion) {
if (!FLAG_incremental_marking) return;
if (!FLAG_incremental_marking || V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
ManualGCScope manual_gc_scope;
// The test verifies that the marking state is preserved when promoting
// a buffer to old space.
@ -270,7 +274,7 @@ TEST(ArrayBuffer_LivePromotion) {
}
TEST(ArrayBuffer_SemiSpaceCopyThenPagePromotion) {
if (!i::FLAG_incremental_marking) return;
if (!i::FLAG_incremental_marking || V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
ManualGCScope manual_gc_scope;
// The test verifies that the marking state is preserved across semispace
// copy.
@ -337,6 +341,7 @@ UNINITIALIZED_TEST(ArrayBuffer_SemiSpaceCopyMultipleTasks) {
}
TEST(ArrayBuffer_ExternalBackingStoreSizeIncreases) {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
@ -378,7 +383,7 @@ TEST(ArrayBuffer_ExternalBackingStoreSizeDecreases) {
}
TEST(ArrayBuffer_ExternalBackingStoreSizeIncreasesMarkCompact) {
if (FLAG_never_compact) return;
if (FLAG_never_compact || V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
ManualGCScope manual_gc_scope;
FLAG_manual_evacuation_candidates_selection = true;
CcTest::InitializeVM();

View File

@ -167,7 +167,8 @@ UNINITIALIZED_TEST(PagePromotion_NewToNewJSArrayBuffer) {
CHECK(heap->new_space()->ToSpaceContainsSlow(buffer->address()));
CHECK(to_be_promoted_page->Contains(first_object->address()));
CHECK(to_be_promoted_page->Contains(buffer->address()));
CHECK(ArrayBufferTracker::IsTracked(*buffer));
if (!V8_ARRAY_BUFFER_EXTENSION_BOOL)
CHECK(ArrayBufferTracker::IsTracked(*buffer));
}
isolate->Dispose();
}
@ -212,7 +213,8 @@ UNINITIALIZED_TEST(PagePromotion_NewToOldJSArrayBuffer) {
CHECK(heap->old_space()->ContainsSlow(buffer->address()));
CHECK(to_be_promoted_page->Contains(first_object->address()));
CHECK(to_be_promoted_page->Contains(buffer->address()));
CHECK(ArrayBufferTracker::IsTracked(*buffer));
if (!V8_ARRAY_BUFFER_EXTENSION_BOOL)
CHECK(ArrayBufferTracker::IsTracked(*buffer));
}
isolate->Dispose();
}