diff --git a/include/v8.h b/include/v8.h index 809e575219..9d9812818a 100644 --- a/include/v8.h +++ b/include/v8.h @@ -4971,6 +4971,25 @@ class V8_EXPORT BackingStore : public v8::internal::BackingStoreBase { v8::Isolate* isolate, std::unique_ptr backing_store, size_t byte_length); + /** + * This callback is used only if the memory block for a BackingStore cannot be + * allocated with an ArrayBuffer::Allocator. In such cases the destructor of + * the BackingStore invokes the callback to free the memory block. + */ + using DeleterCallback = void (*)(void* data, size_t length, + void* deleter_data); + + /** + * If the memory block of a BackingStore is static or is managed manually, + * then this empty deleter along with nullptr deleter_data can be passed to + * ArrayBuffer::NewBackingStore to indicate that. + * + * The manually managed case should be used with caution and only when it + * is guaranteed that the memory block freeing happens after detaching its + * ArrayBuffer. + */ + static void EmptyDeleter(void* data, size_t length, void* deleter_data); + private: /** * See [Shared]ArrayBuffer::GetBackingStore and @@ -4979,14 +4998,13 @@ class V8_EXPORT BackingStore : public v8::internal::BackingStoreBase { BackingStore(); }; -/** - * This callback is used only if the memory block for this backing store cannot - * be allocated with an ArrayBuffer::Allocator. In such cases the destructor - * of this backing store object invokes the callback to free the memory block. - */ +#if !defined(V8_IMMINENT_DEPRECATION_WARNINGS) +// Use v8::BackingStore::DeleterCallback instead. using BackingStoreDeleterCallback = void (*)(void* data, size_t length, void* deleter_data); +#endif + /** * An instance of the built-in ArrayBuffer constructor (ES6 draft 15.13.5). */ @@ -5174,7 +5192,7 @@ class V8_EXPORT ArrayBuffer : public Object { * to the buffer must not be passed again to any V8 API function. */ static std::unique_ptr NewBackingStore( - void* data, size_t byte_length, BackingStoreDeleterCallback deleter, + void* data, size_t byte_length, v8::BackingStore::DeleterCallback deleter, void* deleter_data); /** @@ -5656,7 +5674,7 @@ class V8_EXPORT SharedArrayBuffer : public Object { * to the buffer must not be passed again to any V8 functions. */ static std::unique_ptr NewBackingStore( - void* data, size_t byte_length, BackingStoreDeleterCallback deleter, + void* data, size_t byte_length, v8::BackingStore::DeleterCallback deleter, void* deleter_data); /** diff --git a/src/api/api.cc b/src/api/api.cc index 7328f775b8..36d006691e 100644 --- a/src/api/api.cc +++ b/src/api/api.cc @@ -3842,6 +3842,12 @@ std::unique_ptr v8::BackingStore::Reallocate( return backing_store; } +// static +void v8::BackingStore::EmptyDeleter(void* data, size_t length, + void* deleter_data) { + DCHECK_NULL(deleter_data); +} + std::shared_ptr v8::ArrayBuffer::GetBackingStore() { i::Handle self = Utils::OpenHandle(this); std::shared_ptr backing_store = self->GetBackingStore(); @@ -7577,7 +7583,7 @@ std::unique_ptr v8::ArrayBuffer::NewBackingStore( } std::unique_ptr v8::ArrayBuffer::NewBackingStore( - void* data, size_t byte_length, BackingStoreDeleterCallback deleter, + void* data, size_t byte_length, v8::BackingStore::DeleterCallback deleter, void* deleter_data) { CHECK_LE(byte_length, i::JSArrayBuffer::kMaxByteLength); std::unique_ptr backing_store = @@ -7906,7 +7912,7 @@ std::unique_ptr v8::SharedArrayBuffer::NewBackingStore( } std::unique_ptr v8::SharedArrayBuffer::NewBackingStore( - void* data, size_t byte_length, BackingStoreDeleterCallback deleter, + void* data, size_t byte_length, v8::BackingStore::DeleterCallback deleter, void* deleter_data) { CHECK_LE(byte_length, i::JSArrayBuffer::kMaxByteLength); std::unique_ptr backing_store = diff --git a/src/heap/array-buffer-tracker-inl.h b/src/heap/array-buffer-tracker-inl.h index 0cfdca3a32..b2d1115cba 100644 --- a/src/heap/array-buffer-tracker-inl.h +++ b/src/heap/array-buffer-tracker-inl.h @@ -30,6 +30,7 @@ void ArrayBufferTracker::RegisterNew( // ArrayBuffer tracking works only for small objects. DCHECK(!heap->IsLargeObject(buffer)); DCHECK_EQ(backing_store->buffer_start(), buffer.backing_store()); + const size_t length = backing_store->PerIsolateAccountingLength(); Page* page = Page::FromHeapObject(buffer); { @@ -49,7 +50,6 @@ void ArrayBufferTracker::RegisterNew( // TODO(wez): Remove backing-store from external memory accounting. // We may go over the limit of externally allocated memory here. We call the // api function to trigger a GC in this case. - const size_t length = buffer.PerIsolateAccountingLength(); reinterpret_cast(heap->isolate()) ->AdjustAmountOfExternalAllocatedMemory(length); } @@ -58,7 +58,6 @@ std::shared_ptr ArrayBufferTracker::Unregister( Heap* heap, JSArrayBuffer buffer) { std::shared_ptr backing_store; - const size_t length = buffer.PerIsolateAccountingLength(); Page* page = Page::FromHeapObject(buffer); { base::MutexGuard guard(page->mutex()); @@ -68,6 +67,7 @@ std::shared_ptr ArrayBufferTracker::Unregister( } // TODO(wez): Remove backing-store from external memory accounting. + const size_t length = backing_store->PerIsolateAccountingLength(); heap->update_external_memory(-static_cast(length)); return backing_store; } @@ -90,7 +90,7 @@ void LocalArrayBufferTracker::Free(Callback should_free) { it != array_buffers_.end();) { // Unchecked cast because the map might already be dead at this point. JSArrayBuffer buffer = JSArrayBuffer::unchecked_cast(it->first); - const size_t length = buffer.PerIsolateAccountingLength(); + const size_t length = it->second->PerIsolateAccountingLength(); if (should_free(buffer)) { // Destroy the shared pointer, (perhaps) freeing the backing store. @@ -127,7 +127,7 @@ void ArrayBufferTracker::FreeDead(Page* page, MarkingState* marking_state) { void LocalArrayBufferTracker::Add(JSArrayBuffer buffer, std::shared_ptr backing_store) { - auto length = buffer.PerIsolateAccountingLength(); + auto length = backing_store->PerIsolateAccountingLength(); page_->IncrementExternalBackingStoreBytes( ExternalBackingStoreType::kArrayBuffer, length); @@ -161,7 +161,7 @@ std::shared_ptr LocalArrayBufferTracker::Remove( array_buffers_.erase(it); // Update accounting. - auto length = buffer.PerIsolateAccountingLength(); + auto length = backing_store->PerIsolateAccountingLength(); page_->DecrementExternalBackingStoreBytes( ExternalBackingStoreType::kArrayBuffer, length); diff --git a/src/heap/array-buffer-tracker.cc b/src/heap/array-buffer-tracker.cc index 41b5d4697a..e79f86942f 100644 --- a/src/heap/array-buffer-tracker.cc +++ b/src/heap/array-buffer-tracker.cc @@ -50,7 +50,7 @@ void LocalArrayBufferTracker::Process(Callback callback) { tracker = target_page->local_tracker(); } DCHECK_NOT_NULL(tracker); - const size_t length = old_buffer.PerIsolateAccountingLength(); + const size_t length = it->second->PerIsolateAccountingLength(); // We should decrement before adding to avoid potential overflows in // the external memory counters. tracker->AddInternal(new_buffer, std::move(it->second)); @@ -60,8 +60,8 @@ void LocalArrayBufferTracker::Process(Callback callback) { static_cast(target_page), length); } } else if (result == kRemoveEntry) { - freed_memory += old_buffer.PerIsolateAccountingLength(); auto backing_store = std::move(it->second); + freed_memory += backing_store->PerIsolateAccountingLength(); TRACE_BS("ABT:queue bs=%p mem=%p (length=%zu) cnt=%ld\n", backing_store.get(), backing_store->buffer_start(), backing_store->byte_length(), backing_store.use_count()); diff --git a/src/heap/spaces.cc b/src/heap/spaces.cc index 7a75e7293f..ce89466970 100644 --- a/src/heap/spaces.cc +++ b/src/heap/spaces.cc @@ -2200,7 +2200,9 @@ void PagedSpace::Verify(Isolate* isolate, ObjectVisitor* visitor) { } else if (object.IsJSArrayBuffer()) { JSArrayBuffer array_buffer = JSArrayBuffer::cast(object); if (ArrayBufferTracker::IsTracked(array_buffer)) { - size_t size = array_buffer.PerIsolateAccountingLength(); + size_t size = + ArrayBufferTracker::Lookup(isolate->heap(), array_buffer) + ->PerIsolateAccountingLength(); external_page_bytes[ExternalBackingStoreType::kArrayBuffer] += size; } } @@ -2698,7 +2700,8 @@ void NewSpace::Verify(Isolate* isolate) { } else if (object.IsJSArrayBuffer()) { JSArrayBuffer array_buffer = JSArrayBuffer::cast(object); if (ArrayBufferTracker::IsTracked(array_buffer)) { - size_t size = array_buffer.PerIsolateAccountingLength(); + size_t size = ArrayBufferTracker::Lookup(heap(), array_buffer) + ->PerIsolateAccountingLength(); external_space_bytes[ExternalBackingStoreType::kArrayBuffer] += size; } } diff --git a/src/objects/backing-store.cc b/src/objects/backing-store.cc index 1f42420557..d7fc88aa05 100644 --- a/src/objects/backing-store.cc +++ b/src/objects/backing-store.cc @@ -263,7 +263,8 @@ std::unique_ptr BackingStore::Allocate( false, // is_wasm_memory true, // free_on_destruct false, // has_guard_regions - false); // custom_deleter + false, // custom_deleter + false); // empty_deleter TRACE_BS("BS:alloc bs=%p mem=%p (length=%zu)\n", result, result->buffer_start(), byte_length); @@ -388,7 +389,8 @@ std::unique_ptr BackingStore::TryAllocateWasmMemory( true, // is_wasm_memory true, // free_on_destruct guards, // has_guard_regions - false); // custom_deleter + false, // custom_deleter + false); // empty_deleter TRACE_BS("BSw:alloc bs=%p mem=%p (length=%zu, capacity=%zu)\n", result, result->buffer_start(), byte_length, byte_capacity); @@ -492,7 +494,7 @@ bool BackingStore::GrowWasmMemoryInPlace(Isolate* isolate, size_t delta_pages, } } - if (!is_shared_) { + if (!is_shared_ && free_on_destruct_) { // Only do per-isolate accounting for non-shared backing stores. reinterpret_cast(isolate) ->AdjustAmountOfExternalAllocatedMemory(new_length - old_length); @@ -534,7 +536,8 @@ std::unique_ptr BackingStore::WrapAllocation( false, // is_wasm_memory free_on_destruct, // free_on_destruct false, // has_guard_regions - false); // custom_deleter + false, // custom_deleter + false); // empty_deleter result->SetAllocatorFromIsolate(isolate); TRACE_BS("BS:wrap bs=%p mem=%p (length=%zu)\n", result, result->buffer_start(), result->byte_length()); @@ -543,8 +546,9 @@ std::unique_ptr BackingStore::WrapAllocation( std::unique_ptr BackingStore::WrapAllocation( void* allocation_base, size_t allocation_length, - v8::BackingStoreDeleterCallback deleter, void* deleter_data, + v8::BackingStore::DeleterCallback deleter, void* deleter_data, SharedFlag shared) { + bool is_empty_deleter = (deleter == v8::BackingStore::EmptyDeleter); auto result = new BackingStore(allocation_base, // start allocation_length, // length allocation_length, // capacity @@ -552,7 +556,8 @@ std::unique_ptr BackingStore::WrapAllocation( false, // is_wasm_memory true, // free_on_destruct false, // has_guard_regions - true); // custom_deleter + true, // custom_deleter + is_empty_deleter); // empty_deleter result->type_specific_data_.deleter = {deleter, deleter_data}; TRACE_BS("BS:wrap bs=%p mem=%p (length=%zu)\n", result, result->buffer_start(), result->byte_length()); @@ -568,7 +573,8 @@ std::unique_ptr BackingStore::EmptyBackingStore( false, // is_wasm_memory true, // free_on_destruct false, // has_guard_regions - false); // custom_deleter + false, // custom_deleter + false); // empty_deleter return std::unique_ptr(result); } diff --git a/src/objects/backing-store.h b/src/objects/backing-store.h index 7611c4f2d3..e9a7c8ec15 100644 --- a/src/objects/backing-store.h +++ b/src/objects/backing-store.h @@ -66,7 +66,7 @@ class V8_EXPORT_PRIVATE BackingStore : public BackingStoreBase { static std::unique_ptr WrapAllocation( void* allocation_base, size_t allocation_length, - v8::BackingStoreDeleterCallback deleter, void* deleter_data, + v8::BackingStore::DeleterCallback deleter, void* deleter_data, SharedFlag shared); // Create an empty backing store. @@ -120,12 +120,31 @@ class V8_EXPORT_PRIVATE BackingStore : public BackingStoreBase { // Update all shared memory objects in this isolate (after a grow operation). static void UpdateSharedWasmMemoryObjects(Isolate* isolate); + // Returns the size of the external memory owned by this backing store. + // It is used for triggering GCs based on the external memory pressure. + size_t PerIsolateAccountingLength() { + if (is_shared_) { + // TODO(titzer): SharedArrayBuffers and shared WasmMemorys cause problems + // with accounting for per-isolate external memory. In particular, sharing + // the same array buffer or memory multiple times, which happens in stress + // tests, can cause overcounting, leading to GC thrashing. Fix with global + // accounting? + return 0; + } + if (empty_deleter_) { + // The backing store has an empty deleter. Even if the backing store is + // freed after GC, it would not free the memory block. + return 0; + } + return byte_length(); + } + private: friend class GlobalBackingStoreRegistry; BackingStore(void* buffer_start, size_t byte_length, size_t byte_capacity, SharedFlag shared, bool is_wasm_memory, bool free_on_destruct, - bool has_guard_regions, bool custom_deleter) + bool has_guard_regions, bool custom_deleter, bool empty_deleter) : buffer_start_(buffer_start), byte_length_(byte_length), byte_capacity_(byte_capacity), @@ -135,7 +154,8 @@ class V8_EXPORT_PRIVATE BackingStore : public BackingStoreBase { free_on_destruct_(free_on_destruct), has_guard_regions_(has_guard_regions), globally_registered_(false), - custom_deleter_(custom_deleter) {} + custom_deleter_(custom_deleter), + empty_deleter_(empty_deleter) {} void SetAllocatorFromIsolate(Isolate* isolate); void* buffer_start_ = nullptr; @@ -143,7 +163,7 @@ class V8_EXPORT_PRIVATE BackingStore : public BackingStoreBase { size_t byte_capacity_ = 0; struct DeleterInfo { - v8::BackingStoreDeleterCallback callback; + v8::BackingStore::DeleterCallback callback; void* data; }; @@ -178,6 +198,7 @@ class V8_EXPORT_PRIVATE BackingStore : public BackingStoreBase { bool has_guard_regions_ : 1; bool globally_registered_ : 1; bool custom_deleter_ : 1; + bool empty_deleter_ : 1; // Accessors for type-specific data. v8::ArrayBuffer::Allocator* get_v8_api_array_buffer_allocator(); diff --git a/src/objects/js-array-buffer-inl.h b/src/objects/js-array-buffer-inl.h index e5f361acff..b77f5580e2 100644 --- a/src/objects/js-array-buffer-inl.h +++ b/src/objects/js-array-buffer-inl.h @@ -156,13 +156,6 @@ BIT_FIELD_ACCESSORS(JSArrayBuffer, bit_field, is_asmjs_memory, BIT_FIELD_ACCESSORS(JSArrayBuffer, bit_field, is_shared, JSArrayBuffer::IsSharedBit) -size_t JSArrayBuffer::PerIsolateAccountingLength() { - // TODO(titzer): SharedArrayBuffers and shared WasmMemorys cause problems with - // accounting for per-isolate external memory. In particular, sharing the same - // array buffer or memory multiple times, which happens in stress tests, can - // cause overcounting, leading to GC thrashing. Fix with global accounting? - return is_shared() ? 0 : byte_length(); -} size_t JSArrayBufferView::byte_offset() const { return ReadField(kByteOffsetOffset); diff --git a/src/objects/js-array-buffer.cc b/src/objects/js-array-buffer.cc index 76ac340179..0c2aca6d71 100644 --- a/src/objects/js-array-buffer.cc +++ b/src/objects/js-array-buffer.cc @@ -67,9 +67,9 @@ void JSArrayBuffer::Attach(std::shared_ptr backing_store) { if (V8_ARRAY_BUFFER_EXTENSION_BOOL) { Heap* heap = GetIsolate()->heap(); ArrayBufferExtension* extension = EnsureExtension(); - extension->set_backing_store(std::move(backing_store)); - size_t bytes = PerIsolateAccountingLength(); + size_t bytes = backing_store->PerIsolateAccountingLength(); extension->set_accounting_length(bytes); + extension->set_backing_store(std::move(backing_store)); heap->AppendArrayBufferExtension(*this, extension); } else { GetIsolate()->heap()->RegisterBackingStore(*this, std::move(backing_store)); diff --git a/src/objects/js-array-buffer.h b/src/objects/js-array-buffer.h index e6a5ea6bbd..624b716713 100644 --- a/src/objects/js-array-buffer.h +++ b/src/objects/js-array-buffer.h @@ -110,8 +110,6 @@ class JSArrayBuffer : public JSObject { void YoungMarkExtension(); void YoungMarkExtensionPromoted(); - inline size_t PerIsolateAccountingLength(); - // Dispatched behavior. DECL_PRINTER(JSArrayBuffer) DECL_VERIFIER(JSArrayBuffer) diff --git a/test/cctest/test-api-array-buffer.cc b/test/cctest/test-api-array-buffer.cc index e8e026f156..b15fe80151 100644 --- a/test/cctest/test-api-array-buffer.cc +++ b/test/cctest/test-api-array-buffer.cc @@ -636,6 +636,43 @@ TEST(SharedArrayBuffer_NewBackingStore_CustomDeleter) { CHECK(backing_store_custom_called); } +TEST(ArrayBuffer_NewBackingStore_EmptyDeleter) { + LocalContext env; + v8::Isolate* isolate = env->GetIsolate(); + v8::HandleScope handle_scope(isolate); + char static_buffer[100]; + std::unique_ptr backing_store = + v8::ArrayBuffer::NewBackingStore(static_buffer, sizeof(static_buffer), + v8::BackingStore::EmptyDeleter, nullptr); + uint64_t external_memory_before = + isolate->AdjustAmountOfExternalAllocatedMemory(0); + v8::ArrayBuffer::New(isolate, std::move(backing_store)); + uint64_t external_memory_after = + isolate->AdjustAmountOfExternalAllocatedMemory(0); + // The ArrayBuffer constructor does not increase the external memory counter. + // The counter may decrease however if the allocation triggers GC. + CHECK_GE(external_memory_before, external_memory_after); +} + +TEST(SharedArrayBuffer_NewBackingStore_EmptyDeleter) { + LocalContext env; + v8::Isolate* isolate = env->GetIsolate(); + v8::HandleScope handle_scope(isolate); + char static_buffer[100]; + std::unique_ptr backing_store = + v8::SharedArrayBuffer::NewBackingStore( + static_buffer, sizeof(static_buffer), v8::BackingStore::EmptyDeleter, + nullptr); + uint64_t external_memory_before = + isolate->AdjustAmountOfExternalAllocatedMemory(0); + v8::SharedArrayBuffer::New(isolate, std::move(backing_store)); + uint64_t external_memory_after = + isolate->AdjustAmountOfExternalAllocatedMemory(0); + // The SharedArrayBuffer constructor does not increase the external memory + // counter. The counter may decrease however if the allocation triggers GC. + CHECK_GE(external_memory_before, external_memory_after); +} + THREADED_TEST(BackingStore_NotShared) { LocalContext env; v8::Isolate* isolate = env->GetIsolate();