[api] Support v8::BackingStores with empty deleters

This adjusts v8::[Shared]ArrayBuffer::NewBackingStore to allow passing
a known empty deleter -- v8::BackingStore::EmptyDeleter. Such API is
useful if the backing store memory is static or is manually managed.

We can skip adjusting the amount of external memory for ArrayBuffers
with empty deleters and thus avoid scheduling ineffective GCs.

Bug: chromium:1061960

Change-Id: I0ef5b2b0839098beb59d5cebbb28f9f81a73a042
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2105355
Reviewed-by: Dominik Inführ <dinfuehr@chromium.org>
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#66740}
This commit is contained in:
Ulan Degenbaev 2020-03-16 17:49:58 +01:00 committed by Commit Bot
parent 1a46de717b
commit 5cf02f0f20
11 changed files with 122 additions and 40 deletions

View File

@ -4971,6 +4971,25 @@ class V8_EXPORT BackingStore : public v8::internal::BackingStoreBase {
v8::Isolate* isolate, std::unique_ptr<BackingStore> backing_store,
size_t byte_length);
/**
* This callback is used only if the memory block for a BackingStore cannot be
* allocated with an ArrayBuffer::Allocator. In such cases the destructor of
* the BackingStore invokes the callback to free the memory block.
*/
using DeleterCallback = void (*)(void* data, size_t length,
void* deleter_data);
/**
* If the memory block of a BackingStore is static or is managed manually,
* then this empty deleter along with nullptr deleter_data can be passed to
* ArrayBuffer::NewBackingStore to indicate that.
*
* The manually managed case should be used with caution and only when it
* is guaranteed that the memory block freeing happens after detaching its
* ArrayBuffer.
*/
static void EmptyDeleter(void* data, size_t length, void* deleter_data);
private:
/**
* See [Shared]ArrayBuffer::GetBackingStore and
@ -4979,14 +4998,13 @@ class V8_EXPORT BackingStore : public v8::internal::BackingStoreBase {
BackingStore();
};
/**
* This callback is used only if the memory block for this backing store cannot
* be allocated with an ArrayBuffer::Allocator. In such cases the destructor
* of this backing store object invokes the callback to free the memory block.
*/
#if !defined(V8_IMMINENT_DEPRECATION_WARNINGS)
// Use v8::BackingStore::DeleterCallback instead.
using BackingStoreDeleterCallback = void (*)(void* data, size_t length,
void* deleter_data);
#endif
/**
* An instance of the built-in ArrayBuffer constructor (ES6 draft 15.13.5).
*/
@ -5174,7 +5192,7 @@ class V8_EXPORT ArrayBuffer : public Object {
* to the buffer must not be passed again to any V8 API function.
*/
static std::unique_ptr<BackingStore> NewBackingStore(
void* data, size_t byte_length, BackingStoreDeleterCallback deleter,
void* data, size_t byte_length, v8::BackingStore::DeleterCallback deleter,
void* deleter_data);
/**
@ -5656,7 +5674,7 @@ class V8_EXPORT SharedArrayBuffer : public Object {
* to the buffer must not be passed again to any V8 functions.
*/
static std::unique_ptr<BackingStore> NewBackingStore(
void* data, size_t byte_length, BackingStoreDeleterCallback deleter,
void* data, size_t byte_length, v8::BackingStore::DeleterCallback deleter,
void* deleter_data);
/**

View File

@ -3842,6 +3842,12 @@ std::unique_ptr<v8::BackingStore> v8::BackingStore::Reallocate(
return backing_store;
}
// static
void v8::BackingStore::EmptyDeleter(void* data, size_t length,
void* deleter_data) {
DCHECK_NULL(deleter_data);
}
std::shared_ptr<v8::BackingStore> v8::ArrayBuffer::GetBackingStore() {
i::Handle<i::JSArrayBuffer> self = Utils::OpenHandle(this);
std::shared_ptr<i::BackingStore> backing_store = self->GetBackingStore();
@ -7577,7 +7583,7 @@ std::unique_ptr<v8::BackingStore> v8::ArrayBuffer::NewBackingStore(
}
std::unique_ptr<v8::BackingStore> v8::ArrayBuffer::NewBackingStore(
void* data, size_t byte_length, BackingStoreDeleterCallback deleter,
void* data, size_t byte_length, v8::BackingStore::DeleterCallback deleter,
void* deleter_data) {
CHECK_LE(byte_length, i::JSArrayBuffer::kMaxByteLength);
std::unique_ptr<i::BackingStoreBase> backing_store =
@ -7906,7 +7912,7 @@ std::unique_ptr<v8::BackingStore> v8::SharedArrayBuffer::NewBackingStore(
}
std::unique_ptr<v8::BackingStore> v8::SharedArrayBuffer::NewBackingStore(
void* data, size_t byte_length, BackingStoreDeleterCallback deleter,
void* data, size_t byte_length, v8::BackingStore::DeleterCallback deleter,
void* deleter_data) {
CHECK_LE(byte_length, i::JSArrayBuffer::kMaxByteLength);
std::unique_ptr<i::BackingStoreBase> backing_store =

View File

@ -30,6 +30,7 @@ void ArrayBufferTracker::RegisterNew(
// ArrayBuffer tracking works only for small objects.
DCHECK(!heap->IsLargeObject(buffer));
DCHECK_EQ(backing_store->buffer_start(), buffer.backing_store());
const size_t length = backing_store->PerIsolateAccountingLength();
Page* page = Page::FromHeapObject(buffer);
{
@ -49,7 +50,6 @@ void ArrayBufferTracker::RegisterNew(
// TODO(wez): Remove backing-store from external memory accounting.
// We may go over the limit of externally allocated memory here. We call the
// api function to trigger a GC in this case.
const size_t length = buffer.PerIsolateAccountingLength();
reinterpret_cast<v8::Isolate*>(heap->isolate())
->AdjustAmountOfExternalAllocatedMemory(length);
}
@ -58,7 +58,6 @@ std::shared_ptr<BackingStore> ArrayBufferTracker::Unregister(
Heap* heap, JSArrayBuffer buffer) {
std::shared_ptr<BackingStore> backing_store;
const size_t length = buffer.PerIsolateAccountingLength();
Page* page = Page::FromHeapObject(buffer);
{
base::MutexGuard guard(page->mutex());
@ -68,6 +67,7 @@ std::shared_ptr<BackingStore> ArrayBufferTracker::Unregister(
}
// TODO(wez): Remove backing-store from external memory accounting.
const size_t length = backing_store->PerIsolateAccountingLength();
heap->update_external_memory(-static_cast<intptr_t>(length));
return backing_store;
}
@ -90,7 +90,7 @@ void LocalArrayBufferTracker::Free(Callback should_free) {
it != array_buffers_.end();) {
// Unchecked cast because the map might already be dead at this point.
JSArrayBuffer buffer = JSArrayBuffer::unchecked_cast(it->first);
const size_t length = buffer.PerIsolateAccountingLength();
const size_t length = it->second->PerIsolateAccountingLength();
if (should_free(buffer)) {
// Destroy the shared pointer, (perhaps) freeing the backing store.
@ -127,7 +127,7 @@ void ArrayBufferTracker::FreeDead(Page* page, MarkingState* marking_state) {
void LocalArrayBufferTracker::Add(JSArrayBuffer buffer,
std::shared_ptr<BackingStore> backing_store) {
auto length = buffer.PerIsolateAccountingLength();
auto length = backing_store->PerIsolateAccountingLength();
page_->IncrementExternalBackingStoreBytes(
ExternalBackingStoreType::kArrayBuffer, length);
@ -161,7 +161,7 @@ std::shared_ptr<BackingStore> LocalArrayBufferTracker::Remove(
array_buffers_.erase(it);
// Update accounting.
auto length = buffer.PerIsolateAccountingLength();
auto length = backing_store->PerIsolateAccountingLength();
page_->DecrementExternalBackingStoreBytes(
ExternalBackingStoreType::kArrayBuffer, length);

View File

@ -50,7 +50,7 @@ void LocalArrayBufferTracker::Process(Callback callback) {
tracker = target_page->local_tracker();
}
DCHECK_NOT_NULL(tracker);
const size_t length = old_buffer.PerIsolateAccountingLength();
const size_t length = it->second->PerIsolateAccountingLength();
// We should decrement before adding to avoid potential overflows in
// the external memory counters.
tracker->AddInternal(new_buffer, std::move(it->second));
@ -60,8 +60,8 @@ void LocalArrayBufferTracker::Process(Callback callback) {
static_cast<MemoryChunk*>(target_page), length);
}
} else if (result == kRemoveEntry) {
freed_memory += old_buffer.PerIsolateAccountingLength();
auto backing_store = std::move(it->second);
freed_memory += backing_store->PerIsolateAccountingLength();
TRACE_BS("ABT:queue bs=%p mem=%p (length=%zu) cnt=%ld\n",
backing_store.get(), backing_store->buffer_start(),
backing_store->byte_length(), backing_store.use_count());

View File

@ -2200,7 +2200,9 @@ void PagedSpace::Verify(Isolate* isolate, ObjectVisitor* visitor) {
} else if (object.IsJSArrayBuffer()) {
JSArrayBuffer array_buffer = JSArrayBuffer::cast(object);
if (ArrayBufferTracker::IsTracked(array_buffer)) {
size_t size = array_buffer.PerIsolateAccountingLength();
size_t size =
ArrayBufferTracker::Lookup(isolate->heap(), array_buffer)
->PerIsolateAccountingLength();
external_page_bytes[ExternalBackingStoreType::kArrayBuffer] += size;
}
}
@ -2698,7 +2700,8 @@ void NewSpace::Verify(Isolate* isolate) {
} else if (object.IsJSArrayBuffer()) {
JSArrayBuffer array_buffer = JSArrayBuffer::cast(object);
if (ArrayBufferTracker::IsTracked(array_buffer)) {
size_t size = array_buffer.PerIsolateAccountingLength();
size_t size = ArrayBufferTracker::Lookup(heap(), array_buffer)
->PerIsolateAccountingLength();
external_space_bytes[ExternalBackingStoreType::kArrayBuffer] += size;
}
}

View File

@ -263,7 +263,8 @@ std::unique_ptr<BackingStore> BackingStore::Allocate(
false, // is_wasm_memory
true, // free_on_destruct
false, // has_guard_regions
false); // custom_deleter
false, // custom_deleter
false); // empty_deleter
TRACE_BS("BS:alloc bs=%p mem=%p (length=%zu)\n", result,
result->buffer_start(), byte_length);
@ -388,7 +389,8 @@ std::unique_ptr<BackingStore> BackingStore::TryAllocateWasmMemory(
true, // is_wasm_memory
true, // free_on_destruct
guards, // has_guard_regions
false); // custom_deleter
false, // custom_deleter
false); // empty_deleter
TRACE_BS("BSw:alloc bs=%p mem=%p (length=%zu, capacity=%zu)\n", result,
result->buffer_start(), byte_length, byte_capacity);
@ -492,7 +494,7 @@ bool BackingStore::GrowWasmMemoryInPlace(Isolate* isolate, size_t delta_pages,
}
}
if (!is_shared_) {
if (!is_shared_ && free_on_destruct_) {
// Only do per-isolate accounting for non-shared backing stores.
reinterpret_cast<v8::Isolate*>(isolate)
->AdjustAmountOfExternalAllocatedMemory(new_length - old_length);
@ -534,7 +536,8 @@ std::unique_ptr<BackingStore> BackingStore::WrapAllocation(
false, // is_wasm_memory
free_on_destruct, // free_on_destruct
false, // has_guard_regions
false); // custom_deleter
false, // custom_deleter
false); // empty_deleter
result->SetAllocatorFromIsolate(isolate);
TRACE_BS("BS:wrap bs=%p mem=%p (length=%zu)\n", result,
result->buffer_start(), result->byte_length());
@ -543,8 +546,9 @@ std::unique_ptr<BackingStore> BackingStore::WrapAllocation(
std::unique_ptr<BackingStore> BackingStore::WrapAllocation(
void* allocation_base, size_t allocation_length,
v8::BackingStoreDeleterCallback deleter, void* deleter_data,
v8::BackingStore::DeleterCallback deleter, void* deleter_data,
SharedFlag shared) {
bool is_empty_deleter = (deleter == v8::BackingStore::EmptyDeleter);
auto result = new BackingStore(allocation_base, // start
allocation_length, // length
allocation_length, // capacity
@ -552,7 +556,8 @@ std::unique_ptr<BackingStore> BackingStore::WrapAllocation(
false, // is_wasm_memory
true, // free_on_destruct
false, // has_guard_regions
true); // custom_deleter
true, // custom_deleter
is_empty_deleter); // empty_deleter
result->type_specific_data_.deleter = {deleter, deleter_data};
TRACE_BS("BS:wrap bs=%p mem=%p (length=%zu)\n", result,
result->buffer_start(), result->byte_length());
@ -568,7 +573,8 @@ std::unique_ptr<BackingStore> BackingStore::EmptyBackingStore(
false, // is_wasm_memory
true, // free_on_destruct
false, // has_guard_regions
false); // custom_deleter
false, // custom_deleter
false); // empty_deleter
return std::unique_ptr<BackingStore>(result);
}

View File

@ -66,7 +66,7 @@ class V8_EXPORT_PRIVATE BackingStore : public BackingStoreBase {
static std::unique_ptr<BackingStore> WrapAllocation(
void* allocation_base, size_t allocation_length,
v8::BackingStoreDeleterCallback deleter, void* deleter_data,
v8::BackingStore::DeleterCallback deleter, void* deleter_data,
SharedFlag shared);
// Create an empty backing store.
@ -120,12 +120,31 @@ class V8_EXPORT_PRIVATE BackingStore : public BackingStoreBase {
// Update all shared memory objects in this isolate (after a grow operation).
static void UpdateSharedWasmMemoryObjects(Isolate* isolate);
// Returns the size of the external memory owned by this backing store.
// It is used for triggering GCs based on the external memory pressure.
size_t PerIsolateAccountingLength() {
if (is_shared_) {
// TODO(titzer): SharedArrayBuffers and shared WasmMemorys cause problems
// with accounting for per-isolate external memory. In particular, sharing
// the same array buffer or memory multiple times, which happens in stress
// tests, can cause overcounting, leading to GC thrashing. Fix with global
// accounting?
return 0;
}
if (empty_deleter_) {
// The backing store has an empty deleter. Even if the backing store is
// freed after GC, it would not free the memory block.
return 0;
}
return byte_length();
}
private:
friend class GlobalBackingStoreRegistry;
BackingStore(void* buffer_start, size_t byte_length, size_t byte_capacity,
SharedFlag shared, bool is_wasm_memory, bool free_on_destruct,
bool has_guard_regions, bool custom_deleter)
bool has_guard_regions, bool custom_deleter, bool empty_deleter)
: buffer_start_(buffer_start),
byte_length_(byte_length),
byte_capacity_(byte_capacity),
@ -135,7 +154,8 @@ class V8_EXPORT_PRIVATE BackingStore : public BackingStoreBase {
free_on_destruct_(free_on_destruct),
has_guard_regions_(has_guard_regions),
globally_registered_(false),
custom_deleter_(custom_deleter) {}
custom_deleter_(custom_deleter),
empty_deleter_(empty_deleter) {}
void SetAllocatorFromIsolate(Isolate* isolate);
void* buffer_start_ = nullptr;
@ -143,7 +163,7 @@ class V8_EXPORT_PRIVATE BackingStore : public BackingStoreBase {
size_t byte_capacity_ = 0;
struct DeleterInfo {
v8::BackingStoreDeleterCallback callback;
v8::BackingStore::DeleterCallback callback;
void* data;
};
@ -178,6 +198,7 @@ class V8_EXPORT_PRIVATE BackingStore : public BackingStoreBase {
bool has_guard_regions_ : 1;
bool globally_registered_ : 1;
bool custom_deleter_ : 1;
bool empty_deleter_ : 1;
// Accessors for type-specific data.
v8::ArrayBuffer::Allocator* get_v8_api_array_buffer_allocator();

View File

@ -156,13 +156,6 @@ BIT_FIELD_ACCESSORS(JSArrayBuffer, bit_field, is_asmjs_memory,
BIT_FIELD_ACCESSORS(JSArrayBuffer, bit_field, is_shared,
JSArrayBuffer::IsSharedBit)
size_t JSArrayBuffer::PerIsolateAccountingLength() {
// TODO(titzer): SharedArrayBuffers and shared WasmMemorys cause problems with
// accounting for per-isolate external memory. In particular, sharing the same
// array buffer or memory multiple times, which happens in stress tests, can
// cause overcounting, leading to GC thrashing. Fix with global accounting?
return is_shared() ? 0 : byte_length();
}
size_t JSArrayBufferView::byte_offset() const {
return ReadField<size_t>(kByteOffsetOffset);

View File

@ -67,9 +67,9 @@ void JSArrayBuffer::Attach(std::shared_ptr<BackingStore> backing_store) {
if (V8_ARRAY_BUFFER_EXTENSION_BOOL) {
Heap* heap = GetIsolate()->heap();
ArrayBufferExtension* extension = EnsureExtension();
extension->set_backing_store(std::move(backing_store));
size_t bytes = PerIsolateAccountingLength();
size_t bytes = backing_store->PerIsolateAccountingLength();
extension->set_accounting_length(bytes);
extension->set_backing_store(std::move(backing_store));
heap->AppendArrayBufferExtension(*this, extension);
} else {
GetIsolate()->heap()->RegisterBackingStore(*this, std::move(backing_store));

View File

@ -110,8 +110,6 @@ class JSArrayBuffer : public JSObject {
void YoungMarkExtension();
void YoungMarkExtensionPromoted();
inline size_t PerIsolateAccountingLength();
// Dispatched behavior.
DECL_PRINTER(JSArrayBuffer)
DECL_VERIFIER(JSArrayBuffer)

View File

@ -636,6 +636,43 @@ TEST(SharedArrayBuffer_NewBackingStore_CustomDeleter) {
CHECK(backing_store_custom_called);
}
TEST(ArrayBuffer_NewBackingStore_EmptyDeleter) {
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
v8::HandleScope handle_scope(isolate);
char static_buffer[100];
std::unique_ptr<v8::BackingStore> backing_store =
v8::ArrayBuffer::NewBackingStore(static_buffer, sizeof(static_buffer),
v8::BackingStore::EmptyDeleter, nullptr);
uint64_t external_memory_before =
isolate->AdjustAmountOfExternalAllocatedMemory(0);
v8::ArrayBuffer::New(isolate, std::move(backing_store));
uint64_t external_memory_after =
isolate->AdjustAmountOfExternalAllocatedMemory(0);
// The ArrayBuffer constructor does not increase the external memory counter.
// The counter may decrease however if the allocation triggers GC.
CHECK_GE(external_memory_before, external_memory_after);
}
TEST(SharedArrayBuffer_NewBackingStore_EmptyDeleter) {
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
v8::HandleScope handle_scope(isolate);
char static_buffer[100];
std::unique_ptr<v8::BackingStore> backing_store =
v8::SharedArrayBuffer::NewBackingStore(
static_buffer, sizeof(static_buffer), v8::BackingStore::EmptyDeleter,
nullptr);
uint64_t external_memory_before =
isolate->AdjustAmountOfExternalAllocatedMemory(0);
v8::SharedArrayBuffer::New(isolate, std::move(backing_store));
uint64_t external_memory_after =
isolate->AdjustAmountOfExternalAllocatedMemory(0);
// The SharedArrayBuffer constructor does not increase the external memory
// counter. The counter may decrease however if the allocation triggers GC.
CHECK_GE(external_memory_before, external_memory_after);
}
THREADED_TEST(BackingStore_NotShared) {
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();