[wasm] Implement Managed<T> with std::shared_ptr<T>

This CL simplifies and extends the implementation of Managed<T>
and now uses a std::shared_ptr<T> underneath in order to offer
cross-isolate management of C++ allocated memory.

R=mstarzinger@chromium.org
CC=ulan@chromium.org

Bug: v8:7424
Cq-Include-Trybots: luci.v8.try:v8_linux_noi18n_rel_ng
Change-Id: Id43a26f565677e8c9cdfd73810568d4f2b1871fe
Reviewed-on: https://chromium-review.googlesource.com/1028190
Reviewed-by: Michael Starzinger <mstarzinger@chromium.org>
Commit-Queue: Ben Titzer <titzer@chromium.org>
Cr-Commit-Position: refs/heads/master@{#52820}
This commit is contained in:
Ben L. Titzer 2018-04-26 16:37:05 +02:00 committed by Commit Bot
parent 3f99a376dd
commit b66226828f
16 changed files with 395 additions and 291 deletions

View File

@ -1994,7 +1994,6 @@ v8_source_set("v8_base") {
"src/machine-type.h",
"src/macro-assembler-inl.h",
"src/macro-assembler.h",
"src/managed.h",
"src/map-updater.cc",
"src/map-updater.h",
"src/messages.cc",
@ -2041,6 +2040,8 @@ v8_source_set("v8_base") {
"src/objects/literal-objects-inl.h",
"src/objects/literal-objects.cc",
"src/objects/literal-objects.h",
"src/objects/managed.cc",
"src/objects/managed.h",
"src/objects/map-inl.h",
"src/objects/map.h",
"src/objects/maybe-object-inl.h",

View File

@ -2303,43 +2303,39 @@ Isolate::ThreadDataTable::ThreadDataTable() : table_() {}
Isolate::ThreadDataTable::~ThreadDataTable() {}
void Isolate::ReleaseManagedObjects() {
Isolate::ManagedObjectFinalizer* current =
managed_object_finalizers_list_.next_;
managed_object_finalizers_list_.next_ = nullptr;
while (current != nullptr) {
Isolate::ManagedObjectFinalizer* next = current->next_;
current->Dispose();
current = next;
void Isolate::ReleaseSharedPtrs() {
while (managed_ptr_destructors_head_) {
ManagedPtrDestructor* l = managed_ptr_destructors_head_;
ManagedPtrDestructor* n = nullptr;
managed_ptr_destructors_head_ = nullptr;
for (; l != nullptr; l = n) {
l->destructor_(l->shared_ptr_ptr_);
n = l->next_;
delete l;
}
}
// No new managed objects should pop up during finalization.
DCHECK_NULL(managed_object_finalizers_list_.next_);
}
void Isolate::RegisterForReleaseAtTeardown(
Isolate::ManagedObjectFinalizer* finalizer) {
DCHECK_NOT_NULL(finalizer->value_);
DCHECK_NOT_NULL(finalizer->deleter_);
DCHECK_NULL(finalizer->prev_);
DCHECK_NULL(finalizer->next_);
// Insert at head. We keep the head alive for the lifetime of the Isolate
// because otherwise we can't reset the head, should we delete it before
// the isolate expires
Isolate::ManagedObjectFinalizer* next = managed_object_finalizers_list_.next_;
managed_object_finalizers_list_.next_ = finalizer;
finalizer->prev_ = &managed_object_finalizers_list_;
finalizer->next_ = next;
if (next != nullptr) next->prev_ = finalizer;
void Isolate::RegisterManagedPtrDestructor(ManagedPtrDestructor* destructor) {
DCHECK_NULL(destructor->prev_);
DCHECK_NULL(destructor->next_);
if (managed_ptr_destructors_head_) {
managed_ptr_destructors_head_->prev_ = destructor;
}
destructor->next_ = managed_ptr_destructors_head_;
managed_ptr_destructors_head_ = destructor;
}
void Isolate::UnregisterFromReleaseAtTeardown(
Isolate::ManagedObjectFinalizer* finalizer) {
DCHECK_NOT_NULL(finalizer);
DCHECK_NOT_NULL(finalizer->prev_);
finalizer->prev_->next_ = finalizer->next_;
if (finalizer->next_ != nullptr) finalizer->next_->prev_ = finalizer->prev_;
void Isolate::UnregisterManagedPtrDestructor(ManagedPtrDestructor* destructor) {
if (destructor->prev_) {
destructor->prev_->next_ = destructor->next_;
} else {
DCHECK_EQ(destructor, managed_ptr_destructors_head_);
managed_ptr_destructors_head_ = destructor->next_;
}
if (destructor->next_) destructor->next_->prev_ = destructor->prev_;
destructor->prev_ = nullptr;
destructor->next_ = nullptr;
}
Isolate::PerIsolateThreadData::~PerIsolateThreadData() {
@ -2662,9 +2658,8 @@ void Isolate::Deinit() {
// We start with the heap tear down so that releasing managed objects does
// not cause a GC.
heap_.StartTearDown();
// Release managed objects before shutting down the heap. The finalizer might
// need to access heap objects.
ReleaseManagedObjects();
ReleaseSharedPtrs();
delete deoptimizer_data_;
deoptimizer_data_ = nullptr;

View File

@ -103,6 +103,7 @@ class ThreadState;
class ThreadVisitor; // Defined in v8threads.h
class TracingCpuProfilerImpl;
class UnicodeCache;
struct ManagedPtrDestructor;
template <StateTag Tag> class VMState;
@ -550,7 +551,7 @@ class Isolate : private HiddenFactory {
// for legacy API reasons.
void TearDown();
void ReleaseManagedObjects();
void ReleaseSharedPtrs();
void ClearSerializerData();
@ -1350,41 +1351,11 @@ class Isolate : private HiddenFactory {
void set_allow_atomics_wait(bool set) { allow_atomics_wait_ = set; }
bool allow_atomics_wait() { return allow_atomics_wait_; }
// List of native heap values allocated by the runtime as part of its
// implementation that must be freed at isolate deinit.
class ManagedObjectFinalizer {
public:
using Deleter = void (*)(ManagedObjectFinalizer*);
ManagedObjectFinalizer(void* value, Deleter deleter)
: value_(value), deleter_(deleter) {}
void Dispose() { deleter_(this); }
void* value() const { return value_; }
private:
friend class Isolate;
ManagedObjectFinalizer() = default;
void* value_ = nullptr;
Deleter deleter_ = nullptr;
ManagedObjectFinalizer* prev_ = nullptr;
ManagedObjectFinalizer* next_ = nullptr;
};
static_assert(offsetof(ManagedObjectFinalizer, value_) == 0,
"value_ must be the first member");
// Register a finalizer to be called at isolate teardown.
void RegisterForReleaseAtTeardown(ManagedObjectFinalizer*);
void RegisterManagedPtrDestructor(ManagedPtrDestructor* finalizer);
// Unregister a previously registered value from release at
// isolate teardown.
// This transfers the responsibility of the previously managed value's
// deletion to the caller.
void UnregisterFromReleaseAtTeardown(ManagedObjectFinalizer*);
// Removes a previously-registered shared object finalizer.
void UnregisterManagedPtrDestructor(ManagedPtrDestructor* finalizer);
size_t elements_deletion_counter() { return elements_deletion_counter_; }
void set_elements_deletion_counter(size_t value) {
@ -1698,7 +1669,7 @@ class Isolate : private HiddenFactory {
bool allow_atomics_wait_;
ManagedObjectFinalizer managed_object_finalizers_list_;
ManagedPtrDestructor* managed_ptr_destructors_head_ = nullptr;
size_t total_regexp_code_generated_;

View File

@ -1,104 +0,0 @@
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_MANAGED_H_
#define V8_MANAGED_H_
#include "src/global-handles.h"
#include "src/handles.h"
#include "src/heap/factory.h"
#include "src/isolate.h"
namespace v8 {
namespace internal {
// An object that wraps a pointer to a C++ object and manages its lifetime.
// The C++ object will be deleted when the managed wrapper object is
// garbage collected, or, last resort, if the isolate is torn down before GC,
// as part of Isolate::Dispose().
// Managed<CppType> may be used polymorphically as Foreign, where the held
// address is typed as CppType**. The double indirection is due to the
// use, by Managed, of Isolate::ManagedObjectFinalizer, which has a CppType*
// first field.
// Calling Foreign::set_foreign_address is not allowed on a Managed object.
template <class CppType>
class Managed : public Foreign {
class FinalizerWithHandle : public Isolate::ManagedObjectFinalizer {
public:
FinalizerWithHandle(void* value,
Isolate::ManagedObjectFinalizer::Deleter deleter)
: Isolate::ManagedObjectFinalizer(value, deleter) {}
Object** global_handle_location;
};
public:
V8_INLINE CppType* get() {
return reinterpret_cast<CppType*>(GetFinalizer()->value());
}
static Managed<CppType>* cast(Object* obj) {
SLOW_DCHECK(obj->IsForeign());
return reinterpret_cast<Managed<CppType>*>(obj);
}
// Allocate a new CppType and wrap it in a Managed.
template <typename... Args>
static Handle<Managed<CppType>> Allocate(Isolate* isolate, Args&&... args) {
CppType* ptr = new CppType(std::forward<Args>(args)...);
return From(isolate, ptr);
}
// Create a Managed from an existing CppType*. Takes ownership of the passed
// object.
static Handle<Managed<CppType>> From(Isolate* isolate, CppType* ptr) {
FinalizerWithHandle* finalizer =
new FinalizerWithHandle(ptr, &NativeDelete);
isolate->RegisterForReleaseAtTeardown(finalizer);
Handle<Managed<CppType>> handle = Handle<Managed<CppType>>::cast(
isolate->factory()->NewForeign(reinterpret_cast<Address>(finalizer)));
Handle<Object> global_handle = isolate->global_handles()->Create(*handle);
finalizer->global_handle_location = global_handle.location();
GlobalHandles::MakeWeak(
finalizer->global_handle_location, handle->GetFinalizer(),
&ResetWeakAndScheduleGCDelete, v8::WeakCallbackType::kParameter);
return handle;
}
private:
static void ResetWeakAndScheduleGCDelete(
const v8::WeakCallbackInfo<void>& data) {
FinalizerWithHandle* finalizer =
reinterpret_cast<FinalizerWithHandle*>(data.GetParameter());
GlobalHandles::Destroy(finalizer->global_handle_location);
Isolate* isolate = reinterpret_cast<Isolate*>(data.GetIsolate());
isolate->UnregisterFromReleaseAtTeardown(finalizer);
// We need to call GCDelete as a second pass callback because
// it can trigger garbage collection. The first pass callbacks
// are not allowed to invoke V8 API.
data.SetSecondPassCallback(&GCDelete);
}
static void GCDelete(const v8::WeakCallbackInfo<void>& data) {
FinalizerWithHandle* finalizer =
reinterpret_cast<FinalizerWithHandle*>(data.GetParameter());
NativeDelete(finalizer);
}
static void NativeDelete(Isolate::ManagedObjectFinalizer* finalizer) {
CppType* typed_value = reinterpret_cast<CppType*>(finalizer->value());
delete typed_value;
FinalizerWithHandle* finalizer_with_handle =
static_cast<FinalizerWithHandle*>(finalizer);
delete finalizer_with_handle;
}
FinalizerWithHandle* GetFinalizer() {
return reinterpret_cast<FinalizerWithHandle*>(foreign_address());
}
};
} // namespace internal
} // namespace v8
#endif // V8_MANAGED_H_

View File

@ -14,8 +14,8 @@
#include "src/global-handles.h"
#include "src/heap/factory.h"
#include "src/isolate.h"
#include "src/managed.h"
#include "src/objects-inl.h"
#include "src/objects/managed.h"
#include "src/property-descriptor.h"
#include "unicode/brkiter.h"
#include "unicode/bytestream.h"
@ -952,7 +952,7 @@ bool Collator::InitializeCollator(Isolate* isolate,
}
Handle<Managed<icu::Collator>> managed =
Managed<icu::Collator>::From(isolate, collator);
Managed<icu::Collator>::FromRawPtr(isolate, collator);
collator_holder->SetEmbedderField(0, *managed);
return true;
@ -960,7 +960,7 @@ bool Collator::InitializeCollator(Isolate* isolate,
icu::Collator* Collator::UnpackCollator(Isolate* isolate,
Handle<JSObject> obj) {
return Managed<icu::Collator>::cast(obj->GetEmbedderField(0))->get();
return Managed<icu::Collator>::cast(obj->GetEmbedderField(0))->raw();
}
bool PluralRules::InitializePluralRules(Isolate* isolate, Handle<String> locale,

36
src/objects/managed.cc Normal file
View File

@ -0,0 +1,36 @@
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/objects/managed.h"
namespace v8 {
namespace internal {
namespace {
// Called by the GC in its second pass when a Managed<CppType> is
// garbage collected.
void ManagedObjectFinalizerSecondPass(const v8::WeakCallbackInfo<void>& data) {
auto destructor =
reinterpret_cast<ManagedPtrDestructor*>(data.GetParameter());
destructor->destructor_(destructor->shared_ptr_ptr_);
delete destructor;
}
} // namespace
// Called by the GC in its first pass when a Managed<CppType> is
// garbage collected.
void ManagedObjectFinalizer(const v8::WeakCallbackInfo<void>& data) {
auto destructor =
reinterpret_cast<ManagedPtrDestructor*>(data.GetParameter());
GlobalHandles::Destroy(destructor->global_handle_location_);
Isolate* isolate = reinterpret_cast<Isolate*>(data.GetIsolate());
isolate->UnregisterManagedPtrDestructor(destructor);
// We need to do the main work as a second pass callback because
// it can trigger garbage collection. The first pass callbacks
// are not allowed to invoke V8 API.
data.SetSecondPassCallback(&ManagedObjectFinalizerSecondPass);
}
} // namespace internal
} // namespace v8

112
src/objects/managed.h Normal file
View File

@ -0,0 +1,112 @@
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_OBJECTS_MANAGED_H_
#define V8_OBJECTS_MANAGED_H_
#include <memory>
#include "src/global-handles.h"
#include "src/handles.h"
#include "src/heap/factory.h"
#include "src/isolate.h"
namespace v8 {
namespace internal {
// Implements a doubly-linked lists of destructors for the isolate.
struct ManagedPtrDestructor {
ManagedPtrDestructor* prev_ = nullptr;
ManagedPtrDestructor* next_ = nullptr;
void* shared_ptr_ptr_ = nullptr;
void (*destructor_)(void* shared_ptr) = nullptr;
Object** global_handle_location_ = nullptr;
ManagedPtrDestructor(void* shared_ptr_ptr, void (*destructor)(void*))
: shared_ptr_ptr_(shared_ptr_ptr), destructor_(destructor) {}
};
// The GC finalizer of a managed object, which does not depend on
// the template parameter.
void ManagedObjectFinalizer(const v8::WeakCallbackInfo<void>& data);
// {Managed<T>} is essentially a {std::shared_ptr<T>} allocated on the heap
// that can be used to manage the lifetime of C++ objects that are shared
// across multiple isolates.
// When a {Managed<T>} object is garbage collected (or an isolate which
// contains {Managed<T>} is torn down), the {Managed<T>} deletes its underlying
// {std::shared_ptr<T>}, thereby decrementing its internal reference count,
// which will delete the C++ object when the reference count drops to 0.
template <class CppType>
class Managed : public Foreign {
public:
// Get a raw pointer to the C++ object.
V8_INLINE CppType* raw() { return GetSharedPtrPtr()->get(); }
// Get a copy of the shared pointer to the C++ object.
V8_INLINE std::shared_ptr<CppType> get() { return *GetSharedPtrPtr(); }
static Managed<CppType>* cast(Object* obj) {
SLOW_DCHECK(obj->IsForeign());
return reinterpret_cast<Managed<CppType>*>(obj);
}
// Allocate a new {CppType} and wrap it in a {Managed<CppType>}.
template <typename... Args>
static Handle<Managed<CppType>> Allocate(Isolate* isolate, Args&&... args) {
CppType* ptr = new CppType(std::forward<Args>(args)...);
return FromSharedPtr(isolate, std::shared_ptr<CppType>(ptr));
}
// Create a {Managed<CppType>} from an existing raw {CppType*}. The returned
// object will now own the memory pointed to by {CppType}.
static Handle<Managed<CppType>> FromRawPtr(Isolate* isolate, CppType* ptr) {
return FromSharedPtr(isolate, std::shared_ptr<CppType>(ptr));
}
// Create a {Managed<CppType>} from an existing {std::unique_ptr<CppType>}.
// The returned object will now own the memory pointed to by {CppType}, and
// the unique pointer will be released.
static Handle<Managed<CppType>> FromUniquePtr(
Isolate* isolate, std::unique_ptr<CppType> unique_ptr) {
return FromSharedPtr(isolate, std::move(unique_ptr));
}
// Create a {Managed<CppType>} from an existing {std::shared_ptr<CppType>}.
static Handle<Managed<CppType>> FromSharedPtr(
Isolate* isolate, std::shared_ptr<CppType> shared_ptr) {
auto destructor = new ManagedPtrDestructor(
new std::shared_ptr<CppType>(shared_ptr), Destructor);
Handle<Managed<CppType>> handle = Handle<Managed<CppType>>::cast(
isolate->factory()->NewForeign(reinterpret_cast<Address>(destructor)));
Handle<Object> global_handle = isolate->global_handles()->Create(*handle);
destructor->global_handle_location_ = global_handle.location();
GlobalHandles::MakeWeak(destructor->global_handle_location_, destructor,
&ManagedObjectFinalizer,
v8::WeakCallbackType::kParameter);
isolate->RegisterManagedPtrDestructor(destructor);
return handle;
}
private:
// Internally this {Foreign} object stores a pointer to a new
// std::shared_ptr<CppType>.
std::shared_ptr<CppType>* GetSharedPtrPtr() {
auto destructor =
reinterpret_cast<ManagedPtrDestructor*>(foreign_address());
return reinterpret_cast<std::shared_ptr<CppType>*>(
destructor->shared_ptr_ptr_);
}
// Called by either isolate shutdown or the {ManagedObjectFinalizer} in
// order to actually delete the shared pointer (i.e. decrement its refcount).
static void Destructor(void* ptr) {
auto shared_ptr_ptr = reinterpret_cast<std::shared_ptr<CppType>*>(ptr);
delete shared_ptr_ptr;
}
};
} // namespace internal
} // namespace v8
#endif // V8_OBJECTS_MANAGED_H_

View File

@ -861,7 +861,7 @@ Address CompileLazy(Isolate* isolate,
}
IndirectPatcher* patcher = Managed<IndirectPatcher>::cast(
caller_instance->managed_indirect_patcher())
->get();
->raw();
Address old_target = lazy_stub->instruction_start();
patcher->Patch(*caller_instance, *target_instance, target_func_index,
old_target, result);
@ -1378,7 +1378,7 @@ MaybeHandle<WasmModuleObject> CompileToModuleObjectInternal(
// The {module_wrapper} will take ownership of the {WasmModule} object,
// and it will be destroyed when the GC reclaims the wrapper object.
Handle<WasmModuleWrapper> module_wrapper =
WasmModuleWrapper::From(isolate, module.release());
WasmModuleWrapper::FromUniquePtr(isolate, std::move(module));
// Create the shared module data.
// TODO(clemensh): For the same module (same bytes / same hash), we should
@ -2867,8 +2867,8 @@ void AsyncCompileJob::FinishCompile() {
// The {module_wrapper} will take ownership of the {WasmModule} object,
// and it will be destroyed when the GC reclaims the wrapper object.
Handle<WasmModuleWrapper> module_wrapper =
WasmModuleWrapper::From(isolate_, module_.release());
Handle<Managed<WasmModule>> module_wrapper =
Managed<WasmModule>::FromUniquePtr(isolate_, std::move(module_));
// Create the shared module data.
// TODO(clemensh): For the same module (same bytes / same hash), we should

View File

@ -293,7 +293,7 @@ class InterpreterHandle {
// interpreter.
DCHECK_EQ(this, Managed<wasm::InterpreterHandle>::cast(
instance_obj->debug_info()->interpreter_handle())
->get());
->raw());
return instance_obj;
}
@ -539,19 +539,19 @@ wasm::InterpreterHandle* GetOrCreateInterpreterHandle(
debug_info->set_interpreter_handle(*handle);
}
return Handle<Managed<wasm::InterpreterHandle>>::cast(handle)->get();
return Handle<Managed<wasm::InterpreterHandle>>::cast(handle)->raw();
}
wasm::InterpreterHandle* GetInterpreterHandle(WasmDebugInfo* debug_info) {
Object* handle_obj = debug_info->interpreter_handle();
DCHECK(!handle_obj->IsUndefined(debug_info->GetIsolate()));
return Managed<wasm::InterpreterHandle>::cast(handle_obj)->get();
return Managed<wasm::InterpreterHandle>::cast(handle_obj)->raw();
}
wasm::InterpreterHandle* GetInterpreterHandleOrNull(WasmDebugInfo* debug_info) {
Object* handle_obj = debug_info->interpreter_handle();
if (handle_obj->IsUndefined(debug_info->GetIsolate())) return nullptr;
return Managed<wasm::InterpreterHandle>::cast(handle_obj)->get();
return Managed<wasm::InterpreterHandle>::cast(handle_obj)->raw();
}
int GetNumFunctions(WasmInstanceObject* instance) {
@ -638,7 +638,7 @@ wasm::WasmInterpreter* WasmDebugInfo::SetupForTesting(
auto interp_handle =
Managed<wasm::InterpreterHandle>::Allocate(isolate, isolate, *debug_info);
debug_info->set_interpreter_handle(*interp_handle);
auto ret = interp_handle->get()->interpreter();
auto ret = interp_handle->raw()->interpreter();
ret->SetCallIndirectTestMode();
return ret;
}
@ -759,7 +759,7 @@ Handle<JSFunction> WasmDebugInfo::GetCWasmEntry(
debug_info->set_c_wasm_entry_map(*managed_map);
}
Handle<FixedArray> entries(debug_info->c_wasm_entries(), isolate);
wasm::SignatureMap* map = debug_info->c_wasm_entry_map()->get();
wasm::SignatureMap* map = debug_info->c_wasm_entry_map()->raw();
int32_t index = map->Find(sig);
if (index == -1) {
index = static_cast<int32_t>(map->FindOrInsert(sig));

View File

@ -10,7 +10,7 @@
#include "src/debug/debug-interface.h"
#include "src/globals.h"
#include "src/handles.h"
#include "src/managed.h"
#include "src/objects/managed.h"
#include "src/parsing/preparse-data.h"
#include "src/wasm/decoder.h"

View File

@ -131,7 +131,7 @@ WasmInstanceNativeAllocations* GetNativeAllocations(
WasmInstanceObject* instance) {
return reinterpret_cast<Managed<WasmInstanceNativeAllocations>*>(
instance->managed_native_allocations())
->get();
->raw();
}
// An iterator that returns first the module itself, then all modules linked via
@ -974,13 +974,7 @@ wasm::WasmCode* WasmExportedFunction::GetWasmCode() {
}
WasmModule* WasmSharedModuleData::module() const {
// We populate the kModuleWrapper field with a Foreign holding the
// address to the address of a WasmModule. This is because we can
// handle both cases when the WasmModule's lifetime is managed through
// a Managed<WasmModule> object, as well as cases when it's managed
// by the embedder. CcTests fall into the latter case.
return *(reinterpret_cast<WasmModule**>(
Foreign::cast(module_wrapper())->foreign_address()));
return Managed<WasmModule>::cast(module_wrapper())->raw();
}
Handle<WasmSharedModuleData> WasmSharedModuleData::New(
@ -1377,13 +1371,12 @@ Handle<WasmCompiledModule> WasmCompiledModule::New(
compiled_module->set_export_wrappers(*export_wrappers);
}
compiled_module->set_weak_owning_instance(isolate->heap()->empty_weak_cell());
wasm::NativeModule* native_module = nullptr;
{
std::unique_ptr<wasm::NativeModule> native_module_ptr =
auto native_module =
isolate->wasm_engine()->code_manager()->NewNativeModule(*module, env);
native_module = native_module_ptr.release();
Handle<Foreign> native_module_wrapper =
Managed<wasm::NativeModule>::From(isolate, native_module);
Managed<wasm::NativeModule>::FromUniquePtr(isolate,
std::move(native_module));
compiled_module->set_native_module(*native_module_wrapper);
compiled_module->GetNativeModule()->SetCompiledModule(compiled_module);
}
@ -1410,12 +1403,12 @@ Handle<WasmCompiledModule> WasmCompiledModule::Clone(
handle(module->export_wrappers(), isolate));
ret->set_export_wrappers(*export_copy);
std::unique_ptr<wasm::NativeModule> native_module =
module->GetNativeModule()->Clone();
auto native_module = module->GetNativeModule()->Clone();
// construct the wrapper in 2 steps, because its construction may trigger GC,
// which would shift the this pointer in set_native_module.
Handle<Foreign> native_module_wrapper =
Managed<wasm::NativeModule>::From(isolate, native_module.release());
Managed<wasm::NativeModule>::FromUniquePtr(isolate,
std::move(native_module));
ret->set_native_module(*native_module_wrapper);
ret->GetNativeModule()->SetCompiledModule(ret);
@ -1424,7 +1417,7 @@ Handle<WasmCompiledModule> WasmCompiledModule::Clone(
wasm::NativeModule* WasmCompiledModule::GetNativeModule() const {
if (!has_native_module()) return nullptr;
return Managed<wasm::NativeModule>::cast(native_module())->get();
return Managed<wasm::NativeModule>::cast(native_module())->raw();
}
void WasmCompiledModule::Reset(Isolate* isolate,

View File

@ -8,8 +8,8 @@
#include "src/base/bits.h"
#include "src/debug/debug.h"
#include "src/debug/interface-types.h"
#include "src/managed.h"
#include "src/objects.h"
#include "src/objects/managed.h"
#include "src/objects/script.h"
#include "src/wasm/decoder.h"
#include "src/wasm/wasm-interpreter.h"

View File

@ -693,7 +693,7 @@ MaybeHandle<WasmCompiledModule> DeserializeNativeModule(
// The {module_wrapper} will take ownership of the {WasmModule} object,
// and it will be destroyed when the GC reclaims the wrapper object.
Handle<WasmModuleWrapper> module_wrapper =
WasmModuleWrapper::From(isolate, decode_result.val.release());
WasmModuleWrapper::FromUniquePtr(isolate, std::move(decode_result.val));
Handle<Script> script = CreateWasmScript(isolate, wire_bytes);
Handle<WasmSharedModuleData> shared = WasmSharedModuleData::New(
isolate, module_wrapper, Handle<SeqOneByteString>::cast(module_bytes),

View File

@ -6,7 +6,7 @@
#include <stdlib.h>
#include <string.h>
#include "src/managed.h"
#include "src/objects/managed.h"
#include "src/objects-inl.h"
#include "test/cctest/cctest.h"
@ -14,84 +14,184 @@
namespace v8 {
namespace internal {
class DeleteRecorder {
class DeleteCounter {
public:
explicit DeleteRecorder(bool* deleted) : deleted_(deleted) {
*deleted_ = false;
}
~DeleteRecorder() { *deleted_ = true; }
static void Deleter(Isolate::ManagedObjectFinalizer* finalizer) {
delete *reinterpret_cast<DeleteRecorder**>(finalizer);
explicit DeleteCounter(int* deleted) : deleted_(deleted) { *deleted_ = 0; }
~DeleteCounter() { (*deleted_)++; }
static void Deleter(void* arg) {
delete reinterpret_cast<DeleteCounter*>(arg);
}
private:
bool* deleted_;
int* deleted_;
};
TEST(ManagedCollect) {
TEST(GCCausesDestruction) {
Isolate* isolate = CcTest::InitIsolateOnce();
bool deleted1 = false;
bool deleted2 = false;
DeleteRecorder* d1 = new DeleteRecorder(&deleted1);
DeleteRecorder* d2 = new DeleteRecorder(&deleted2);
Isolate::ManagedObjectFinalizer finalizer(d2, DeleteRecorder::Deleter);
isolate->RegisterForReleaseAtTeardown(&finalizer);
int deleted1 = 0;
int deleted2 = 0;
DeleteCounter* d1 = new DeleteCounter(&deleted1);
DeleteCounter* d2 = new DeleteCounter(&deleted2);
{
HandleScope scope(isolate);
auto handle = Managed<DeleteRecorder>::From(isolate, d1);
auto handle = Managed<DeleteCounter>::FromRawPtr(isolate, d1);
USE(handle);
}
CcTest::CollectAllAvailableGarbage();
CHECK(deleted1);
CHECK(!deleted2);
isolate->UnregisterFromReleaseAtTeardown(&finalizer);
CHECK_EQ(1, deleted1);
CHECK_EQ(0, deleted2);
delete d2;
CHECK(deleted2);
CHECK_EQ(1, deleted2);
}
TEST(DisposeCollect) {
TEST(DisposeCausesDestruction1) {
v8::Isolate::CreateParams create_params;
create_params.array_buffer_allocator =
CcTest::InitIsolateOnce()->array_buffer_allocator();
v8::Isolate* isolate = v8::Isolate::New(create_params);
isolate->Enter();
Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
bool deleted1 = false;
bool deleted2 = false;
DeleteRecorder* d1 = new DeleteRecorder(&deleted1);
DeleteRecorder* d2 = new DeleteRecorder(&deleted2);
isolate->Enter();
int deleted1 = 0;
DeleteCounter* d1 = new DeleteCounter(&deleted1);
{
HandleScope scope(i_isolate);
auto handle = Managed<DeleteRecorder>::From(i_isolate, d1);
auto handle = Managed<DeleteCounter>::FromRawPtr(i_isolate, d1);
USE(handle);
}
Isolate::ManagedObjectFinalizer finalizer(d2, DeleteRecorder::Deleter);
i_isolate->RegisterForReleaseAtTeardown(&finalizer);
isolate->Exit();
isolate->Dispose();
CHECK_EQ(1, deleted1);
}
TEST(DisposeCausesDestruction2) {
v8::Isolate::CreateParams create_params;
create_params.array_buffer_allocator =
CcTest::InitIsolateOnce()->array_buffer_allocator();
v8::Isolate* isolate = v8::Isolate::New(create_params);
Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
isolate->Enter();
int deleted1 = 0;
int deleted2 = 0;
DeleteCounter* d1 = new DeleteCounter(&deleted1);
DeleteCounter* d2 = new DeleteCounter(&deleted2);
{
HandleScope scope(i_isolate);
auto handle = Managed<DeleteCounter>::FromRawPtr(i_isolate, d1);
USE(handle);
}
ManagedPtrDestructor* destructor =
new ManagedPtrDestructor(d2, DeleteCounter::Deleter);
i_isolate->RegisterManagedPtrDestructor(destructor);
isolate->Exit();
isolate->Dispose();
CHECK(deleted1);
CHECK(deleted2);
CHECK_EQ(1, deleted1);
CHECK_EQ(1, deleted2);
}
TEST(CollectOnGC) {
i::Isolate* i_isolate = CcTest::InitIsolateOnce();
TEST(DisposeWithAnotherSharedPtr) {
v8::Isolate::CreateParams create_params;
create_params.array_buffer_allocator =
CcTest::InitIsolateOnce()->array_buffer_allocator();
bool deleted = false;
v8::Isolate* isolate = v8::Isolate::New(create_params);
Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
isolate->Enter();
int deleted1 = 0;
DeleteCounter* d1 = new DeleteCounter(&deleted1);
{
HandleScope scope(i_isolate);
Managed<DeleteRecorder>::Allocate(i_isolate, &deleted);
std::shared_ptr<DeleteCounter> shared1(d1);
{
HandleScope scope(i_isolate);
auto handle = Managed<DeleteCounter>::FromSharedPtr(i_isolate, shared1);
USE(handle);
}
isolate->Exit();
isolate->Dispose();
CHECK_EQ(0, deleted1);
}
// TODO(ulan): It should be possible to trigger a normal gc before the
// "critical" one, and the "critical" one should still process pending
// second-pass phantom callbacks (crbug.com/v8/7628).
// CcTest::CollectAllGarbage();
CcTest::CollectAllAvailableGarbage();
// Should be deleted after the second shared pointer is destroyed.
CHECK_EQ(1, deleted1);
}
CHECK(deleted);
TEST(DisposeAcrossIsolates) {
v8::Isolate::CreateParams create_params;
create_params.array_buffer_allocator =
CcTest::InitIsolateOnce()->array_buffer_allocator();
int deleted = 0;
DeleteCounter* delete_counter = new DeleteCounter(&deleted);
v8::Isolate* isolate1 = v8::Isolate::New(create_params);
Isolate* i_isolate1 = reinterpret_cast<i::Isolate*>(isolate1);
isolate1->Enter();
{
HandleScope scope1(i_isolate1);
auto handle1 =
Managed<DeleteCounter>::FromRawPtr(i_isolate1, delete_counter);
v8::Isolate* isolate2 = v8::Isolate::New(create_params);
Isolate* i_isolate2 = reinterpret_cast<i::Isolate*>(isolate2);
isolate2->Enter();
{
HandleScope scope(i_isolate2);
auto handle2 =
Managed<DeleteCounter>::FromSharedPtr(i_isolate2, handle1->get());
USE(handle2);
}
isolate2->Exit();
isolate2->Dispose();
CHECK_EQ(0, deleted);
}
// Should be deleted after the first isolate is destroyed.
isolate1->Exit();
isolate1->Dispose();
CHECK_EQ(1, deleted);
}
TEST(CollectAcrossIsolates) {
v8::Isolate::CreateParams create_params;
create_params.array_buffer_allocator =
CcTest::InitIsolateOnce()->array_buffer_allocator();
int deleted = 0;
DeleteCounter* delete_counter = new DeleteCounter(&deleted);
v8::Isolate* isolate1 = v8::Isolate::New(create_params);
Isolate* i_isolate1 = reinterpret_cast<i::Isolate*>(isolate1);
isolate1->Enter();
{
HandleScope scope1(i_isolate1);
auto handle1 =
Managed<DeleteCounter>::FromRawPtr(i_isolate1, delete_counter);
v8::Isolate* isolate2 = v8::Isolate::New(create_params);
Isolate* i_isolate2 = reinterpret_cast<i::Isolate*>(isolate2);
isolate2->Enter();
{
HandleScope scope(i_isolate2);
auto handle2 =
Managed<DeleteCounter>::FromSharedPtr(i_isolate2, handle1->get());
USE(handle2);
}
i_isolate2->heap()->CollectAllAvailableGarbage(
i::GarbageCollectionReason::kTesting);
CHECK_EQ(0, deleted);
isolate2->Exit();
isolate2->Dispose();
CHECK_EQ(0, deleted);
}
// Should be deleted after the first isolate is destroyed.
i_isolate1->heap()->CollectAllAvailableGarbage(
i::GarbageCollectionReason::kTesting);
CHECK_EQ(1, deleted);
isolate1->Exit();
isolate1->Dispose();
CHECK_EQ(1, deleted);
}
} // namespace internal

View File

@ -17,13 +17,14 @@ TestingModuleBuilder::TestingModuleBuilder(
Zone* zone, ManuallyImportedJSFunction* maybe_import,
WasmExecutionMode mode, RuntimeExceptionSupport exception_support,
LowerSimd lower_simd)
: test_module_ptr_(&test_module_),
: test_module_(std::make_shared<WasmModule>()),
test_module_ptr_(test_module_.get()),
isolate_(CcTest::InitIsolateOnce()),
execution_mode_(mode),
runtime_exception_support_(exception_support),
lower_simd_(lower_simd) {
WasmJs::Install(isolate_, true);
test_module_.globals_size = kMaxGlobalsSize;
test_module_->globals_size = kMaxGlobalsSize;
memset(globals_data_, 0, sizeof(globals_data_));
uint32_t maybe_import_index = 0;
@ -33,8 +34,8 @@ TestingModuleBuilder::TestingModuleBuilder(
// instance object allocates import entries.
maybe_import_index = AddFunction(maybe_import->sig, nullptr);
DCHECK_EQ(0, maybe_import_index);
test_module_.num_imported_functions = 1;
test_module_.functions[0].imported = true;
test_module_->num_imported_functions = 1;
test_module_->functions[0].imported = true;
}
instance_object_ = InitInstanceObject();
@ -44,7 +45,7 @@ TestingModuleBuilder::TestingModuleBuilder(
CodeSpaceMemoryModificationScope modification_scope(isolate_->heap());
Handle<Code> code = compiler::CompileWasmToJSWrapper(
isolate_, maybe_import->js_function, maybe_import->sig,
maybe_import_index, test_module_.origin(),
maybe_import_index, test_module_->origin(),
trap_handler::IsTrapHandlerEnabled() ? kUseTrapHandler
: kNoTrapHandler);
native_module_->ResizeCodeTableForTesting(maybe_import_index + 1,
@ -62,13 +63,13 @@ TestingModuleBuilder::TestingModuleBuilder(
}
byte* TestingModuleBuilder::AddMemory(uint32_t size) {
CHECK(!test_module_.has_memory);
CHECK(!test_module_->has_memory);
CHECK_NULL(mem_start_);
CHECK_EQ(0, mem_size_);
DCHECK(!instance_object_->has_memory_object());
test_module_.has_memory = true;
test_module_->has_memory = true;
const bool enable_guard_regions =
trap_handler::IsTrapHandlerEnabled() && test_module_.is_wasm();
trap_handler::IsTrapHandlerEnabled() && test_module_->is_wasm();
uint32_t alloc_size =
enable_guard_regions ? RoundUp(size, CommitPageSize()) : size;
Handle<JSArrayBuffer> new_buffer;
@ -83,7 +84,7 @@ byte* TestingModuleBuilder::AddMemory(uint32_t size) {
// Create the WasmMemoryObject.
Handle<WasmMemoryObject> memory_object = WasmMemoryObject::New(
isolate_, new_buffer,
(test_module_.maximum_pages != 0) ? test_module_.maximum_pages : -1);
(test_module_->maximum_pages != 0) ? test_module_->maximum_pages : -1);
instance_object_->set_memory_object(*memory_object);
WasmMemoryObject::AddInstance(isolate_, memory_object, instance_object_);
// TODO(wasm): Delete the following two lines when test-run-wasm will use a
@ -94,23 +95,23 @@ byte* TestingModuleBuilder::AddMemory(uint32_t size) {
}
uint32_t TestingModuleBuilder::AddFunction(FunctionSig* sig, const char* name) {
if (test_module_.functions.size() == 0) {
if (test_module_->functions.size() == 0) {
// TODO(titzer): Reserving space here to avoid the underlying WasmFunction
// structs from moving.
test_module_.functions.reserve(kMaxFunctions);
test_module_->functions.reserve(kMaxFunctions);
}
uint32_t index = static_cast<uint32_t>(test_module_.functions.size());
uint32_t index = static_cast<uint32_t>(test_module_->functions.size());
if (native_module_) {
native_module_->ResizeCodeTableForTesting(index + 1, kMaxFunctions);
}
test_module_.functions.push_back({sig, index, 0, {0, 0}, false, false});
test_module_->functions.push_back({sig, index, 0, {0, 0}, false, false});
if (name) {
Vector<const byte> name_vec = Vector<const byte>::cast(CStrVector(name));
test_module_.AddNameForTesting(
test_module_->AddNameForTesting(
index, {AddBytes(name_vec), static_cast<uint32_t>(name_vec.length())});
}
if (interpreter_) {
interpreter_->AddFunctionForTesting(&test_module_.functions.back());
interpreter_->AddFunctionForTesting(&test_module_->functions.back());
}
DCHECK_LT(index, kMaxFunctions); // limited for testing.
return index;
@ -126,12 +127,12 @@ Handle<JSFunction> TestingModuleBuilder::WrapCode(uint32_t index) {
Handle<WeakCell> weak_instance(compiled_module->weak_owning_instance(),
isolate_);
Handle<Code> ret_code = compiler::CompileJSToWasmWrapper(
isolate_, &test_module_, weak_instance, code, index,
isolate_, test_module_ptr_, weak_instance, code, index,
trap_handler::IsTrapHandlerEnabled() ? kUseTrapHandler : kNoTrapHandler);
Handle<JSFunction> ret = WasmExportedFunction::New(
isolate_, instance_object(), MaybeHandle<String>(),
static_cast<int>(index),
static_cast<int>(test_module_.functions[index].sig->parameter_count()),
static_cast<int>(test_module_->functions[index].sig->parameter_count()),
ret_code);
// Add reference to the exported wrapper code.
@ -147,8 +148,8 @@ Handle<JSFunction> TestingModuleBuilder::WrapCode(uint32_t index) {
void TestingModuleBuilder::AddIndirectFunctionTable(
const uint16_t* function_indexes, uint32_t table_size) {
test_module_.function_tables.emplace_back();
WasmIndirectFunctionTable& table = test_module_.function_tables.back();
test_module_->function_tables.emplace_back();
WasmIndirectFunctionTable& table = test_module_->function_tables.back();
table.initial_size = table_size;
table.maximum_size = table_size;
table.has_maximum_size = true;
@ -164,11 +165,11 @@ void TestingModuleBuilder::PopulateIndirectFunctionTable() {
auto instance = instance_object();
uint32_t num_tables = 1; // TODO(titzer): multiple tables.
for (uint32_t i = 0; i < num_tables; i++) {
WasmIndirectFunctionTable& table = test_module_.function_tables[i];
WasmIndirectFunctionTable& table = test_module_->function_tables[i];
int table_size = static_cast<int>(instance->indirect_function_table_size());
for (int j = 0; j < table_size; j++) {
WasmFunction& function = test_module_.functions[table.values[j]];
int sig_id = test_module_.signature_map.Find(function.sig);
WasmFunction& function = test_module_->functions[table.values[j]];
int sig_id = test_module_->signature_map.Find(function.sig);
auto wasm_code = native_module_->GetCode(function.func_index);
IndirectFunctionTableEntry(*instance, j)
.set(sig_id, *instance, wasm_code);
@ -195,7 +196,7 @@ uint32_t TestingModuleBuilder::AddBytes(Vector<const byte> bytes) {
ModuleEnv TestingModuleBuilder::CreateModuleEnv() {
return {
&test_module_,
test_module_ptr_,
trap_handler::IsTrapHandlerEnabled() ? kUseTrapHandler : kNoTrapHandler,
runtime_exception_support_};
}
@ -203,26 +204,25 @@ ModuleEnv TestingModuleBuilder::CreateModuleEnv() {
const WasmGlobal* TestingModuleBuilder::AddGlobal(ValueType type) {
byte size = ValueTypes::MemSize(ValueTypes::MachineTypeFor(type));
global_offset = (global_offset + size - 1) & ~(size - 1); // align
test_module_.globals.push_back(
test_module_->globals.push_back(
{type, true, WasmInitExpr(), {global_offset}, false, false});
global_offset += size;
// limit number of globals.
CHECK_LT(global_offset, kMaxGlobalsSize);
return &test_module_.globals.back();
return &test_module_->globals.back();
}
Handle<WasmInstanceObject> TestingModuleBuilder::InitInstanceObject() {
Handle<SeqOneByteString> empty_string = Handle<SeqOneByteString>::cast(
isolate_->factory()->NewStringFromOneByte({}).ToHandleChecked());
// The lifetime of the wasm module is tied to this object's, and we cannot
// rely on the mechanics of Managed<T>.
Handle<Foreign> module_wrapper = isolate_->factory()->NewForeign(
reinterpret_cast<Address>(&test_module_ptr_));
auto managed_module =
Managed<WasmModule>::FromSharedPtr(isolate_, test_module_);
DCHECK_EQ(test_module_ptr_, managed_module->raw());
Handle<Script> script =
isolate_->factory()->NewScript(isolate_->factory()->empty_string());
script->set_type(Script::TYPE_WASM);
Handle<WasmSharedModuleData> shared_module_data =
WasmSharedModuleData::New(isolate_, module_wrapper, empty_string, script,
WasmSharedModuleData::New(isolate_, managed_module, empty_string, script,
Handle<ByteArray>::null());
Handle<FixedArray> export_wrappers = isolate_->factory()->NewFixedArray(0);
ModuleEnv env = CreateModuleEnv();

View File

@ -94,7 +94,7 @@ class TestingModuleBuilder {
TestingModuleBuilder(Zone*, ManuallyImportedJSFunction*, WasmExecutionMode,
RuntimeExceptionSupport, LowerSimd);
void ChangeOriginToAsmjs() { test_module_.set_origin(kAsmJsOrigin); }
void ChangeOriginToAsmjs() { test_module_->set_origin(kAsmJsOrigin); }
byte* AddMemory(uint32_t size);
@ -114,12 +114,12 @@ class TestingModuleBuilder {
}
byte AddSignature(FunctionSig* sig) {
DCHECK_EQ(test_module_.signatures.size(),
test_module_.signature_ids.size());
test_module_.signatures.push_back(sig);
auto canonical_sig_num = test_module_.signature_map.FindOrInsert(sig);
test_module_.signature_ids.push_back(canonical_sig_num);
size_t size = test_module_.signatures.size();
DCHECK_EQ(test_module_->signatures.size(),
test_module_->signature_ids.size());
test_module_->signatures.push_back(sig);
auto canonical_sig_num = test_module_->signature_map.FindOrInsert(sig);
test_module_->signature_ids.push_back(canonical_sig_num);
size_t size = test_module_->signatures.size();
CHECK_GT(127, size);
return static_cast<byte>(size - 1);
}
@ -173,13 +173,13 @@ class TestingModuleBuilder {
}
void SetMaxMemPages(uint32_t maximum_pages) {
test_module_.maximum_pages = maximum_pages;
test_module_->maximum_pages = maximum_pages;
if (instance_object()->has_memory_object()) {
instance_object()->memory_object()->set_maximum_pages(maximum_pages);
}
}
void SetHasSharedMemory() { test_module_.has_shared_memory = true; }
void SetHasSharedMemory() { test_module_->has_shared_memory = true; }
uint32_t AddFunction(FunctionSig* sig, const char* name);
@ -193,7 +193,7 @@ class TestingModuleBuilder {
uint32_t AddBytes(Vector<const byte> bytes);
WasmFunction* GetFunctionAt(int index) {
return &test_module_.functions[index];
return &test_module_->functions[index];
}
WasmInterpreter* interpreter() { return interpreter_; }
@ -224,7 +224,7 @@ class TestingModuleBuilder {
}
private:
WasmModule test_module_;
std::shared_ptr<WasmModule> test_module_;
WasmModule* test_module_ptr_;
Isolate* isolate_;
uint32_t global_offset = 0;