[in-place weak refs] Add & use PrototypeUsers (a specialized weak data structure)

It's a growing weak array which also has an API for marking slots empty (those
will then be filled before growing the array again).

This is a more efficient implementation than the corresponding feature in
FixedArrayOfWeakCells, because we chain the empty slots together.

BUG=v8:7308

Cq-Include-Trybots: luci.chromium.try:linux_chromium_rel_ng
Change-Id: I335cd3f9cc7838c7f6ca350735b1503b2f5b8eed
Reviewed-on: https://chromium-review.googlesource.com/1090922
Commit-Queue: Marja Hölttä <marja@chromium.org>
Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#54481}
This commit is contained in:
Marja Hölttä 2018-07-17 09:05:17 +02:00 committed by Commit Bot
parent 56d21252c9
commit c3dbef4db6
11 changed files with 372 additions and 45 deletions

View File

@ -761,7 +761,10 @@ StartupData SnapshotCreator::CreateBlob(
// context even after we have disposed of the context.
isolate->heap()->CollectAllAvailableGarbage(
i::GarbageCollectionReason::kSnapshotCreator);
{
i::HandleScope scope(isolate);
isolate->heap()->CompactFixedArraysOfWeakCells();
}
isolate->heap()->read_only_space()->ClearStringPaddingIfNeeded();

View File

@ -205,7 +205,7 @@ Handle<HeapObject> Factory::NewFillerObject(int size, bool double_align,
Handle<PrototypeInfo> Factory::NewPrototypeInfo() {
Handle<PrototypeInfo> result =
Handle<PrototypeInfo>::cast(NewStruct(PROTOTYPE_INFO_TYPE, TENURED));
result->set_prototype_users(FixedArrayOfWeakCells::Empty());
result->set_prototype_users(*empty_weak_array_list());
result->set_registry_slot(PrototypeInfo::UNREGISTERED);
result->set_bit_field(0);
return result;

View File

@ -5010,19 +5010,28 @@ void CompactFixedArrayOfWeakCells(Isolate* isolate, Object* object) {
} // anonymous namespace
void Heap::CompactFixedArraysOfWeakCells() {
// Find known FixedArrayOfWeakCells and compact them.
// Find known PrototypeUsers and compact them.
std::vector<Handle<PrototypeInfo>> prototype_infos;
{
HeapIterator iterator(this);
for (HeapObject* o = iterator.next(); o != nullptr; o = iterator.next()) {
if (o->IsPrototypeInfo()) {
Object* prototype_users = PrototypeInfo::cast(o)->prototype_users();
if (prototype_users->IsFixedArrayOfWeakCells()) {
FixedArrayOfWeakCells* array =
FixedArrayOfWeakCells::cast(prototype_users);
array->Compact<JSObject::PrototypeRegistryCompactionCallback>(
isolate());
PrototypeInfo* prototype_info = PrototypeInfo::cast(o);
if (prototype_info->prototype_users()->IsWeakArrayList()) {
prototype_infos.emplace_back(handle(prototype_info, isolate()));
}
}
}
}
for (auto& prototype_info : prototype_infos) {
Handle<WeakArrayList> array(
WeakArrayList::cast(prototype_info->prototype_users()), isolate());
WeakArrayList* new_array = PrototypeUsers::Compact(
array, this, JSObject::PrototypeRegistryCompactionCallback);
prototype_info->set_prototype_users(new_array);
}
// Find known FixedArrayOfWeakCells and compact them.
CompactFixedArrayOfWeakCells(isolate(), noscript_shared_function_infos());
CompactFixedArrayOfWeakCells(isolate(), script_list());
CompactFixedArrayOfWeakCells(isolate(), weak_stack_trace_list());

View File

@ -762,8 +762,8 @@ void ObjectStatsCollectorImpl::RecordVirtualMapDetails(Map* map) {
if (map->prototype_info()->IsPrototypeInfo()) {
PrototypeInfo* info = PrototypeInfo::cast(map->prototype_info());
Object* users = info->prototype_users();
if (users->IsFixedArrayOfWeakCells()) {
RecordSimpleVirtualObjectStats(map, FixedArrayOfWeakCells::cast(users),
if (users->IsWeakFixedArray()) {
RecordSimpleVirtualObjectStats(map, WeakArrayList::cast(users),
ObjectStats::PROTOTYPE_USERS_TYPE);
}
}

View File

@ -1543,13 +1543,45 @@ void Module::ModuleVerify(Isolate* isolate) {
void PrototypeInfo::PrototypeInfoVerify(Isolate* isolate) {
CHECK(IsPrototypeInfo());
CHECK(weak_cell()->IsWeakCell() || weak_cell()->IsUndefined(isolate));
if (prototype_users()->IsFixedArrayOfWeakCells()) {
FixedArrayOfWeakCells::cast(prototype_users())->FixedArrayVerify(isolate);
if (prototype_users()->IsWeakArrayList()) {
PrototypeUsers::Verify(WeakArrayList::cast(prototype_users()));
} else {
CHECK(prototype_users()->IsSmi());
}
}
void PrototypeUsers::Verify(WeakArrayList* array) {
if (array->length() == 0) {
// Allow empty & uninitialized lists.
return;
}
// Verify empty slot chain.
int empty_slot = Smi::ToInt(empty_slot_index(array));
int empty_slots_count = 0;
while (empty_slot != kNoEmptySlotsMarker) {
CHECK_GT(empty_slot, 0);
CHECK_LT(empty_slot, array->length());
empty_slot = Smi::ToInt(array->Get(empty_slot)->ToSmi());
++empty_slots_count;
}
// Verify that all elements are either weak pointers or SMIs marking empty
// slots.
int weak_maps_count = 0;
for (int i = kFirstIndex; i < array->length(); ++i) {
HeapObject* heap_object;
MaybeObject* object = array->Get(i);
if ((object->ToWeakHeapObject(&heap_object) && heap_object->IsMap()) ||
object->IsClearedWeakHeapObject()) {
++weak_maps_count;
} else {
CHECK(object->IsSmi());
}
}
CHECK_EQ(weak_maps_count + empty_slots_count + 1, array->length());
}
void Tuple2::Tuple2Verify(Isolate* isolate) {
CHECK(IsTuple2());
Heap* heap = isolate->heap();

View File

@ -10380,8 +10380,7 @@ void FixedArrayOfWeakCells::Iterator::Reset(Object* maybe_array) {
}
}
void JSObject::PrototypeRegistryCompactionCallback::Callback(Object* value,
void JSObject::PrototypeRegistryCompactionCallback(HeapObject* value,
int old_index,
int new_index) {
DCHECK(value->IsMap() && Map::cast(value)->is_prototype_map());
@ -10394,8 +10393,6 @@ void JSObject::PrototypeRegistryCompactionCallback::Callback(Object* value,
template void FixedArrayOfWeakCells::Compact<
FixedArrayOfWeakCells::NullCallback>(Isolate* isolate);
template void FixedArrayOfWeakCells::Compact<
JSObject::PrototypeRegistryCompactionCallback>(Isolate* isolate);
bool FixedArrayOfWeakCells::Remove(Handle<HeapObject> value) {
if (Length() == 0) return false;
@ -10548,6 +10545,91 @@ Handle<WeakArrayList> WeakArrayList::EnsureSpace(Isolate* isolate,
return array;
}
// static
Handle<WeakArrayList> PrototypeUsers::Add(Isolate* isolate,
Handle<WeakArrayList> array,
Handle<Map> value,
int* assigned_index) {
int length = array->length();
if (length == 0) {
// Uninitialized WeakArrayList; need to initialize empty_slot_index.
array = WeakArrayList::EnsureSpace(isolate, array, kFirstIndex + 1);
set_empty_slot_index(*array, kNoEmptySlotsMarker);
array->Set(kFirstIndex, HeapObjectReference::Weak(*value));
array->set_length(kFirstIndex + 1);
if (assigned_index != nullptr) *assigned_index = kFirstIndex;
return array;
}
// If the array has unfilled space at the end, use it.
if (!array->IsFull()) {
array->Set(length, HeapObjectReference::Weak(*value));
array->set_length(length + 1);
if (assigned_index != nullptr) *assigned_index = length;
return array;
}
// If there are empty slots, use one of them.
int empty_slot = Smi::ToInt(empty_slot_index(*array));
if (empty_slot != kNoEmptySlotsMarker) {
DCHECK_GE(empty_slot, kFirstIndex);
CHECK_LT(empty_slot, array->length());
int next_empty_slot = Smi::ToInt(array->Get(empty_slot)->ToSmi());
array->Set(empty_slot, HeapObjectReference::Weak(*value));
if (assigned_index != nullptr) *assigned_index = empty_slot;
set_empty_slot_index(*array, next_empty_slot);
return array;
} else {
DCHECK_EQ(empty_slot, kNoEmptySlotsMarker);
}
// Array full and no empty slots. Grow the array.
array = WeakArrayList::EnsureSpace(isolate, array, length + 1);
array->Set(length, HeapObjectReference::Weak(*value));
array->set_length(length + 1);
if (assigned_index != nullptr) *assigned_index = length;
return array;
}
WeakArrayList* PrototypeUsers::Compact(Handle<WeakArrayList> array, Heap* heap,
CompactionCallback callback) {
if (array->length() == 0) {
return *array;
}
// Count the amount of live references.
int new_length = kFirstIndex;
for (int i = kFirstIndex; i < array->length(); i++) {
MaybeObject* element = array->Get(i);
if (element->IsSmi()) continue;
if (element->IsClearedWeakHeapObject()) continue;
++new_length;
}
if (new_length == array->length()) {
return *array;
}
Handle<WeakArrayList> new_array = WeakArrayList::EnsureSpace(
heap->isolate(),
handle(ReadOnlyRoots(heap).empty_weak_array_list(), heap->isolate()),
new_length);
// Allocation might have caused GC and turned some of the elements into
// cleared weak heap objects. Count the number of live objects again.
int copy_to = kFirstIndex;
for (int i = kFirstIndex; i < array->length(); i++) {
MaybeObject* element = array->Get(i);
if (element->IsSmi()) continue;
if (element->IsClearedWeakHeapObject()) continue;
HeapObject* value = element->ToWeakHeapObject();
callback(value, i, copy_to);
new_array->Set(copy_to++, element);
}
new_array->set_length(copy_to);
set_empty_slot_index(*new_array, kNoEmptySlotsMarker);
return *new_array;
}
Handle<RegExpMatchInfo> RegExpMatchInfo::ReserveCaptures(
Isolate* isolate, Handle<RegExpMatchInfo> match_info, int capture_count) {
DCHECK_GE(match_info->length(), kLastMatchOverhead);
@ -12644,9 +12726,14 @@ void JSObject::LazyRegisterPrototypeUser(Handle<Map> user, Isolate* isolate) {
Handle<PrototypeInfo> proto_info =
Map::GetOrCreatePrototypeInfo(proto, isolate);
Handle<Object> maybe_registry(proto_info->prototype_users(), isolate);
Handle<WeakArrayList> registry =
maybe_registry->IsSmi()
? handle(ReadOnlyRoots(isolate->heap()).empty_weak_array_list(),
isolate)
: Handle<WeakArrayList>::cast(maybe_registry);
int slot = 0;
Handle<FixedArrayOfWeakCells> new_array = FixedArrayOfWeakCells::Add(
isolate, maybe_registry, current_user, &slot);
Handle<WeakArrayList> new_array =
PrototypeUsers::Add(isolate, registry, current_user, &slot);
current_user_info->set_registry_slot(slot);
if (!maybe_registry.is_identical_to(new_array)) {
proto_info->set_prototype_users(*new_array);
@ -12676,7 +12763,7 @@ bool JSObject::UnregisterPrototypeUser(Handle<Map> user, Isolate* isolate) {
if (!user->prototype()->IsJSObject()) {
Object* users =
PrototypeInfo::cast(user->prototype_info())->prototype_users();
return users->IsFixedArrayOfWeakCells();
return users->IsWeakArrayList();
}
Handle<JSObject> prototype(JSObject::cast(user->prototype()), isolate);
Handle<PrototypeInfo> user_info =
@ -12689,10 +12776,10 @@ bool JSObject::UnregisterPrototypeUser(Handle<Map> user, Isolate* isolate) {
DCHECK(maybe_proto_info->IsPrototypeInfo());
Handle<PrototypeInfo> proto_info(PrototypeInfo::cast(maybe_proto_info),
isolate);
Object* maybe_registry = proto_info->prototype_users();
DCHECK(maybe_registry->IsFixedArrayOfWeakCells());
DCHECK(FixedArrayOfWeakCells::cast(maybe_registry)->Get(slot) == *user);
FixedArrayOfWeakCells::cast(maybe_registry)->Clear(slot);
Handle<WeakArrayList> prototype_users(
WeakArrayList::cast(proto_info->prototype_users()), isolate);
DCHECK_EQ(prototype_users->Get(slot), HeapObjectReference::Weak(*user));
PrototypeUsers::MarkSlotEmpty(*prototype_users, slot);
if (FLAG_trace_prototype_users) {
PrintF("Unregistering %p as a user of prototype %p.\n",
reinterpret_cast<void*>(*user), reinterpret_cast<void*>(*prototype));
@ -12725,12 +12812,18 @@ void InvalidatePrototypeChainsInternal(Map* map) {
Object* maybe_proto_info = map->prototype_info();
if (!maybe_proto_info->IsPrototypeInfo()) return;
PrototypeInfo* proto_info = PrototypeInfo::cast(maybe_proto_info);
FixedArrayOfWeakCells::Iterator iterator(proto_info->prototype_users());
WeakArrayList* prototype_users =
WeakArrayList::cast(proto_info->prototype_users());
// For now, only maps register themselves as users.
Map* user;
while ((user = iterator.Next<Map>()) != nullptr) {
// Walk the prototype chain (backwards, towards leaf objects) if necessary.
InvalidatePrototypeChainsInternal(user);
for (int i = PrototypeUsers::kFirstIndex; i < prototype_users->length();
++i) {
HeapObject* heap_object;
if (prototype_users->Get(i)->ToWeakHeapObject(&heap_object) &&
heap_object->IsMap()) {
// Walk the prototype chain (backwards, towards leaf objects) if
// necessary.
InvalidatePrototypeChainsInternal(Map::cast(heap_object));
}
}
}

View File

@ -2260,11 +2260,9 @@ class JSObject: public JSReceiver {
// Utility used by many Array builtins and runtime functions
static inline bool PrototypeHasNoElements(Isolate* isolate, JSObject* object);
// Alternative implementation of FixedArrayOfWeakCells::NullCallback.
class PrototypeRegistryCompactionCallback {
public:
static void Callback(Object* value, int old_index, int new_index);
};
// To be passed to PrototypeUsers::Compact.
static void PrototypeRegistryCompactionCallback(HeapObject* value,
int old_index, int new_index);
// Retrieve interceptors.
inline InterceptorInfo* GetNamedInterceptor();

View File

@ -362,15 +362,16 @@ class WeakArrayList : public HeapObject {
static const int kMaxCapacity =
(FixedArray::kMaxSize - kHeaderSize) / kPointerSize;
protected:
static Handle<WeakArrayList> EnsureSpace(Isolate* isolate,
Handle<WeakArrayList> array,
int length);
private:
static int OffsetOfElementAt(int index) {
return kHeaderSize + index * kPointerSize;
}
static Handle<WeakArrayList> EnsureSpace(Isolate* isolate,
Handle<WeakArrayList> array,
int length);
DISALLOW_IMPLICIT_CONSTRUCTORS(WeakArrayList);
};

View File

@ -7,6 +7,8 @@
#include "src/objects/prototype-info.h"
#include "src/objects/maybe-object.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
@ -37,6 +39,23 @@ SMI_ACCESSORS(PrototypeInfo, registry_slot, kRegistrySlotOffset)
SMI_ACCESSORS(PrototypeInfo, bit_field, kBitFieldOffset)
BOOL_ACCESSORS(PrototypeInfo, bit_field, should_be_fast_map, kShouldBeFastBit)
void PrototypeUsers::MarkSlotEmpty(WeakArrayList* array, int index) {
DCHECK_GT(index, 0);
DCHECK_LT(index, array->length());
// Chain the empty slots into a linked list (each empty slot contains the
// index of the next empty slot).
array->Set(index, MaybeObject::FromObject(empty_slot_index(array)));
set_empty_slot_index(array, index);
}
Smi* PrototypeUsers::empty_slot_index(WeakArrayList* array) {
return array->Get(kEmptySlotIndex)->ToSmi();
}
void PrototypeUsers::set_empty_slot_index(WeakArrayList* array, int index) {
array->Set(kEmptySlotIndex, MaybeObject::FromObject(Smi::FromInt(index)));
}
} // namespace internal
} // namespace v8

View File

@ -6,6 +6,7 @@
#define V8_OBJECTS_PROTOTYPE_INFO_H_
#include "src/objects.h"
#include "src/objects/fixed-array.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
@ -21,8 +22,8 @@ class PrototypeInfo : public Struct {
// [weak_cell]: A WeakCell containing this prototype. ICs cache the cell here.
DECL_ACCESSORS(weak_cell, Object)
// [prototype_users]: FixedArrayOfWeakCells containing maps using this
// prototype, or Smi(0) if uninitialized.
// [prototype_users]: WeakArrayList containing weak references to maps using
// this prototype, or Smi(0) if uninitialized.
DECL_ACCESSORS(prototype_users, Object)
// [object_create_map]: A field caching the map for Object.create(prototype).
@ -67,6 +68,42 @@ class PrototypeInfo : public Struct {
DISALLOW_IMPLICIT_CONSTRUCTORS(PrototypeInfo);
};
// A growing array with an additional API for marking slots "empty". When adding
// new elements, we reuse the empty slots instead of growing the array.
class PrototypeUsers : public WeakArrayList {
public:
static Handle<WeakArrayList> Add(Isolate* isolate,
Handle<WeakArrayList> array,
Handle<Map> value, int* assigned_index);
static inline void MarkSlotEmpty(WeakArrayList* array, int index);
// The callback is called when a weak pointer to HeapObject "object" is moved
// from index "from_index" to index "to_index" during compaction. The callback
// must not cause GC.
typedef void (*CompactionCallback)(HeapObject* object, int from_index,
int to_index);
static WeakArrayList* Compact(Handle<WeakArrayList> array, Heap* heap,
CompactionCallback callback);
#ifdef VERIFY_HEAP
static void Verify(WeakArrayList* array);
#endif // VERIFY_HEAP
static const int kEmptySlotIndex = 0;
static const int kFirstIndex = 1;
static const int kNoEmptySlotsMarker = 0;
private:
static inline Smi* empty_slot_index(WeakArrayList* array);
static inline void set_empty_slot_index(WeakArrayList* array, int index);
static void IsSlotEmpty(WeakArrayList* array, int index);
DISALLOW_IMPLICIT_CONSTRUCTORS(PrototypeUsers);
};
} // namespace internal
} // namespace v8

View File

@ -578,6 +578,141 @@ TEST(Regress7768) {
CcTest::CollectAllGarbage();
}
TEST(PrototypeUsersBasic) {
CcTest::InitializeVM();
LocalContext context;
Isolate* isolate = CcTest::i_isolate();
Factory* factory = isolate->factory();
Heap* heap = isolate->heap();
HandleScope outer_scope(isolate);
Handle<WeakArrayList> array(ReadOnlyRoots(heap).empty_weak_array_list(),
isolate);
// Add some objects into the array.
int index = -1;
{
Handle<Map> map = factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
array = PrototypeUsers::Add(isolate, array, map, &index);
CHECK_EQ(array->length(), index + 1);
}
CHECK_EQ(index, 1);
int empty_index = index;
PrototypeUsers::MarkSlotEmpty(*array, empty_index);
// Even though we have an empty slot, we still add to the end.
int last_index = index;
int old_capacity = array->capacity();
while (!array->IsFull()) {
Handle<Map> map = factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
array = PrototypeUsers::Add(isolate, array, map, &index);
CHECK_EQ(index, last_index + 1);
CHECK_EQ(array->length(), index + 1);
last_index = index;
}
// The next addition will fill the empty slot.
{
Handle<Map> map = factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
array = PrototypeUsers::Add(isolate, array, map, &index);
}
CHECK_EQ(index, empty_index);
// The next addition will make the arrow grow again.
{
Handle<Map> map = factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
array = PrototypeUsers::Add(isolate, array, map, &index);
CHECK_EQ(array->length(), index + 1);
last_index = index;
}
CHECK_GT(array->capacity(), old_capacity);
// Make multiple slots empty.
int empty_index1 = 1;
int empty_index2 = 2;
PrototypeUsers::MarkSlotEmpty(*array, empty_index1);
PrototypeUsers::MarkSlotEmpty(*array, empty_index2);
// Fill the array (still adding to the end)
old_capacity = array->capacity();
while (!array->IsFull()) {
Handle<Map> map = factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
array = PrototypeUsers::Add(isolate, array, map, &index);
CHECK_EQ(index, last_index + 1);
CHECK_EQ(array->length(), index + 1);
last_index = index;
}
// Make sure we use the empty slots in (reverse) order.
{
Handle<Map> map = factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
array = PrototypeUsers::Add(isolate, array, map, &index);
}
CHECK_EQ(index, empty_index2);
{
Handle<Map> map = factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
array = PrototypeUsers::Add(isolate, array, map, &index);
}
CHECK_EQ(index, empty_index1);
}
namespace {
HeapObject* saved_heap_object = nullptr;
static void TestCompactCallback(HeapObject* value, int old_index,
int new_index) {
saved_heap_object = value;
CHECK_EQ(old_index, 2);
CHECK_EQ(new_index, 1);
}
} // namespace
TEST(PrototypeUsersCompacted) {
ManualGCScope manual_gc_scope;
CcTest::InitializeVM();
LocalContext context;
Isolate* isolate = CcTest::i_isolate();
Factory* factory = isolate->factory();
Heap* heap = isolate->heap();
HandleScope outer_scope(isolate);
Handle<WeakArrayList> array(ReadOnlyRoots(heap).empty_weak_array_list(),
isolate);
// Add some objects into the array.
int index = -1;
Handle<Map> map_cleared_by_user =
factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
array = PrototypeUsers::Add(isolate, array, map_cleared_by_user, &index);
CHECK_EQ(index, 1);
Handle<Map> live_map = factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
array = PrototypeUsers::Add(isolate, array, live_map, &index);
CHECK_EQ(index, 2);
{
HandleScope inner_scope(isolate);
Handle<Map> soon_dead_map =
factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
array = PrototypeUsers::Add(isolate, array, soon_dead_map, &index);
CHECK_EQ(index, 3);
array = inner_scope.CloseAndEscape(array);
}
PrototypeUsers::MarkSlotEmpty(*array, 1);
CcTest::CollectAllGarbage();
CHECK(array->Get(3)->IsClearedWeakHeapObject());
CHECK_EQ(array->length(), 3 + PrototypeUsers::kFirstIndex);
WeakArrayList* new_array =
PrototypeUsers::Compact(array, heap, TestCompactCallback);
CHECK_EQ(new_array->length(), 1 + PrototypeUsers::kFirstIndex);
CHECK_EQ(saved_heap_object, *live_map);
}
} // namespace heap
} // namespace internal
} // namespace v8