Prototype of mark-and-compact support for Harmony weak maps.

R=vegorov@chromium.org
BUG=v8:1565
TEST=cctest/test-weakmaps

Review URL: http://codereview.chromium.org/7553012

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@8817 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
mstarzinger@chromium.org 2011-08-03 12:48:30 +00:00
parent 9721eddc1f
commit b05ff5e0b9
14 changed files with 316 additions and 26 deletions

View File

@ -1292,6 +1292,10 @@ class ScavengingVisitor : public StaticVisitorBase {
&ObjectEvacuationStrategy<POINTER_OBJECT>::
template VisitSpecialized<SharedFunctionInfo::kSize>);
table_.Register(kVisitJSWeakMap,
&ObjectEvacuationStrategy<POINTER_OBJECT>::
Visit);
table_.Register(kVisitJSRegExp,
&ObjectEvacuationStrategy<POINTER_OBJECT>::
Visit);

View File

@ -1646,6 +1646,7 @@ class Heap {
friend class Page;
friend class Isolate;
friend class MarkCompactCollector;
friend class StaticMarkingVisitor;
friend class MapCompact;
DISALLOW_COPY_AND_ASSIGN(Heap);

View File

@ -64,13 +64,15 @@ MarkCompactCollector::MarkCompactCollector() : // NOLINT
live_bytes_(0),
#endif
heap_(NULL),
code_flusher_(NULL) { }
code_flusher_(NULL),
encountered_weak_maps_(NULL) { }
void MarkCompactCollector::CollectGarbage() {
// Make sure that Prepare() has been called. The individual steps below will
// update the state as they proceed.
ASSERT(state_ == PREPARE_GC);
ASSERT(encountered_weak_maps_ == Smi::FromInt(0));
// Prepare has selected whether to compact the old generation or not.
// Tell the tracer.
@ -80,6 +82,8 @@ void MarkCompactCollector::CollectGarbage() {
if (FLAG_collect_maps) ClearNonLiveTransitions();
ClearWeakMaps();
SweepLargeObjectSpace();
if (IsCompacting()) {
@ -407,6 +411,8 @@ class StaticMarkingVisitor : public StaticVisitorBase {
table_.Register(kVisitSeqAsciiString, &DataObjectVisitor::Visit);
table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
table_.Register(kVisitJSWeakMap, &VisitJSWeakMap);
table_.Register(kVisitOddball,
&FixedBodyVisitor<StaticMarkingVisitor,
Oddball::BodyDescriptor,
@ -556,6 +562,34 @@ class StaticMarkingVisitor : public StaticVisitorBase {
StructBodyDescriptor,
void> StructObjectVisitor;
static void VisitJSWeakMap(Map* map, HeapObject* object) {
MarkCompactCollector* collector = map->heap()->mark_compact_collector();
JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(object);
// Enqueue weak map in linked list of encountered weak maps.
ASSERT(weak_map->next() == Smi::FromInt(0));
weak_map->set_next(collector->encountered_weak_maps());
collector->set_encountered_weak_maps(weak_map);
// Skip visiting the backing hash table containing the mappings.
int object_size = JSWeakMap::BodyDescriptor::SizeOf(map, object);
BodyVisitorBase<StaticMarkingVisitor>::IteratePointers(
map->heap(),
object,
JSWeakMap::BodyDescriptor::kStartOffset,
JSWeakMap::kTableOffset);
BodyVisitorBase<StaticMarkingVisitor>::IteratePointers(
map->heap(),
object,
JSWeakMap::kTableOffset + kPointerSize,
object_size);
// Mark the backing hash table without pushing it on the marking stack.
ASSERT(!weak_map->unchecked_table()->IsMarked());
ASSERT(weak_map->unchecked_table()->map()->IsMarked());
collector->SetMark(weak_map->unchecked_table());
}
static void VisitCode(Map* map, HeapObject* object) {
reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>(
map->heap());
@ -1369,20 +1403,26 @@ void MarkCompactCollector::MarkImplicitRefGroups() {
// marking stack have been marked, or are overflowed in the heap.
void MarkCompactCollector::EmptyMarkingStack() {
while (!marking_stack_.is_empty()) {
HeapObject* object = marking_stack_.Pop();
ASSERT(object->IsHeapObject());
ASSERT(heap()->Contains(object));
ASSERT(object->IsMarked());
ASSERT(!object->IsOverflowed());
while (!marking_stack_.is_empty()) {
HeapObject* object = marking_stack_.Pop();
ASSERT(object->IsHeapObject());
ASSERT(heap()->Contains(object));
ASSERT(object->IsMarked());
ASSERT(!object->IsOverflowed());
// Because the object is marked, we have to recover the original map
// pointer and use it to mark the object's body.
MapWord map_word = object->map_word();
map_word.ClearMark();
Map* map = map_word.ToMap();
MarkObject(map);
// Because the object is marked, we have to recover the original map
// pointer and use it to mark the object's body.
MapWord map_word = object->map_word();
map_word.ClearMark();
Map* map = map_word.ToMap();
MarkObject(map);
StaticMarkingVisitor::IterateBody(map, object);
StaticMarkingVisitor::IterateBody(map, object);
}
// Process encountered weak maps, mark objects only reachable by those
// weak maps and repeat until fix-point is reached.
ProcessWeakMaps();
}
}
@ -1735,6 +1775,45 @@ void MarkCompactCollector::ClearNonLiveTransitions() {
}
}
void MarkCompactCollector::ProcessWeakMaps() {
Object* weak_map_obj = encountered_weak_maps();
while (weak_map_obj != Smi::FromInt(0)) {
ASSERT(HeapObject::cast(weak_map_obj)->IsMarked());
JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj);
ObjectHashTable* table = weak_map->unchecked_table();
for (int i = 0; i < table->Capacity(); i++) {
if (HeapObject::cast(table->KeyAt(i))->IsMarked()) {
Object* value = table->get(table->EntryToValueIndex(i));
StaticMarkingVisitor::MarkObjectByPointer(heap(), &value);
table->set_unchecked(heap(),
table->EntryToValueIndex(i),
value,
UPDATE_WRITE_BARRIER);
}
}
weak_map_obj = weak_map->next();
}
}
void MarkCompactCollector::ClearWeakMaps() {
Object* weak_map_obj = encountered_weak_maps();
while (weak_map_obj != Smi::FromInt(0)) {
ASSERT(HeapObject::cast(weak_map_obj)->IsMarked());
JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj);
ObjectHashTable* table = weak_map->unchecked_table();
for (int i = 0; i < table->Capacity(); i++) {
if (!HeapObject::cast(table->KeyAt(i))->IsMarked()) {
table->RemoveEntry(i, heap());
}
}
weak_map_obj = weak_map->next();
weak_map->set_next(Smi::FromInt(0));
}
set_encountered_weak_maps(Smi::FromInt(0));
}
// -------------------------------------------------------------------------
// Phase 2: Encode forwarding addresses.
// When compacting, forwarding addresses for objects in old space and map

View File

@ -193,6 +193,11 @@ class MarkCompactCollector {
inline bool is_code_flushing_enabled() const { return code_flusher_ != NULL; }
void EnableCodeFlushing(bool enable);
inline Object* encountered_weak_maps() { return encountered_weak_maps_; }
inline void set_encountered_weak_maps(Object* weak_map) {
encountered_weak_maps_ = weak_map;
}
private:
MarkCompactCollector();
~MarkCompactCollector();
@ -329,6 +334,16 @@ class MarkCompactCollector {
// We replace them with a null descriptor, with the same key.
void ClearNonLiveTransitions();
// Mark all values associated with reachable keys in weak maps encountered
// so far. This might push new object or even new weak maps onto the
// marking stack.
void ProcessWeakMaps();
// After all reachable objects have been marked those weak map entries
// with an unreachable key are removed from all encountered weak maps.
// The linked list of all encountered weak maps is destroyed.
void ClearWeakMaps();
// -----------------------------------------------------------------------
// Phase 2: Sweeping to clear mark bits and free non-live objects for
// a non-compacting collection, or else computing and encoding
@ -499,6 +514,7 @@ class MarkCompactCollector {
Heap* heap_;
MarkingStack marking_stack_;
CodeFlusher* code_flusher_;
Object* encountered_weak_maps_;
friend class Heap;
friend class OverflowedObjectsScanner;

View File

@ -3861,6 +3861,12 @@ ACCESSORS(JSProxy, padding, Object, kPaddingOffset)
ACCESSORS(JSWeakMap, table, ObjectHashTable, kTableOffset)
ACCESSORS_GCSAFE(JSWeakMap, next, Object, kNextOffset)
ObjectHashTable* JSWeakMap::unchecked_table() {
return reinterpret_cast<ObjectHashTable*>(READ_FIELD(this, kTableOffset));
}
Address Foreign::address() {
@ -4471,6 +4477,11 @@ MaybeObject* ObjectHashTableShape::AsObject(JSObject* key) {
}
void ObjectHashTable::RemoveEntry(int entry) {
RemoveEntry(entry, GetHeap());
}
void Map::ClearCodeCache(Heap* heap) {
// No write barrier is needed since empty_fixed_array is not in new space.
// Please note this function is used during marking:

View File

@ -88,6 +88,9 @@ StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
case JS_GLOBAL_PROPERTY_CELL_TYPE:
return kVisitPropertyCell;
case JS_WEAK_MAP_TYPE:
return kVisitJSWeakMap;
case JS_REGEXP_TYPE:
return kVisitJSRegExp;
@ -115,7 +118,6 @@ StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
case JS_GLOBAL_OBJECT_TYPE:
case JS_BUILTINS_OBJECT_TYPE:
case JS_MESSAGE_OBJECT_TYPE:
case JS_WEAK_MAP_TYPE:
return GetVisitorIdForSize(kVisitJSObject,
kVisitJSObjectGeneric,
instance_size);

View File

@ -121,6 +121,7 @@ class StaticVisitorBase : public AllStatic {
kVisitPropertyCell,
kVisitSharedFunctionInfo,
kVisitJSFunction,
kVisitJSWeakMap,
kVisitJSRegExp,
kVisitorIdCount,
@ -317,7 +318,9 @@ class StaticNewSpaceVisitor : public StaticVisitorBase {
SharedFunctionInfo::BodyDescriptor,
int>::Visit);
table_.Register(kVisitJSRegExp, &VisitJSRegExp);
table_.Register(kVisitJSWeakMap, &VisitJSObject);
table_.Register(kVisitJSRegExp, &VisitJSObject);
table_.Register(kVisitSeqAsciiString, &VisitSeqAsciiString);
@ -356,15 +359,15 @@ class StaticNewSpaceVisitor : public StaticVisitorBase {
return FixedDoubleArray::SizeFor(length);
}
static inline int VisitJSObject(Map* map, HeapObject* object) {
return JSObjectVisitor::Visit(map, object);
}
static inline int VisitSeqAsciiString(Map* map, HeapObject* object) {
return SeqAsciiString::cast(object)->
SeqAsciiStringSize(map->instance_type());
}
static inline int VisitJSRegExp(Map* map, HeapObject* object) {
return JSObjectVisitor::Visit(map, object);
}
static inline int VisitSeqTwoByteString(Map* map, HeapObject* object) {
return SeqTwoByteString::cast(object)->
SeqTwoByteStringSize(map->instance_type());

View File

@ -11640,10 +11640,9 @@ void ObjectHashTable::AddEntry(int entry, JSObject* key, Object* value) {
}
void ObjectHashTable::RemoveEntry(int entry) {
Object* null_value = GetHeap()->null_value();
set(EntryToIndex(entry), null_value);
set(EntryToIndex(entry) + 1, null_value);
void ObjectHashTable::RemoveEntry(int entry, Heap* heap) {
set_null(heap, EntryToIndex(entry));
set_null(heap, EntryToIndex(entry) + 1);
ElementRemoved();
}

View File

@ -2984,8 +2984,16 @@ class ObjectHashTable: public HashTable<ObjectHashTableShape, JSObject*> {
MUST_USE_RESULT MaybeObject* Put(JSObject* key, Object* value);
private:
friend class MarkCompactCollector;
void AddEntry(int entry, JSObject* key, Object* value);
void RemoveEntry(int entry);
void RemoveEntry(int entry, Heap* heap);
inline void RemoveEntry(int entry);
// Returns the index to the value of an entry.
static inline int EntryToValueIndex(int entry) {
return EntryToIndex(entry) + 1;
}
};
@ -6642,6 +6650,12 @@ class JSWeakMap: public JSObject {
// [table]: the backing hash table mapping keys to values.
DECL_ACCESSORS(table, ObjectHashTable)
// [next]: linked list of encountered weak maps during GC.
DECL_ACCESSORS(next, Object)
// Unchecked accessors to be used during GC.
inline ObjectHashTable* unchecked_table();
// Casting.
static inline JSWeakMap* cast(Object* obj);
@ -6656,7 +6670,8 @@ class JSWeakMap: public JSObject {
#endif
static const int kTableOffset = JSObject::kHeaderSize;
static const int kSize = kTableOffset + kPointerSize;
static const int kNextOffset = kTableOffset + kPointerSize;
static const int kSize = kNextOffset + kPointerSize;
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(JSWeakMap);

View File

@ -642,6 +642,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_WeakMapInitialize) {
ASSERT(weakmap->map()->inobject_properties() == 0);
Handle<ObjectHashTable> table = isolate->factory()->NewObjectHashTable(0);
weakmap->set_table(*table);
weakmap->set_next(Smi::FromInt(0));
return *weakmap;
}

View File

@ -96,7 +96,8 @@ SOURCES = {
'test-threads.cc',
'test-unbound-queue.cc',
'test-utils.cc',
'test-version.cc'
'test-version.cc',
'test-weakmaps.cc'
],
'arch:arm': [
'test-assembler-arm.cc',

View File

@ -38,6 +38,9 @@ test-api/ApplyInterruption: PASS || TIMEOUT
test-serialize/TestThatAlwaysFails: FAIL
test-serialize/DependentTestThatAlwaysFails: FAIL
# We do not yet shrink weak maps after they have been emptied by the GC
test-weakmaps/Shrinking: FAIL
##############################################################################
[ $arch == arm ]

View File

@ -0,0 +1,149 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "v8.h"
#include "global-handles.h"
#include "snapshot.h"
#include "cctest.h"
using namespace v8::internal;
static Handle<JSWeakMap> AllocateJSWeakMap() {
Handle<Map> map = FACTORY->NewMap(JS_WEAK_MAP_TYPE, JSWeakMap::kSize);
Handle<JSObject> weakmap_obj = FACTORY->NewJSObjectFromMap(map);
Handle<JSWeakMap> weakmap(JSWeakMap::cast(*weakmap_obj));
// Do not use handles for the hash table, it would make entries strong.
Object* table_obj = ObjectHashTable::Allocate(1)->ToObjectChecked();
ObjectHashTable* table = ObjectHashTable::cast(table_obj);
weakmap->set_table(table);
weakmap->set_next(Smi::FromInt(0));
return weakmap;
}
static void PutIntoWeakMap(Handle<JSWeakMap> weakmap,
Handle<JSObject> key,
int value) {
Handle<ObjectHashTable> table = PutIntoObjectHashTable(
Handle<ObjectHashTable>(weakmap->table()),
Handle<JSObject>(JSObject::cast(*key)),
Handle<Smi>(Smi::FromInt(value)));
weakmap->set_table(*table);
}
static int NumberOfWeakCalls = 0;
static void WeakPointerCallback(v8::Persistent<v8::Value> handle, void* id) {
ASSERT(id == reinterpret_cast<void*>(1234));
NumberOfWeakCalls++;
handle.Dispose();
}
TEST(Weakness) {
LocalContext context;
v8::HandleScope scope;
Handle<JSWeakMap> weakmap = AllocateJSWeakMap();
GlobalHandles* global_handles = Isolate::Current()->global_handles();
// Keep global reference to the key.
Handle<Object> key;
{
v8::HandleScope scope;
Handle<Map> map = FACTORY->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
Handle<JSObject> object = FACTORY->NewJSObjectFromMap(map);
key = global_handles->Create(*object);
}
CHECK(!global_handles->IsWeak(key.location()));
// Put entry into weak map.
{
v8::HandleScope scope;
PutIntoWeakMap(weakmap, Handle<JSObject>(JSObject::cast(*key)), 23);
}
CHECK_EQ(1, weakmap->table()->NumberOfElements());
// Force a full GC.
HEAP->CollectAllGarbage(false);
CHECK_EQ(0, NumberOfWeakCalls);
CHECK_EQ(1, weakmap->table()->NumberOfElements());
CHECK_EQ(0, weakmap->table()->NumberOfDeletedElements());
// Make the global reference to the key weak.
{
v8::HandleScope scope;
global_handles->MakeWeak(key.location(),
reinterpret_cast<void*>(1234),
&WeakPointerCallback);
}
CHECK(global_handles->IsWeak(key.location()));
// Force a full GC.
// Perform two consecutive GCs because the first one will only clear
// weak references whereas the second one will also clear weak maps.
HEAP->CollectAllGarbage(false);
CHECK_EQ(1, NumberOfWeakCalls);
CHECK_EQ(1, weakmap->table()->NumberOfElements());
CHECK_EQ(0, weakmap->table()->NumberOfDeletedElements());
HEAP->CollectAllGarbage(false);
CHECK_EQ(1, NumberOfWeakCalls);
CHECK_EQ(0, weakmap->table()->NumberOfElements());
CHECK_EQ(1, weakmap->table()->NumberOfDeletedElements());
}
TEST(Shrinking) {
LocalContext context;
v8::HandleScope scope;
Handle<JSWeakMap> weakmap = AllocateJSWeakMap();
// Check initial capacity.
CHECK_EQ(32, weakmap->table()->Capacity());
// Fill up weak map to trigger capacity change.
{
v8::HandleScope scope;
Handle<Map> map = FACTORY->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
for (int i = 0; i < 32; i++) {
Handle<JSObject> object = FACTORY->NewJSObjectFromMap(map);
PutIntoWeakMap(weakmap, object, i);
}
}
// Check increased capacity.
CHECK_EQ(128, weakmap->table()->Capacity());
// Force a full GC.
CHECK_EQ(32, weakmap->table()->NumberOfElements());
CHECK_EQ(0, weakmap->table()->NumberOfDeletedElements());
HEAP->CollectAllGarbage(false);
CHECK_EQ(0, weakmap->table()->NumberOfElements());
CHECK_EQ(32, weakmap->table()->NumberOfDeletedElements());
// Check shrunk capacity.
CHECK_EQ(32, weakmap->table()->Capacity());
}

View File

@ -112,3 +112,9 @@ var m = new WeakMap;
assertTrue(m instanceof WeakMap);
assertTrue(WeakMap.prototype.set instanceof Function)
assertTrue(WeakMap.prototype.get instanceof Function)
// Stress Test
// There is a proposed stress-test available at the es-discuss mailing list
// which cannot be reasonably automated. Check it out by hand if you like:
// https://mail.mozilla.org/pipermail/es-discuss/2011-May/014096.html