Fix issues when stressing compaction with WeakMaps.

1) While marking the backing hash table of a WeakMap we also need to
   record the slot because it might be on an evacuation candidate.
2) With incremental marking one backing hash table might be marked more
   than once because the WeakMap might have gone through a white to gray
   transition.
3) The corner case when the allocation of the backing hash table itself
   causes a GC, leads to a WeakMap with an undefined table field, so we
   still need to handle this case correctly.

R=vegorov@chromium.org
TEST=mjsunit/harmony/proxies-example-membrane --stress-compaction

Review URL: https://chromiumcodereview.appspot.com/9985010

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@11385 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
mstarzinger@chromium.org 2012-04-19 10:32:38 +00:00
parent 0cfa70852a
commit 7d22d73ebf
2 changed files with 14 additions and 6 deletions

View File

@ -1152,9 +1152,10 @@ class StaticMarkingVisitor : public StaticVisitorBase {
JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(object);
// Enqueue weak map in linked list of encountered weak maps.
ASSERT(weak_map->next() == Smi::FromInt(0));
weak_map->set_next(collector->encountered_weak_maps());
collector->set_encountered_weak_maps(weak_map);
if (weak_map->next() == Smi::FromInt(0)) {
weak_map->set_next(collector->encountered_weak_maps());
collector->set_encountered_weak_maps(weak_map);
}
// Skip visiting the backing hash table containing the mappings.
int object_size = JSWeakMap::BodyDescriptor::SizeOf(map, object);
@ -1170,9 +1171,15 @@ class StaticMarkingVisitor : public StaticVisitorBase {
object_size);
// Mark the backing hash table without pushing it on the marking stack.
ObjectHashTable* table = ObjectHashTable::cast(weak_map->table());
ASSERT(!MarkCompactCollector::IsMarked(table));
collector->SetMark(table, Marking::MarkBitFrom(table));
Object* table_object = weak_map->table();
if (!table_object->IsHashTable()) return;
ObjectHashTable* table = ObjectHashTable::cast(table_object);
Object** table_slot =
HeapObject::RawField(weak_map, JSWeakMap::kTableOffset);
MarkBit table_mark = Marking::MarkBitFrom(table);
collector->RecordSlot(table_slot, table_slot, table);
if (!table_mark.Get()) collector->SetMark(table, table_mark);
// Recording the map slot can be skipped, because maps are not compacted.
collector->MarkObject(table->map(), Marking::MarkBitFrom(table->map()));
ASSERT(MarkCompactCollector::IsMarked(table->map()));
}

View File

@ -65,6 +65,7 @@ static void WeakPointerCallback(v8::Persistent<v8::Value> handle, void* id) {
TEST(Weakness) {
FLAG_incremental_marking = false;
LocalContext context;
v8::HandleScope scope;
Handle<JSWeakMap> weakmap = AllocateJSWeakMap();