diff --git a/src/bootstrapper.cc b/src/bootstrapper.cc index e1d4489d44..bbd69ecaba 100644 --- a/src/bootstrapper.cc +++ b/src/bootstrapper.cc @@ -812,9 +812,6 @@ void Genesis::InitializeGlobal(Handle inner_global, initial_map->set_instance_size( initial_map->instance_size() + 5 * kPointerSize); initial_map->set_instance_descriptors(*descriptors); - initial_map->set_scavenger( - Heap::GetScavenger(initial_map->instance_type(), - initial_map->instance_size())); } { // -- J S O N diff --git a/src/factory.cc b/src/factory.cc index 5b387413c3..39e881ac3d 100644 --- a/src/factory.cc +++ b/src/factory.cc @@ -277,8 +277,6 @@ Handle Factory::CopyMap(Handle src, copy->set_inobject_properties(inobject_properties); copy->set_unused_property_fields(inobject_properties); copy->set_instance_size(copy->instance_size() + instance_size_delta); - copy->set_scavenger(Heap::GetScavenger(copy->instance_type(), - copy->instance_size())); return copy; } diff --git a/src/heap.cc b/src/heap.cc index a27eff1786..1b625897d1 100644 --- a/src/heap.cc +++ b/src/heap.cc @@ -799,34 +799,34 @@ class ScavengeVisitor: public ObjectVisitor { }; -// A queue of objects promoted during scavenge. Each object is accompanied -// by it's size to avoid dereferencing a map pointer for scanning. +// A queue of pointers and maps of to-be-promoted objects during a +// scavenge collection. class PromotionQueue { public: void Initialize(Address start_address) { - front_ = rear_ = reinterpret_cast(start_address); + front_ = rear_ = reinterpret_cast(start_address); } bool is_empty() { return front_ <= rear_; } - void insert(HeapObject* target, int size) { - *(--rear_) = reinterpret_cast(target); - *(--rear_) = size; + void insert(HeapObject* object, Map* map) { + *(--rear_) = object; + *(--rear_) = map; // Assert no overflow into live objects. ASSERT(reinterpret_cast
(rear_) >= Heap::new_space()->top()); } - void remove(HeapObject** target, int* size) { - *target = reinterpret_cast(*(--front_)); - *size = static_cast(*(--front_)); + void remove(HeapObject** object, Map** map) { + *object = *(--front_); + *map = Map::cast(*(--front_)); // Assert no underflow. ASSERT(front_ >= rear_); } private: // The front of the queue is higher in memory than the rear. - intptr_t* front_; - intptr_t* rear_; + HeapObject** front_; + HeapObject** rear_; }; @@ -1041,26 +1041,31 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor, // queue is empty. while (new_space_front < new_space_.top()) { HeapObject* object = HeapObject::FromAddress(new_space_front); - Map* map = object->map(); - int size = object->SizeFromMap(map); - object->IterateBody(map->instance_type(), size, scavenge_visitor); - new_space_front += size; + object->Iterate(scavenge_visitor); + new_space_front += object->Size(); } // Promote and process all the to-be-promoted objects. while (!promotion_queue.is_empty()) { - HeapObject* target; - int size; - promotion_queue.remove(&target, &size); + HeapObject* source; + Map* map; + promotion_queue.remove(&source, &map); + // Copy the from-space object to its new location (given by the + // forwarding address) and fix its map. + HeapObject* target = source->map_word().ToForwardingAddress(); + int size = source->SizeFromMap(map); + CopyBlock(target->address(), source->address(), size); + target->set_map(map); - // Promoted object might be already partially visited - // during dirty regions iteration. Thus we search specificly - // for pointers to from semispace instead of looking for pointers - // to new space. +#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) + // Update NewSpace stats if necessary. + RecordCopiedObject(target); +#endif + // Visit the newly copied object for pointers to new space. ASSERT(!target->IsMap()); - IterateAndMarkPointersToFromSpace(target->address(), - target->address() + size, - &ScavengePointer); + IterateAndMarkPointersToNewSpace(target->address(), + target->address() + size, + &ScavengePointer); } // Take another spin if there are now unswept objects in new space @@ -1072,7 +1077,7 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor, #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) -static void RecordCopiedObject(HeapObject* obj) { +void Heap::RecordCopiedObject(HeapObject* obj) { bool should_record = false; #ifdef DEBUG should_record = FLAG_heap_stats; @@ -1081,24 +1086,22 @@ static void RecordCopiedObject(HeapObject* obj) { should_record = should_record || FLAG_log_gc; #endif if (should_record) { - if (Heap::new_space()->Contains(obj)) { - Heap::new_space()->RecordAllocation(obj); + if (new_space_.Contains(obj)) { + new_space_.RecordAllocation(obj); } else { - Heap::new_space()->RecordPromotion(obj); + new_space_.RecordPromotion(obj); } } } #endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) -// Helper function used by CopyObject to copy a source object to an -// allocated target object and update the forwarding pointer in the source -// object. Returns the target object. -inline static HeapObject* MigrateObject(HeapObject* source, - HeapObject* target, - int size) { + +HeapObject* Heap::MigrateObject(HeapObject* source, + HeapObject* target, + int size) { // Copy the content of source to target. - Heap::CopyBlock(target->address(), source->address(), size); + CopyBlock(target->address(), source->address(), size); // Set the forwarding address. source->set_map_word(MapWord::FromForwardingAddress(target)); @@ -1112,272 +1115,13 @@ inline static HeapObject* MigrateObject(HeapObject* source, } -enum ObjectContents { DATA_OBJECT, POINTER_OBJECT }; -enum SizeRestriction { SMALL, UNKNOWN_SIZE }; - - -template -static inline void EvacuateObject(Map* map, - HeapObject** slot, - HeapObject* object, - int object_size) { - ASSERT((size_restriction != SMALL) || - (object_size <= Page::kMaxHeapObjectSize)); - ASSERT(object->Size() == object_size); - - if (Heap::ShouldBePromoted(object->address(), object_size)) { - Object* result; - - if ((size_restriction != SMALL) && - (object_size > Page::kMaxHeapObjectSize)) { - result = Heap::lo_space()->AllocateRawFixedArray(object_size); - } else { - if (object_contents == DATA_OBJECT) { - result = Heap::old_data_space()->AllocateRaw(object_size); - } else { - result = Heap::old_pointer_space()->AllocateRaw(object_size); - } - } - - if (!result->IsFailure()) { - HeapObject* target = HeapObject::cast(result); - *slot = MigrateObject(object, target, object_size); - - if (object_contents == POINTER_OBJECT) { - promotion_queue.insert(target, object_size); - } - - Heap::tracer()->increment_promoted_objects_size(object_size); - return; - } - } - Object* result = Heap::new_space()->AllocateRaw(object_size); - ASSERT(!result->IsFailure()); - *slot = MigrateObject(object, HeapObject::cast(result), object_size); - return; -} - - -template -static inline void EvacuateObjectOfFixedSize(Map* map, - HeapObject** slot, - HeapObject* object) { - const int object_size = object_size_in_words << kPointerSizeLog2; - EvacuateObject(map, slot, object, object_size); -} - - -template -static inline void EvacuateObjectOfFixedSize(Map* map, - HeapObject** slot, - HeapObject* object) { - int object_size = map->instance_size(); - EvacuateObject(map, slot, object, object_size); -} - - -static inline void EvacuateFixedArray(Map* map, - HeapObject** slot, - HeapObject* object) { - int object_size = FixedArray::cast(object)->FixedArraySize(); - EvacuateObject(map, slot, object, object_size); -} - - -static inline void EvacuateByteArray(Map* map, - HeapObject** slot, - HeapObject* object) { - int object_size = ByteArray::cast(object)->ByteArraySize(); - EvacuateObject(map, slot, object, object_size); -} - - -static Scavenger GetScavengerForSize(int object_size, - ObjectContents object_contents) { - ASSERT(IsAligned(object_size, kPointerSize)); - ASSERT(object_size < Page::kMaxHeapObjectSize); - - switch (object_size >> kPointerSizeLog2) { -#define CASE(n) \ - case n: \ - if (object_contents == DATA_OBJECT) { \ - return static_cast( \ - &EvacuateObjectOfFixedSize); \ - } else { \ - return static_cast( \ - &EvacuateObjectOfFixedSize); \ - } - - CASE(1); - CASE(2); - CASE(3); - CASE(4); - CASE(5); - CASE(6); - CASE(7); - CASE(8); - CASE(9); - CASE(10); - CASE(11); - CASE(12); - CASE(13); - CASE(14); - CASE(15); - CASE(16); - default: - if (object_contents == DATA_OBJECT) { - return static_cast(&EvacuateObjectOfFixedSize); - } else { - return static_cast( - &EvacuateObjectOfFixedSize); - } - -#undef CASE - } -} - - -static inline void EvacuateSeqAsciiString(Map* map, - HeapObject** slot, - HeapObject* object) { - int object_size = SeqAsciiString::cast(object)-> - SeqAsciiStringSize(map->instance_type()); - EvacuateObject(map, slot, object, object_size); -} - - -static inline void EvacuateSeqTwoByteString(Map* map, - HeapObject** slot, - HeapObject* object) { - int object_size = SeqTwoByteString::cast(object)-> - SeqTwoByteStringSize(map->instance_type()); - EvacuateObject(map, slot, object, object_size); -} - - -static inline bool IsShortcutCandidate(int type) { - return ((type & kShortcutTypeMask) == kShortcutTypeTag); -} - - -static inline void EvacuateShortcutCandidate(Map* map, - HeapObject** slot, - HeapObject* object) { - ASSERT(IsShortcutCandidate(map->instance_type())); - - if (ConsString::cast(object)->unchecked_second() == Heap::empty_string()) { - HeapObject* first = - HeapObject::cast(ConsString::cast(object)->unchecked_first()); - - *slot = first; - - if (!Heap::InNewSpace(first)) { - object->set_map_word(MapWord::FromForwardingAddress(first)); - return; - } - - MapWord first_word = first->map_word(); - if (first_word.IsForwardingAddress()) { - HeapObject* target = first_word.ToForwardingAddress(); - - *slot = target; - object->set_map_word(MapWord::FromForwardingAddress(target)); - return; - } - - first->map()->Scavenge(slot, first); - object->set_map_word(MapWord::FromForwardingAddress(*slot)); - return; - } - - int object_size = ConsString::kSize; - EvacuateObject(map, slot, object, object_size); -} - - -Scavenger Heap::GetScavenger(int instance_type, int instance_size) { - if (instance_type < FIRST_NONSTRING_TYPE) { - switch (instance_type & kStringRepresentationMask) { - case kSeqStringTag: - if ((instance_type & kStringEncodingMask) == kAsciiStringTag) { - return &EvacuateSeqAsciiString; - } else { - return &EvacuateSeqTwoByteString; - } - - case kConsStringTag: - if (IsShortcutCandidate(instance_type)) { - return &EvacuateShortcutCandidate; - } else { - ASSERT(instance_size == ConsString::kSize); - return GetScavengerForSize(ConsString::kSize, POINTER_OBJECT); - } - - case kExternalStringTag: - ASSERT(instance_size == ExternalString::kSize); - return GetScavengerForSize(ExternalString::kSize, DATA_OBJECT); - } - UNREACHABLE(); - } - - switch (instance_type) { - case BYTE_ARRAY_TYPE: - return reinterpret_cast(&EvacuateByteArray); - - case FIXED_ARRAY_TYPE: - return reinterpret_cast(&EvacuateFixedArray); - - case JS_OBJECT_TYPE: - case JS_CONTEXT_EXTENSION_OBJECT_TYPE: - case JS_VALUE_TYPE: - case JS_ARRAY_TYPE: - case JS_REGEXP_TYPE: - case JS_FUNCTION_TYPE: - case JS_GLOBAL_PROXY_TYPE: - case JS_GLOBAL_OBJECT_TYPE: - case JS_BUILTINS_OBJECT_TYPE: - return GetScavengerForSize(instance_size, POINTER_OBJECT); - - case ODDBALL_TYPE: - return NULL; - - case PROXY_TYPE: - return GetScavengerForSize(Proxy::kSize, DATA_OBJECT); - - case MAP_TYPE: - return NULL; - - case CODE_TYPE: - return NULL; - - case JS_GLOBAL_PROPERTY_CELL_TYPE: - return NULL; - - case HEAP_NUMBER_TYPE: - case FILLER_TYPE: - case PIXEL_ARRAY_TYPE: - case EXTERNAL_BYTE_ARRAY_TYPE: - case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE: - case EXTERNAL_SHORT_ARRAY_TYPE: - case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE: - case EXTERNAL_INT_ARRAY_TYPE: - case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE: - case EXTERNAL_FLOAT_ARRAY_TYPE: - return GetScavengerForSize(instance_size, DATA_OBJECT); - - case SHARED_FUNCTION_INFO_TYPE: - return GetScavengerForSize(SharedFunctionInfo::kAlignedSize, - POINTER_OBJECT); - -#define MAKE_STRUCT_CASE(NAME, Name, name) \ - case NAME##_TYPE: - STRUCT_LIST(MAKE_STRUCT_CASE) -#undef MAKE_STRUCT_CASE - return GetScavengerForSize(instance_size, POINTER_OBJECT); - default: - UNREACHABLE(); - return NULL; - } +static inline bool IsShortcutCandidate(HeapObject* object, Map* map) { + STATIC_ASSERT(kNotStringTag != 0 && kSymbolTag != 0); + ASSERT(object->map() == map); + InstanceType type = map->instance_type(); + if ((type & kShortcutTypeMask) != kShortcutTypeTag) return false; + ASSERT(object->IsString() && !object->IsSymbol()); + return ConsString::cast(object)->unchecked_second() == Heap::empty_string(); } @@ -1385,8 +1129,103 @@ void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) { ASSERT(InFromSpace(object)); MapWord first_word = object->map_word(); ASSERT(!first_word.IsForwardingAddress()); - Map* map = first_word.ToMap(); - map->Scavenge(p, object); + + // Optimization: Bypass flattened ConsString objects. + if (IsShortcutCandidate(object, first_word.ToMap())) { + object = HeapObject::cast(ConsString::cast(object)->unchecked_first()); + *p = object; + // After patching *p we have to repeat the checks that object is in the + // active semispace of the young generation and not already copied. + if (!InNewSpace(object)) return; + first_word = object->map_word(); + if (first_word.IsForwardingAddress()) { + *p = first_word.ToForwardingAddress(); + return; + } + } + + int object_size = object->SizeFromMap(first_word.ToMap()); + // We rely on live objects in new space to be at least two pointers, + // so we can store the from-space address and map pointer of promoted + // objects in the to space. + ASSERT(object_size >= 2 * kPointerSize); + + // If the object should be promoted, we try to copy it to old space. + if (ShouldBePromoted(object->address(), object_size)) { + Object* result; + if (object_size > MaxObjectSizeInPagedSpace()) { + result = lo_space_->AllocateRawFixedArray(object_size); + if (!result->IsFailure()) { + HeapObject* target = HeapObject::cast(result); + + if (object->IsFixedArray()) { + // Save the from-space object pointer and its map pointer at the + // top of the to space to be swept and copied later. Write the + // forwarding address over the map word of the from-space + // object. + promotion_queue.insert(object, first_word.ToMap()); + object->set_map_word(MapWord::FromForwardingAddress(target)); + + // Give the space allocated for the result a proper map by + // treating it as a free list node (not linked into the free + // list). + FreeListNode* node = FreeListNode::FromAddress(target->address()); + node->set_size(object_size); + + *p = target; + } else { + // In large object space only fixed arrays might possibly contain + // intergenerational references. + // All other objects can be copied immediately and not revisited. + *p = MigrateObject(object, target, object_size); + } + + tracer()->increment_promoted_objects_size(object_size); + return; + } + } else { + OldSpace* target_space = Heap::TargetSpace(object); + ASSERT(target_space == Heap::old_pointer_space_ || + target_space == Heap::old_data_space_); + result = target_space->AllocateRaw(object_size); + if (!result->IsFailure()) { + HeapObject* target = HeapObject::cast(result); + if (target_space == Heap::old_pointer_space_) { + // Save the from-space object pointer and its map pointer at the + // top of the to space to be swept and copied later. Write the + // forwarding address over the map word of the from-space + // object. + promotion_queue.insert(object, first_word.ToMap()); + object->set_map_word(MapWord::FromForwardingAddress(target)); + + // Give the space allocated for the result a proper map by + // treating it as a free list node (not linked into the free + // list). + FreeListNode* node = FreeListNode::FromAddress(target->address()); + node->set_size(object_size); + + *p = target; + } else { + // Objects promoted to the data space can be copied immediately + // and not revisited---we will never sweep that space for + // pointers and the copied objects do not contain pointers to + // new space objects. + *p = MigrateObject(object, target, object_size); +#ifdef DEBUG + VerifyNonPointerSpacePointersVisitor v; + (*p)->Iterate(&v); +#endif + } + tracer()->increment_promoted_objects_size(object_size); + return; + } + } + } + // The object should remain in new space or the old space allocation failed. + Object* result = new_space_.AllocateRaw(object_size); + // Failed allocation at this point is utterly unexpected. + ASSERT(!result->IsFailure()); + *p = MigrateObject(object, HeapObject::cast(result), object_size); } @@ -1404,8 +1243,6 @@ Object* Heap::AllocatePartialMap(InstanceType instance_type, reinterpret_cast(result)->set_map(raw_unchecked_meta_map()); reinterpret_cast(result)->set_instance_type(instance_type); reinterpret_cast(result)->set_instance_size(instance_size); - reinterpret_cast(result)-> - set_scavenger(GetScavenger(instance_type, instance_size)); reinterpret_cast(result)->set_inobject_properties(0); reinterpret_cast(result)->set_pre_allocated_property_fields(0); reinterpret_cast(result)->set_unused_property_fields(0); @@ -1422,7 +1259,6 @@ Object* Heap::AllocateMap(InstanceType instance_type, int instance_size) { Map* map = reinterpret_cast(result); map->set_map(meta_map()); map->set_instance_type(instance_type); - map->set_scavenger(GetScavenger(instance_type, instance_size)); map->set_prototype(null_value()); map->set_constructor(null_value()); map->set_instance_size(instance_size); @@ -3855,9 +3691,9 @@ bool Heap::IteratePointersInDirtyMapsRegion( } -void Heap::IterateAndMarkPointersToFromSpace(Address start, - Address end, - ObjectSlotCallback callback) { +void Heap::IterateAndMarkPointersToNewSpace(Address start, + Address end, + ObjectSlotCallback callback) { Address slot_address = start; Page* page = Page::FromAddress(start); @@ -3865,7 +3701,7 @@ void Heap::IterateAndMarkPointersToFromSpace(Address start, while (slot_address < end) { Object** slot = reinterpret_cast(slot_address); - if (Heap::InFromSpace(*slot)) { + if (Heap::InNewSpace(*slot)) { ASSERT((*slot)->IsHeapObject()); callback(reinterpret_cast(slot)); if (Heap::InNewSpace(*slot)) { diff --git a/src/heap.h b/src/heap.h index 1349e51199..df3ba0ea2a 100644 --- a/src/heap.h +++ b/src/heap.h @@ -774,12 +774,11 @@ class Heap : public AllStatic { DirtyRegionCallback visit_dirty_region, ObjectSlotCallback callback); - // Iterate pointers to from semispace of new space found in memory interval - // from start to end. + // Iterate pointers to new space found in memory interval from start to end. // Update dirty marks for page containing start address. - static void IterateAndMarkPointersToFromSpace(Address start, - Address end, - ObjectSlotCallback callback); + static void IterateAndMarkPointersToNewSpace(Address start, + Address end, + ObjectSlotCallback callback); // Iterate pointers to new space found in memory interval from start to end. // Return true if pointers to new space was found. @@ -986,8 +985,6 @@ class Heap : public AllStatic { static void RecordStats(HeapStats* stats); - static Scavenger GetScavenger(int instance_type, int instance_size); - // Copy block of memory from src to dst. Size of block should be aligned // by pointer size. static inline void CopyBlock(Address dst, Address src, int byte_size); @@ -1235,7 +1232,17 @@ class Heap : public AllStatic { set_instanceof_cache_function(the_hole_value()); } + // Helper function used by CopyObject to copy a source object to an + // allocated target object and update the forwarding pointer in the source + // object. Returns the target object. + static inline HeapObject* MigrateObject(HeapObject* source, + HeapObject* target, + int size); + #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) + // Record the copy of an object in the NewSpace's statistics. + static void RecordCopiedObject(HeapObject* obj); + // Record statistics before and after garbage collection. static void ReportStatisticsBeforeGC(); static void ReportStatisticsAfterGC(); diff --git a/src/objects-inl.h b/src/objects-inl.h index e764e39fcc..0e45550845 100644 --- a/src/objects-inl.h +++ b/src/objects-inl.h @@ -2060,23 +2060,6 @@ void ExternalFloatArray::set(int index, float value) { ptr[index] = value; } -inline Scavenger Map::scavenger() { - Scavenger callback = reinterpret_cast( - READ_INTPTR_FIELD(this, kIterateBodyCallbackOffset)); - - ASSERT(callback == Heap::GetScavenger(instance_type(), - instance_size())); - - return callback; -} - -inline void Map::set_scavenger(Scavenger callback) { - ASSERT(!reinterpret_cast( - reinterpret_cast(callback))->IsHeapObject()); - WRITE_INTPTR_FIELD(this, - kIterateBodyCallbackOffset, - reinterpret_cast(callback)); -} int Map::instance_size() { return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2; diff --git a/src/objects.cc b/src/objects.cc index 10c549c6b6..e79a5505c9 100644 --- a/src/objects.cc +++ b/src/objects.cc @@ -2190,8 +2190,6 @@ Object* JSObject::NormalizeProperties(PropertyNormalizationMode mode, int new_instance_size = map()->instance_size() - instance_size_delta; new_map->set_inobject_properties(0); new_map->set_instance_size(new_instance_size); - new_map->set_scavenger(Heap::GetScavenger(new_map->instance_type(), - new_map->instance_size())); Heap::CreateFillerObjectAt(this->address() + new_instance_size, instance_size_delta); } diff --git a/src/objects.h b/src/objects.h index 8dfc75aa75..4a7dee6a83 100644 --- a/src/objects.h +++ b/src/objects.h @@ -2899,7 +2899,6 @@ class Code: public HeapObject { DISALLOW_IMPLICIT_CONSTRUCTORS(Code); }; -typedef void (*Scavenger)(Map* map, HeapObject** slot, HeapObject* object); // All heap objects have a Map that describes their structure. // A Map contains information about: @@ -3101,13 +3100,6 @@ class Map: public HeapObject { void MapVerify(); #endif - inline Scavenger scavenger(); - inline void set_scavenger(Scavenger callback); - - inline void Scavenge(HeapObject** slot, HeapObject* obj) { - scavenger()(this, slot, obj); - } - static const int kMaxPreAllocatedPropertyFields = 255; // Layout description. @@ -3118,8 +3110,7 @@ class Map: public HeapObject { static const int kInstanceDescriptorsOffset = kConstructorOffset + kPointerSize; static const int kCodeCacheOffset = kInstanceDescriptorsOffset + kPointerSize; - static const int kIterateBodyCallbackOffset = kCodeCacheOffset + kPointerSize; - static const int kPadStart = kIterateBodyCallbackOffset + kPointerSize; + static const int kPadStart = kCodeCacheOffset + kPointerSize; static const int kSize = MAP_POINTER_ALIGN(kPadStart); // Layout of pointer fields. Heap iteration code relies on them diff --git a/src/serialize.cc b/src/serialize.cc index e8aed5496f..a6a516a76d 100644 --- a/src/serialize.cc +++ b/src/serialize.cc @@ -673,14 +673,6 @@ void Deserializer::ReadObject(int space_number, LOG(SnapshotPositionEvent(address, source_->position())); } ReadChunk(current, limit, space_number, address); - - if (space == Heap::map_space()) { - ASSERT(size == Map::kSize); - HeapObject* obj = HeapObject::FromAddress(address); - Map* map = reinterpret_cast(obj); - map->set_scavenger(Heap::GetScavenger(map->instance_type(), - map->instance_size())); - } }