Revert revisions 5041 and 5042 introducing virtual scavenge
behavior. It breaks debug builds with snapshots on my machine. TBR=vegorov@chromium.org Review URL: http://codereview.chromium.org/2983001 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@5046 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
f6e049b0ce
commit
ef8baf25fc
@ -812,9 +812,6 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
|
|||||||
initial_map->set_instance_size(
|
initial_map->set_instance_size(
|
||||||
initial_map->instance_size() + 5 * kPointerSize);
|
initial_map->instance_size() + 5 * kPointerSize);
|
||||||
initial_map->set_instance_descriptors(*descriptors);
|
initial_map->set_instance_descriptors(*descriptors);
|
||||||
initial_map->set_scavenger(
|
|
||||||
Heap::GetScavenger(initial_map->instance_type(),
|
|
||||||
initial_map->instance_size()));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
{ // -- J S O N
|
{ // -- J S O N
|
||||||
|
@ -277,8 +277,6 @@ Handle<Map> Factory::CopyMap(Handle<Map> src,
|
|||||||
copy->set_inobject_properties(inobject_properties);
|
copy->set_inobject_properties(inobject_properties);
|
||||||
copy->set_unused_property_fields(inobject_properties);
|
copy->set_unused_property_fields(inobject_properties);
|
||||||
copy->set_instance_size(copy->instance_size() + instance_size_delta);
|
copy->set_instance_size(copy->instance_size() + instance_size_delta);
|
||||||
copy->set_scavenger(Heap::GetScavenger(copy->instance_type(),
|
|
||||||
copy->instance_size()));
|
|
||||||
return copy;
|
return copy;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
458
src/heap.cc
458
src/heap.cc
@ -799,34 +799,34 @@ class ScavengeVisitor: public ObjectVisitor {
|
|||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
// A queue of objects promoted during scavenge. Each object is accompanied
|
// A queue of pointers and maps of to-be-promoted objects during a
|
||||||
// by it's size to avoid dereferencing a map pointer for scanning.
|
// scavenge collection.
|
||||||
class PromotionQueue {
|
class PromotionQueue {
|
||||||
public:
|
public:
|
||||||
void Initialize(Address start_address) {
|
void Initialize(Address start_address) {
|
||||||
front_ = rear_ = reinterpret_cast<intptr_t*>(start_address);
|
front_ = rear_ = reinterpret_cast<HeapObject**>(start_address);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool is_empty() { return front_ <= rear_; }
|
bool is_empty() { return front_ <= rear_; }
|
||||||
|
|
||||||
void insert(HeapObject* target, int size) {
|
void insert(HeapObject* object, Map* map) {
|
||||||
*(--rear_) = reinterpret_cast<intptr_t>(target);
|
*(--rear_) = object;
|
||||||
*(--rear_) = size;
|
*(--rear_) = map;
|
||||||
// Assert no overflow into live objects.
|
// Assert no overflow into live objects.
|
||||||
ASSERT(reinterpret_cast<Address>(rear_) >= Heap::new_space()->top());
|
ASSERT(reinterpret_cast<Address>(rear_) >= Heap::new_space()->top());
|
||||||
}
|
}
|
||||||
|
|
||||||
void remove(HeapObject** target, int* size) {
|
void remove(HeapObject** object, Map** map) {
|
||||||
*target = reinterpret_cast<HeapObject*>(*(--front_));
|
*object = *(--front_);
|
||||||
*size = static_cast<int>(*(--front_));
|
*map = Map::cast(*(--front_));
|
||||||
// Assert no underflow.
|
// Assert no underflow.
|
||||||
ASSERT(front_ >= rear_);
|
ASSERT(front_ >= rear_);
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
// The front of the queue is higher in memory than the rear.
|
// The front of the queue is higher in memory than the rear.
|
||||||
intptr_t* front_;
|
HeapObject** front_;
|
||||||
intptr_t* rear_;
|
HeapObject** rear_;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
@ -1041,26 +1041,31 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
|
|||||||
// queue is empty.
|
// queue is empty.
|
||||||
while (new_space_front < new_space_.top()) {
|
while (new_space_front < new_space_.top()) {
|
||||||
HeapObject* object = HeapObject::FromAddress(new_space_front);
|
HeapObject* object = HeapObject::FromAddress(new_space_front);
|
||||||
Map* map = object->map();
|
object->Iterate(scavenge_visitor);
|
||||||
int size = object->SizeFromMap(map);
|
new_space_front += object->Size();
|
||||||
object->IterateBody(map->instance_type(), size, scavenge_visitor);
|
|
||||||
new_space_front += size;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Promote and process all the to-be-promoted objects.
|
// Promote and process all the to-be-promoted objects.
|
||||||
while (!promotion_queue.is_empty()) {
|
while (!promotion_queue.is_empty()) {
|
||||||
HeapObject* target;
|
HeapObject* source;
|
||||||
int size;
|
Map* map;
|
||||||
promotion_queue.remove(&target, &size);
|
promotion_queue.remove(&source, &map);
|
||||||
|
// Copy the from-space object to its new location (given by the
|
||||||
|
// forwarding address) and fix its map.
|
||||||
|
HeapObject* target = source->map_word().ToForwardingAddress();
|
||||||
|
int size = source->SizeFromMap(map);
|
||||||
|
CopyBlock(target->address(), source->address(), size);
|
||||||
|
target->set_map(map);
|
||||||
|
|
||||||
// Promoted object might be already partially visited
|
#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
|
||||||
// during dirty regions iteration. Thus we search specificly
|
// Update NewSpace stats if necessary.
|
||||||
// for pointers to from semispace instead of looking for pointers
|
RecordCopiedObject(target);
|
||||||
// to new space.
|
#endif
|
||||||
|
// Visit the newly copied object for pointers to new space.
|
||||||
ASSERT(!target->IsMap());
|
ASSERT(!target->IsMap());
|
||||||
IterateAndMarkPointersToFromSpace(target->address(),
|
IterateAndMarkPointersToNewSpace(target->address(),
|
||||||
target->address() + size,
|
target->address() + size,
|
||||||
&ScavengePointer);
|
&ScavengePointer);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Take another spin if there are now unswept objects in new space
|
// Take another spin if there are now unswept objects in new space
|
||||||
@ -1072,7 +1077,7 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
|
|||||||
|
|
||||||
|
|
||||||
#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
|
#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
|
||||||
static void RecordCopiedObject(HeapObject* obj) {
|
void Heap::RecordCopiedObject(HeapObject* obj) {
|
||||||
bool should_record = false;
|
bool should_record = false;
|
||||||
#ifdef DEBUG
|
#ifdef DEBUG
|
||||||
should_record = FLAG_heap_stats;
|
should_record = FLAG_heap_stats;
|
||||||
@ -1081,24 +1086,22 @@ static void RecordCopiedObject(HeapObject* obj) {
|
|||||||
should_record = should_record || FLAG_log_gc;
|
should_record = should_record || FLAG_log_gc;
|
||||||
#endif
|
#endif
|
||||||
if (should_record) {
|
if (should_record) {
|
||||||
if (Heap::new_space()->Contains(obj)) {
|
if (new_space_.Contains(obj)) {
|
||||||
Heap::new_space()->RecordAllocation(obj);
|
new_space_.RecordAllocation(obj);
|
||||||
} else {
|
} else {
|
||||||
Heap::new_space()->RecordPromotion(obj);
|
new_space_.RecordPromotion(obj);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
|
#endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
|
||||||
|
|
||||||
|
|
||||||
// Helper function used by CopyObject to copy a source object to an
|
|
||||||
// allocated target object and update the forwarding pointer in the source
|
HeapObject* Heap::MigrateObject(HeapObject* source,
|
||||||
// object. Returns the target object.
|
HeapObject* target,
|
||||||
inline static HeapObject* MigrateObject(HeapObject* source,
|
int size) {
|
||||||
HeapObject* target,
|
|
||||||
int size) {
|
|
||||||
// Copy the content of source to target.
|
// Copy the content of source to target.
|
||||||
Heap::CopyBlock(target->address(), source->address(), size);
|
CopyBlock(target->address(), source->address(), size);
|
||||||
|
|
||||||
// Set the forwarding address.
|
// Set the forwarding address.
|
||||||
source->set_map_word(MapWord::FromForwardingAddress(target));
|
source->set_map_word(MapWord::FromForwardingAddress(target));
|
||||||
@ -1112,272 +1115,13 @@ inline static HeapObject* MigrateObject(HeapObject* source,
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
enum ObjectContents { DATA_OBJECT, POINTER_OBJECT };
|
static inline bool IsShortcutCandidate(HeapObject* object, Map* map) {
|
||||||
enum SizeRestriction { SMALL, UNKNOWN_SIZE };
|
STATIC_ASSERT(kNotStringTag != 0 && kSymbolTag != 0);
|
||||||
|
ASSERT(object->map() == map);
|
||||||
|
InstanceType type = map->instance_type();
|
||||||
template<ObjectContents object_contents, SizeRestriction size_restriction>
|
if ((type & kShortcutTypeMask) != kShortcutTypeTag) return false;
|
||||||
static inline void EvacuateObject(Map* map,
|
ASSERT(object->IsString() && !object->IsSymbol());
|
||||||
HeapObject** slot,
|
return ConsString::cast(object)->unchecked_second() == Heap::empty_string();
|
||||||
HeapObject* object,
|
|
||||||
int object_size) {
|
|
||||||
ASSERT((size_restriction != SMALL) ||
|
|
||||||
(object_size <= Page::kMaxHeapObjectSize));
|
|
||||||
ASSERT(object->Size() == object_size);
|
|
||||||
|
|
||||||
if (Heap::ShouldBePromoted(object->address(), object_size)) {
|
|
||||||
Object* result;
|
|
||||||
|
|
||||||
if ((size_restriction != SMALL) &&
|
|
||||||
(object_size > Page::kMaxHeapObjectSize)) {
|
|
||||||
result = Heap::lo_space()->AllocateRawFixedArray(object_size);
|
|
||||||
} else {
|
|
||||||
if (object_contents == DATA_OBJECT) {
|
|
||||||
result = Heap::old_data_space()->AllocateRaw(object_size);
|
|
||||||
} else {
|
|
||||||
result = Heap::old_pointer_space()->AllocateRaw(object_size);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!result->IsFailure()) {
|
|
||||||
HeapObject* target = HeapObject::cast(result);
|
|
||||||
*slot = MigrateObject(object, target, object_size);
|
|
||||||
|
|
||||||
if (object_contents == POINTER_OBJECT) {
|
|
||||||
promotion_queue.insert(target, object_size);
|
|
||||||
}
|
|
||||||
|
|
||||||
Heap::tracer()->increment_promoted_objects_size(object_size);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Object* result = Heap::new_space()->AllocateRaw(object_size);
|
|
||||||
ASSERT(!result->IsFailure());
|
|
||||||
*slot = MigrateObject(object, HeapObject::cast(result), object_size);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
template<int object_size_in_words, ObjectContents object_contents>
|
|
||||||
static inline void EvacuateObjectOfFixedSize(Map* map,
|
|
||||||
HeapObject** slot,
|
|
||||||
HeapObject* object) {
|
|
||||||
const int object_size = object_size_in_words << kPointerSizeLog2;
|
|
||||||
EvacuateObject<object_contents, SMALL>(map, slot, object, object_size);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
template<ObjectContents object_contents>
|
|
||||||
static inline void EvacuateObjectOfFixedSize(Map* map,
|
|
||||||
HeapObject** slot,
|
|
||||||
HeapObject* object) {
|
|
||||||
int object_size = map->instance_size();
|
|
||||||
EvacuateObject<object_contents, SMALL>(map, slot, object, object_size);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
static inline void EvacuateFixedArray(Map* map,
|
|
||||||
HeapObject** slot,
|
|
||||||
HeapObject* object) {
|
|
||||||
int object_size = FixedArray::cast(object)->FixedArraySize();
|
|
||||||
EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
static inline void EvacuateByteArray(Map* map,
|
|
||||||
HeapObject** slot,
|
|
||||||
HeapObject* object) {
|
|
||||||
int object_size = ByteArray::cast(object)->ByteArraySize();
|
|
||||||
EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
static Scavenger GetScavengerForSize(int object_size,
|
|
||||||
ObjectContents object_contents) {
|
|
||||||
ASSERT(IsAligned(object_size, kPointerSize));
|
|
||||||
ASSERT(object_size < Page::kMaxHeapObjectSize);
|
|
||||||
|
|
||||||
switch (object_size >> kPointerSizeLog2) {
|
|
||||||
#define CASE(n) \
|
|
||||||
case n: \
|
|
||||||
if (object_contents == DATA_OBJECT) { \
|
|
||||||
return static_cast<Scavenger>( \
|
|
||||||
&EvacuateObjectOfFixedSize<n, DATA_OBJECT>); \
|
|
||||||
} else { \
|
|
||||||
return static_cast<Scavenger>( \
|
|
||||||
&EvacuateObjectOfFixedSize<n, POINTER_OBJECT>); \
|
|
||||||
}
|
|
||||||
|
|
||||||
CASE(1);
|
|
||||||
CASE(2);
|
|
||||||
CASE(3);
|
|
||||||
CASE(4);
|
|
||||||
CASE(5);
|
|
||||||
CASE(6);
|
|
||||||
CASE(7);
|
|
||||||
CASE(8);
|
|
||||||
CASE(9);
|
|
||||||
CASE(10);
|
|
||||||
CASE(11);
|
|
||||||
CASE(12);
|
|
||||||
CASE(13);
|
|
||||||
CASE(14);
|
|
||||||
CASE(15);
|
|
||||||
CASE(16);
|
|
||||||
default:
|
|
||||||
if (object_contents == DATA_OBJECT) {
|
|
||||||
return static_cast<Scavenger>(&EvacuateObjectOfFixedSize<DATA_OBJECT>);
|
|
||||||
} else {
|
|
||||||
return static_cast<Scavenger>(
|
|
||||||
&EvacuateObjectOfFixedSize<POINTER_OBJECT>);
|
|
||||||
}
|
|
||||||
|
|
||||||
#undef CASE
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
static inline void EvacuateSeqAsciiString(Map* map,
|
|
||||||
HeapObject** slot,
|
|
||||||
HeapObject* object) {
|
|
||||||
int object_size = SeqAsciiString::cast(object)->
|
|
||||||
SeqAsciiStringSize(map->instance_type());
|
|
||||||
EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
static inline void EvacuateSeqTwoByteString(Map* map,
|
|
||||||
HeapObject** slot,
|
|
||||||
HeapObject* object) {
|
|
||||||
int object_size = SeqTwoByteString::cast(object)->
|
|
||||||
SeqTwoByteStringSize(map->instance_type());
|
|
||||||
EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
static inline bool IsShortcutCandidate(int type) {
|
|
||||||
return ((type & kShortcutTypeMask) == kShortcutTypeTag);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
static inline void EvacuateShortcutCandidate(Map* map,
|
|
||||||
HeapObject** slot,
|
|
||||||
HeapObject* object) {
|
|
||||||
ASSERT(IsShortcutCandidate(map->instance_type()));
|
|
||||||
|
|
||||||
if (ConsString::cast(object)->unchecked_second() == Heap::empty_string()) {
|
|
||||||
HeapObject* first =
|
|
||||||
HeapObject::cast(ConsString::cast(object)->unchecked_first());
|
|
||||||
|
|
||||||
*slot = first;
|
|
||||||
|
|
||||||
if (!Heap::InNewSpace(first)) {
|
|
||||||
object->set_map_word(MapWord::FromForwardingAddress(first));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
MapWord first_word = first->map_word();
|
|
||||||
if (first_word.IsForwardingAddress()) {
|
|
||||||
HeapObject* target = first_word.ToForwardingAddress();
|
|
||||||
|
|
||||||
*slot = target;
|
|
||||||
object->set_map_word(MapWord::FromForwardingAddress(target));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
first->map()->Scavenge(slot, first);
|
|
||||||
object->set_map_word(MapWord::FromForwardingAddress(*slot));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
int object_size = ConsString::kSize;
|
|
||||||
EvacuateObject<POINTER_OBJECT, SMALL>(map, slot, object, object_size);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
Scavenger Heap::GetScavenger(int instance_type, int instance_size) {
|
|
||||||
if (instance_type < FIRST_NONSTRING_TYPE) {
|
|
||||||
switch (instance_type & kStringRepresentationMask) {
|
|
||||||
case kSeqStringTag:
|
|
||||||
if ((instance_type & kStringEncodingMask) == kAsciiStringTag) {
|
|
||||||
return &EvacuateSeqAsciiString;
|
|
||||||
} else {
|
|
||||||
return &EvacuateSeqTwoByteString;
|
|
||||||
}
|
|
||||||
|
|
||||||
case kConsStringTag:
|
|
||||||
if (IsShortcutCandidate(instance_type)) {
|
|
||||||
return &EvacuateShortcutCandidate;
|
|
||||||
} else {
|
|
||||||
ASSERT(instance_size == ConsString::kSize);
|
|
||||||
return GetScavengerForSize(ConsString::kSize, POINTER_OBJECT);
|
|
||||||
}
|
|
||||||
|
|
||||||
case kExternalStringTag:
|
|
||||||
ASSERT(instance_size == ExternalString::kSize);
|
|
||||||
return GetScavengerForSize(ExternalString::kSize, DATA_OBJECT);
|
|
||||||
}
|
|
||||||
UNREACHABLE();
|
|
||||||
}
|
|
||||||
|
|
||||||
switch (instance_type) {
|
|
||||||
case BYTE_ARRAY_TYPE:
|
|
||||||
return reinterpret_cast<Scavenger>(&EvacuateByteArray);
|
|
||||||
|
|
||||||
case FIXED_ARRAY_TYPE:
|
|
||||||
return reinterpret_cast<Scavenger>(&EvacuateFixedArray);
|
|
||||||
|
|
||||||
case JS_OBJECT_TYPE:
|
|
||||||
case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
|
|
||||||
case JS_VALUE_TYPE:
|
|
||||||
case JS_ARRAY_TYPE:
|
|
||||||
case JS_REGEXP_TYPE:
|
|
||||||
case JS_FUNCTION_TYPE:
|
|
||||||
case JS_GLOBAL_PROXY_TYPE:
|
|
||||||
case JS_GLOBAL_OBJECT_TYPE:
|
|
||||||
case JS_BUILTINS_OBJECT_TYPE:
|
|
||||||
return GetScavengerForSize(instance_size, POINTER_OBJECT);
|
|
||||||
|
|
||||||
case ODDBALL_TYPE:
|
|
||||||
return NULL;
|
|
||||||
|
|
||||||
case PROXY_TYPE:
|
|
||||||
return GetScavengerForSize(Proxy::kSize, DATA_OBJECT);
|
|
||||||
|
|
||||||
case MAP_TYPE:
|
|
||||||
return NULL;
|
|
||||||
|
|
||||||
case CODE_TYPE:
|
|
||||||
return NULL;
|
|
||||||
|
|
||||||
case JS_GLOBAL_PROPERTY_CELL_TYPE:
|
|
||||||
return NULL;
|
|
||||||
|
|
||||||
case HEAP_NUMBER_TYPE:
|
|
||||||
case FILLER_TYPE:
|
|
||||||
case PIXEL_ARRAY_TYPE:
|
|
||||||
case EXTERNAL_BYTE_ARRAY_TYPE:
|
|
||||||
case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE:
|
|
||||||
case EXTERNAL_SHORT_ARRAY_TYPE:
|
|
||||||
case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE:
|
|
||||||
case EXTERNAL_INT_ARRAY_TYPE:
|
|
||||||
case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE:
|
|
||||||
case EXTERNAL_FLOAT_ARRAY_TYPE:
|
|
||||||
return GetScavengerForSize(instance_size, DATA_OBJECT);
|
|
||||||
|
|
||||||
case SHARED_FUNCTION_INFO_TYPE:
|
|
||||||
return GetScavengerForSize(SharedFunctionInfo::kAlignedSize,
|
|
||||||
POINTER_OBJECT);
|
|
||||||
|
|
||||||
#define MAKE_STRUCT_CASE(NAME, Name, name) \
|
|
||||||
case NAME##_TYPE:
|
|
||||||
STRUCT_LIST(MAKE_STRUCT_CASE)
|
|
||||||
#undef MAKE_STRUCT_CASE
|
|
||||||
return GetScavengerForSize(instance_size, POINTER_OBJECT);
|
|
||||||
default:
|
|
||||||
UNREACHABLE();
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -1385,8 +1129,103 @@ void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
|
|||||||
ASSERT(InFromSpace(object));
|
ASSERT(InFromSpace(object));
|
||||||
MapWord first_word = object->map_word();
|
MapWord first_word = object->map_word();
|
||||||
ASSERT(!first_word.IsForwardingAddress());
|
ASSERT(!first_word.IsForwardingAddress());
|
||||||
Map* map = first_word.ToMap();
|
|
||||||
map->Scavenge(p, object);
|
// Optimization: Bypass flattened ConsString objects.
|
||||||
|
if (IsShortcutCandidate(object, first_word.ToMap())) {
|
||||||
|
object = HeapObject::cast(ConsString::cast(object)->unchecked_first());
|
||||||
|
*p = object;
|
||||||
|
// After patching *p we have to repeat the checks that object is in the
|
||||||
|
// active semispace of the young generation and not already copied.
|
||||||
|
if (!InNewSpace(object)) return;
|
||||||
|
first_word = object->map_word();
|
||||||
|
if (first_word.IsForwardingAddress()) {
|
||||||
|
*p = first_word.ToForwardingAddress();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
int object_size = object->SizeFromMap(first_word.ToMap());
|
||||||
|
// We rely on live objects in new space to be at least two pointers,
|
||||||
|
// so we can store the from-space address and map pointer of promoted
|
||||||
|
// objects in the to space.
|
||||||
|
ASSERT(object_size >= 2 * kPointerSize);
|
||||||
|
|
||||||
|
// If the object should be promoted, we try to copy it to old space.
|
||||||
|
if (ShouldBePromoted(object->address(), object_size)) {
|
||||||
|
Object* result;
|
||||||
|
if (object_size > MaxObjectSizeInPagedSpace()) {
|
||||||
|
result = lo_space_->AllocateRawFixedArray(object_size);
|
||||||
|
if (!result->IsFailure()) {
|
||||||
|
HeapObject* target = HeapObject::cast(result);
|
||||||
|
|
||||||
|
if (object->IsFixedArray()) {
|
||||||
|
// Save the from-space object pointer and its map pointer at the
|
||||||
|
// top of the to space to be swept and copied later. Write the
|
||||||
|
// forwarding address over the map word of the from-space
|
||||||
|
// object.
|
||||||
|
promotion_queue.insert(object, first_word.ToMap());
|
||||||
|
object->set_map_word(MapWord::FromForwardingAddress(target));
|
||||||
|
|
||||||
|
// Give the space allocated for the result a proper map by
|
||||||
|
// treating it as a free list node (not linked into the free
|
||||||
|
// list).
|
||||||
|
FreeListNode* node = FreeListNode::FromAddress(target->address());
|
||||||
|
node->set_size(object_size);
|
||||||
|
|
||||||
|
*p = target;
|
||||||
|
} else {
|
||||||
|
// In large object space only fixed arrays might possibly contain
|
||||||
|
// intergenerational references.
|
||||||
|
// All other objects can be copied immediately and not revisited.
|
||||||
|
*p = MigrateObject(object, target, object_size);
|
||||||
|
}
|
||||||
|
|
||||||
|
tracer()->increment_promoted_objects_size(object_size);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
OldSpace* target_space = Heap::TargetSpace(object);
|
||||||
|
ASSERT(target_space == Heap::old_pointer_space_ ||
|
||||||
|
target_space == Heap::old_data_space_);
|
||||||
|
result = target_space->AllocateRaw(object_size);
|
||||||
|
if (!result->IsFailure()) {
|
||||||
|
HeapObject* target = HeapObject::cast(result);
|
||||||
|
if (target_space == Heap::old_pointer_space_) {
|
||||||
|
// Save the from-space object pointer and its map pointer at the
|
||||||
|
// top of the to space to be swept and copied later. Write the
|
||||||
|
// forwarding address over the map word of the from-space
|
||||||
|
// object.
|
||||||
|
promotion_queue.insert(object, first_word.ToMap());
|
||||||
|
object->set_map_word(MapWord::FromForwardingAddress(target));
|
||||||
|
|
||||||
|
// Give the space allocated for the result a proper map by
|
||||||
|
// treating it as a free list node (not linked into the free
|
||||||
|
// list).
|
||||||
|
FreeListNode* node = FreeListNode::FromAddress(target->address());
|
||||||
|
node->set_size(object_size);
|
||||||
|
|
||||||
|
*p = target;
|
||||||
|
} else {
|
||||||
|
// Objects promoted to the data space can be copied immediately
|
||||||
|
// and not revisited---we will never sweep that space for
|
||||||
|
// pointers and the copied objects do not contain pointers to
|
||||||
|
// new space objects.
|
||||||
|
*p = MigrateObject(object, target, object_size);
|
||||||
|
#ifdef DEBUG
|
||||||
|
VerifyNonPointerSpacePointersVisitor v;
|
||||||
|
(*p)->Iterate(&v);
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
tracer()->increment_promoted_objects_size(object_size);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// The object should remain in new space or the old space allocation failed.
|
||||||
|
Object* result = new_space_.AllocateRaw(object_size);
|
||||||
|
// Failed allocation at this point is utterly unexpected.
|
||||||
|
ASSERT(!result->IsFailure());
|
||||||
|
*p = MigrateObject(object, HeapObject::cast(result), object_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -1404,8 +1243,6 @@ Object* Heap::AllocatePartialMap(InstanceType instance_type,
|
|||||||
reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map());
|
reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map());
|
||||||
reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
|
reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
|
||||||
reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
|
reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
|
||||||
reinterpret_cast<Map*>(result)->
|
|
||||||
set_scavenger(GetScavenger(instance_type, instance_size));
|
|
||||||
reinterpret_cast<Map*>(result)->set_inobject_properties(0);
|
reinterpret_cast<Map*>(result)->set_inobject_properties(0);
|
||||||
reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0);
|
reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0);
|
||||||
reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
|
reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
|
||||||
@ -1422,7 +1259,6 @@ Object* Heap::AllocateMap(InstanceType instance_type, int instance_size) {
|
|||||||
Map* map = reinterpret_cast<Map*>(result);
|
Map* map = reinterpret_cast<Map*>(result);
|
||||||
map->set_map(meta_map());
|
map->set_map(meta_map());
|
||||||
map->set_instance_type(instance_type);
|
map->set_instance_type(instance_type);
|
||||||
map->set_scavenger(GetScavenger(instance_type, instance_size));
|
|
||||||
map->set_prototype(null_value());
|
map->set_prototype(null_value());
|
||||||
map->set_constructor(null_value());
|
map->set_constructor(null_value());
|
||||||
map->set_instance_size(instance_size);
|
map->set_instance_size(instance_size);
|
||||||
@ -3855,9 +3691,9 @@ bool Heap::IteratePointersInDirtyMapsRegion(
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void Heap::IterateAndMarkPointersToFromSpace(Address start,
|
void Heap::IterateAndMarkPointersToNewSpace(Address start,
|
||||||
Address end,
|
Address end,
|
||||||
ObjectSlotCallback callback) {
|
ObjectSlotCallback callback) {
|
||||||
Address slot_address = start;
|
Address slot_address = start;
|
||||||
Page* page = Page::FromAddress(start);
|
Page* page = Page::FromAddress(start);
|
||||||
|
|
||||||
@ -3865,7 +3701,7 @@ void Heap::IterateAndMarkPointersToFromSpace(Address start,
|
|||||||
|
|
||||||
while (slot_address < end) {
|
while (slot_address < end) {
|
||||||
Object** slot = reinterpret_cast<Object**>(slot_address);
|
Object** slot = reinterpret_cast<Object**>(slot_address);
|
||||||
if (Heap::InFromSpace(*slot)) {
|
if (Heap::InNewSpace(*slot)) {
|
||||||
ASSERT((*slot)->IsHeapObject());
|
ASSERT((*slot)->IsHeapObject());
|
||||||
callback(reinterpret_cast<HeapObject**>(slot));
|
callback(reinterpret_cast<HeapObject**>(slot));
|
||||||
if (Heap::InNewSpace(*slot)) {
|
if (Heap::InNewSpace(*slot)) {
|
||||||
|
21
src/heap.h
21
src/heap.h
@ -774,12 +774,11 @@ class Heap : public AllStatic {
|
|||||||
DirtyRegionCallback visit_dirty_region,
|
DirtyRegionCallback visit_dirty_region,
|
||||||
ObjectSlotCallback callback);
|
ObjectSlotCallback callback);
|
||||||
|
|
||||||
// Iterate pointers to from semispace of new space found in memory interval
|
// Iterate pointers to new space found in memory interval from start to end.
|
||||||
// from start to end.
|
|
||||||
// Update dirty marks for page containing start address.
|
// Update dirty marks for page containing start address.
|
||||||
static void IterateAndMarkPointersToFromSpace(Address start,
|
static void IterateAndMarkPointersToNewSpace(Address start,
|
||||||
Address end,
|
Address end,
|
||||||
ObjectSlotCallback callback);
|
ObjectSlotCallback callback);
|
||||||
|
|
||||||
// Iterate pointers to new space found in memory interval from start to end.
|
// Iterate pointers to new space found in memory interval from start to end.
|
||||||
// Return true if pointers to new space was found.
|
// Return true if pointers to new space was found.
|
||||||
@ -986,8 +985,6 @@ class Heap : public AllStatic {
|
|||||||
|
|
||||||
static void RecordStats(HeapStats* stats);
|
static void RecordStats(HeapStats* stats);
|
||||||
|
|
||||||
static Scavenger GetScavenger(int instance_type, int instance_size);
|
|
||||||
|
|
||||||
// Copy block of memory from src to dst. Size of block should be aligned
|
// Copy block of memory from src to dst. Size of block should be aligned
|
||||||
// by pointer size.
|
// by pointer size.
|
||||||
static inline void CopyBlock(Address dst, Address src, int byte_size);
|
static inline void CopyBlock(Address dst, Address src, int byte_size);
|
||||||
@ -1235,7 +1232,17 @@ class Heap : public AllStatic {
|
|||||||
set_instanceof_cache_function(the_hole_value());
|
set_instanceof_cache_function(the_hole_value());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Helper function used by CopyObject to copy a source object to an
|
||||||
|
// allocated target object and update the forwarding pointer in the source
|
||||||
|
// object. Returns the target object.
|
||||||
|
static inline HeapObject* MigrateObject(HeapObject* source,
|
||||||
|
HeapObject* target,
|
||||||
|
int size);
|
||||||
|
|
||||||
#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
|
#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
|
||||||
|
// Record the copy of an object in the NewSpace's statistics.
|
||||||
|
static void RecordCopiedObject(HeapObject* obj);
|
||||||
|
|
||||||
// Record statistics before and after garbage collection.
|
// Record statistics before and after garbage collection.
|
||||||
static void ReportStatisticsBeforeGC();
|
static void ReportStatisticsBeforeGC();
|
||||||
static void ReportStatisticsAfterGC();
|
static void ReportStatisticsAfterGC();
|
||||||
|
@ -2060,23 +2060,6 @@ void ExternalFloatArray::set(int index, float value) {
|
|||||||
ptr[index] = value;
|
ptr[index] = value;
|
||||||
}
|
}
|
||||||
|
|
||||||
inline Scavenger Map::scavenger() {
|
|
||||||
Scavenger callback = reinterpret_cast<Scavenger>(
|
|
||||||
READ_INTPTR_FIELD(this, kIterateBodyCallbackOffset));
|
|
||||||
|
|
||||||
ASSERT(callback == Heap::GetScavenger(instance_type(),
|
|
||||||
instance_size()));
|
|
||||||
|
|
||||||
return callback;
|
|
||||||
}
|
|
||||||
|
|
||||||
inline void Map::set_scavenger(Scavenger callback) {
|
|
||||||
ASSERT(!reinterpret_cast<Object*>(
|
|
||||||
reinterpret_cast<intptr_t>(callback))->IsHeapObject());
|
|
||||||
WRITE_INTPTR_FIELD(this,
|
|
||||||
kIterateBodyCallbackOffset,
|
|
||||||
reinterpret_cast<intptr_t>(callback));
|
|
||||||
}
|
|
||||||
|
|
||||||
int Map::instance_size() {
|
int Map::instance_size() {
|
||||||
return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
|
return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
|
||||||
|
@ -2190,8 +2190,6 @@ Object* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
|
|||||||
int new_instance_size = map()->instance_size() - instance_size_delta;
|
int new_instance_size = map()->instance_size() - instance_size_delta;
|
||||||
new_map->set_inobject_properties(0);
|
new_map->set_inobject_properties(0);
|
||||||
new_map->set_instance_size(new_instance_size);
|
new_map->set_instance_size(new_instance_size);
|
||||||
new_map->set_scavenger(Heap::GetScavenger(new_map->instance_type(),
|
|
||||||
new_map->instance_size()));
|
|
||||||
Heap::CreateFillerObjectAt(this->address() + new_instance_size,
|
Heap::CreateFillerObjectAt(this->address() + new_instance_size,
|
||||||
instance_size_delta);
|
instance_size_delta);
|
||||||
}
|
}
|
||||||
|
@ -2899,7 +2899,6 @@ class Code: public HeapObject {
|
|||||||
DISALLOW_IMPLICIT_CONSTRUCTORS(Code);
|
DISALLOW_IMPLICIT_CONSTRUCTORS(Code);
|
||||||
};
|
};
|
||||||
|
|
||||||
typedef void (*Scavenger)(Map* map, HeapObject** slot, HeapObject* object);
|
|
||||||
|
|
||||||
// All heap objects have a Map that describes their structure.
|
// All heap objects have a Map that describes their structure.
|
||||||
// A Map contains information about:
|
// A Map contains information about:
|
||||||
@ -3101,13 +3100,6 @@ class Map: public HeapObject {
|
|||||||
void MapVerify();
|
void MapVerify();
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
inline Scavenger scavenger();
|
|
||||||
inline void set_scavenger(Scavenger callback);
|
|
||||||
|
|
||||||
inline void Scavenge(HeapObject** slot, HeapObject* obj) {
|
|
||||||
scavenger()(this, slot, obj);
|
|
||||||
}
|
|
||||||
|
|
||||||
static const int kMaxPreAllocatedPropertyFields = 255;
|
static const int kMaxPreAllocatedPropertyFields = 255;
|
||||||
|
|
||||||
// Layout description.
|
// Layout description.
|
||||||
@ -3118,8 +3110,7 @@ class Map: public HeapObject {
|
|||||||
static const int kInstanceDescriptorsOffset =
|
static const int kInstanceDescriptorsOffset =
|
||||||
kConstructorOffset + kPointerSize;
|
kConstructorOffset + kPointerSize;
|
||||||
static const int kCodeCacheOffset = kInstanceDescriptorsOffset + kPointerSize;
|
static const int kCodeCacheOffset = kInstanceDescriptorsOffset + kPointerSize;
|
||||||
static const int kIterateBodyCallbackOffset = kCodeCacheOffset + kPointerSize;
|
static const int kPadStart = kCodeCacheOffset + kPointerSize;
|
||||||
static const int kPadStart = kIterateBodyCallbackOffset + kPointerSize;
|
|
||||||
static const int kSize = MAP_POINTER_ALIGN(kPadStart);
|
static const int kSize = MAP_POINTER_ALIGN(kPadStart);
|
||||||
|
|
||||||
// Layout of pointer fields. Heap iteration code relies on them
|
// Layout of pointer fields. Heap iteration code relies on them
|
||||||
|
@ -673,14 +673,6 @@ void Deserializer::ReadObject(int space_number,
|
|||||||
LOG(SnapshotPositionEvent(address, source_->position()));
|
LOG(SnapshotPositionEvent(address, source_->position()));
|
||||||
}
|
}
|
||||||
ReadChunk(current, limit, space_number, address);
|
ReadChunk(current, limit, space_number, address);
|
||||||
|
|
||||||
if (space == Heap::map_space()) {
|
|
||||||
ASSERT(size == Map::kSize);
|
|
||||||
HeapObject* obj = HeapObject::FromAddress(address);
|
|
||||||
Map* map = reinterpret_cast<Map*>(obj);
|
|
||||||
map->set_scavenger(Heap::GetScavenger(map->instance_type(),
|
|
||||||
map->instance_size()));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user