Merge ObjectIterator::has_next and ObjectIterator::next methods.

This reduces chances of improper usage, see http://code.google.com/p/v8/issues/detail?id=586
for more details.

BUG=586

Review URL: http://codereview.chromium.org/555072

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@3696 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
antonm@chromium.org 2010-01-25 22:53:18 +00:00
parent 921540510c
commit 29af9c54a4
14 changed files with 118 additions and 171 deletions

View File

@ -1695,9 +1695,7 @@ void Debug::CreateScriptCache() {
// Scan heap for Script objects.
int count = 0;
HeapIterator iterator;
while (iterator.has_next()) {
HeapObject* obj = iterator.next();
ASSERT(obj != NULL);
for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
if (obj->IsScript() && Script::cast(obj)->HasValidSource()) {
script_cache_->Add(Handle<Script>(Script::cast(obj)));
count++;

View File

@ -625,8 +625,7 @@ void HeapProfiler::WriteSample() {
ConstructorHeapProfile js_cons_profile;
RetainerHeapProfile js_retainer_profile;
HeapIterator iterator;
while (iterator.has_next()) {
HeapObject* obj = iterator.next();
for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
CollectStats(obj, info);
js_cons_profile.CollectStats(obj);
js_retainer_profile.CollectStats(obj);

View File

@ -327,7 +327,7 @@ void Heap::GarbageCollectionPrologue() {
int Heap::SizeOfObjects() {
int total = 0;
AllSpaces spaces;
while (Space* space = spaces.next()) {
for (Space* space = spaces.next(); space != NULL; space = spaces.next()) {
total += space->Size();
}
return total;
@ -732,13 +732,14 @@ static void VerifyNonPointerSpacePointers() {
// do not expect them.
VerifyNonPointerSpacePointersVisitor v;
HeapObjectIterator code_it(Heap::code_space());
while (code_it.has_next()) {
HeapObject* object = code_it.next();
for (HeapObject* object = code_it.next();
object != NULL; object = code_it.next())
object->Iterate(&v);
}
HeapObjectIterator data_it(Heap::old_data_space());
while (data_it.has_next()) data_it.next()->Iterate(&v);
for (HeapObject* object = data_it.next();
object != NULL; object = data_it.next())
object->Iterate(&v);
}
#endif
@ -804,8 +805,8 @@ void Heap::Scavenge() {
// Copy objects reachable from cells by scavenging cell values directly.
HeapObjectIterator cell_iterator(cell_space_);
while (cell_iterator.has_next()) {
HeapObject* cell = cell_iterator.next();
for (HeapObject* cell = cell_iterator.next();
cell != NULL; cell = cell_iterator.next()) {
if (cell->IsJSGlobalPropertyCell()) {
Address value_address =
reinterpret_cast<Address>(cell) +
@ -1013,13 +1014,15 @@ void Heap::RebuildRSets() {
void Heap::RebuildRSets(PagedSpace* space) {
HeapObjectIterator it(space);
while (it.has_next()) Heap::UpdateRSet(it.next());
for (HeapObject* obj = it.next(); obj != NULL; obj = it.next())
Heap::UpdateRSet(obj);
}
void Heap::RebuildRSets(LargeObjectSpace* space) {
LargeObjectIterator it(space);
while (it.has_next()) Heap::UpdateRSet(it.next());
for (HeapObject* obj = it.next(); obj != NULL; obj = it.next())
Heap::UpdateRSet(obj);
}
@ -3106,7 +3109,8 @@ void Heap::Print() {
if (!HasBeenSetup()) return;
Top::PrintStack();
AllSpaces spaces;
while (Space* space = spaces.next()) space->Print();
for (Space* space = spaces.next(); space != NULL; space = spaces.next())
space->Print();
}
@ -3648,7 +3652,8 @@ void Heap::TearDown() {
void Heap::Shrink() {
// Try to shrink all paged spaces.
PagedSpaces spaces;
while (PagedSpace* space = spaces.next()) space->Shrink();
for (PagedSpace* space = spaces.next(); space != NULL; space = spaces.next())
space->Shrink();
}
@ -3657,7 +3662,8 @@ void Heap::Shrink() {
void Heap::Protect() {
if (HasBeenSetup()) {
AllSpaces spaces;
while (Space* space = spaces.next()) space->Protect();
for (Space* space = spaces.next(); space != NULL; space = spaces.next())
space->Protect();
}
}
@ -3665,7 +3671,8 @@ void Heap::Protect() {
void Heap::Unprotect() {
if (HasBeenSetup()) {
AllSpaces spaces;
while (Space* space = spaces.next()) space->Unprotect();
for (Space* space = spaces.next(); space != NULL; space = spaces.next())
space->Unprotect();
}
}
@ -3837,34 +3844,25 @@ void HeapIterator::Shutdown() {
}
bool HeapIterator::has_next() {
HeapObject* HeapIterator::next() {
// No iterator means we are done.
if (object_iterator_ == NULL) return false;
if (object_iterator_ == NULL) return NULL;
if (object_iterator_->has_next_object()) {
if (HeapObject* obj = object_iterator_->next_object()) {
// If the current iterator has more objects we are fine.
return true;
return obj;
} else {
// Go though the spaces looking for one that has objects.
while (space_iterator_->has_next()) {
object_iterator_ = space_iterator_->next();
if (object_iterator_->has_next_object()) {
return true;
if (HeapObject* obj = object_iterator_->next_object()) {
return obj;
}
}
}
// Done with the last space.
object_iterator_ = NULL;
return false;
}
HeapObject* HeapIterator::next() {
if (has_next()) {
return object_iterator_->next_object();
} else {
return NULL;
}
return NULL;
}

View File

@ -1290,7 +1290,6 @@ class HeapIterator BASE_EMBEDDED {
explicit HeapIterator();
virtual ~HeapIterator();
bool has_next();
HeapObject* next();
void reset();

View File

@ -1237,9 +1237,7 @@ static int EnumerateCompiledFunctions(Handle<SharedFunctionInfo>* sfis) {
AssertNoAllocation no_alloc;
int compiled_funcs_count = 0;
HeapIterator iterator;
while (iterator.has_next()) {
HeapObject* obj = iterator.next();
ASSERT(obj != NULL);
for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
if (!obj->IsSharedFunctionInfo()) continue;
SharedFunctionInfo* sfi = SharedFunctionInfo::cast(obj);
if (sfi->is_compiled()
@ -1352,9 +1350,7 @@ void Logger::LogCompiledFunctions() {
void Logger::LogFunctionObjects() {
AssertNoAllocation no_alloc;
HeapIterator iterator;
while (iterator.has_next()) {
HeapObject* obj = iterator.next();
ASSERT(obj != NULL);
for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
if (!obj->IsJSFunction()) continue;
JSFunction* jsf = JSFunction::cast(obj);
if (!jsf->is_compiled()) continue;
@ -1366,9 +1362,7 @@ void Logger::LogFunctionObjects() {
void Logger::LogAccessorCallbacks() {
AssertNoAllocation no_alloc;
HeapIterator iterator;
while (iterator.has_next()) {
HeapObject* obj = iterator.next();
ASSERT(obj != NULL);
for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
if (!obj->IsAccessorInfo()) continue;
AccessorInfo* ai = AccessorInfo::cast(obj);
if (!ai->name()->IsString()) continue;

View File

@ -129,7 +129,8 @@ void MarkCompactCollector::Prepare(GCTracer* tracer) {
#endif
PagedSpaces spaces;
while (PagedSpace* space = spaces.next()) {
for (PagedSpace* space = spaces.next();
space != NULL; space = spaces.next()) {
space->PrepareForMarkCompact(compacting_collection_);
}
@ -172,7 +173,7 @@ void MarkCompactCollector::Finish() {
int old_gen_used = 0;
OldSpaces spaces;
while (OldSpace* space = spaces.next()) {
for (OldSpace* space = spaces.next(); space != NULL; space = spaces.next()) {
old_gen_recoverable += space->Waste() + space->AvailableFree();
old_gen_used += space->Size();
}
@ -475,8 +476,8 @@ void MarkCompactCollector::MarkDescriptorArray(
void MarkCompactCollector::CreateBackPointers() {
HeapObjectIterator iterator(Heap::map_space());
while (iterator.has_next()) {
Object* next_object = iterator.next();
for (HeapObject* next_object = iterator.next();
next_object != NULL; next_object = iterator.next()) {
if (next_object->IsMap()) { // Could also be ByteArray on free list.
Map* map = Map::cast(next_object);
if (map->instance_type() >= FIRST_JS_OBJECT_TYPE &&
@ -509,8 +510,7 @@ static void ScanOverflowedObjects(T* it) {
// so that we don't waste effort pointlessly scanning for objects.
ASSERT(!marking_stack.is_full());
while (it->has_next()) {
HeapObject* object = it->next();
for (HeapObject* object = it->next(); object != NULL; object = it->next()) {
if (object->IsOverflowed()) {
object->ClearOverflow();
ASSERT(object->IsMarked());
@ -793,8 +793,9 @@ void MarkCompactCollector::ClearNonLiveTransitions() {
// scan the descriptor arrays of those maps, not all maps.
// All of these actions are carried out only on maps of JSObjects
// and related subtypes.
while (map_iterator.has_next()) {
Map* map = reinterpret_cast<Map*>(map_iterator.next());
for (HeapObject* obj = map_iterator.next();
obj != NULL; obj = map_iterator.next()) {
Map* map = reinterpret_cast<Map*>(obj);
if (!map->IsMarked() && map->IsByteArray()) continue;
ASSERT(SafeIsMap(map));
@ -1282,8 +1283,7 @@ class MapCompact {
MapIterator it;
HeapObject* o = it.next();
for (; o != first_map_to_evacuate_; o = it.next()) {
it.has_next(); // Must be called for side-effects, see bug 586.
ASSERT(it.has_next());
ASSERT(o != NULL);
Map* map = reinterpret_cast<Map*>(o);
ASSERT(!map->IsMarked());
ASSERT(!map->IsOverflowed());
@ -1309,10 +1309,8 @@ class MapCompact {
void UpdateMapPointersInLargeObjectSpace() {
LargeObjectIterator it(Heap::lo_space());
while (true) {
if (!it.has_next()) break;
UpdateMapPointersInObject(it.next());
}
for (HeapObject* obj = it.next(); obj != NULL; obj = it.next())
UpdateMapPointersInObject(obj);
}
void Finish() {
@ -1355,9 +1353,8 @@ class MapCompact {
static Map* NextMap(MapIterator* it, HeapObject* last, bool live) {
while (true) {
it->has_next(); // Must be called for side-effects, see bug 586.
ASSERT(it->has_next());
HeapObject* next = it->next();
ASSERT(next != NULL);
if (next == last)
return NULL;
ASSERT(!next->IsOverflowed());
@ -1446,8 +1443,9 @@ class MapCompact {
if (!FLAG_enable_slow_asserts)
return;
while (map_to_evacuate_it_.has_next())
ASSERT(FreeListNode::IsFreeListNode(map_to_evacuate_it_.next()));
for (HeapObject* obj = map_to_evacuate_it_.next();
obj != NULL; obj = map_to_evacuate_it_.next())
ASSERT(FreeListNode::IsFreeListNode(obj));
}
#endif
};
@ -1480,7 +1478,8 @@ void MarkCompactCollector::SweepSpaces() {
map_compact.FinishMapSpace();
PagedSpaces spaces;
while (PagedSpace* space = spaces.next()) {
for (PagedSpace* space = spaces.next();
space != NULL; space = spaces.next()) {
if (space == Heap::map_space()) continue;
map_compact.UpdateMapPointersInPagedSpace(space);
}
@ -1655,7 +1654,8 @@ void MarkCompactCollector::UpdatePointers() {
// Large objects do not move, the map word can be updated directly.
LargeObjectIterator it(Heap::lo_space());
while (it.has_next()) UpdatePointersInNewObject(it.next());
for (HeapObject* obj = it.next(); obj != NULL; obj = it.next())
UpdatePointersInNewObject(obj);
USE(live_maps);
USE(live_pointer_olds);
@ -1819,7 +1819,8 @@ void MarkCompactCollector::RelocateObjects() {
Page::set_rset_state(Page::IN_USE);
#endif
PagedSpaces spaces;
while (PagedSpace* space = spaces.next()) space->MCCommitRelocationInfo();
for (PagedSpace* space = spaces.next(); space != NULL; space = spaces.next())
space->MCCommitRelocationInfo();
}

View File

@ -7212,9 +7212,8 @@ Object* Runtime::FindSharedFunctionInfoInScript(Handle<Script> script,
Handle<SharedFunctionInfo> last;
while (!done) {
HeapIterator iterator;
while (iterator.has_next()) {
HeapObject* obj = iterator.next();
ASSERT(obj != NULL);
for (HeapObject* obj = iterator.next();
obj != NULL; obj = iterator.next()) {
if (obj->IsSharedFunctionInfo()) {
Handle<SharedFunctionInfo> shared(SharedFunctionInfo::cast(obj));
if (shared->script() == *script) {
@ -7670,10 +7669,10 @@ static int DebugReferencedBy(JSObject* target,
int count = 0;
JSObject* last = NULL;
HeapIterator iterator;
while (iterator.has_next() &&
HeapObject* heap_obj = NULL;
while (((heap_obj = iterator.next()) != NULL) &&
(max_references == 0 || count < max_references)) {
// Only look at all JSObjects.
HeapObject* heap_obj = iterator.next();
if (heap_obj->IsJSObject()) {
// Skip context extension objects and argument arrays as these are
// checked in the context of functions using them.
@ -7783,10 +7782,10 @@ static int DebugConstructedBy(JSFunction* constructor, int max_references,
// Iterate the heap.
int count = 0;
HeapIterator iterator;
while (iterator.has_next() &&
HeapObject* heap_obj = NULL;
while (((heap_obj = iterator.next()) != NULL) &&
(max_references == 0 || count < max_references)) {
// Only look at all JSObjects.
HeapObject* heap_obj = iterator.next();
if (heap_obj->IsJSObject()) {
JSObject* obj = JSObject::cast(heap_obj);
if (obj->map()->constructor() == constructor) {
@ -7934,8 +7933,8 @@ static Handle<Object> Runtime_GetScriptFromScriptName(
// script data.
Handle<Script> script;
HeapIterator iterator;
while (script.is_null() && iterator.has_next()) {
HeapObject* obj = iterator.next();
HeapObject* obj = NULL;
while (script.is_null() && ((obj = iterator.next()) != NULL)) {
// If a script is found check if it has the script data requested.
if (obj->IsScript()) {
if (Script::cast(obj)->name()->IsString()) {

View File

@ -35,32 +35,6 @@ namespace v8 {
namespace internal {
// -----------------------------------------------------------------------------
// HeapObjectIterator
bool HeapObjectIterator::has_next() {
if (cur_addr_ < cur_limit_) {
return true; // common case
}
ASSERT(cur_addr_ == cur_limit_);
return HasNextInNextPage(); // slow path
}
HeapObject* HeapObjectIterator::next() {
ASSERT(has_next());
HeapObject* obj = HeapObject::FromAddress(cur_addr_);
int obj_size = (size_func_ == NULL) ? obj->Size() : size_func_(obj);
ASSERT_OBJECT_SIZE(obj_size);
cur_addr_ += obj_size;
ASSERT(cur_addr_ <= cur_limit_);
return obj;
}
// -----------------------------------------------------------------------------
// PageIterator

View File

@ -82,8 +82,8 @@ void HeapObjectIterator::Initialize(Address cur, Address end,
}
bool HeapObjectIterator::HasNextInNextPage() {
if (cur_addr_ == end_addr_) return false;
HeapObject* HeapObjectIterator::FromNextPage() {
if (cur_addr_ == end_addr_) return NULL;
Page* cur_page = Page::FromAllocationTop(cur_addr_);
cur_page = cur_page->next_page();
@ -92,12 +92,12 @@ bool HeapObjectIterator::HasNextInNextPage() {
cur_addr_ = cur_page->ObjectAreaStart();
cur_limit_ = (cur_page == end_page_) ? end_addr_ : cur_page->AllocationTop();
if (cur_addr_ == end_addr_) return false;
if (cur_addr_ == end_addr_) return NULL;
ASSERT(cur_addr_ < cur_limit_);
#ifdef DEBUG
Verify();
#endif
return true;
return FromCurrentPage();
}
@ -1437,7 +1437,8 @@ void NewSpace::ClearHistograms() {
void NewSpace::CollectStatistics() {
ClearHistograms();
SemiSpaceIterator it(this);
while (it.has_next()) RecordAllocation(it.next());
for (HeapObject* obj = it.next(); obj != NULL; obj = it.next())
RecordAllocation(obj);
}
@ -2054,8 +2055,7 @@ static void CollectCommentStatistics(RelocIterator* it) {
// - by code comment
void PagedSpace::CollectCodeStatistics() {
HeapObjectIterator obj_it(this);
while (obj_it.has_next()) {
HeapObject* obj = obj_it.next();
for (HeapObject* obj = obj_it.next(); obj != NULL; obj = obj_it.next()) {
if (obj->IsCode()) {
Code* code = Code::cast(obj);
code_kind_statistics[code->kind()] += code->Size();
@ -2157,7 +2157,8 @@ void OldSpace::ReportStatistics() {
ClearHistograms();
HeapObjectIterator obj_it(this);
while (obj_it.has_next()) { CollectHistogramInfo(obj_it.next()); }
for (HeapObject* obj = obj_it.next(); obj != NULL; obj = obj_it.next())
CollectHistogramInfo(obj);
ReportHistogram(true);
}
@ -2393,7 +2394,8 @@ void FixedSpace::ReportStatistics() {
ClearHistograms();
HeapObjectIterator obj_it(this);
while (obj_it.has_next()) { CollectHistogramInfo(obj_it.next()); }
for (HeapObject* obj = obj_it.next(); obj != NULL; obj = obj_it.next())
CollectHistogramInfo(obj);
ReportHistogram(false);
}
@ -2462,7 +2464,8 @@ LargeObjectIterator::LargeObjectIterator(LargeObjectSpace* space,
HeapObject* LargeObjectIterator::next() {
ASSERT(has_next());
if (current_ == NULL) return NULL;
HeapObject* object = current_->GetObject();
current_ = current_->next();
return object;
@ -2639,8 +2642,7 @@ void LargeObjectSpace::ClearRSet() {
ASSERT(Page::is_rset_in_use());
LargeObjectIterator it(this);
while (it.has_next()) {
HeapObject* object = it.next();
for (HeapObject* object = it.next(); object != NULL; object = it.next()) {
// We only have code, sequential strings, or fixed arrays in large
// object space, and only fixed arrays need remembered set support.
if (object->IsFixedArray()) {
@ -2668,11 +2670,10 @@ void LargeObjectSpace::IterateRSet(ObjectSlotCallback copy_object_func) {
30);
LargeObjectIterator it(this);
while (it.has_next()) {
for (HeapObject* object = it.next(); object != NULL; object = it.next()) {
// We only have code, sequential strings, or fixed arrays in large
// object space, and only fixed arrays can possibly contain pointers to
// the young generation.
HeapObject* object = it.next();
if (object->IsFixedArray()) {
// Iterate the normal page remembered set range.
Page* page = Page::FromAddress(object->address());
@ -2798,8 +2799,8 @@ void LargeObjectSpace::Verify() {
void LargeObjectSpace::Print() {
LargeObjectIterator it(this);
while (it.has_next()) {
it.next()->Print();
for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
obj->Print();
}
}
@ -2809,9 +2810,9 @@ void LargeObjectSpace::ReportStatistics() {
int num_objects = 0;
ClearHistograms();
LargeObjectIterator it(this);
while (it.has_next()) {
for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
num_objects++;
CollectHistogramInfo(it.next());
CollectHistogramInfo(obj);
}
PrintF(" number of objects %d\n", num_objects);
@ -2821,8 +2822,7 @@ void LargeObjectSpace::ReportStatistics() {
void LargeObjectSpace::CollectCodeStatistics() {
LargeObjectIterator obj_it(this);
while (obj_it.has_next()) {
HeapObject* obj = obj_it.next();
for (HeapObject* obj = obj_it.next(); obj != NULL; obj = obj_it.next()) {
if (obj->IsCode()) {
Code* code = Code::cast(obj);
code_kind_statistics[code->kind()] += code->Size();
@ -2833,8 +2833,7 @@ void LargeObjectSpace::CollectCodeStatistics() {
void LargeObjectSpace::PrintRSet() {
LargeObjectIterator it(this);
while (it.has_next()) {
HeapObject* object = it.next();
for (HeapObject* object = it.next(); object != NULL; object = it.next()) {
if (object->IsFixedArray()) {
Page* page = Page::FromAddress(object->address());

View File

@ -597,15 +597,14 @@ class MemoryAllocator : public AllStatic {
// Interface for heap object iterator to be implemented by all object space
// object iterators.
//
// NOTE: The space specific object iterators also implements the own has_next()
// and next() methods which are used to avoid using virtual functions
// NOTE: The space specific object iterators also implements the own next()
// method which is used to avoid using virtual functions
// iterating a specific space.
class ObjectIterator : public Malloced {
public:
virtual ~ObjectIterator() { }
virtual bool has_next_object() = 0;
virtual HeapObject* next_object() = 0;
};
@ -645,11 +644,11 @@ class HeapObjectIterator: public ObjectIterator {
Address start,
HeapObjectCallback size_func);
inline bool has_next();
inline HeapObject* next();
inline HeapObject* next() {
return (cur_addr_ < cur_limit_) ? FromCurrentPage() : FromNextPage();
}
// implementation of ObjectIterator.
virtual bool has_next_object() { return has_next(); }
virtual HeapObject* next_object() { return next(); }
private:
@ -659,9 +658,21 @@ class HeapObjectIterator: public ObjectIterator {
HeapObjectCallback size_func_; // size function
Page* end_page_; // caches the page of the end address
// Slow path of has_next, checks whether there are more objects in
// the next page.
bool HasNextInNextPage();
HeapObject* FromCurrentPage() {
ASSERT(cur_addr_ < cur_limit_);
HeapObject* obj = HeapObject::FromAddress(cur_addr_);
int obj_size = (size_func_ == NULL) ? obj->Size() : size_func_(obj);
ASSERT_OBJECT_SIZE(obj_size);
cur_addr_ += obj_size;
ASSERT(cur_addr_ <= cur_limit_);
return obj;
}
// Slow path of next, goes into the next page.
HeapObject* FromNextPage();
// Initializes fields.
void Initialize(Address start, Address end, HeapObjectCallback size_func);
@ -1206,10 +1217,8 @@ class SemiSpaceIterator : public ObjectIterator {
SemiSpaceIterator(NewSpace* space, HeapObjectCallback size_func);
SemiSpaceIterator(NewSpace* space, Address start);
bool has_next() {return current_ < limit_; }
HeapObject* next() {
ASSERT(has_next());
if (current_ == limit_) return NULL;
HeapObject* object = HeapObject::FromAddress(current_);
int size = (size_func_ == NULL) ? object->Size() : size_func_(object);
@ -1219,7 +1228,6 @@ class SemiSpaceIterator : public ObjectIterator {
}
// Implementation of the ObjectIterator functions.
virtual bool has_next_object() { return has_next(); }
virtual HeapObject* next_object() { return next(); }
private:
@ -1800,11 +1808,9 @@ class MapSpace : public FixedSpace {
int pages_left = live_maps / kMapsPerPage;
PageIterator it(this, PageIterator::ALL_PAGES);
while (pages_left-- > 0) {
it.has_next(); // Must be called for side-effects, see bug 586.
ASSERT(it.has_next());
it.next()->ClearRSet();
}
it.has_next(); // Must be called for side-effects, see bug 586.
ASSERT(it.has_next());
Page* top_page = it.next();
top_page->ClearRSet();
@ -2054,11 +2060,9 @@ class LargeObjectIterator: public ObjectIterator {
explicit LargeObjectIterator(LargeObjectSpace* space);
LargeObjectIterator(LargeObjectSpace* space, HeapObjectCallback size_func);
bool has_next() { return current_ != NULL; }
HeapObject* next();
// implementation of ObjectIterator.
virtual bool has_next_object() { return has_next(); }
virtual HeapObject* next_object() { return next(); }
private:

View File

@ -6236,8 +6236,16 @@ THREADED_TEST(LockUnlockLock) {
}
static int GetSurvivingGlobalObjectsCount() {
static int GetGlobalObjectsCount() {
int count = 0;
v8::internal::HeapIterator it;
for (i::HeapObject* object = it.next(); object != NULL; object = it.next())
if (object->IsJSGlobalObject()) count++;
return count;
}
static int GetSurvivingGlobalObjectsCount() {
// We need to collect all garbage twice to be sure that everything
// has been collected. This is because inline caches are cleared in
// the first garbage collection but some of the maps have already
@ -6245,13 +6253,7 @@ static int GetSurvivingGlobalObjectsCount() {
// collected until the second garbage collection.
v8::internal::Heap::CollectAllGarbage(false);
v8::internal::Heap::CollectAllGarbage(false);
v8::internal::HeapIterator it;
while (it.has_next()) {
v8::internal::HeapObject* object = it.next();
if (object->IsJSGlobalObject()) {
count++;
}
}
int count = GetGlobalObjectsCount();
#ifdef DEBUG
if (count > 0) v8::internal::Heap::TracePathToGlobal();
#endif
@ -8621,17 +8623,6 @@ THREADED_TEST(SpaghettiStackReThrow) {
}
static int GetGlobalObjectsCount() {
int count = 0;
v8::internal::HeapIterator it;
while (it.has_next()) {
v8::internal::HeapObject* object = it.next();
if (object->IsJSGlobalObject()) count++;
}
return count;
}
TEST(Regress528) {
v8::V8::Initialize();

View File

@ -413,9 +413,7 @@ void CheckDebuggerUnloaded(bool check_functions) {
// Iterate the head and check that there are no debugger related objects left.
HeapIterator iterator;
while (iterator.has_next()) {
HeapObject* obj = iterator.next();
CHECK(obj != NULL);
for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
CHECK(!obj->IsDebugInfo());
CHECK(!obj->IsBreakPointInfo());

View File

@ -64,10 +64,8 @@ TEST(ConstructorProfile) {
ConstructorHeapProfileTestHelper cons_profile;
i::AssertNoAllocation no_alloc;
i::HeapIterator iterator;
while (iterator.has_next()) {
i::HeapObject* obj = iterator.next();
for (i::HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next())
cons_profile.CollectStats(obj);
}
CHECK_EQ(0, cons_profile.f_count());
cons_profile.PrintStats();
CHECK_EQ(2, cons_profile.f_count());
@ -375,10 +373,8 @@ TEST(RetainerProfile) {
RetainerHeapProfile ret_profile;
i::AssertNoAllocation no_alloc;
i::HeapIterator iterator;
while (iterator.has_next()) {
i::HeapObject* obj = iterator.next();
for (i::HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next())
ret_profile.CollectStats(obj);
}
RetainerProfilePrinter printer;
ret_profile.DebugPrintStats(&printer);
const char* retainers_of_a = printer.GetRetainers("A");

View File

@ -746,16 +746,13 @@ static int ObjectsFoundInHeap(Handle<Object> objs[], int size) {
// Count the number of objects found in the heap.
int found_count = 0;
HeapIterator iterator;
while (iterator.has_next()) {
HeapObject* obj = iterator.next();
CHECK(obj != NULL);
for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
for (int i = 0; i < size; i++) {
if (*objs[i] == obj) {
found_count++;
}
}
}
CHECK(!iterator.has_next());
return found_count;
}