[heap] Do not invoke GC to make heap iterable.

Remove kMakeHeapIterableMask since the heap is always iterable.

BUG=chromium:580959
LOG=n

Review-Url: https://codereview.chromium.org/1961373003
Cr-Commit-Position: refs/heads/master@{#36333}
This commit is contained in:
hpayer 2016-05-18 11:02:11 -07:00 committed by Commit bot
parent 3fef34e023
commit 7c1cac4888
9 changed files with 14 additions and 50 deletions

View File

@ -1319,10 +1319,6 @@ bool Debug::PrepareFunctionForBreakPoints(Handle<SharedFunctionInfo> shared) {
}
}
// Make sure we abort incremental marking.
isolate_->heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask,
"prepare for break points");
bool is_interpreted = shared->HasBytecodeArray();
{

View File

@ -893,7 +893,8 @@ void Heap::CollectAllAvailableGarbage(const char* gc_reason) {
isolate()->optimizing_compile_dispatcher()->Flush();
}
isolate()->ClearSerializerData();
set_current_gc_flags(kMakeHeapIterableMask | kReduceMemoryFootprintMask);
set_current_gc_flags(kAbortIncrementalMarkingMask |
kReduceMemoryFootprintMask);
isolate_->compilation_cache()->Clear();
const int kMaxNumberOfAttempts = 7;
const int kMinNumberOfAttempts = 2;
@ -4045,22 +4046,10 @@ AllocationResult Heap::AllocateStruct(InstanceType type) {
}
bool Heap::IsHeapIterable() {
// TODO(hpayer): This function is not correct. Allocation folding in old
// space breaks the iterability.
return new_space_top_after_last_gc_ == new_space()->top();
}
void Heap::MakeHeapIterable() {
DCHECK(AllowHeapAllocation::IsAllowed());
if (!IsHeapIterable()) {
CollectAllGarbage(kMakeHeapIterableMask, "Heap::MakeHeapIterable");
}
if (mark_compact_collector()->sweeping_in_progress()) {
mark_compact_collector()->EnsureSweepingCompleted();
}
DCHECK(IsHeapIterable());
}
@ -4605,10 +4594,7 @@ void Heap::Verify() {
CHECK(HasBeenSetUp());
HandleScope scope(isolate());
if (mark_compact_collector()->sweeping_in_progress()) {
// We have to wait here for the sweeper threads to have an iterable heap.
mark_compact_collector()->EnsureSweepingCompleted();
}
MakeHeapIterable();
VerifyPointersVisitor visitor;
IterateRoots(&visitor, VISIT_ONLY_STRONG);

View File

@ -541,9 +541,6 @@ class Heap {
static const int kAbortIncrementalMarkingMask = 2;
static const int kFinalizeIncrementalMarkingMask = 4;
// Making the heap iterable requires us to abort incremental marking.
static const int kMakeHeapIterableMask = kAbortIncrementalMarkingMask;
// The roots that have an index less than this are always in old space.
static const int kOldSpaceRoots = 0x20;
@ -664,9 +661,6 @@ class Heap {
// Converts the given boolean condition to JavaScript boolean value.
inline Oddball* ToBoolean(bool condition);
// Check whether the heap is currently iterable.
bool IsHeapIterable();
// Notify the heap that a context has been disposed.
int NotifyContextDisposed(bool dependant_context);
@ -1033,9 +1027,7 @@ class Heap {
AllocationSpace space, const char* gc_reason = NULL,
const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
// Performs a full garbage collection. If (flags & kMakeHeapIterableMask) is
// non-zero, then the slower precise sweeper is used, which leaves the heap
// in a state where we can iterate over the heap visiting all objects.
// Performs a full garbage collection.
void CollectAllGarbage(
int flags = kFinalizeIncrementalMarkingMask, const char* gc_reason = NULL,
const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
@ -1537,7 +1529,7 @@ class Heap {
void EnsureFillerObjectAtTop();
// Ensure that we have swept all spaces in such a way that we can iterate
// over all objects. May cause a GC.
// over all objects.
void MakeHeapIterable();
// Performs garbage collection operation.

View File

@ -1565,8 +1565,6 @@ void Logger::LogCodeObject(Object* object) {
void Logger::LogCodeObjects() {
Heap* heap = isolate_->heap();
heap->CollectAllGarbage(Heap::kMakeHeapIterableMask,
"Logger::LogCodeObjects");
HeapIterator iterator(heap);
DisallowHeapAllocation no_gc;
for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
@ -1650,8 +1648,6 @@ void Logger::LogExistingFunction(Handle<SharedFunctionInfo> shared,
void Logger::LogCompiledFunctions() {
Heap* heap = isolate_->heap();
heap->CollectAllGarbage(Heap::kMakeHeapIterableMask,
"Logger::LogCompiledFunctions");
HandleScope scope(isolate_);
const int compiled_funcs_count = EnumerateCompiledFunctions(heap, NULL, NULL);
ScopedVector< Handle<SharedFunctionInfo> > sfis(compiled_funcs_count);
@ -1670,8 +1666,6 @@ void Logger::LogCompiledFunctions() {
void Logger::LogAccessorCallbacks() {
Heap* heap = isolate_->heap();
heap->CollectAllGarbage(Heap::kMakeHeapIterableMask,
"Logger::LogAccessorCallbacks");
HeapIterator iterator(heap);
DisallowHeapAllocation no_gc;
for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {

View File

@ -478,8 +478,8 @@ void HeapObjectsMap::UpdateHeapObjectsMap() {
PrintF("Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
entries_map_.occupancy());
}
heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask,
"HeapObjectsMap::UpdateHeapObjectsMap");
heap_->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask,
"HeapObjectsMap::UpdateHeapObjectsMap");
HeapIterator iterator(heap_);
for (HeapObject* obj = iterator.next();
obj != NULL;
@ -2505,12 +2505,10 @@ bool HeapSnapshotGenerator::GenerateSnapshot() {
// full GC is reachable from the root when computing dominators.
// This is not true for weakly reachable objects.
// As a temporary solution we call GC twice.
heap_->CollectAllGarbage(
Heap::kMakeHeapIterableMask,
"HeapSnapshotGenerator::GenerateSnapshot");
heap_->CollectAllGarbage(
Heap::kMakeHeapIterableMask,
"HeapSnapshotGenerator::GenerateSnapshot");
heap_->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask,
"HeapSnapshotGenerator::GenerateSnapshot");
heap_->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask,
"HeapSnapshotGenerator::GenerateSnapshot");
#ifdef VERIFY_HEAP
Heap* debug_heap = heap_;

View File

@ -14001,7 +14001,7 @@ static void CheckSurvivingGlobalObjectsCount(int expected) {
// been marked at that point. Therefore some of the maps are not
// collected until the second garbage collection.
CcTest::heap()->CollectAllGarbage();
CcTest::heap()->CollectAllGarbage(i::Heap::kMakeHeapIterableMask);
CcTest::heap()->CollectAllGarbage();
int count = GetGlobalObjectsCount();
#ifdef DEBUG
if (count != expected) CcTest::heap()->TracePathToGlobal();

View File

@ -378,7 +378,7 @@ void CheckDebuggerUnloaded(bool check_functions) {
// Collect garbage to ensure weak handles are cleared.
CcTest::heap()->CollectAllGarbage();
CcTest::heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask);
CcTest::heap()->CollectAllGarbage();
// Iterate the head and check that there are no debugger related objects left.
HeapIterator iterator(CcTest::heap());

View File

@ -485,8 +485,6 @@ TEST(EquivalenceOfLoggingAndTraversal) {
" (function a(j) { return function b() { return j; } })(100);\n"
"})(this);");
logger->StopProfiler();
reinterpret_cast<i::Isolate*>(isolate)->heap()->CollectAllGarbage(
i::Heap::kMakeHeapIterableMask);
logger->StringEvent("test-logging-done", "");
// Iterate heap to find compiled functions, will write to log.

View File

@ -87,7 +87,7 @@ assertEquals(6, mirror.referencedBy().length);
// Adds a reference when set.
h("x_ = a");
var x = mirror.referencedBy();
assertEquals(7, mirror.referencedBy().length);
assertTrue(mirror.referencedBy().length <= 8);
// Removes a reference when cleared.
h("x_ = null");
assertEquals(6, mirror.referencedBy().length);