Revert of [heap] Do not invoke GC to make heap iterable. (patchset #5 id:80001 of https://codereview.chromium.org/1961373003/ )

Reason for revert:
Breaks https://build.chromium.org/p/client.v8/builders/V8%20Linux%20-%20gc%20stress/builds/3551

Original issue's description:
> [heap] Do not invoke GC to make heap iterable.
>
> Remove kMakeHeapIterableMask since the heap is always iterable.
>
> BUG=chromium:580959
> LOG=n
>
> Committed: https://crrev.com/7c1cac4888a248fda3fa6de3624f32a6babb37e9
> Cr-Commit-Position: refs/heads/master@{#36333}

TBR=ulan@chromium.org,yangguo@chromium.org,hpayer@chromium.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=chromium:580959

Review-Url: https://codereview.chromium.org/1987363002
Cr-Commit-Position: refs/heads/master@{#36335}
This commit is contained in:
machenbach 2016-05-18 12:22:05 -07:00 committed by Commit bot
parent 768e9c509b
commit 0aa3707dc4
9 changed files with 50 additions and 14 deletions

View File

@ -1319,6 +1319,10 @@ bool Debug::PrepareFunctionForBreakPoints(Handle<SharedFunctionInfo> shared) {
}
}
// Make sure we abort incremental marking.
isolate_->heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask,
"prepare for break points");
bool is_interpreted = shared->HasBytecodeArray();
{

View File

@ -893,8 +893,7 @@ void Heap::CollectAllAvailableGarbage(const char* gc_reason) {
isolate()->optimizing_compile_dispatcher()->Flush();
}
isolate()->ClearSerializerData();
set_current_gc_flags(kAbortIncrementalMarkingMask |
kReduceMemoryFootprintMask);
set_current_gc_flags(kMakeHeapIterableMask | kReduceMemoryFootprintMask);
isolate_->compilation_cache()->Clear();
const int kMaxNumberOfAttempts = 7;
const int kMinNumberOfAttempts = 2;
@ -4046,10 +4045,22 @@ AllocationResult Heap::AllocateStruct(InstanceType type) {
}
bool Heap::IsHeapIterable() {
// TODO(hpayer): This function is not correct. Allocation folding in old
// space breaks the iterability.
return new_space_top_after_last_gc_ == new_space()->top();
}
void Heap::MakeHeapIterable() {
DCHECK(AllowHeapAllocation::IsAllowed());
if (!IsHeapIterable()) {
CollectAllGarbage(kMakeHeapIterableMask, "Heap::MakeHeapIterable");
}
if (mark_compact_collector()->sweeping_in_progress()) {
mark_compact_collector()->EnsureSweepingCompleted();
}
DCHECK(IsHeapIterable());
}
@ -4594,7 +4605,10 @@ void Heap::Verify() {
CHECK(HasBeenSetUp());
HandleScope scope(isolate());
MakeHeapIterable();
if (mark_compact_collector()->sweeping_in_progress()) {
// We have to wait here for the sweeper threads to have an iterable heap.
mark_compact_collector()->EnsureSweepingCompleted();
}
VerifyPointersVisitor visitor;
IterateRoots(&visitor, VISIT_ONLY_STRONG);

View File

@ -541,6 +541,9 @@ class Heap {
static const int kAbortIncrementalMarkingMask = 2;
static const int kFinalizeIncrementalMarkingMask = 4;
// Making the heap iterable requires us to abort incremental marking.
static const int kMakeHeapIterableMask = kAbortIncrementalMarkingMask;
// The roots that have an index less than this are always in old space.
static const int kOldSpaceRoots = 0x20;
@ -661,6 +664,9 @@ class Heap {
// Converts the given boolean condition to JavaScript boolean value.
inline Oddball* ToBoolean(bool condition);
// Check whether the heap is currently iterable.
bool IsHeapIterable();
// Notify the heap that a context has been disposed.
int NotifyContextDisposed(bool dependant_context);
@ -1027,7 +1033,9 @@ class Heap {
AllocationSpace space, const char* gc_reason = NULL,
const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
// Performs a full garbage collection.
// Performs a full garbage collection. If (flags & kMakeHeapIterableMask) is
// non-zero, then the slower precise sweeper is used, which leaves the heap
// in a state where we can iterate over the heap visiting all objects.
void CollectAllGarbage(
int flags = kFinalizeIncrementalMarkingMask, const char* gc_reason = NULL,
const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
@ -1529,7 +1537,7 @@ class Heap {
void EnsureFillerObjectAtTop();
// Ensure that we have swept all spaces in such a way that we can iterate
// over all objects.
// over all objects. May cause a GC.
void MakeHeapIterable();
// Performs garbage collection operation.

View File

@ -1565,6 +1565,8 @@ void Logger::LogCodeObject(Object* object) {
void Logger::LogCodeObjects() {
Heap* heap = isolate_->heap();
heap->CollectAllGarbage(Heap::kMakeHeapIterableMask,
"Logger::LogCodeObjects");
HeapIterator iterator(heap);
DisallowHeapAllocation no_gc;
for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
@ -1648,6 +1650,8 @@ void Logger::LogExistingFunction(Handle<SharedFunctionInfo> shared,
void Logger::LogCompiledFunctions() {
Heap* heap = isolate_->heap();
heap->CollectAllGarbage(Heap::kMakeHeapIterableMask,
"Logger::LogCompiledFunctions");
HandleScope scope(isolate_);
const int compiled_funcs_count = EnumerateCompiledFunctions(heap, NULL, NULL);
ScopedVector< Handle<SharedFunctionInfo> > sfis(compiled_funcs_count);
@ -1666,6 +1670,8 @@ void Logger::LogCompiledFunctions() {
void Logger::LogAccessorCallbacks() {
Heap* heap = isolate_->heap();
heap->CollectAllGarbage(Heap::kMakeHeapIterableMask,
"Logger::LogAccessorCallbacks");
HeapIterator iterator(heap);
DisallowHeapAllocation no_gc;
for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {

View File

@ -478,8 +478,8 @@ void HeapObjectsMap::UpdateHeapObjectsMap() {
PrintF("Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
entries_map_.occupancy());
}
heap_->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask,
"HeapObjectsMap::UpdateHeapObjectsMap");
heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask,
"HeapObjectsMap::UpdateHeapObjectsMap");
HeapIterator iterator(heap_);
for (HeapObject* obj = iterator.next();
obj != NULL;
@ -2505,10 +2505,12 @@ bool HeapSnapshotGenerator::GenerateSnapshot() {
// full GC is reachable from the root when computing dominators.
// This is not true for weakly reachable objects.
// As a temporary solution we call GC twice.
heap_->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask,
"HeapSnapshotGenerator::GenerateSnapshot");
heap_->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask,
"HeapSnapshotGenerator::GenerateSnapshot");
heap_->CollectAllGarbage(
Heap::kMakeHeapIterableMask,
"HeapSnapshotGenerator::GenerateSnapshot");
heap_->CollectAllGarbage(
Heap::kMakeHeapIterableMask,
"HeapSnapshotGenerator::GenerateSnapshot");
#ifdef VERIFY_HEAP
Heap* debug_heap = heap_;

View File

@ -14001,7 +14001,7 @@ static void CheckSurvivingGlobalObjectsCount(int expected) {
// been marked at that point. Therefore some of the maps are not
// collected until the second garbage collection.
CcTest::heap()->CollectAllGarbage();
CcTest::heap()->CollectAllGarbage();
CcTest::heap()->CollectAllGarbage(i::Heap::kMakeHeapIterableMask);
int count = GetGlobalObjectsCount();
#ifdef DEBUG
if (count != expected) CcTest::heap()->TracePathToGlobal();

View File

@ -378,7 +378,7 @@ void CheckDebuggerUnloaded(bool check_functions) {
// Collect garbage to ensure weak handles are cleared.
CcTest::heap()->CollectAllGarbage();
CcTest::heap()->CollectAllGarbage();
CcTest::heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask);
// Iterate the head and check that there are no debugger related objects left.
HeapIterator iterator(CcTest::heap());

View File

@ -485,6 +485,8 @@ TEST(EquivalenceOfLoggingAndTraversal) {
" (function a(j) { return function b() { return j; } })(100);\n"
"})(this);");
logger->StopProfiler();
reinterpret_cast<i::Isolate*>(isolate)->heap()->CollectAllGarbage(
i::Heap::kMakeHeapIterableMask);
logger->StringEvent("test-logging-done", "");
// Iterate heap to find compiled functions, will write to log.

View File

@ -87,7 +87,7 @@ assertEquals(6, mirror.referencedBy().length);
// Adds a reference when set.
h("x_ = a");
var x = mirror.referencedBy();
assertTrue(mirror.referencedBy().length <= 8);
assertEquals(7, mirror.referencedBy().length);
// Removes a reference when cleared.
h("x_ = null");
assertEquals(6, mirror.referencedBy().length);