[heap] Fix stress marking observer and remove --black-allocation

The main fix is to ensure that the recently allocated object is marked
black in StressMarkingObserver::Step. Otherwise, the concurrent marker
can observe an uninitialized white object in the old generation.

This patch also removes the --black-allocation flag.

Bug: v8:8676
Change-Id: Iba8f00330eabc4847eaef2cd3dfb2884d62a48b4
Reviewed-on: https://chromium-review.googlesource.com/c/1425915
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#59002}
This commit is contained in:
Ulan Degenbaev 2019-01-22 16:50:52 +01:00 committed by Commit Bot
parent edf930db47
commit 30602560a8
7 changed files with 28 additions and 39 deletions

View File

@ -725,7 +725,6 @@ DEFINE_INT(ephemeron_fixpoint_iterations, 10,
"number of fixpoint iterations it takes to switch to linear "
"ephemeron algorithm")
DEFINE_BOOL(trace_concurrent_marking, false, "trace concurrent marking")
DEFINE_BOOL(black_allocation, true, "use black allocation")
DEFINE_BOOL(concurrent_store_buffer, true,
"use concurrent store buffer processing")
DEFINE_BOOL(concurrent_sweeping, true, "use concurrent sweeping")

View File

@ -40,19 +40,8 @@ void IncrementalMarking::Observer::Step(int bytes_allocated, Address addr,
heap->isolate(),
RuntimeCallCounterId::kGC_Custom_IncrementalMarkingObserver);
incremental_marking_.AdvanceIncrementalMarkingOnAllocation();
if (incremental_marking_.black_allocation() && addr != kNullAddress) {
// AdvanceIncrementalMarkingOnAllocation can start black allocation.
// Ensure that the new object is marked black.
HeapObject object = HeapObject::FromAddress(addr);
if (incremental_marking_.marking_state()->IsWhite(object) &&
!(Heap::InNewSpace(object) || heap->new_lo_space()->Contains(object))) {
if (heap->IsLargeObject(object)) {
incremental_marking_.marking_state()->WhiteToBlack(object);
} else {
Page::FromAddress(addr)->CreateBlackArea(addr, addr + size);
}
}
}
// AdvanceIncrementalMarkingOnAllocation can start incremental marking.
incremental_marking_.EnsureBlackAllocated(addr, size);
}
IncrementalMarking::IncrementalMarking(
@ -368,13 +357,7 @@ void IncrementalMarking::StartMarking() {
heap_->isolate()->compilation_cache()->MarkCompactPrologue();
#ifdef V8_CONCURRENT_MARKING
// The write-barrier does not check the color of the source object.
// Start black allocation earlier to ensure faster marking progress.
if (!black_allocation_) {
StartBlackAllocation();
}
#endif
StartBlackAllocation();
// Mark strong roots grey.
IncrementalMarkingRootMarkingVisitor visitor(this);
@ -391,7 +374,6 @@ void IncrementalMarking::StartMarking() {
}
void IncrementalMarking::StartBlackAllocation() {
DCHECK(FLAG_black_allocation);
DCHECK(!black_allocation_);
DCHECK(IsMarking());
black_allocation_ = true;
@ -405,7 +387,6 @@ void IncrementalMarking::StartBlackAllocation() {
}
void IncrementalMarking::PauseBlackAllocation() {
DCHECK(FLAG_black_allocation);
DCHECK(IsMarking());
heap()->old_space()->UnmarkLinearAllocationArea();
heap()->map_space()->UnmarkLinearAllocationArea();
@ -427,6 +408,22 @@ void IncrementalMarking::FinishBlackAllocation() {
}
}
void IncrementalMarking::EnsureBlackAllocated(Address allocated, size_t size) {
if (black_allocation() && allocated != kNullAddress) {
HeapObject object = HeapObject::FromAddress(allocated);
if (marking_state()->IsWhite(object) &&
!(Heap::InNewSpace(object) ||
heap_->new_lo_space()->Contains(object))) {
if (heap_->IsLargeObject(object)) {
marking_state()->WhiteToBlack(object);
} else {
Page::FromAddress(allocated)->CreateBlackArea(allocated,
allocated + size);
}
}
}
}
void IncrementalMarking::MarkRoots() {
DCHECK(!finalize_marking_completed_);
DCHECK(IsMarking());
@ -516,13 +513,6 @@ void IncrementalMarking::FinalizeIncrementally() {
finalize_marking_completed_ = true;
if (FLAG_black_allocation && !heap()->ShouldReduceMemory() &&
!black_allocation_) {
// TODO(hpayer): Move to an earlier point as soon as we make faster marking
// progress.
StartBlackAllocation();
}
if (FLAG_trace_incremental_marking) {
double end = heap_->MonotonicallyIncreasingTimeInMs();
double delta = end - start;

View File

@ -244,6 +244,10 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
void Deactivate();
// Ensures that the given region is black allocated if it is in the old
// generation.
void EnsureBlackAllocated(Address allocated, size_t size);
private:
class Observer : public AllocationObserver {
public:

View File

@ -437,12 +437,12 @@ AllocationResult PagedSpace::AllocateRaw(int size_in_bytes,
#endif
HeapObject heap_obj;
if (!result.IsRetry() && result.To(&heap_obj) && !is_local()) {
DCHECK_IMPLIES(
heap()->incremental_marking()->black_allocation(),
heap()->incremental_marking()->marking_state()->IsBlack(heap_obj));
AllocationStep(static_cast<int>(size_in_bytes + bytes_since_last),
heap_obj->address(), size_in_bytes);
StartNextInlineAllocationStep();
DCHECK_IMPLIES(
heap()->incremental_marking()->black_allocation(),
heap()->incremental_marking()->marking_state()->IsBlack(heap_obj));
}
return result;
}

View File

@ -3,6 +3,7 @@
// found in the LICENSE file.
#include "src/heap/stress-marking-observer.h"
#include "src/heap/incremental-marking.h"
namespace v8 {
namespace internal {
@ -15,6 +16,7 @@ void StressMarkingObserver::Step(int bytes_allocated, Address soon_object,
size_t size) {
heap_.StartIncrementalMarkingIfAllocationLimitIsReached(Heap::kNoGCFlags,
kNoGCCallbackFlags);
heap_.incremental_marking()->EnsureBlackAllocated(soon_object, size);
}
} // namespace internal

View File

@ -5485,7 +5485,6 @@ TEST(LiveBytes) {
TEST(Regress615489) {
if (!FLAG_incremental_marking) return;
FLAG_black_allocation = true;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
Heap* heap = CcTest::heap();
@ -5589,7 +5588,6 @@ TEST(Regress631969) {
TEST(LeftTrimFixedArrayInBlackArea) {
if (!FLAG_incremental_marking) return;
FLAG_black_allocation = true;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
Heap* heap = CcTest::heap();
@ -5629,7 +5627,6 @@ TEST(LeftTrimFixedArrayInBlackArea) {
TEST(ContinuousLeftTrimFixedArrayInBlackArea) {
if (!FLAG_incremental_marking) return;
FLAG_black_allocation = true;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
Heap* heap = CcTest::heap();
@ -5696,7 +5693,6 @@ TEST(ContinuousLeftTrimFixedArrayInBlackArea) {
TEST(ContinuousRightTrimFixedArrayInBlackArea) {
if (!FLAG_incremental_marking) return;
FLAG_black_allocation = true;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
Heap* heap = CcTest::heap();
@ -6105,7 +6101,6 @@ TEST(Regress6800LargeObject) {
HEAP_TEST(RegressMissingWriteBarrierInAllocate) {
if (!FLAG_incremental_marking) return;
ManualGCScope manual_gc_scope;
FLAG_black_allocation = true;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
Heap* heap = CcTest::heap();

View File

@ -1794,7 +1794,6 @@ TEST(CodeSerializerLargeCodeObjectWithIncrementalMarking) {
FLAG_always_opt = false;
const char* filter_flag = "--turbo-filter=NOTHING";
FlagList::SetFlagsFromString(filter_flag, StrLength(filter_flag));
FLAG_black_allocation = true;
FLAG_manual_evacuation_candidates_selection = true;
LocalContext context;