cppgc: Avoid double-accounting live bytes through Steele barrier

The barrier just re-added a black object to the worklist (making it
gray) which results in double-accounting live bytes.

Trace directly as the barrier is not widely used.

Bug: chromium:1056170
Change-Id: I06a55c13f6e82652ad1939a12c4e23f3a3ebd3fd
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2904212
Auto-Submit: Michael Lippautz <mlippautz@chromium.org>
Commit-Queue: Omer Katz <omerkatz@chromium.org>
Reviewed-by: Omer Katz <omerkatz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#74668}
This commit is contained in:
Michael Lippautz 2021-05-18 23:43:43 +02:00 committed by V8 LUCI CQ
parent 7ae8f4fbe3
commit 0052640ef2
8 changed files with 78 additions and 10 deletions

View File

@ -149,6 +149,20 @@ void ConcurrentMarkingTask::ProcessWorklists(
return;
}
if (!DrainWorklistWithYielding(
job_delegate, concurrent_marking_state,
concurrent_marker_.incremental_marking_schedule(),
concurrent_marking_state.retrace_marked_objects_worklist(),
[&concurrent_marking_visitor](HeapObjectHeader* header) {
BasePage::FromPayload(header)->SynchronizedLoad();
// Retracing does not increment marked bytes as the object has
// already been processed before.
DynamicallyTraceMarkedObject<AccessMode::kAtomic>(
concurrent_marking_visitor, *header);
})) {
return;
}
{
StatsCollector::DisabledConcurrentScope stats_scope(
concurrent_marker_.heap().stats_collector(),

View File

@ -496,6 +496,17 @@ bool MarkerBase::ProcessWorklistsWithDeadline(
})) {
return false;
}
if (!DrainWorklistWithBytesAndTimeDeadline(
mutator_marking_state_, marked_bytes_deadline, time_deadline,
mutator_marking_state_.retrace_marked_objects_worklist(),
[this](HeapObjectHeader* header) {
// Retracing does not increment marked bytes as the object has
// already been processed before.
DynamicallyTraceMarkedObject<AccessMode::kNonAtomic>(visitor(),
*header);
})) {
return false;
}
}
{

View File

@ -58,6 +58,11 @@ class V8_EXPORT_PRIVATE MarkerBase {
IsForcedGC is_forced_gc = IsForcedGC::kNotForced;
};
enum class WriteBarrierType {
kDijkstra,
kSteele,
};
virtual ~MarkerBase();
MarkerBase(const MarkerBase&) = delete;
@ -95,6 +100,8 @@ class V8_EXPORT_PRIVATE MarkerBase {
void ProcessWeakness();
inline void WriteBarrierForInConstructionObject(HeapObjectHeader&);
template <WriteBarrierType type>
inline void WriteBarrierForObject(HeapObjectHeader&);
HeapBase& heap() { return heap_; }
@ -220,8 +227,16 @@ void MarkerBase::WriteBarrierForInConstructionObject(HeapObjectHeader& header) {
.Push<AccessMode::kAtomic>(&header);
}
template <MarkerBase::WriteBarrierType type>
void MarkerBase::WriteBarrierForObject(HeapObjectHeader& header) {
mutator_marking_state_.write_barrier_worklist().Push(&header);
switch (type) {
case MarkerBase::WriteBarrierType::kDijkstra:
mutator_marking_state_.write_barrier_worklist().Push(&header);
break;
case MarkerBase::WriteBarrierType::kSteele:
mutator_marking_state_.retrace_marked_objects_worklist().Push(&header);
break;
}
}
} // namespace internal

View File

@ -99,6 +99,10 @@ class MarkingStateBase {
MarkingWorklists::WeakContainersWorklist& weak_containers_worklist() {
return weak_containers_worklist_;
}
MarkingWorklists::RetraceMarkedObjectsWorklist::Local&
retrace_marked_objects_worklist() {
return retrace_marked_objects_worklist_;
}
CompactionWorklists::MovableReferencesWorklist::Local*
movable_slots_worklist() {
@ -138,6 +142,8 @@ class MarkingStateBase {
MarkingWorklists::EphemeronPairsWorklist::Local
ephemeron_pairs_for_processing_worklist_;
MarkingWorklists::WeakContainersWorklist& weak_containers_worklist_;
MarkingWorklists::RetraceMarkedObjectsWorklist::Local
retrace_marked_objects_worklist_;
// Existence of the worklist (|movable_slot_worklist_| != nullptr) denotes
// that compaction is currently enabled and slots must be recorded.
std::unique_ptr<CompactionWorklists::MovableReferencesWorklist::Local>
@ -149,8 +155,7 @@ class MarkingStateBase {
MarkingStateBase::MarkingStateBase(HeapBase& heap,
MarkingWorklists& marking_worklists,
CompactionWorklists* compaction_worklists)
:
heap_(heap),
: heap_(heap),
marking_worklist_(marking_worklists.marking_worklist()),
not_fully_constructed_worklist_(
*marking_worklists.not_fully_constructed_worklist()),
@ -164,7 +169,9 @@ MarkingStateBase::MarkingStateBase(HeapBase& heap,
marking_worklists.discovered_ephemeron_pairs_worklist()),
ephemeron_pairs_for_processing_worklist_(
marking_worklists.ephemeron_pairs_for_processing_worklist()),
weak_containers_worklist_(*marking_worklists.weak_containers_worklist()) {
weak_containers_worklist_(*marking_worklists.weak_containers_worklist()),
retrace_marked_objects_worklist_(
marking_worklists.retrace_marked_objects_worklist()) {
if (compaction_worklists) {
movable_slots_worklist_ =
std::make_unique<CompactionWorklists::MovableReferencesWorklist::Local>(
@ -354,9 +361,7 @@ void MutatorMarkingState::ReTraceMarkedWeakContainer(cppgc::Visitor& visitor,
HeapObjectHeader& header) {
DCHECK(weak_containers_worklist_.Contains(&header));
recently_retraced_weak_containers_.Insert(&header);
// Don't push to the marking worklist to avoid double accounting of marked
// bytes as the container is already accounted for.
header.Trace(&visitor);
retrace_marked_objects_worklist().Push(&header);
}
void MutatorMarkingState::DynamicallyMarkAddress(ConstAddress address) {

View File

@ -19,6 +19,7 @@ void MarkingWorklists::ClearForTesting() {
concurrent_marking_bailout_worklist_.Clear();
discovered_ephemeron_pairs_worklist_.Clear();
ephemeron_pairs_for_processing_worklist_.Clear();
retrace_marked_objects_worklist_.Clear();
}
MarkingWorklists::ExternalMarkingWorklist::~ExternalMarkingWorklist() {

View File

@ -84,6 +84,8 @@ class MarkingWorklists {
using EphemeronPairsWorklist =
heap::base::Worklist<EphemeronPairItem, 64 /* local entries */>;
using WeakContainersWorklist = ExternalMarkingWorklist;
using RetraceMarkedObjectsWorklist =
heap::base::Worklist<HeapObjectHeader*, 16 /* local entries */>;
MarkingWorklist* marking_worklist() { return &marking_worklist_; }
NotFullyConstructedWorklist* not_fully_constructed_worklist() {
@ -111,6 +113,9 @@ class MarkingWorklists {
WeakContainersWorklist* weak_containers_worklist() {
return &weak_containers_worklist_;
}
RetraceMarkedObjectsWorklist* retrace_marked_objects_worklist() {
return &retrace_marked_objects_worklist_;
}
void ClearForTesting();
@ -125,6 +130,7 @@ class MarkingWorklists {
EphemeronPairsWorklist discovered_ephemeron_pairs_worklist_;
EphemeronPairsWorklist ephemeron_pairs_for_processing_worklist_;
WeakContainersWorklist weak_containers_worklist_;
RetraceMarkedObjectsWorklist retrace_marked_objects_worklist_;
};
template <>

View File

@ -25,6 +25,7 @@ AtomicEntryFlag WriteBarrier::incremental_or_concurrent_marking_flag_;
namespace {
template <MarkerBase::WriteBarrierType type>
void ProcessMarkValue(HeapObjectHeader& header, MarkerBase* marker,
const void* value) {
#if defined(CPPGC_CAGED_HEAP)
@ -46,7 +47,7 @@ void ProcessMarkValue(HeapObjectHeader& header, MarkerBase* marker,
return;
}
marker->WriteBarrierForObject(header);
marker->WriteBarrierForObject<type>(header);
}
} // namespace
@ -73,7 +74,8 @@ void WriteBarrier::DijkstraMarkingBarrierSlow(const void* value) {
const_cast<HeapObjectHeader&>(page->ObjectHeaderFromInnerAddress(value));
if (!header.TryMarkAtomic()) return;
ProcessMarkValue(header, heap->marker(), value);
ProcessMarkValue<MarkerBase::WriteBarrierType::kDijkstra>(
header, heap->marker(), value);
}
// static
@ -117,7 +119,8 @@ void WriteBarrier::SteeleMarkingBarrierSlow(const void* value) {
const_cast<HeapObjectHeader&>(page->ObjectHeaderFromInnerAddress(value));
if (!header.IsMarked<AccessMode::kAtomic>()) return;
ProcessMarkValue(header, heap->marker(), value);
ProcessMarkValue<MarkerBase::WriteBarrierType::kSteele>(
header, heap->marker(), value);
}
#if defined(CPPGC_YOUNG_GENERATION)

View File

@ -50,6 +50,9 @@ class V8_NODISCARD ExpectWriteBarrierFires final
marker->MutatorMarkingStateForTesting().marking_worklist()),
write_barrier_worklist_(
marker->MutatorMarkingStateForTesting().write_barrier_worklist()),
retrace_marked_objects_worklist_(
marker->MutatorMarkingStateForTesting()
.retrace_marked_objects_worklist()),
objects_(objects) {
EXPECT_TRUE(marking_worklist_.IsGlobalEmpty());
EXPECT_TRUE(write_barrier_worklist_.IsGlobalEmpty());
@ -76,6 +79,14 @@ class V8_NODISCARD ExpectWriteBarrierFires final
if (pos != objects_.end()) objects_.erase(pos);
}
}
{
HeapObjectHeader* item;
while (retrace_marked_objects_worklist_.Pop(&item)) {
auto pos =
std::find(objects_.begin(), objects_.end(), item->ObjectStart());
if (pos != objects_.end()) objects_.erase(pos);
}
}
EXPECT_TRUE(objects_.empty());
for (auto* header : headers_) {
EXPECT_TRUE(header->IsMarked());
@ -88,6 +99,8 @@ class V8_NODISCARD ExpectWriteBarrierFires final
private:
MarkingWorklists::MarkingWorklist::Local& marking_worklist_;
MarkingWorklists::WriteBarrierWorklist::Local& write_barrier_worklist_;
MarkingWorklists::RetraceMarkedObjectsWorklist::Local&
retrace_marked_objects_worklist_;
std::vector<void*> objects_;
std::vector<HeapObjectHeader*> headers_;
};