[heap] Incremental marking simplifications
- Remove dead code. - Remove `was_activated_`. Bug: v8:12775 Change-Id: Ie54b24f21a8789dc815ab7a96ce4a074e3644342 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3726300 Reviewed-by: Omer Katz <omerkatz@chromium.org> Commit-Queue: Michael Lippautz <mlippautz@chromium.org> Cr-Commit-Position: refs/heads/main@{#81423}
This commit is contained in:
parent
a1da14588a
commit
e67caa3bab
@ -1701,7 +1701,7 @@ void Heap::ReportExternalMemoryPressure() {
|
||||
return;
|
||||
}
|
||||
if (incremental_marking()->IsStopped()) {
|
||||
if (incremental_marking()->CanBeActivated()) {
|
||||
if (incremental_marking()->CanBeStarted()) {
|
||||
StartIncrementalMarking(GCFlagsForIncrementalMarking(),
|
||||
GarbageCollectionReason::kExternalMemoryPressure,
|
||||
kGCCallbackFlagsForExternalMemory);
|
||||
@ -2058,8 +2058,8 @@ void Heap::StartIncrementalMarkingIfAllocationLimitIsReached(
|
||||
}
|
||||
|
||||
void Heap::StartIncrementalMarkingIfAllocationLimitIsReachedBackground() {
|
||||
if (!incremental_marking()->IsStopped() ||
|
||||
!incremental_marking()->CanBeActivated()) {
|
||||
if (incremental_marking()->IsRunning() ||
|
||||
!incremental_marking()->CanBeStarted()) {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -2248,7 +2248,7 @@ size_t Heap::PerformGarbageCollection(
|
||||
|
||||
// If incremental marking has been activated, the full GC cycle has already
|
||||
// started, so don't start a new one.
|
||||
if (!incremental_marking_->WasActivated()) {
|
||||
if (!incremental_marking_->IsRunning()) {
|
||||
tracer()->StartCycle(collector, gc_reason, collector_reason,
|
||||
GCTracer::MarkingType::kAtomic);
|
||||
}
|
||||
@ -2256,7 +2256,7 @@ size_t Heap::PerformGarbageCollection(
|
||||
|
||||
tracer()->StartAtomicPause();
|
||||
if (!Heap::IsYoungGenerationCollector(collector) &&
|
||||
incremental_marking_->WasActivated()) {
|
||||
incremental_marking_->IsRunning()) {
|
||||
tracer()->UpdateCurrentEvent(gc_reason, collector_reason);
|
||||
}
|
||||
|
||||
@ -2396,7 +2396,7 @@ void Heap::PerformSharedGarbageCollection(Isolate* initiator,
|
||||
v8::Isolate::Scope isolate_scope(reinterpret_cast<v8::Isolate*>(isolate()));
|
||||
|
||||
tracer()->StartObservablePause();
|
||||
DCHECK(!incremental_marking_->WasActivated());
|
||||
DCHECK(incremental_marking_->IsStopped());
|
||||
DCHECK_NOT_NULL(isolate()->global_safepoint());
|
||||
|
||||
isolate()->global_safepoint()->IterateClientIsolates([](Isolate* client) {
|
||||
@ -2649,8 +2649,6 @@ void Heap::MarkCompactEpilogue() {
|
||||
SetGCState(NOT_IN_GC);
|
||||
|
||||
isolate_->counters()->objs_since_last_full()->Set(0);
|
||||
|
||||
incremental_marking()->Epilogue();
|
||||
}
|
||||
|
||||
void Heap::MarkCompactPrologue() {
|
||||
@ -3459,10 +3457,8 @@ FixedArrayBase Heap::LeftTrimFixedArray(FixedArrayBase object,
|
||||
Address old_start = object.address();
|
||||
Address new_start = old_start + bytes_to_trim;
|
||||
|
||||
if (incremental_marking()->IsMarking()) {
|
||||
incremental_marking()->NotifyLeftTrimming(
|
||||
object, HeapObject::FromAddress(new_start));
|
||||
}
|
||||
incremental_marking()->NotifyLeftTrimming(object,
|
||||
HeapObject::FromAddress(new_start));
|
||||
|
||||
#ifdef DEBUG
|
||||
if (MayContainRecordedSlots(object)) {
|
||||
@ -5525,7 +5521,7 @@ double Heap::PercentToGlobalMemoryLimit() {
|
||||
Heap::IncrementalMarkingLimit Heap::IncrementalMarkingLimitReached() {
|
||||
// Code using an AlwaysAllocateScope assumes that the GC state does not
|
||||
// change; that implies that no marking steps must be performed.
|
||||
if (!incremental_marking()->CanBeActivated() || always_allocate()) {
|
||||
if (!incremental_marking()->CanBeStarted() || always_allocate()) {
|
||||
// Incremental marking is disabled or it is too early to start.
|
||||
return IncrementalMarkingLimit::kNoLimit;
|
||||
}
|
||||
@ -6028,12 +6024,9 @@ void Heap::RegisterExternallyReferencedObject(Address* location) {
|
||||
}
|
||||
HeapObject heap_object = HeapObject::cast(object);
|
||||
DCHECK(IsValidHeapObject(this, heap_object));
|
||||
if (FLAG_incremental_marking_wrappers && incremental_marking()->IsMarking()) {
|
||||
incremental_marking()->WhiteToGreyAndPush(heap_object);
|
||||
} else {
|
||||
DCHECK(mark_compact_collector()->in_use());
|
||||
mark_compact_collector()->MarkExternallyReferencedObject(heap_object);
|
||||
}
|
||||
DCHECK(incremental_marking()->IsMarking() ||
|
||||
mark_compact_collector()->in_use());
|
||||
mark_compact_collector()->MarkExternallyReferencedObject(heap_object);
|
||||
}
|
||||
|
||||
void Heap::StartTearDown() {
|
||||
|
@ -5,12 +5,9 @@
|
||||
#ifndef V8_HEAP_INCREMENTAL_MARKING_INL_H_
|
||||
#define V8_HEAP_INCREMENTAL_MARKING_INL_H_
|
||||
|
||||
#include "src/heap/incremental-marking.h"
|
||||
|
||||
#include "src/execution/isolate.h"
|
||||
#include "src/heap/mark-compact-inl.h"
|
||||
#include "src/objects/maybe-object.h"
|
||||
#include "src/objects/objects-inl.h"
|
||||
#include "src/heap/heap-inl.h"
|
||||
#include "src/heap/incremental-marking.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
@ -33,14 +30,6 @@ void IncrementalMarking::TransferColor(HeapObject from, HeapObject to) {
|
||||
}
|
||||
}
|
||||
|
||||
bool IncrementalMarking::WhiteToGreyAndPush(HeapObject obj) {
|
||||
if (marking_state()->WhiteToGrey(obj)) {
|
||||
local_marking_worklists()->Push(obj);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void IncrementalMarking::RestartIfNotMarking() {
|
||||
if (state_ == COMPLETE) {
|
||||
state_ = MARKING;
|
||||
|
@ -79,14 +79,16 @@ void IncrementalMarking::MarkBlackBackground(HeapObject obj, int object_size) {
|
||||
}
|
||||
|
||||
void IncrementalMarking::NotifyLeftTrimming(HeapObject from, HeapObject to) {
|
||||
DCHECK(IsMarking());
|
||||
if (!IsMarking()) return;
|
||||
|
||||
DCHECK(MemoryChunk::FromHeapObject(from)->SweepingDone());
|
||||
DCHECK_EQ(MemoryChunk::FromHeapObject(from), MemoryChunk::FromHeapObject(to));
|
||||
DCHECK_NE(from, to);
|
||||
|
||||
MarkBit new_mark_bit = marking_state()->MarkBitFrom(to);
|
||||
|
||||
if (black_allocation() && Marking::IsBlack<kAtomicity>(new_mark_bit)) {
|
||||
if (black_allocation() &&
|
||||
Marking::IsBlack<AccessMode::ATOMIC>(new_mark_bit)) {
|
||||
// Nothing to do if the object is in black area.
|
||||
return;
|
||||
}
|
||||
@ -96,19 +98,17 @@ void IncrementalMarking::NotifyLeftTrimming(HeapObject from, HeapObject to) {
|
||||
if (from.address() + kTaggedSize == to.address()) {
|
||||
// The old and the new markbits overlap. The |to| object has the
|
||||
// grey color. To make it black, we need to set the second bit.
|
||||
DCHECK(new_mark_bit.Get<kAtomicity>());
|
||||
new_mark_bit.Next().Set<kAtomicity>();
|
||||
DCHECK(new_mark_bit.Get<AccessMode::ATOMIC>());
|
||||
new_mark_bit.Next().Set<AccessMode::ATOMIC>();
|
||||
} else {
|
||||
bool success = Marking::WhiteToBlack<kAtomicity>(new_mark_bit);
|
||||
bool success = Marking::WhiteToBlack<AccessMode::ATOMIC>(new_mark_bit);
|
||||
DCHECK(success);
|
||||
USE(success);
|
||||
}
|
||||
DCHECK(marking_state()->IsBlack(to));
|
||||
}
|
||||
|
||||
bool IncrementalMarking::WasActivated() { return was_activated_; }
|
||||
|
||||
bool IncrementalMarking::CanBeActivated() {
|
||||
bool IncrementalMarking::CanBeStarted() const {
|
||||
// Only start incremental marking in a safe state:
|
||||
// 1) when incremental marking is turned on
|
||||
// 2) when we are currently not in a GC, and
|
||||
@ -173,7 +173,6 @@ void IncrementalMarking::Start(GarbageCollectionReason gc_reason) {
|
||||
scheduled_bytes_to_mark_ = 0;
|
||||
schedule_update_time_ms_ = start_time_ms_;
|
||||
bytes_marked_concurrently_ = 0;
|
||||
was_activated_ = true;
|
||||
|
||||
StartMarking();
|
||||
|
||||
@ -182,7 +181,16 @@ void IncrementalMarking::Start(GarbageCollectionReason gc_reason) {
|
||||
incremental_marking_job()->Start(heap_);
|
||||
}
|
||||
|
||||
class IncrementalMarkingRootMarkingVisitor final : public RootVisitor {
|
||||
bool IncrementalMarking::WhiteToGreyAndPush(HeapObject obj) {
|
||||
if (marking_state()->WhiteToGrey(obj)) {
|
||||
local_marking_worklists()->Push(obj);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
class IncrementalMarking::IncrementalMarkingRootMarkingVisitor final
|
||||
: public RootVisitor {
|
||||
public:
|
||||
explicit IncrementalMarkingRootMarkingVisitor(Heap* heap)
|
||||
: heap_(heap), incremental_marking_(heap->incremental_marking()) {}
|
||||
@ -221,22 +229,18 @@ class IncrementalMarkingRootMarkingVisitor final : public RootVisitor {
|
||||
IncrementalMarking* const incremental_marking_;
|
||||
};
|
||||
|
||||
namespace {
|
||||
|
||||
void MarkRoots(Heap* heap) {
|
||||
IncrementalMarkingRootMarkingVisitor visitor(heap);
|
||||
void IncrementalMarking::MarkRoots() {
|
||||
IncrementalMarkingRootMarkingVisitor visitor(heap_);
|
||||
CodePageHeaderModificationScope rwx_write_scope(
|
||||
"Marking of builtins table entries require write access to Code page "
|
||||
"header");
|
||||
heap->IterateRoots(
|
||||
heap_->IterateRoots(
|
||||
&visitor,
|
||||
base::EnumSet<SkipRoot>{SkipRoot::kStack, SkipRoot::kMainThreadHandles,
|
||||
SkipRoot::kWeak});
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
void IncrementalMarking::MarkRootsForTesting() { MarkRoots(heap_); }
|
||||
void IncrementalMarking::MarkRootsForTesting() { MarkRoots(); }
|
||||
|
||||
void IncrementalMarking::StartMarking() {
|
||||
if (heap_->isolate()->serializer_enabled()) {
|
||||
@ -281,7 +285,7 @@ void IncrementalMarking::StartMarking() {
|
||||
|
||||
{
|
||||
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_ROOTS);
|
||||
MarkRoots(heap_);
|
||||
MarkRoots();
|
||||
}
|
||||
|
||||
if (FLAG_concurrent_marking && !heap_->IsTearingDown()) {
|
||||
@ -618,12 +622,6 @@ void IncrementalMarking::MarkingComplete(CompletionAction action) {
|
||||
}
|
||||
}
|
||||
|
||||
void IncrementalMarking::Epilogue() {
|
||||
DCHECK(IsStopped());
|
||||
|
||||
was_activated_ = false;
|
||||
}
|
||||
|
||||
bool IncrementalMarking::ShouldDoEmbedderStep() {
|
||||
return state_ == MARKING && FLAG_incremental_marking_wrappers &&
|
||||
heap_->local_embedder_heap_tracer()->InUse();
|
||||
|
@ -46,7 +46,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
|
||||
class V8_NODISCARD PauseBlackAllocationScope {
|
||||
public:
|
||||
explicit PauseBlackAllocationScope(IncrementalMarking* marking)
|
||||
: marking_(marking), paused_(false) {
|
||||
: marking_(marking) {
|
||||
if (marking_->black_allocation()) {
|
||||
paused_ = true;
|
||||
marking_->PauseBlackAllocation();
|
||||
@ -61,7 +61,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
|
||||
|
||||
private:
|
||||
IncrementalMarking* marking_;
|
||||
bool paused_;
|
||||
bool paused_ = false;
|
||||
};
|
||||
|
||||
// It's hard to know how much work the incremental marker should do to make
|
||||
@ -85,30 +85,28 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
|
||||
static constexpr size_t kEmbedderActivationThreshold = 0;
|
||||
#endif
|
||||
|
||||
static const AccessMode kAtomicity = AccessMode::ATOMIC;
|
||||
V8_INLINE void TransferColor(HeapObject from, HeapObject to);
|
||||
|
||||
V8_INLINE void RestartIfNotMarking();
|
||||
|
||||
IncrementalMarking(Heap* heap, WeakObjects* weak_objects);
|
||||
|
||||
MarkingState* marking_state() { return &marking_state_; }
|
||||
|
||||
AtomicMarkingState* atomic_marking_state() { return &atomic_marking_state_; }
|
||||
|
||||
NonAtomicMarkingState* non_atomic_marking_state() {
|
||||
return &non_atomic_marking_state_;
|
||||
}
|
||||
|
||||
void NotifyLeftTrimming(HeapObject from, HeapObject to);
|
||||
|
||||
V8_INLINE void TransferColor(HeapObject from, HeapObject to);
|
||||
|
||||
bool IsStopped() const { return state() == STOPPED; }
|
||||
bool IsRunning() const { return !IsStopped(); }
|
||||
bool IsMarking() const { return state() >= MARKING; }
|
||||
bool IsComplete() const { return state() == COMPLETE; }
|
||||
|
||||
bool CollectionRequested() const { return collection_requested_; }
|
||||
|
||||
bool CanBeActivated();
|
||||
bool WasActivated();
|
||||
bool CanBeStarted() const;
|
||||
|
||||
void Start(GarbageCollectionReason gc_reason);
|
||||
// Returns true if incremental marking was running and false otherwise.
|
||||
@ -117,10 +115,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
|
||||
void UpdateMarkingWorklistAfterYoungGenGC();
|
||||
void UpdateMarkedBytesAfterScavenge(size_t dead_bytes_in_new_space);
|
||||
|
||||
void MarkingComplete(CompletionAction action);
|
||||
|
||||
void Epilogue();
|
||||
|
||||
// Performs incremental marking steps and returns before the deadline_in_ms is
|
||||
// reached. It may return earlier if the marker is already ahead of the
|
||||
// marking schedule, which is indicated with StepResult::kDone.
|
||||
@ -128,22 +122,9 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
|
||||
CompletionAction completion_action,
|
||||
StepOrigin step_origin);
|
||||
|
||||
void FinalizeSweeping();
|
||||
bool ContinueConcurrentSweeping();
|
||||
void SupportConcurrentSweeping();
|
||||
|
||||
StepResult Step(double max_step_size_in_ms, CompletionAction action,
|
||||
StepOrigin step_origin);
|
||||
|
||||
bool ShouldDoEmbedderStep();
|
||||
StepResult EmbedderStep(double expected_duration_ms, double* duration_ms);
|
||||
|
||||
V8_INLINE void RestartIfNotMarking();
|
||||
|
||||
// Returns true if the function succeeds in transitioning the object
|
||||
// from white to grey.
|
||||
V8_INLINE bool WhiteToGreyAndPush(HeapObject obj);
|
||||
|
||||
// This function is used to color the object black before it undergoes an
|
||||
// unsafe layout change. This is a part of synchronization protocol with
|
||||
// the concurrent marker.
|
||||
@ -173,8 +154,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
|
||||
return collector_->local_marking_worklists();
|
||||
}
|
||||
|
||||
void Deactivate();
|
||||
|
||||
// Ensures that the given region is black allocated if it is in the old
|
||||
// generation.
|
||||
void EnsureBlackAllocated(Address allocated, size_t size);
|
||||
@ -189,6 +168,8 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
|
||||
void MarkRootsForTesting();
|
||||
|
||||
private:
|
||||
class IncrementalMarkingRootMarkingVisitor;
|
||||
|
||||
class Observer : public AllocationObserver {
|
||||
public:
|
||||
Observer(IncrementalMarking* incremental_marking, intptr_t step_size)
|
||||
@ -203,6 +184,9 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
|
||||
|
||||
void StartMarking();
|
||||
|
||||
bool ShouldDoEmbedderStep();
|
||||
StepResult EmbedderStep(double expected_duration_ms, double* duration_ms);
|
||||
|
||||
void StartBlackAllocation();
|
||||
void PauseBlackAllocation();
|
||||
void FinishBlackAllocation();
|
||||
@ -232,8 +216,15 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
|
||||
// bytes and already marked bytes.
|
||||
size_t ComputeStepSizeInBytes(StepOrigin step_origin);
|
||||
|
||||
void MarkingComplete(CompletionAction action);
|
||||
void MarkRoots();
|
||||
|
||||
void AdvanceOnAllocation();
|
||||
|
||||
// Returns true if the function succeeds in transitioning the object
|
||||
// from white to grey.
|
||||
bool WhiteToGreyAndPush(HeapObject obj);
|
||||
|
||||
State state() const {
|
||||
DCHECK_IMPLIES(state_ != STOPPED, FLAG_incremental_marking);
|
||||
return state_;
|
||||
@ -269,7 +260,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
|
||||
std::atomic<State> state_;
|
||||
|
||||
bool is_compacting_ = false;
|
||||
bool was_activated_ = false;
|
||||
bool black_allocation_ = false;
|
||||
bool collection_requested_ = false;
|
||||
IncrementalMarkingJob incremental_marking_job_;
|
||||
|
@ -58,7 +58,7 @@ void MemoryReducer::TimerTask::RunInternal() {
|
||||
low_allocation_rate || optimize_for_memory;
|
||||
event.can_start_incremental_gc =
|
||||
heap->incremental_marking()->IsStopped() &&
|
||||
(heap->incremental_marking()->CanBeActivated() || optimize_for_memory);
|
||||
(heap->incremental_marking()->CanBeStarted() || optimize_for_memory);
|
||||
event.committed_memory = heap->CommittedOldGenerationMemory();
|
||||
memory_reducer_->NotifyTimer(event);
|
||||
}
|
||||
|
@ -5,12 +5,15 @@
|
||||
#ifndef V8_HEAP_SCAVENGER_INL_H_
|
||||
#define V8_HEAP_SCAVENGER_INL_H_
|
||||
|
||||
#include "src/codegen/assembler-inl.h"
|
||||
#include "src/heap/evacuation-allocator-inl.h"
|
||||
#include "src/heap/incremental-marking-inl.h"
|
||||
#include "src/heap/memory-chunk.h"
|
||||
#include "src/heap/new-spaces.h"
|
||||
#include "src/heap/objects-visiting-inl.h"
|
||||
#include "src/heap/scavenger.h"
|
||||
#include "src/objects/map.h"
|
||||
#include "src/objects/objects-body-descriptors-inl.h"
|
||||
#include "src/objects/objects-inl.h"
|
||||
#include "src/objects/slots-inl.h"
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user