[heap] Fix GcTracer scopes for GC jobs
Bug: v8:11181 Change-Id: I8ca8b7249ef660874da761c11f192ffd06748ff5 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2558219 Commit-Queue: Ulan Degenbaev <ulan@chromium.org> Reviewed-by: Camillo Bruni <cbruni@chromium.org> Cr-Commit-Position: refs/heads/master@{#72096}
This commit is contained in:
parent
3466daa7e4
commit
d09d32bc52
@ -370,8 +370,17 @@ class ConcurrentMarking::JobTask : public v8::JobTask {
|
||||
|
||||
// v8::JobTask overrides.
|
||||
void Run(JobDelegate* delegate) override {
|
||||
concurrent_marking_->Run(delegate, bytecode_flush_mode_,
|
||||
mark_compact_epoch_, is_forced_gc_);
|
||||
if (delegate->IsJoiningThread()) {
|
||||
// TRACE_GC is not needed here because the caller opens the right scope.
|
||||
concurrent_marking_->Run(delegate, bytecode_flush_mode_,
|
||||
mark_compact_epoch_, is_forced_gc_);
|
||||
} else {
|
||||
TRACE_GC1(concurrent_marking_->heap_->tracer(),
|
||||
GCTracer::Scope::MC_BACKGROUND_MARKING,
|
||||
ThreadKind::kBackground);
|
||||
concurrent_marking_->Run(delegate, bytecode_flush_mode_,
|
||||
mark_compact_epoch_, is_forced_gc_);
|
||||
}
|
||||
}
|
||||
|
||||
size_t GetMaxConcurrency(size_t worker_count) const override {
|
||||
@ -404,8 +413,6 @@ ConcurrentMarking::ConcurrentMarking(Heap* heap,
|
||||
void ConcurrentMarking::Run(JobDelegate* delegate,
|
||||
BytecodeFlushMode bytecode_flush_mode,
|
||||
unsigned mark_compact_epoch, bool is_forced_gc) {
|
||||
TRACE_GC_EPOCH(heap_->tracer(), GCTracer::Scope::MC_BACKGROUND_MARKING,
|
||||
ThreadKind::kBackground);
|
||||
size_t kBytesUntilInterruptCheck = 64 * KB;
|
||||
int kObjectsUntilInterrupCheck = 1000;
|
||||
uint8_t task_id = delegate->GetTaskId() + 1;
|
||||
|
@ -593,6 +593,7 @@ void GCTracer::PrintNVP() const {
|
||||
"scavenge.sweep_array_buffers=%.2f "
|
||||
"background.scavenge.parallel=%.2f "
|
||||
"background.unmapper=%.2f "
|
||||
"unmapper=%.2f "
|
||||
"incremental.steps_count=%d "
|
||||
"incremental.steps_took=%.1f "
|
||||
"scavenge_throughput=%.f "
|
||||
@ -636,6 +637,7 @@ void GCTracer::PrintNVP() const {
|
||||
current_.scopes[Scope::SCAVENGER_SWEEP_ARRAY_BUFFERS],
|
||||
current_.scopes[Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL],
|
||||
current_.scopes[Scope::BACKGROUND_UNMAPPER],
|
||||
current_.scopes[Scope::UNMAPPER],
|
||||
current_.incremental_marking_scopes[GCTracer::Scope::MC_INCREMENTAL]
|
||||
.steps,
|
||||
current_.scopes[Scope::MC_INCREMENTAL],
|
||||
@ -678,6 +680,7 @@ void GCTracer::PrintNVP() const {
|
||||
"background.evacuate.copy=%.2f "
|
||||
"background.evacuate.update_pointers=%.2f "
|
||||
"background.unmapper=%.2f "
|
||||
"unmapper=%.2f "
|
||||
"update_marking_deque=%.2f "
|
||||
"reset_liveness=%.2f\n",
|
||||
duration, spent_in_mutator, "mmc", current_.reduce_memory,
|
||||
@ -702,6 +705,7 @@ void GCTracer::PrintNVP() const {
|
||||
current_.scopes[Scope::MINOR_MC_BACKGROUND_EVACUATE_COPY],
|
||||
current_.scopes[Scope::MINOR_MC_BACKGROUND_EVACUATE_UPDATE_POINTERS],
|
||||
current_.scopes[Scope::BACKGROUND_UNMAPPER],
|
||||
current_.scopes[Scope::UNMAPPER],
|
||||
current_.scopes[Scope::MINOR_MC_MARKING_DEQUE],
|
||||
current_.scopes[Scope::MINOR_MC_RESET_LIVENESS]);
|
||||
break;
|
||||
@ -784,6 +788,7 @@ void GCTracer::PrintNVP() const {
|
||||
"background.evacuate.copy=%.1f "
|
||||
"background.evacuate.update_pointers=%.1f "
|
||||
"background.unmapper=%.1f "
|
||||
"unmapper=%.1f "
|
||||
"total_size_before=%zu "
|
||||
"total_size_after=%zu "
|
||||
"holes_size_before=%zu "
|
||||
@ -882,9 +887,10 @@ void GCTracer::PrintNVP() const {
|
||||
current_.scopes[Scope::MC_BACKGROUND_EVACUATE_COPY],
|
||||
current_.scopes[Scope::MC_BACKGROUND_EVACUATE_UPDATE_POINTERS],
|
||||
current_.scopes[Scope::BACKGROUND_UNMAPPER],
|
||||
current_.start_object_size, current_.end_object_size,
|
||||
current_.start_holes_size, current_.end_holes_size,
|
||||
allocated_since_last_gc, heap_->promoted_objects_size(),
|
||||
current_.scopes[Scope::UNMAPPER], current_.start_object_size,
|
||||
current_.end_object_size, current_.start_holes_size,
|
||||
current_.end_holes_size, allocated_since_last_gc,
|
||||
heap_->promoted_objects_size(),
|
||||
heap_->semi_space_copied_object_size(),
|
||||
heap_->nodes_died_in_new_space_, heap_->nodes_copied_in_new_space_,
|
||||
heap_->nodes_promoted_, heap_->promotion_ratio_,
|
||||
|
@ -163,12 +163,14 @@ class MemoryAllocator::Unmapper::UnmapFreeMemoryJob : public JobTask {
|
||||
UnmapFreeMemoryJob& operator=(const UnmapFreeMemoryJob&) = delete;
|
||||
|
||||
void Run(JobDelegate* delegate) override {
|
||||
TRACE_GC1(tracer_, GCTracer::Scope::BACKGROUND_UNMAPPER,
|
||||
ThreadKind::kBackground);
|
||||
unmapper_->PerformFreeMemoryOnQueuedChunks<FreeMode::kUncommitPooled>(
|
||||
delegate);
|
||||
if (FLAG_trace_unmapper) {
|
||||
PrintIsolate(unmapper_->heap_->isolate(), "UnmapFreeMemoryTask Done\n");
|
||||
if (delegate->IsJoiningThread()) {
|
||||
TRACE_GC(tracer_, GCTracer::Scope::UNMAPPER);
|
||||
RunImpl(delegate);
|
||||
|
||||
} else {
|
||||
TRACE_GC1(tracer_, GCTracer::Scope::BACKGROUND_UNMAPPER,
|
||||
ThreadKind::kBackground);
|
||||
RunImpl(delegate);
|
||||
}
|
||||
}
|
||||
|
||||
@ -182,6 +184,13 @@ class MemoryAllocator::Unmapper::UnmapFreeMemoryJob : public JobTask {
|
||||
}
|
||||
|
||||
private:
|
||||
void RunImpl(JobDelegate* delegate) {
|
||||
unmapper_->PerformFreeMemoryOnQueuedChunks<FreeMode::kUncommitPooled>(
|
||||
delegate);
|
||||
if (FLAG_trace_unmapper) {
|
||||
PrintIsolate(unmapper_->heap_->isolate(), "UnmapFreeMemoryTask Done\n");
|
||||
}
|
||||
}
|
||||
Unmapper* const unmapper_;
|
||||
GCTracer* const tracer_;
|
||||
};
|
||||
|
@ -509,7 +509,8 @@
|
||||
F(SCAVENGER_SCAVENGE_WEAK) \
|
||||
F(SCAVENGER_SCAVENGE_FINALIZE) \
|
||||
F(SCAVENGER_SWEEP_ARRAY_BUFFERS) \
|
||||
F(STOP_THE_WORLD)
|
||||
F(STOP_THE_WORLD) \
|
||||
F(UNMAPPER)
|
||||
|
||||
#define TRACER_BACKGROUND_SCOPES(F) \
|
||||
F(BACKGROUND_YOUNG_ARRAY_BUFFER_SWEEP) \
|
||||
|
@ -7325,6 +7325,23 @@ TEST(Regress10900) {
|
||||
CcTest::CollectAllAvailableGarbage();
|
||||
}
|
||||
|
||||
TEST(Regress11181) {
|
||||
FLAG_always_compact = true;
|
||||
CcTest::InitializeVM();
|
||||
TracingFlags::runtime_stats.store(
|
||||
v8::tracing::TracingCategoryObserver::ENABLED_BY_NATIVE,
|
||||
std::memory_order_relaxed);
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
const char* source =
|
||||
"let roots = [];"
|
||||
"for (let i = 0; i < 100; i++) roots.push(new Array(1000).fill(0));"
|
||||
"roots.push(new Array(1000000).fill(0));"
|
||||
"roots;";
|
||||
CompileRun(source);
|
||||
CcTest::CollectAllAvailableGarbage();
|
||||
TracingFlags::runtime_stats.store(0, std::memory_order_relaxed);
|
||||
}
|
||||
|
||||
} // namespace heap
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
@ -813,6 +813,20 @@ TEST_F(RuntimeCallStatsTest, ApiGetter) {
|
||||
EXPECT_EQ(kCustomCallbackTime * 4013, counter2()->time().InMicroseconds());
|
||||
}
|
||||
|
||||
TEST_F(RuntimeCallStatsTest, GarbageCollection) {
|
||||
FLAG_expose_gc = true;
|
||||
v8::Isolate* isolate = v8_isolate();
|
||||
RunJS(
|
||||
"let root = [];"
|
||||
"for (let i = 0; i < 10; i++) root.push((new Array(1000)).fill(0));"
|
||||
"root.push((new Array(1000000)).fill(0));"
|
||||
"((new Array(1000000)).fill(0));");
|
||||
isolate->RequestGarbageCollectionForTesting(
|
||||
v8::Isolate::kFullGarbageCollection);
|
||||
isolate->RequestGarbageCollectionForTesting(
|
||||
v8::Isolate::kFullGarbageCollection);
|
||||
}
|
||||
|
||||
TEST_F(SnapshotNativeCounterTest, StringAddNative) {
|
||||
RunJS("let s = 'hello, ' + 'world!'");
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user