[heap] Reland improvements for embedder tracing
Add a path into embedder tracing on allocation. This is safe as as Blink is not allowed to call into V8 during object construction. This is a reland ofcaed2cc033
. Also relands the cleanups ofce02d86bf2
. Bug: chromium:843903 Change-Id: Ic89792fe68337c540a1a93629aee2e92b8774ab2 Reviewed-on: https://chromium-review.googlesource.com/c/1350992 Reviewed-by: Ulan Degenbaev <ulan@chromium.org> Commit-Queue: Michael Lippautz <mlippautz@chromium.org> Cr-Commit-Position: refs/heads/master@{#57847}
This commit is contained in:
parent
bf6b6fe6c9
commit
c32a378f48
@ -5,22 +5,31 @@
|
||||
#include "src/heap/embedder-tracing.h"
|
||||
|
||||
#include "src/base/logging.h"
|
||||
#include "src/objects/embedder-data-slot.h"
|
||||
#include "src/objects/js-objects-inl.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
void LocalEmbedderHeapTracer::SetRemoteTracer(EmbedderHeapTracer* tracer) {
|
||||
if (remote_tracer_) remote_tracer_->isolate_ = nullptr;
|
||||
|
||||
remote_tracer_ = tracer;
|
||||
if (remote_tracer_)
|
||||
remote_tracer_->isolate_ = reinterpret_cast<v8::Isolate*>(isolate_);
|
||||
}
|
||||
|
||||
void LocalEmbedderHeapTracer::TracePrologue() {
|
||||
if (!InUse()) return;
|
||||
|
||||
CHECK(cached_wrappers_to_trace_.empty());
|
||||
num_v8_marking_worklist_was_empty_ = 0;
|
||||
embedder_worklist_empty_ = false;
|
||||
remote_tracer_->TracePrologue();
|
||||
}
|
||||
|
||||
void LocalEmbedderHeapTracer::TraceEpilogue() {
|
||||
if (!InUse()) return;
|
||||
|
||||
CHECK(cached_wrappers_to_trace_.empty());
|
||||
remote_tracer_->TraceEpilogue();
|
||||
}
|
||||
|
||||
@ -36,30 +45,11 @@ void LocalEmbedderHeapTracer::EnterFinalPause() {
|
||||
bool LocalEmbedderHeapTracer::Trace(double deadline) {
|
||||
if (!InUse()) return true;
|
||||
|
||||
DCHECK_EQ(0, NumberOfCachedWrappersToTrace());
|
||||
return remote_tracer_->AdvanceTracing(deadline);
|
||||
}
|
||||
|
||||
bool LocalEmbedderHeapTracer::IsRemoteTracingDone() {
|
||||
return (InUse()) ? cached_wrappers_to_trace_.empty() &&
|
||||
remote_tracer_->IsTracingDone()
|
||||
: true;
|
||||
}
|
||||
|
||||
void LocalEmbedderHeapTracer::RegisterWrappersWithRemoteTracer() {
|
||||
if (!InUse()) return;
|
||||
|
||||
if (cached_wrappers_to_trace_.empty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
remote_tracer_->RegisterV8References(cached_wrappers_to_trace_);
|
||||
cached_wrappers_to_trace_.clear();
|
||||
}
|
||||
|
||||
bool LocalEmbedderHeapTracer::RequiresImmediateWrapperProcessing() {
|
||||
const size_t kTooManyWrappers = 16000;
|
||||
return cached_wrappers_to_trace_.size() > kTooManyWrappers;
|
||||
return !InUse() || remote_tracer_->IsTracingDone();
|
||||
}
|
||||
|
||||
void LocalEmbedderHeapTracer::SetEmbedderStackStateForNextFinalization(
|
||||
@ -69,5 +59,45 @@ void LocalEmbedderHeapTracer::SetEmbedderStackStateForNextFinalization(
|
||||
embedder_stack_state_ = stack_state;
|
||||
}
|
||||
|
||||
LocalEmbedderHeapTracer::ProcessingScope::ProcessingScope(
|
||||
LocalEmbedderHeapTracer* tracer)
|
||||
: tracer_(tracer) {
|
||||
wrapper_cache_.reserve(kWrapperCacheSize);
|
||||
}
|
||||
|
||||
LocalEmbedderHeapTracer::ProcessingScope::~ProcessingScope() {
|
||||
if (!wrapper_cache_.empty()) {
|
||||
tracer_->remote_tracer()->RegisterV8References(std::move(wrapper_cache_));
|
||||
}
|
||||
}
|
||||
|
||||
void LocalEmbedderHeapTracer::ProcessingScope::TracePossibleWrapper(
|
||||
JSObject* js_object) {
|
||||
DCHECK(js_object->IsApiWrapper());
|
||||
if (js_object->GetEmbedderFieldCount() < 2) return;
|
||||
|
||||
void* pointer0;
|
||||
void* pointer1;
|
||||
if (EmbedderDataSlot(js_object, 0).ToAlignedPointer(&pointer0) && pointer0 &&
|
||||
EmbedderDataSlot(js_object, 1).ToAlignedPointer(&pointer1)) {
|
||||
wrapper_cache_.push_back({pointer0, pointer1});
|
||||
}
|
||||
FlushWrapperCacheIfFull();
|
||||
}
|
||||
|
||||
void LocalEmbedderHeapTracer::ProcessingScope::FlushWrapperCacheIfFull() {
|
||||
if (wrapper_cache_.size() == wrapper_cache_.capacity()) {
|
||||
tracer_->remote_tracer()->RegisterV8References(std::move(wrapper_cache_));
|
||||
wrapper_cache_.clear();
|
||||
wrapper_cache_.reserve(kWrapperCacheSize);
|
||||
}
|
||||
}
|
||||
|
||||
void LocalEmbedderHeapTracer::ProcessingScope::AddWrapperInfoForTesting(
|
||||
WrapperInfo info) {
|
||||
wrapper_cache_.push_back(info);
|
||||
FlushWrapperCacheIfFull();
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
@ -13,10 +13,30 @@ namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
class Heap;
|
||||
class JSObject;
|
||||
|
||||
class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final {
|
||||
public:
|
||||
typedef std::pair<void*, void*> WrapperInfo;
|
||||
typedef std::vector<WrapperInfo> WrapperCache;
|
||||
|
||||
class V8_EXPORT_PRIVATE ProcessingScope {
|
||||
public:
|
||||
explicit ProcessingScope(LocalEmbedderHeapTracer* tracer);
|
||||
~ProcessingScope();
|
||||
|
||||
void TracePossibleWrapper(JSObject* js_object);
|
||||
|
||||
void AddWrapperInfoForTesting(WrapperInfo info);
|
||||
|
||||
private:
|
||||
static constexpr size_t kWrapperCacheSize = 1000;
|
||||
|
||||
void FlushWrapperCacheIfFull();
|
||||
|
||||
LocalEmbedderHeapTracer* const tracer_;
|
||||
WrapperCache wrapper_cache_;
|
||||
};
|
||||
|
||||
explicit LocalEmbedderHeapTracer(Isolate* isolate) : isolate_(isolate) {}
|
||||
|
||||
@ -24,59 +44,45 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final {
|
||||
if (remote_tracer_) remote_tracer_->isolate_ = nullptr;
|
||||
}
|
||||
|
||||
bool InUse() const { return remote_tracer_ != nullptr; }
|
||||
EmbedderHeapTracer* remote_tracer() const { return remote_tracer_; }
|
||||
|
||||
void SetRemoteTracer(EmbedderHeapTracer* tracer) {
|
||||
if (remote_tracer_) remote_tracer_->isolate_ = nullptr;
|
||||
|
||||
remote_tracer_ = tracer;
|
||||
if (remote_tracer_)
|
||||
remote_tracer_->isolate_ = reinterpret_cast<v8::Isolate*>(isolate_);
|
||||
}
|
||||
|
||||
bool InUse() const { return remote_tracer_ != nullptr; }
|
||||
|
||||
void SetRemoteTracer(EmbedderHeapTracer* tracer);
|
||||
void TracePrologue();
|
||||
void TraceEpilogue();
|
||||
void EnterFinalPause();
|
||||
bool Trace(double deadline);
|
||||
bool IsRemoteTracingDone();
|
||||
|
||||
size_t NumberOfCachedWrappersToTrace() {
|
||||
return cached_wrappers_to_trace_.size();
|
||||
}
|
||||
void AddWrapperToTrace(WrapperInfo entry) {
|
||||
cached_wrappers_to_trace_.push_back(entry);
|
||||
}
|
||||
void ClearCachedWrappersToTrace() { cached_wrappers_to_trace_.clear(); }
|
||||
void RegisterWrappersWithRemoteTracer();
|
||||
|
||||
// In order to avoid running out of memory we force tracing wrappers if there
|
||||
// are too many of them.
|
||||
bool RequiresImmediateWrapperProcessing();
|
||||
|
||||
void NotifyV8MarkingWorklistWasEmpty() {
|
||||
num_v8_marking_worklist_was_empty_++;
|
||||
}
|
||||
|
||||
bool ShouldFinalizeIncrementalMarking() {
|
||||
static const size_t kMaxIncrementalFixpointRounds = 3;
|
||||
return !FLAG_incremental_marking_wrappers || !InUse() ||
|
||||
IsRemoteTracingDone() ||
|
||||
(IsRemoteTracingDone() && embedder_worklist_empty_) ||
|
||||
num_v8_marking_worklist_was_empty_ > kMaxIncrementalFixpointRounds;
|
||||
}
|
||||
|
||||
void SetEmbedderStackStateForNextFinalization(
|
||||
EmbedderHeapTracer::EmbedderStackState stack_state);
|
||||
|
||||
private:
|
||||
typedef std::vector<WrapperInfo> WrapperCache;
|
||||
void SetEmbedderWorklistEmpty(bool is_empty) {
|
||||
embedder_worklist_empty_ = is_empty;
|
||||
}
|
||||
|
||||
private:
|
||||
Isolate* const isolate_;
|
||||
WrapperCache cached_wrappers_to_trace_;
|
||||
EmbedderHeapTracer* remote_tracer_ = nullptr;
|
||||
|
||||
size_t num_v8_marking_worklist_was_empty_ = 0;
|
||||
EmbedderHeapTracer::EmbedderStackState embedder_stack_state_ =
|
||||
EmbedderHeapTracer::kUnknown;
|
||||
// Indicates whether the embedder worklist was observed empty on the main
|
||||
// thread. This is opportunistic as concurrent marking tasks may hold local
|
||||
// segments of potential embedder fields to move to the main thread.
|
||||
bool embedder_worklist_empty_ = false;
|
||||
|
||||
friend class EmbedderStackStateScope;
|
||||
};
|
||||
|
@ -2964,9 +2964,6 @@ void Heap::RegisterDeserializedObjectsForBlackAllocation(
|
||||
}
|
||||
}
|
||||
}
|
||||
// We potentially deserialized wrappers which require registering with the
|
||||
// embedder as the marker will not find them.
|
||||
local_embedder_heap_tracer()->RegisterWrappersWithRemoteTracer();
|
||||
|
||||
// Large object space doesn't use reservations, so it needs custom handling.
|
||||
for (HeapObject* object : large_objects) {
|
||||
@ -4546,18 +4543,6 @@ EmbedderHeapTracer* Heap::GetEmbedderHeapTracer() const {
|
||||
return local_embedder_heap_tracer()->remote_tracer();
|
||||
}
|
||||
|
||||
void Heap::TracePossibleWrapper(JSObject* js_object) {
|
||||
DCHECK(js_object->IsApiWrapper());
|
||||
if (js_object->GetEmbedderFieldCount() < 2) return;
|
||||
void* pointer0;
|
||||
void* pointer1;
|
||||
if (EmbedderDataSlot(js_object, 0).ToAlignedPointer(&pointer0) && pointer0 &&
|
||||
EmbedderDataSlot(js_object, 1).ToAlignedPointer(&pointer1)) {
|
||||
local_embedder_heap_tracer()->AddWrapperToTrace(
|
||||
std::pair<void*, void*>(pointer0, pointer1));
|
||||
}
|
||||
}
|
||||
|
||||
void Heap::RegisterExternallyReferencedObject(Address* location) {
|
||||
// The embedder is not aware of whether numbers are materialized as heap
|
||||
// objects are just passed around as Smis.
|
||||
|
@ -893,7 +893,6 @@ class Heap {
|
||||
void SetEmbedderHeapTracer(EmbedderHeapTracer* tracer);
|
||||
EmbedderHeapTracer* GetEmbedderHeapTracer() const;
|
||||
|
||||
void TracePossibleWrapper(JSObject* js_object);
|
||||
void RegisterExternallyReferencedObject(Address* location);
|
||||
void SetEmbedderStackStateForNextFinalizaton(
|
||||
EmbedderHeapTracer::EmbedderStackState stack_state);
|
||||
|
@ -8,6 +8,7 @@
|
||||
#include "src/compilation-cache.h"
|
||||
#include "src/conversions.h"
|
||||
#include "src/heap/concurrent-marking.h"
|
||||
#include "src/heap/embedder-tracing.h"
|
||||
#include "src/heap/gc-idle-time-handler.h"
|
||||
#include "src/heap/gc-tracer.h"
|
||||
#include "src/heap/heap-inl.h"
|
||||
@ -815,39 +816,35 @@ intptr_t IncrementalMarking::ProcessMarkingWorklist(
|
||||
int size = VisitObject(obj->map(), obj);
|
||||
bytes_processed += size - unscanned_bytes_of_large_object_;
|
||||
}
|
||||
// Report all found wrappers to the embedder. This is necessary as the
|
||||
// embedder could potentially invalidate wrappers as soon as V8 is done
|
||||
// with its incremental marking processing. Any cached wrappers could
|
||||
// result in broken pointers at this point.
|
||||
heap_->local_embedder_heap_tracer()->RegisterWrappersWithRemoteTracer();
|
||||
return bytes_processed;
|
||||
}
|
||||
|
||||
void IncrementalMarking::EmbedderStep(double duration_ms) {
|
||||
constexpr int kObjectsToProcessBeforeInterrupt = 100;
|
||||
constexpr size_t kObjectsToProcessBeforeInterrupt = 500;
|
||||
|
||||
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_EMBEDDER_TRACING);
|
||||
|
||||
const double deadline =
|
||||
heap_->MonotonicallyIncreasingTimeInMs() + duration_ms;
|
||||
|
||||
HeapObject* object;
|
||||
int cnt = 0;
|
||||
while (marking_worklist()->embedder()->Pop(0, &object)) {
|
||||
heap_->TracePossibleWrapper(JSObject::cast(object));
|
||||
if (++cnt == kObjectsToProcessBeforeInterrupt) {
|
||||
cnt = 0;
|
||||
if (heap_->MonotonicallyIncreasingTimeInMs() > deadline) {
|
||||
break;
|
||||
double deadline = heap_->MonotonicallyIncreasingTimeInMs() + duration_ms;
|
||||
bool empty_worklist;
|
||||
do {
|
||||
{
|
||||
LocalEmbedderHeapTracer::ProcessingScope scope(
|
||||
heap_->local_embedder_heap_tracer());
|
||||
HeapObject* object;
|
||||
size_t cnt = 0;
|
||||
empty_worklist = true;
|
||||
while (marking_worklist()->embedder()->Pop(0, &object)) {
|
||||
scope.TracePossibleWrapper(JSObject::cast(object));
|
||||
if (++cnt == kObjectsToProcessBeforeInterrupt) {
|
||||
cnt = 0;
|
||||
empty_worklist = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
heap_->local_embedder_heap_tracer()->RegisterWrappersWithRemoteTracer();
|
||||
if (!heap_->local_embedder_heap_tracer()
|
||||
->ShouldFinalizeIncrementalMarking()) {
|
||||
heap_->local_embedder_heap_tracer()->Trace(deadline);
|
||||
}
|
||||
} while (!empty_worklist &&
|
||||
(heap_->MonotonicallyIncreasingTimeInMs() < deadline));
|
||||
heap_->local_embedder_heap_tracer()->SetEmbedderWorklistEmpty(empty_worklist);
|
||||
}
|
||||
|
||||
void IncrementalMarking::Hurry() {
|
||||
@ -957,6 +954,11 @@ void IncrementalMarking::Epilogue() {
|
||||
finalize_marking_completed_ = false;
|
||||
}
|
||||
|
||||
bool IncrementalMarking::ShouldDoEmbedderStep() {
|
||||
return state_ == MARKING && FLAG_incremental_marking_wrappers &&
|
||||
heap_->local_embedder_heap_tracer()->InUse();
|
||||
}
|
||||
|
||||
double IncrementalMarking::AdvanceIncrementalMarking(
|
||||
double deadline_in_ms, CompletionAction completion_action,
|
||||
StepOrigin step_origin) {
|
||||
@ -965,27 +967,22 @@ double IncrementalMarking::AdvanceIncrementalMarking(
|
||||
TRACE_EVENT0("v8", "V8.GCIncrementalMarking");
|
||||
TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL);
|
||||
DCHECK(!IsStopped());
|
||||
DCHECK_EQ(
|
||||
0, heap_->local_embedder_heap_tracer()->NumberOfCachedWrappersToTrace());
|
||||
|
||||
double remaining_time_in_ms = 0.0;
|
||||
intptr_t step_size_in_bytes = GCIdleTimeHandler::EstimateMarkingStepSize(
|
||||
kStepSizeInMs,
|
||||
heap()->tracer()->IncrementalMarkingSpeedInBytesPerMillisecond());
|
||||
|
||||
const bool incremental_wrapper_tracing =
|
||||
state_ == MARKING && FLAG_incremental_marking_wrappers &&
|
||||
heap_->local_embedder_heap_tracer()->InUse();
|
||||
do {
|
||||
if (incremental_wrapper_tracing && trace_wrappers_toggle_) {
|
||||
if (ShouldDoEmbedderStep() && trace_wrappers_toggle_) {
|
||||
EmbedderStep(kStepSizeInMs);
|
||||
} else {
|
||||
const intptr_t step_size_in_bytes =
|
||||
GCIdleTimeHandler::EstimateMarkingStepSize(
|
||||
kStepSizeInMs,
|
||||
heap()->tracer()->IncrementalMarkingSpeedInBytesPerMillisecond());
|
||||
Step(step_size_in_bytes, completion_action, step_origin);
|
||||
}
|
||||
trace_wrappers_toggle_ = !trace_wrappers_toggle_;
|
||||
remaining_time_in_ms =
|
||||
deadline_in_ms - heap()->MonotonicallyIncreasingTimeInMs();
|
||||
} while (remaining_time_in_ms >= kStepSizeInMs && !IsComplete() &&
|
||||
} while (remaining_time_in_ms > kStepSizeInMs && !IsComplete() &&
|
||||
!marking_worklist()->IsEmpty());
|
||||
return remaining_time_in_ms;
|
||||
}
|
||||
@ -1037,49 +1034,54 @@ void IncrementalMarking::AdvanceIncrementalMarkingOnAllocation() {
|
||||
return;
|
||||
}
|
||||
|
||||
size_t bytes_to_process =
|
||||
StepSizeToKeepUpWithAllocations() + StepSizeToMakeProgress();
|
||||
HistogramTimerScope incremental_marking_scope(
|
||||
heap_->isolate()->counters()->gc_incremental_marking());
|
||||
TRACE_EVENT0("v8", "V8.GCIncrementalMarking");
|
||||
TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL);
|
||||
|
||||
if (bytes_to_process >= IncrementalMarking::kMinStepSizeInBytes) {
|
||||
HistogramTimerScope incremental_marking_scope(
|
||||
heap_->isolate()->counters()->gc_incremental_marking());
|
||||
TRACE_EVENT0("v8", "V8.GCIncrementalMarking");
|
||||
TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL);
|
||||
// The first step after Scavenge will see many allocated bytes.
|
||||
// Cap the step size to distribute the marking work more uniformly.
|
||||
size_t max_step_size = GCIdleTimeHandler::EstimateMarkingStepSize(
|
||||
kMaxStepSizeInMs,
|
||||
heap()->tracer()->IncrementalMarkingSpeedInBytesPerMillisecond());
|
||||
bytes_to_process = Min(bytes_to_process, max_step_size);
|
||||
size_t bytes_processed = 0;
|
||||
if (FLAG_concurrent_marking) {
|
||||
bytes_processed = Step(bytes_to_process, GC_VIA_STACK_GUARD,
|
||||
StepOrigin::kV8, WorklistToProcess::kBailout);
|
||||
bytes_to_process = (bytes_processed >= bytes_to_process)
|
||||
? 0
|
||||
: bytes_to_process - bytes_processed;
|
||||
size_t current_bytes_marked_concurrently =
|
||||
heap()->concurrent_marking()->TotalMarkedBytes();
|
||||
// The concurrent_marking()->TotalMarkedBytes() is not monothonic for a
|
||||
// short period of time when a concurrent marking task is finishing.
|
||||
if (current_bytes_marked_concurrently > bytes_marked_concurrently_) {
|
||||
bytes_marked_ahead_of_schedule_ +=
|
||||
current_bytes_marked_concurrently - bytes_marked_concurrently_;
|
||||
bytes_marked_concurrently_ = current_bytes_marked_concurrently;
|
||||
if (ShouldDoEmbedderStep() && trace_wrappers_toggle_) {
|
||||
EmbedderStep(kMaxStepSizeInMs);
|
||||
} else {
|
||||
size_t bytes_to_process =
|
||||
StepSizeToKeepUpWithAllocations() + StepSizeToMakeProgress();
|
||||
if (bytes_to_process >= IncrementalMarking::kMinStepSizeInBytes) {
|
||||
// The first step after Scavenge will see many allocated bytes.
|
||||
// Cap the step size to distribute the marking work more uniformly.
|
||||
size_t max_step_size = GCIdleTimeHandler::EstimateMarkingStepSize(
|
||||
kMaxStepSizeInMs,
|
||||
heap()->tracer()->IncrementalMarkingSpeedInBytesPerMillisecond());
|
||||
bytes_to_process = Min(bytes_to_process, max_step_size);
|
||||
size_t bytes_processed = 0;
|
||||
if (FLAG_concurrent_marking) {
|
||||
bytes_processed = Step(bytes_to_process, GC_VIA_STACK_GUARD,
|
||||
StepOrigin::kV8, WorklistToProcess::kBailout);
|
||||
bytes_to_process = (bytes_processed >= bytes_to_process)
|
||||
? 0
|
||||
: bytes_to_process - bytes_processed;
|
||||
size_t current_bytes_marked_concurrently =
|
||||
heap()->concurrent_marking()->TotalMarkedBytes();
|
||||
// The concurrent_marking()->TotalMarkedBytes() is not monothonic for a
|
||||
// short period of time when a concurrent marking task is finishing.
|
||||
if (current_bytes_marked_concurrently > bytes_marked_concurrently_) {
|
||||
bytes_marked_ahead_of_schedule_ +=
|
||||
current_bytes_marked_concurrently - bytes_marked_concurrently_;
|
||||
bytes_marked_concurrently_ = current_bytes_marked_concurrently;
|
||||
}
|
||||
}
|
||||
if (bytes_marked_ahead_of_schedule_ >= bytes_to_process) {
|
||||
// Steps performed in tasks and concurrently have put us ahead of
|
||||
// schedule. We skip processing of marking dequeue here and thus shift
|
||||
// marking time from inside V8 to standalone tasks.
|
||||
bytes_marked_ahead_of_schedule_ -= bytes_to_process;
|
||||
bytes_processed += bytes_to_process;
|
||||
bytes_to_process = IncrementalMarking::kMinStepSizeInBytes;
|
||||
}
|
||||
bytes_processed += Step(bytes_to_process, GC_VIA_STACK_GUARD,
|
||||
StepOrigin::kV8, WorklistToProcess::kAll);
|
||||
bytes_allocated_ -= Min(bytes_allocated_, bytes_processed);
|
||||
}
|
||||
if (bytes_marked_ahead_of_schedule_ >= bytes_to_process) {
|
||||
// Steps performed in tasks and concurrently have put us ahead of
|
||||
// schedule. We skip processing of marking dequeue here and thus shift
|
||||
// marking time from inside V8 to standalone tasks.
|
||||
bytes_marked_ahead_of_schedule_ -= bytes_to_process;
|
||||
bytes_processed += bytes_to_process;
|
||||
bytes_to_process = IncrementalMarking::kMinStepSizeInBytes;
|
||||
}
|
||||
bytes_processed += Step(bytes_to_process, GC_VIA_STACK_GUARD,
|
||||
StepOrigin::kV8, WorklistToProcess::kAll);
|
||||
bytes_allocated_ -= Min(bytes_allocated_, bytes_processed);
|
||||
}
|
||||
trace_wrappers_toggle_ = !trace_wrappers_toggle_;
|
||||
}
|
||||
|
||||
size_t IncrementalMarking::Step(size_t bytes_to_process,
|
||||
|
@ -177,6 +177,8 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
|
||||
size_t Step(size_t bytes_to_process, CompletionAction action,
|
||||
StepOrigin step_origin,
|
||||
WorklistToProcess worklist_to_process = WorklistToProcess::kAll);
|
||||
|
||||
bool ShouldDoEmbedderStep();
|
||||
void EmbedderStep(double duration);
|
||||
|
||||
inline void RestartIfNotMarking();
|
||||
|
@ -74,7 +74,8 @@ V8_INLINE int
|
||||
MarkingVisitor<fixed_array_mode, retaining_path_mode,
|
||||
MarkingState>::VisitEmbedderTracingSubclass(Map map, T* object) {
|
||||
if (heap_->local_embedder_heap_tracer()->InUse()) {
|
||||
heap_->TracePossibleWrapper(object);
|
||||
marking_worklist()->embedder()->Push(MarkCompactCollectorBase::kMainThread,
|
||||
object);
|
||||
}
|
||||
int size = T::BodyDescriptor::SizeOf(map, object);
|
||||
T::BodyDescriptor::IterateBody(map, object, size, this);
|
||||
|
@ -1487,6 +1487,7 @@ void MarkCompactCollector::ProcessEphemeronsUntilFixpoint() {
|
||||
|
||||
work_to_do = work_to_do || !marking_worklist()->IsEmpty() ||
|
||||
heap()->concurrent_marking()->ephemeron_marked() ||
|
||||
!marking_worklist()->IsEmbedderEmpty() ||
|
||||
!heap()->local_embedder_heap_tracer()->IsRemoteTracingDone();
|
||||
++iterations;
|
||||
}
|
||||
@ -1614,11 +1615,14 @@ void MarkCompactCollector::ProcessEphemeronsLinear() {
|
||||
void MarkCompactCollector::PerformWrapperTracing() {
|
||||
if (heap_->local_embedder_heap_tracer()->InUse()) {
|
||||
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_EMBEDDER_TRACING);
|
||||
HeapObject* object;
|
||||
while (marking_worklist()->embedder()->Pop(kMainThread, &object)) {
|
||||
heap_->TracePossibleWrapper(JSObject::cast(object));
|
||||
{
|
||||
LocalEmbedderHeapTracer::ProcessingScope scope(
|
||||
heap_->local_embedder_heap_tracer());
|
||||
HeapObject* object;
|
||||
while (marking_worklist()->embedder()->Pop(kMainThread, &object)) {
|
||||
scope.TracePossibleWrapper(JSObject::cast(object));
|
||||
}
|
||||
}
|
||||
heap_->local_embedder_heap_tracer()->RegisterWrappersWithRemoteTracer();
|
||||
heap_->local_embedder_heap_tracer()->Trace(
|
||||
std::numeric_limits<double>::infinity());
|
||||
}
|
||||
@ -1779,7 +1783,8 @@ void MarkCompactCollector::MarkLiveObjects() {
|
||||
// once.
|
||||
PerformWrapperTracing();
|
||||
ProcessMarkingWorklist();
|
||||
} while (!heap_->local_embedder_heap_tracer()->IsRemoteTracingDone());
|
||||
} while (!heap_->local_embedder_heap_tracer()->IsRemoteTracingDone() ||
|
||||
!marking_worklist()->IsEmbedderEmpty());
|
||||
DCHECK(marking_worklist()->IsEmbedderEmpty());
|
||||
DCHECK(marking_worklist()->IsEmpty());
|
||||
}
|
||||
|
@ -235,6 +235,8 @@ enum class RememberedSetUpdatingMode { ALL, OLD_TO_NEW_ONLY };
|
||||
// Base class for minor and full MC collectors.
|
||||
class MarkCompactCollectorBase {
|
||||
public:
|
||||
static const int kMainThread = 0;
|
||||
|
||||
virtual ~MarkCompactCollectorBase() = default;
|
||||
|
||||
virtual void SetUp() = 0;
|
||||
@ -245,7 +247,6 @@ class MarkCompactCollectorBase {
|
||||
inline Isolate* isolate();
|
||||
|
||||
protected:
|
||||
static const int kMainThread = 0;
|
||||
explicit MarkCompactCollectorBase(Heap* heap)
|
||||
: heap_(heap), old_to_new_slots_(0) {}
|
||||
|
||||
|
@ -280,10 +280,6 @@ void ScavengerCollector::CollectGarbage() {
|
||||
|
||||
// Update how much has survived scavenge.
|
||||
heap_->IncrementYoungSurvivorsCounter(heap_->SurvivedNewSpaceObjectSize());
|
||||
|
||||
// Scavenger may find new wrappers by iterating objects promoted onto a black
|
||||
// page.
|
||||
heap_->local_embedder_heap_tracer()->RegisterWrappersWithRemoteTracer();
|
||||
}
|
||||
|
||||
void ScavengerCollector::HandleSurvivingNewLargeObjects() {
|
||||
|
@ -83,6 +83,14 @@ TEST(LocalEmbedderHeapTracer, EnterFinalPauseForwards) {
|
||||
local_tracer.EnterFinalPause();
|
||||
}
|
||||
|
||||
TEST(LocalEmbedderHeapTracer, IsRemoteTracingDoneForwards) {
|
||||
StrictMock<MockEmbedderHeapTracer> remote_tracer;
|
||||
LocalEmbedderHeapTracer local_tracer(nullptr);
|
||||
local_tracer.SetRemoteTracer(&remote_tracer);
|
||||
EXPECT_CALL(remote_tracer, IsTracingDone());
|
||||
local_tracer.IsRemoteTracingDone();
|
||||
}
|
||||
|
||||
TEST(LocalEmbedderHeapTracer, EnterFinalPauseDefaultStackStateUnkown) {
|
||||
StrictMock<MockEmbedderHeapTracer> remote_tracer;
|
||||
LocalEmbedderHeapTracer local_tracer(nullptr);
|
||||
@ -152,52 +160,19 @@ TEST(LocalEmbedderHeapTracer, IsRemoteTracingDoneIncludesRemote) {
|
||||
local_tracer.IsRemoteTracingDone();
|
||||
}
|
||||
|
||||
TEST(LocalEmbedderHeapTracer, NumberOfCachedWrappersToTraceExcludesRemote) {
|
||||
LocalEmbedderHeapTracer local_tracer(nullptr);
|
||||
StrictMock<MockEmbedderHeapTracer> remote_tracer;
|
||||
local_tracer.SetRemoteTracer(&remote_tracer);
|
||||
local_tracer.NumberOfCachedWrappersToTrace();
|
||||
}
|
||||
|
||||
TEST(LocalEmbedderHeapTracer, RegisterWrappersWithRemoteTracer) {
|
||||
TEST(LocalEmbedderHeapTracer, RegisterV8ReferencesWithRemoteTracer) {
|
||||
StrictMock<MockEmbedderHeapTracer> remote_tracer;
|
||||
LocalEmbedderHeapTracer local_tracer(nullptr);
|
||||
local_tracer.SetRemoteTracer(&remote_tracer);
|
||||
local_tracer.AddWrapperToTrace(CreateWrapperInfo());
|
||||
EXPECT_EQ(1u, local_tracer.NumberOfCachedWrappersToTrace());
|
||||
EXPECT_CALL(remote_tracer, RegisterV8References(_));
|
||||
local_tracer.RegisterWrappersWithRemoteTracer();
|
||||
EXPECT_EQ(0u, local_tracer.NumberOfCachedWrappersToTrace());
|
||||
{
|
||||
LocalEmbedderHeapTracer::ProcessingScope scope(&local_tracer);
|
||||
scope.AddWrapperInfoForTesting(CreateWrapperInfo());
|
||||
EXPECT_CALL(remote_tracer, RegisterV8References(_));
|
||||
}
|
||||
EXPECT_CALL(remote_tracer, IsTracingDone()).WillOnce(Return(false));
|
||||
EXPECT_FALSE(local_tracer.IsRemoteTracingDone());
|
||||
}
|
||||
|
||||
TEST(LocalEmbedderHeapTracer, TraceFinishes) {
|
||||
StrictMock<MockEmbedderHeapTracer> remote_tracer;
|
||||
LocalEmbedderHeapTracer local_tracer(nullptr);
|
||||
local_tracer.SetRemoteTracer(&remote_tracer);
|
||||
local_tracer.AddWrapperToTrace(CreateWrapperInfo());
|
||||
EXPECT_EQ(1u, local_tracer.NumberOfCachedWrappersToTrace());
|
||||
EXPECT_CALL(remote_tracer, RegisterV8References(_));
|
||||
local_tracer.RegisterWrappersWithRemoteTracer();
|
||||
EXPECT_CALL(remote_tracer, AdvanceTracing(_)).WillOnce(Return(true));
|
||||
EXPECT_TRUE(local_tracer.Trace(std::numeric_limits<double>::infinity()));
|
||||
EXPECT_EQ(0u, local_tracer.NumberOfCachedWrappersToTrace());
|
||||
}
|
||||
|
||||
TEST(LocalEmbedderHeapTracer, TraceDoesNotFinish) {
|
||||
StrictMock<MockEmbedderHeapTracer> remote_tracer;
|
||||
LocalEmbedderHeapTracer local_tracer(nullptr);
|
||||
local_tracer.SetRemoteTracer(&remote_tracer);
|
||||
local_tracer.AddWrapperToTrace(CreateWrapperInfo());
|
||||
EXPECT_EQ(1u, local_tracer.NumberOfCachedWrappersToTrace());
|
||||
EXPECT_CALL(remote_tracer, RegisterV8References(_));
|
||||
local_tracer.RegisterWrappersWithRemoteTracer();
|
||||
EXPECT_CALL(remote_tracer, AdvanceTracing(_)).WillOnce(Return(false));
|
||||
EXPECT_FALSE(local_tracer.Trace(1.0));
|
||||
EXPECT_EQ(0u, local_tracer.NumberOfCachedWrappersToTrace());
|
||||
}
|
||||
|
||||
TEST_F(LocalEmbedderHeapTracerWithIsolate, SetRemoteTracerSetsIsolate) {
|
||||
StrictMock<MockEmbedderHeapTracer> remote_tracer;
|
||||
LocalEmbedderHeapTracer local_tracer(isolate());
|
||||
|
Loading…
Reference in New Issue
Block a user