[heap] Introduce enum of garbage collection reasons.
Now callers of Heap::CollectGarbage* functions need to specify the reason as an enum value instead of a string. Subsequent CL will add stats counter for GC reason. BUG= Review-Url: https://codereview.chromium.org/2310143002 Cr-Commit-Position: refs/heads/master@{#39239}
This commit is contained in:
parent
cab765fe50
commit
1b26611ce9
13
src/api.cc
13
src/api.cc
@ -512,7 +512,8 @@ StartupData SnapshotCreator::CreateBlob(
|
||||
|
||||
// If we don't do this then we end up with a stray root pointing at the
|
||||
// context even after we have disposed of the context.
|
||||
isolate->heap()->CollectAllAvailableGarbage("mksnapshot");
|
||||
isolate->heap()->CollectAllAvailableGarbage(
|
||||
i::GarbageCollectionReason::kSnapshotCreator);
|
||||
isolate->heap()->CompactWeakFixedArrays();
|
||||
|
||||
i::DisallowHeapAllocation no_gc_from_here_on;
|
||||
@ -7477,8 +7478,7 @@ Local<Integer> v8::Integer::NewFromUnsigned(Isolate* isolate, uint32_t value) {
|
||||
void Isolate::ReportExternalAllocationLimitReached() {
|
||||
i::Heap* heap = reinterpret_cast<i::Isolate*>(this)->heap();
|
||||
if (heap->gc_state() != i::Heap::NOT_IN_GC) return;
|
||||
heap->ReportExternalMemoryPressure(
|
||||
"external memory allocation limit reached.");
|
||||
heap->ReportExternalMemoryPressure();
|
||||
}
|
||||
|
||||
|
||||
@ -7638,13 +7638,13 @@ void Isolate::RequestGarbageCollectionForTesting(GarbageCollectionType type) {
|
||||
CHECK(i::FLAG_expose_gc);
|
||||
if (type == kMinorGarbageCollection) {
|
||||
reinterpret_cast<i::Isolate*>(this)->heap()->CollectGarbage(
|
||||
i::NEW_SPACE, "Isolate::RequestGarbageCollection",
|
||||
i::NEW_SPACE, i::GarbageCollectionReason::kTesting,
|
||||
kGCCallbackFlagForced);
|
||||
} else {
|
||||
DCHECK_EQ(kFullGarbageCollection, type);
|
||||
reinterpret_cast<i::Isolate*>(this)->heap()->CollectAllGarbage(
|
||||
i::Heap::kAbortIncrementalMarkingMask,
|
||||
"Isolate::RequestGarbageCollection", kGCCallbackFlagForced);
|
||||
i::GarbageCollectionReason::kTesting, kGCCallbackFlagForced);
|
||||
}
|
||||
}
|
||||
|
||||
@ -8073,7 +8073,8 @@ void Isolate::LowMemoryNotification() {
|
||||
i::HistogramTimerScope idle_notification_scope(
|
||||
isolate->counters()->gc_low_memory_notification());
|
||||
TRACE_EVENT0("v8", "V8.GCLowMemoryNotification");
|
||||
isolate->heap()->CollectAllAvailableGarbage("low memory notification");
|
||||
isolate->heap()->CollectAllAvailableGarbage(
|
||||
i::GarbageCollectionReason::kLowMemoryNotification);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1282,7 +1282,7 @@ bool Debug::PrepareFunctionForBreakPoints(Handle<SharedFunctionInfo> shared) {
|
||||
|
||||
// Make sure we abort incremental marking.
|
||||
isolate_->heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask,
|
||||
"prepare for break points");
|
||||
GarbageCollectionReason::kDebugger);
|
||||
|
||||
DCHECK(shared->is_compiled());
|
||||
bool baseline_exists = shared->HasBaselineCode();
|
||||
@ -1599,7 +1599,8 @@ void Debug::ClearMirrorCache() {
|
||||
|
||||
|
||||
Handle<FixedArray> Debug::GetLoadedScripts() {
|
||||
isolate_->heap()->CollectAllGarbage();
|
||||
isolate_->heap()->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
|
||||
GarbageCollectionReason::kDebugger);
|
||||
Factory* factory = isolate_->factory();
|
||||
if (!factory->script_list()->IsWeakFixedArray()) {
|
||||
return factory->empty_fixed_array();
|
||||
|
@ -67,7 +67,8 @@ void StatisticsExtension::GetCounters(
|
||||
args[0]
|
||||
->BooleanValue(args.GetIsolate()->GetCurrentContext())
|
||||
.FromMaybe(false)) {
|
||||
heap->CollectAllGarbage(Heap::kNoGCFlags, "counters extension");
|
||||
heap->CollectAllGarbage(Heap::kNoGCFlags,
|
||||
GarbageCollectionReason::kCountersExtension);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -37,13 +37,15 @@ namespace internal {
|
||||
RETURN_OBJECT_UNLESS_RETRY(ISOLATE, TYPE) \
|
||||
/* Two GCs before panicking. In newspace will almost always succeed. */ \
|
||||
for (int __i__ = 0; __i__ < 2; __i__++) { \
|
||||
(ISOLATE)->heap()->CollectGarbage(__allocation__.RetrySpace(), \
|
||||
"allocation failure"); \
|
||||
(ISOLATE)->heap()->CollectGarbage( \
|
||||
__allocation__.RetrySpace(), \
|
||||
GarbageCollectionReason::kAllocationFailure); \
|
||||
__allocation__ = FUNCTION_CALL; \
|
||||
RETURN_OBJECT_UNLESS_RETRY(ISOLATE, TYPE) \
|
||||
} \
|
||||
(ISOLATE)->counters()->gc_last_resort_from_handles()->Increment(); \
|
||||
(ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc"); \
|
||||
(ISOLATE)->heap()->CollectAllAvailableGarbage( \
|
||||
GarbageCollectionReason::kLastResort); \
|
||||
{ \
|
||||
AlwaysAllocateScope __scope__(ISOLATE); \
|
||||
__allocation__ = FUNCTION_CALL; \
|
||||
@ -54,7 +56,6 @@ namespace internal {
|
||||
return Handle<TYPE>(); \
|
||||
} while (false)
|
||||
|
||||
|
||||
template<typename T>
|
||||
Handle<T> Factory::New(Handle<Map> map, AllocationSpace space) {
|
||||
CALL_HEAP_FUNCTION(
|
||||
|
@ -57,7 +57,7 @@ const char* GCTracer::Scope::Name(ScopeId id) {
|
||||
return "(unknown)";
|
||||
}
|
||||
|
||||
GCTracer::Event::Event(Type type, const char* gc_reason,
|
||||
GCTracer::Event::Event(Type type, GarbageCollectionReason gc_reason,
|
||||
const char* collector_reason)
|
||||
: type(type),
|
||||
gc_reason(gc_reason),
|
||||
@ -110,7 +110,7 @@ const char* GCTracer::Event::TypeName(bool short_name) const {
|
||||
|
||||
GCTracer::GCTracer(Heap* heap)
|
||||
: heap_(heap),
|
||||
current_(Event::START, nullptr, nullptr),
|
||||
current_(Event::START, GarbageCollectionReason::kUnknown, nullptr),
|
||||
previous_(current_),
|
||||
previous_incremental_mark_compactor_event_(current_),
|
||||
cumulative_incremental_marking_bytes_(0),
|
||||
@ -130,7 +130,7 @@ GCTracer::GCTracer(Heap* heap)
|
||||
}
|
||||
|
||||
void GCTracer::ResetForTesting() {
|
||||
current_ = Event(Event::START, NULL, NULL);
|
||||
current_ = Event(Event::START, GarbageCollectionReason::kTesting, nullptr);
|
||||
current_.end_time = heap_->MonotonicallyIncreasingTimeInMs();
|
||||
previous_ = previous_incremental_mark_compactor_event_ = current_;
|
||||
cumulative_incremental_marking_bytes_ = 0.0;
|
||||
@ -162,7 +162,8 @@ void GCTracer::ResetForTesting() {
|
||||
start_counter_ = 0;
|
||||
}
|
||||
|
||||
void GCTracer::Start(GarbageCollector collector, const char* gc_reason,
|
||||
void GCTracer::Start(GarbageCollector collector,
|
||||
GarbageCollectionReason gc_reason,
|
||||
const char* collector_reason) {
|
||||
start_counter_++;
|
||||
if (start_counter_ != 1) return;
|
||||
@ -411,7 +412,6 @@ void GCTracer::Output(const char* format, ...) const {
|
||||
heap_->AddToRingBuffer(buffer.start());
|
||||
}
|
||||
|
||||
|
||||
void GCTracer::Print() const {
|
||||
double duration = current_.end_time - current_.start_time;
|
||||
const size_t kIncrementalStatsSize = 128;
|
||||
@ -443,7 +443,7 @@ void GCTracer::Print() const {
|
||||
static_cast<double>(current_.end_object_size) / MB,
|
||||
static_cast<double>(current_.end_memory_size) / MB, duration,
|
||||
TotalExternalTime(), incremental_buffer,
|
||||
current_.gc_reason != nullptr ? current_.gc_reason : "",
|
||||
Heap::GarbageCollectionReasonToString(current_.gc_reason),
|
||||
current_.collector_reason != nullptr ? current_.collector_reason : "");
|
||||
}
|
||||
|
||||
|
@ -193,7 +193,8 @@ class GCTracer {
|
||||
START = 3
|
||||
};
|
||||
|
||||
Event(Type type, const char* gc_reason, const char* collector_reason);
|
||||
Event(Type type, GarbageCollectionReason gc_reason,
|
||||
const char* collector_reason);
|
||||
|
||||
// Returns a string describing the event type.
|
||||
const char* TypeName(bool short_name) const;
|
||||
@ -201,7 +202,7 @@ class GCTracer {
|
||||
// Type of event
|
||||
Type type;
|
||||
|
||||
const char* gc_reason;
|
||||
GarbageCollectionReason gc_reason;
|
||||
const char* collector_reason;
|
||||
|
||||
// Timestamp set in the constructor.
|
||||
@ -271,7 +272,7 @@ class GCTracer {
|
||||
explicit GCTracer(Heap* heap);
|
||||
|
||||
// Start collecting data.
|
||||
void Start(GarbageCollector collector, const char* gc_reason,
|
||||
void Start(GarbageCollector collector, GarbageCollectionReason gc_reason,
|
||||
const char* collector_reason);
|
||||
|
||||
// Stop collecting data and print results.
|
||||
|
@ -716,8 +716,8 @@ void Heap::RemoveAllocationSitePretenuringFeedback(AllocationSite* site) {
|
||||
site, static_cast<uint32_t>(bit_cast<uintptr_t>(site)));
|
||||
}
|
||||
|
||||
|
||||
bool Heap::CollectGarbage(AllocationSpace space, const char* gc_reason,
|
||||
bool Heap::CollectGarbage(AllocationSpace space,
|
||||
GarbageCollectionReason gc_reason,
|
||||
const v8::GCCallbackFlags callbackFlags) {
|
||||
const char* collector_reason = NULL;
|
||||
GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
|
||||
|
156
src/heap/heap.cc
156
src/heap/heap.cc
@ -790,14 +790,16 @@ void Heap::HandleGCRequest() {
|
||||
} else if (incremental_marking()->request_type() ==
|
||||
IncrementalMarking::COMPLETE_MARKING) {
|
||||
incremental_marking()->reset_request_type();
|
||||
CollectAllGarbage(current_gc_flags_, "GC interrupt",
|
||||
CollectAllGarbage(current_gc_flags_,
|
||||
GarbageCollectionReason::kFinalizeMarkingViaStackGuard,
|
||||
current_gc_callback_flags_);
|
||||
} else if (incremental_marking()->request_type() ==
|
||||
IncrementalMarking::FINALIZATION &&
|
||||
incremental_marking()->IsMarking() &&
|
||||
!incremental_marking()->finalize_marking_completed()) {
|
||||
incremental_marking()->reset_request_type();
|
||||
FinalizeIncrementalMarking("GC interrupt: finalize incremental marking");
|
||||
FinalizeIncrementalMarking(
|
||||
GarbageCollectionReason::kFinalizeMarkingViaStackGuard);
|
||||
}
|
||||
}
|
||||
|
||||
@ -806,8 +808,7 @@ void Heap::ScheduleIdleScavengeIfNeeded(int bytes_allocated) {
|
||||
scavenge_job_->ScheduleIdleTaskIfNeeded(this, bytes_allocated);
|
||||
}
|
||||
|
||||
|
||||
void Heap::FinalizeIncrementalMarking(const char* gc_reason) {
|
||||
void Heap::FinalizeIncrementalMarking(GarbageCollectionReason gc_reason) {
|
||||
if (FLAG_trace_incremental_marking) {
|
||||
isolate()->PrintWithTimestamp("[IncrementalMarking] (%s).\n", gc_reason);
|
||||
}
|
||||
@ -857,7 +858,7 @@ HistogramTimer* Heap::GCTypeTimer(GarbageCollector collector) {
|
||||
}
|
||||
}
|
||||
|
||||
void Heap::CollectAllGarbage(int flags, const char* gc_reason,
|
||||
void Heap::CollectAllGarbage(int flags, GarbageCollectionReason gc_reason,
|
||||
const v8::GCCallbackFlags gc_callback_flags) {
|
||||
// Since we are ignoring the return value, the exact choice of space does
|
||||
// not matter, so long as we do not specify NEW_SPACE, which would not
|
||||
@ -867,8 +868,7 @@ void Heap::CollectAllGarbage(int flags, const char* gc_reason,
|
||||
set_current_gc_flags(kNoGCFlags);
|
||||
}
|
||||
|
||||
|
||||
void Heap::CollectAllAvailableGarbage(const char* gc_reason) {
|
||||
void Heap::CollectAllAvailableGarbage(GarbageCollectionReason gc_reason) {
|
||||
// Since we are ignoring the return value, the exact choice of space does
|
||||
// not matter, so long as we do not specify NEW_SPACE, which would not
|
||||
// cause a full GC.
|
||||
@ -902,12 +902,12 @@ void Heap::CollectAllAvailableGarbage(const char* gc_reason) {
|
||||
UncommitFromSpace();
|
||||
}
|
||||
|
||||
|
||||
void Heap::ReportExternalMemoryPressure(const char* gc_reason) {
|
||||
void Heap::ReportExternalMemoryPressure() {
|
||||
if (external_memory_ >
|
||||
(external_memory_at_last_mark_compact_ + external_memory_hard_limit())) {
|
||||
CollectAllGarbage(
|
||||
kReduceMemoryFootprintMask | kFinalizeIncrementalMarkingMask, gc_reason,
|
||||
kReduceMemoryFootprintMask | kFinalizeIncrementalMarkingMask,
|
||||
GarbageCollectionReason::kExternalMemoryPressure,
|
||||
static_cast<GCCallbackFlags>(kGCCallbackFlagCollectAllAvailableGarbage |
|
||||
kGCCallbackFlagCollectAllExternalMemory));
|
||||
return;
|
||||
@ -915,13 +915,13 @@ void Heap::ReportExternalMemoryPressure(const char* gc_reason) {
|
||||
if (incremental_marking()->IsStopped()) {
|
||||
if (incremental_marking()->CanBeActivated()) {
|
||||
StartIncrementalMarking(
|
||||
i::Heap::kNoGCFlags,
|
||||
i::Heap::kNoGCFlags, GarbageCollectionReason::kExternalMemoryPressure,
|
||||
static_cast<GCCallbackFlags>(
|
||||
kGCCallbackFlagSynchronousPhantomCallbackProcessing |
|
||||
kGCCallbackFlagCollectAllExternalMemory),
|
||||
gc_reason);
|
||||
kGCCallbackFlagCollectAllExternalMemory));
|
||||
} else {
|
||||
CollectAllGarbage(i::Heap::kNoGCFlags, gc_reason,
|
||||
CollectAllGarbage(i::Heap::kNoGCFlags,
|
||||
GarbageCollectionReason::kExternalMemoryPressure,
|
||||
kGCCallbackFlagSynchronousPhantomCallbackProcessing);
|
||||
}
|
||||
} else {
|
||||
@ -955,8 +955,8 @@ void Heap::EnsureFillerObjectAtTop() {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
bool Heap::CollectGarbage(GarbageCollector collector, const char* gc_reason,
|
||||
bool Heap::CollectGarbage(GarbageCollector collector,
|
||||
GarbageCollectionReason gc_reason,
|
||||
const char* collector_reason,
|
||||
const v8::GCCallbackFlags gc_callback_flags) {
|
||||
// The VM is in the GC state until exiting this function.
|
||||
@ -1055,8 +1055,8 @@ bool Heap::CollectGarbage(GarbageCollector collector, const char* gc_reason,
|
||||
// Start incremental marking for the next cycle. The heap snapshot
|
||||
// generator needs incremental marking to stay off after it aborted.
|
||||
if (!ShouldAbortIncrementalMarking()) {
|
||||
StartIncrementalMarkingIfNeeded(kNoGCFlags, kNoGCCallbackFlags,
|
||||
"GC epilogue");
|
||||
StartIncrementalMarkingIfAllocationLimitIsReached(kNoGCFlags,
|
||||
kNoGCCallbackFlags);
|
||||
}
|
||||
|
||||
return next_gc_likely_to_collect_more;
|
||||
@ -1082,28 +1082,28 @@ int Heap::NotifyContextDisposed(bool dependant_context) {
|
||||
return ++contexts_disposed_;
|
||||
}
|
||||
|
||||
|
||||
void Heap::StartIncrementalMarking(int gc_flags,
|
||||
const GCCallbackFlags gc_callback_flags,
|
||||
const char* reason) {
|
||||
GarbageCollectionReason gc_reason,
|
||||
GCCallbackFlags gc_callback_flags) {
|
||||
DCHECK(incremental_marking()->IsStopped());
|
||||
set_current_gc_flags(gc_flags);
|
||||
current_gc_callback_flags_ = gc_callback_flags;
|
||||
incremental_marking()->Start(reason);
|
||||
incremental_marking()->Start(gc_reason);
|
||||
}
|
||||
|
||||
void Heap::StartIncrementalMarkingIfNeeded(
|
||||
int gc_flags, const GCCallbackFlags gc_callback_flags, const char* reason) {
|
||||
void Heap::StartIncrementalMarkingIfAllocationLimitIsReached(
|
||||
int gc_flags, const GCCallbackFlags gc_callback_flags) {
|
||||
if (incremental_marking()->IsStopped() &&
|
||||
incremental_marking()->ShouldActivateEvenWithoutIdleNotification()) {
|
||||
StartIncrementalMarking(gc_flags, gc_callback_flags, reason);
|
||||
StartIncrementalMarking(gc_flags, GarbageCollectionReason::kAllocationLimit,
|
||||
gc_callback_flags);
|
||||
}
|
||||
}
|
||||
|
||||
void Heap::StartIdleIncrementalMarking() {
|
||||
void Heap::StartIdleIncrementalMarking(GarbageCollectionReason gc_reason) {
|
||||
gc_idle_time_handler_->ResetNoProgressCounter();
|
||||
StartIncrementalMarking(kReduceMemoryFootprintMask, kNoGCCallbackFlags,
|
||||
"idle");
|
||||
StartIncrementalMarking(kReduceMemoryFootprintMask, gc_reason,
|
||||
kNoGCCallbackFlags);
|
||||
}
|
||||
|
||||
|
||||
@ -1212,17 +1212,15 @@ bool Heap::ReserveSpace(Reservation* reservations, List<Address>* maps) {
|
||||
}
|
||||
if (perform_gc) {
|
||||
if (space == NEW_SPACE) {
|
||||
CollectGarbage(NEW_SPACE, "failed to reserve space in the new space");
|
||||
CollectGarbage(NEW_SPACE, GarbageCollectionReason::kDeserializer);
|
||||
} else {
|
||||
if (counter > 1) {
|
||||
CollectAllGarbage(
|
||||
kReduceMemoryFootprintMask | kAbortIncrementalMarkingMask,
|
||||
"failed to reserve space in paged or large "
|
||||
"object space, trying to reduce memory footprint");
|
||||
GarbageCollectionReason::kDeserializer);
|
||||
} else {
|
||||
CollectAllGarbage(
|
||||
kAbortIncrementalMarkingMask,
|
||||
"failed to reserve space in paged or large object space");
|
||||
CollectAllGarbage(kAbortIncrementalMarkingMask,
|
||||
GarbageCollectionReason::kDeserializer);
|
||||
}
|
||||
}
|
||||
gc_performed = true;
|
||||
@ -4076,7 +4074,8 @@ bool Heap::IsHeapIterable() {
|
||||
void Heap::MakeHeapIterable() {
|
||||
DCHECK(AllowHeapAllocation::IsAllowed());
|
||||
if (!IsHeapIterable()) {
|
||||
CollectAllGarbage(kMakeHeapIterableMask, "Heap::MakeHeapIterable");
|
||||
CollectAllGarbage(kMakeHeapIterableMask,
|
||||
GarbageCollectionReason::kMakeHeapIterable);
|
||||
}
|
||||
if (mark_compact_collector()->sweeping_in_progress()) {
|
||||
mark_compact_collector()->EnsureSweepingCompleted();
|
||||
@ -4211,20 +4210,21 @@ bool Heap::MarkingDequesAreEmpty() {
|
||||
->NumberOfWrappersToTrace() == 0));
|
||||
}
|
||||
|
||||
void Heap::FinalizeIncrementalMarkingIfComplete(const char* comment) {
|
||||
void Heap::FinalizeIncrementalMarkingIfComplete(
|
||||
GarbageCollectionReason gc_reason) {
|
||||
if (incremental_marking()->IsMarking() &&
|
||||
(incremental_marking()->IsReadyToOverApproximateWeakClosure() ||
|
||||
(!incremental_marking()->finalize_marking_completed() &&
|
||||
MarkingDequesAreEmpty()))) {
|
||||
FinalizeIncrementalMarking(comment);
|
||||
FinalizeIncrementalMarking(gc_reason);
|
||||
} else if (incremental_marking()->IsComplete() ||
|
||||
(mark_compact_collector()->marking_deque()->IsEmpty())) {
|
||||
CollectAllGarbage(current_gc_flags_, comment);
|
||||
CollectAllGarbage(current_gc_flags_, gc_reason);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
bool Heap::TryFinalizeIdleIncrementalMarking(double idle_time_in_ms) {
|
||||
bool Heap::TryFinalizeIdleIncrementalMarking(
|
||||
double idle_time_in_ms, GarbageCollectionReason gc_reason) {
|
||||
size_t size_of_objects = static_cast<size_t>(SizeOfObjects());
|
||||
double final_incremental_mark_compact_speed_in_bytes_per_ms =
|
||||
tracer()->FinalIncrementalMarkCompactSpeedInBytesPerMillisecond();
|
||||
@ -4233,16 +4233,14 @@ bool Heap::TryFinalizeIdleIncrementalMarking(double idle_time_in_ms) {
|
||||
MarkingDequesAreEmpty() &&
|
||||
gc_idle_time_handler_->ShouldDoOverApproximateWeakClosure(
|
||||
idle_time_in_ms))) {
|
||||
FinalizeIncrementalMarking(
|
||||
"Idle notification: finalize incremental marking");
|
||||
FinalizeIncrementalMarking(gc_reason);
|
||||
return true;
|
||||
} else if (incremental_marking()->IsComplete() ||
|
||||
(MarkingDequesAreEmpty() &&
|
||||
gc_idle_time_handler_->ShouldDoFinalIncrementalMarkCompact(
|
||||
idle_time_in_ms, size_of_objects,
|
||||
final_incremental_mark_compact_speed_in_bytes_per_ms))) {
|
||||
CollectAllGarbage(current_gc_flags_,
|
||||
"idle notification: finalize incremental marking");
|
||||
CollectAllGarbage(current_gc_flags_, gc_reason);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
@ -4317,7 +4315,7 @@ bool Heap::PerformIdleTimeAction(GCIdleTimeAction action,
|
||||
DCHECK(contexts_disposed_ > 0);
|
||||
HistogramTimerScope scope(isolate_->counters()->gc_context());
|
||||
TRACE_EVENT0("v8", "V8.GCContext");
|
||||
CollectAllGarbage(kNoGCFlags, "idle notification: contexts disposed");
|
||||
CollectAllGarbage(kNoGCFlags, GarbageCollectionReason::kContextDisposal);
|
||||
break;
|
||||
}
|
||||
case DO_NOTHING:
|
||||
@ -4450,10 +4448,11 @@ void Heap::CheckMemoryPressure() {
|
||||
}
|
||||
}
|
||||
if (memory_pressure_level_.Value() == MemoryPressureLevel::kCritical) {
|
||||
CollectGarbageOnMemoryPressure("memory pressure");
|
||||
CollectGarbageOnMemoryPressure();
|
||||
} else if (memory_pressure_level_.Value() == MemoryPressureLevel::kModerate) {
|
||||
if (FLAG_incremental_marking && incremental_marking()->IsStopped()) {
|
||||
StartIdleIncrementalMarking();
|
||||
StartIncrementalMarking(kReduceMemoryFootprintMask,
|
||||
GarbageCollectionReason::kMemoryPressure);
|
||||
}
|
||||
}
|
||||
MemoryReducer::Event event;
|
||||
@ -4462,7 +4461,7 @@ void Heap::CheckMemoryPressure() {
|
||||
memory_reducer_->NotifyPossibleGarbage(event);
|
||||
}
|
||||
|
||||
void Heap::CollectGarbageOnMemoryPressure(const char* source) {
|
||||
void Heap::CollectGarbageOnMemoryPressure() {
|
||||
const int kGarbageThresholdInBytes = 8 * MB;
|
||||
const double kGarbageThresholdAsFractionOfTotalMemory = 0.1;
|
||||
// This constant is the maximum response time in RAIL performance model.
|
||||
@ -4470,7 +4469,8 @@ void Heap::CollectGarbageOnMemoryPressure(const char* source) {
|
||||
|
||||
double start = MonotonicallyIncreasingTimeInMs();
|
||||
CollectAllGarbage(kReduceMemoryFootprintMask | kAbortIncrementalMarkingMask,
|
||||
source, kGCCallbackFlagCollectAllAvailableGarbage);
|
||||
GarbageCollectionReason::kMemoryPressure,
|
||||
kGCCallbackFlagCollectAllAvailableGarbage);
|
||||
double end = MonotonicallyIncreasingTimeInMs();
|
||||
|
||||
// Estimate how much memory we can free.
|
||||
@ -4485,11 +4485,13 @@ void Heap::CollectGarbageOnMemoryPressure(const char* source) {
|
||||
// Otherwise, start incremental marking.
|
||||
if (end - start < kMaxMemoryPressurePauseMs / 2) {
|
||||
CollectAllGarbage(
|
||||
kReduceMemoryFootprintMask | kAbortIncrementalMarkingMask, source,
|
||||
kReduceMemoryFootprintMask | kAbortIncrementalMarkingMask,
|
||||
GarbageCollectionReason::kMemoryPressure,
|
||||
kGCCallbackFlagCollectAllAvailableGarbage);
|
||||
} else {
|
||||
if (FLAG_incremental_marking && incremental_marking()->IsStopped()) {
|
||||
StartIdleIncrementalMarking();
|
||||
StartIncrementalMarking(kReduceMemoryFootprintMask,
|
||||
GarbageCollectionReason::kMemoryPressure);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -4575,6 +4577,58 @@ void Heap::ReportHeapStatistics(const char* title) {
|
||||
|
||||
#endif // DEBUG
|
||||
|
||||
const char* Heap::GarbageCollectionReasonToString(
|
||||
GarbageCollectionReason gc_reason) {
|
||||
switch (gc_reason) {
|
||||
case GarbageCollectionReason::kAllocationFailure:
|
||||
return "allocation failure";
|
||||
case GarbageCollectionReason::kAllocationLimit:
|
||||
return "allocation limit";
|
||||
case GarbageCollectionReason::kContextDisposal:
|
||||
return "context disposal";
|
||||
case GarbageCollectionReason::kCountersExtension:
|
||||
return "counters extension";
|
||||
case GarbageCollectionReason::kDebugger:
|
||||
return "debugger";
|
||||
case GarbageCollectionReason::kDeserializer:
|
||||
return "deserialize";
|
||||
case GarbageCollectionReason::kExternalMemoryPressure:
|
||||
return "external memory pressure";
|
||||
case GarbageCollectionReason::kFinalizeMarkingViaStackGuard:
|
||||
return "finalize incremental marking via stack guard";
|
||||
case GarbageCollectionReason::kFinalizeMarkingViaTask:
|
||||
return "finalize incremental marking via task";
|
||||
case GarbageCollectionReason::kFullHashtable:
|
||||
return "full hash-table";
|
||||
case GarbageCollectionReason::kHeapProfiler:
|
||||
return "heap profiler";
|
||||
case GarbageCollectionReason::kIdleTask:
|
||||
return "idle task";
|
||||
case GarbageCollectionReason::kLastResort:
|
||||
return "last resort";
|
||||
case GarbageCollectionReason::kLowMemoryNotification:
|
||||
return "low memory notification";
|
||||
case GarbageCollectionReason::kMakeHeapIterable:
|
||||
return "make heap iterable";
|
||||
case GarbageCollectionReason::kMemoryPressure:
|
||||
return "memory pressure";
|
||||
case GarbageCollectionReason::kMemoryReducer:
|
||||
return "memory reducer";
|
||||
case GarbageCollectionReason::kRuntime:
|
||||
return "runtime";
|
||||
case GarbageCollectionReason::kSamplingProfiler:
|
||||
return "sampling profiler";
|
||||
case GarbageCollectionReason::kSnapshotCreator:
|
||||
return "snapshot creator";
|
||||
case GarbageCollectionReason::kTesting:
|
||||
return "testing";
|
||||
case GarbageCollectionReason::kUnknown:
|
||||
return "unknown";
|
||||
}
|
||||
UNREACHABLE();
|
||||
return "";
|
||||
}
|
||||
|
||||
bool Heap::Contains(HeapObject* value) {
|
||||
if (memory_allocator()->IsOutsideAllocatedSpace(value->address())) {
|
||||
return false;
|
||||
|
@ -348,6 +348,31 @@ enum class ClearRecordedSlots { kYes, kNo };
|
||||
|
||||
enum class ClearBlackArea { kYes, kNo };
|
||||
|
||||
enum class GarbageCollectionReason {
|
||||
kUnknown,
|
||||
kAllocationFailure,
|
||||
kAllocationLimit,
|
||||
kContextDisposal,
|
||||
kCountersExtension,
|
||||
kDebugger,
|
||||
kDeserializer,
|
||||
kExternalMemoryPressure,
|
||||
kFinalizeMarkingViaStackGuard,
|
||||
kFinalizeMarkingViaTask,
|
||||
kFullHashtable,
|
||||
kHeapProfiler,
|
||||
kIdleTask,
|
||||
kLastResort,
|
||||
kLowMemoryNotification,
|
||||
kMakeHeapIterable,
|
||||
kMemoryPressure,
|
||||
kMemoryReducer,
|
||||
kRuntime,
|
||||
kSamplingProfiler,
|
||||
kSnapshotCreator,
|
||||
kTesting
|
||||
};
|
||||
|
||||
// A queue of objects promoted during scavenge. Each object is accompanied by
|
||||
// its size to avoid dereferencing a map pointer for scanning. The last page in
|
||||
// to-space is used for the promotion queue. On conflict during scavenge, the
|
||||
@ -1079,22 +1104,22 @@ class Heap {
|
||||
// Returns whether there is a chance that another major GC could
|
||||
// collect more garbage.
|
||||
inline bool CollectGarbage(
|
||||
AllocationSpace space, const char* gc_reason = NULL,
|
||||
AllocationSpace space, GarbageCollectionReason gc_reason,
|
||||
const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
|
||||
|
||||
// Performs a full garbage collection. If (flags & kMakeHeapIterableMask) is
|
||||
// non-zero, then the slower precise sweeper is used, which leaves the heap
|
||||
// in a state where we can iterate over the heap visiting all objects.
|
||||
void CollectAllGarbage(
|
||||
int flags = kFinalizeIncrementalMarkingMask, const char* gc_reason = NULL,
|
||||
int flags, GarbageCollectionReason gc_reason,
|
||||
const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
|
||||
|
||||
// Last hope GC, should try to squeeze as much as possible.
|
||||
void CollectAllAvailableGarbage(const char* gc_reason = NULL);
|
||||
void CollectAllAvailableGarbage(GarbageCollectionReason gc_reason);
|
||||
|
||||
// Reports and external memory pressure event, either performs a major GC or
|
||||
// completes incremental marking in order to free external resources.
|
||||
void ReportExternalMemoryPressure(const char* gc_reason = NULL);
|
||||
void ReportExternalMemoryPressure();
|
||||
|
||||
// Invoked when GC was requested via the stack guard.
|
||||
void HandleGCRequest();
|
||||
@ -1145,23 +1170,22 @@ class Heap {
|
||||
|
||||
// Start incremental marking and ensure that idle time handler can perform
|
||||
// incremental steps.
|
||||
void StartIdleIncrementalMarking();
|
||||
void StartIdleIncrementalMarking(GarbageCollectionReason gc_reason);
|
||||
|
||||
// Starts incremental marking assuming incremental marking is currently
|
||||
// stopped.
|
||||
void StartIncrementalMarking(int gc_flags = kNoGCFlags,
|
||||
const GCCallbackFlags gc_callback_flags =
|
||||
GCCallbackFlags::kNoGCCallbackFlags,
|
||||
const char* reason = nullptr);
|
||||
void StartIncrementalMarking(
|
||||
int gc_flags, GarbageCollectionReason gc_reason,
|
||||
GCCallbackFlags gc_callback_flags = GCCallbackFlags::kNoGCCallbackFlags);
|
||||
|
||||
void StartIncrementalMarkingIfNeeded(int gc_flags = kNoGCFlags,
|
||||
const GCCallbackFlags gc_callback_flags =
|
||||
GCCallbackFlags::kNoGCCallbackFlags,
|
||||
const char* reason = nullptr);
|
||||
void StartIncrementalMarkingIfAllocationLimitIsReached(
|
||||
int gc_flags,
|
||||
GCCallbackFlags gc_callback_flags = GCCallbackFlags::kNoGCCallbackFlags);
|
||||
|
||||
void FinalizeIncrementalMarkingIfComplete(const char* comment);
|
||||
void FinalizeIncrementalMarkingIfComplete(GarbageCollectionReason gc_reason);
|
||||
|
||||
bool TryFinalizeIdleIncrementalMarking(double idle_time_in_ms);
|
||||
bool TryFinalizeIdleIncrementalMarking(double idle_time_in_ms,
|
||||
GarbageCollectionReason gc_reason);
|
||||
|
||||
void RegisterReservationsForBlackAllocation(Reservation* reservations);
|
||||
|
||||
@ -1458,6 +1482,9 @@ class Heap {
|
||||
void ReportCodeStatistics(const char* title);
|
||||
#endif
|
||||
|
||||
static const char* GarbageCollectionReasonToString(
|
||||
GarbageCollectionReason gc_reason);
|
||||
|
||||
private:
|
||||
class PretenuringScope;
|
||||
|
||||
@ -1613,7 +1640,7 @@ class Heap {
|
||||
// Returns whether there is a chance that another major GC could
|
||||
// collect more garbage.
|
||||
bool CollectGarbage(
|
||||
GarbageCollector collector, const char* gc_reason,
|
||||
GarbageCollector collector, GarbageCollectionReason gc_reason,
|
||||
const char* collector_reason,
|
||||
const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
|
||||
|
||||
@ -1688,10 +1715,6 @@ class Heap {
|
||||
|
||||
void ReduceNewSpaceSize();
|
||||
|
||||
bool TryFinalizeIdleIncrementalMarking(
|
||||
double idle_time_in_ms, size_t size_of_objects,
|
||||
size_t mark_compact_speed_in_bytes_per_ms);
|
||||
|
||||
GCIdleTimeHeapState ComputeHeapState();
|
||||
|
||||
bool PerformIdleTimeAction(GCIdleTimeAction action,
|
||||
@ -1711,13 +1734,13 @@ class Heap {
|
||||
|
||||
void CompactRetainedMaps(ArrayList* retained_maps);
|
||||
|
||||
void CollectGarbageOnMemoryPressure(const char* source);
|
||||
void CollectGarbageOnMemoryPressure();
|
||||
|
||||
// Attempt to over-approximate the weak closure by marking object groups and
|
||||
// implicit references from global handles, but don't atomically complete
|
||||
// marking. If we continue to mark incrementally, we might have marked
|
||||
// objects that die later.
|
||||
void FinalizeIncrementalMarking(const char* gc_reason);
|
||||
void FinalizeIncrementalMarking(GarbageCollectionReason gc_reason);
|
||||
|
||||
// Returns the timer used for a given GC type.
|
||||
// - GCScavenger: young generation GC
|
||||
|
@ -84,7 +84,9 @@ IncrementalMarkingJob::IdleTask::Progress IncrementalMarkingJob::IdleTask::Step(
|
||||
deadline_in_ms, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
|
||||
IncrementalMarking::DO_NOT_FORCE_COMPLETION);
|
||||
if (remaining_idle_time_in_ms > 0.0) {
|
||||
heap->TryFinalizeIdleIncrementalMarking(remaining_idle_time_in_ms);
|
||||
heap->TryFinalizeIdleIncrementalMarking(
|
||||
remaining_idle_time_in_ms,
|
||||
GarbageCollectionReason::kFinalizeMarkingViaTask);
|
||||
}
|
||||
return incremental_marking->IsStopped() ? kDone : kMoreWork;
|
||||
}
|
||||
@ -123,7 +125,7 @@ void IncrementalMarkingJob::DelayedTask::Step(Heap* heap) {
|
||||
deadline, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
|
||||
IncrementalMarking::FORCE_COMPLETION);
|
||||
heap->FinalizeIncrementalMarkingIfComplete(
|
||||
"Incremental marking task: finalize incremental marking");
|
||||
GarbageCollectionReason::kFinalizeMarkingViaTask);
|
||||
}
|
||||
|
||||
|
||||
|
@ -497,8 +497,7 @@ static void PatchIncrementalMarkingRecordWriteStubs(
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void IncrementalMarking::Start(const char* reason) {
|
||||
void IncrementalMarking::Start(GarbageCollectionReason gc_reason) {
|
||||
if (FLAG_trace_incremental_marking) {
|
||||
int old_generation_size_mb =
|
||||
static_cast<int>(heap()->PromotedSpaceSizeOfObjects() / MB);
|
||||
@ -507,8 +506,8 @@ void IncrementalMarking::Start(const char* reason) {
|
||||
heap()->isolate()->PrintWithTimestamp(
|
||||
"[IncrementalMarking] Start (%s): old generation %dMB, limit %dMB, "
|
||||
"slack %dMB\n",
|
||||
(reason == nullptr) ? "unknown reason" : reason, old_generation_size_mb,
|
||||
old_generation_limit_mb,
|
||||
Heap::GarbageCollectionReasonToString(gc_reason),
|
||||
old_generation_size_mb, old_generation_limit_mb,
|
||||
Max(0, old_generation_limit_mb - old_generation_size_mb));
|
||||
}
|
||||
DCHECK(FLAG_incremental_marking);
|
||||
|
@ -74,7 +74,7 @@ class IncrementalMarking {
|
||||
|
||||
bool WasActivated();
|
||||
|
||||
void Start(const char* reason = nullptr);
|
||||
void Start(GarbageCollectionReason gc_reason);
|
||||
|
||||
void FinalizeIncrementally();
|
||||
|
||||
|
@ -73,7 +73,8 @@ void MemoryReducer::NotifyTimer(const Event& event) {
|
||||
PrintIsolate(heap()->isolate(), "Memory reducer: started GC #%d\n",
|
||||
state_.started_gcs);
|
||||
}
|
||||
heap()->StartIdleIncrementalMarking();
|
||||
heap()->StartIdleIncrementalMarking(
|
||||
GarbageCollectionReason::kMemoryReducer);
|
||||
} else if (state_.action == kWait) {
|
||||
if (!heap()->incremental_marking()->IsStopped() &&
|
||||
heap()->ShouldOptimizeForMemoryUsage()) {
|
||||
@ -87,7 +88,7 @@ void MemoryReducer::NotifyTimer(const Event& event) {
|
||||
deadline, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
|
||||
IncrementalMarking::FORCE_COMPLETION);
|
||||
heap()->FinalizeIncrementalMarkingIfComplete(
|
||||
"Memory reducer: finalize incremental marking");
|
||||
GarbageCollectionReason::kFinalizeMarkingViaTask);
|
||||
}
|
||||
// Re-schedule the timer.
|
||||
ScheduleTimer(event.time_ms, state_.next_gc_start_ms - event.time_ms);
|
||||
|
@ -34,7 +34,7 @@ void ScavengeJob::IdleTask::RunInternal(double deadline_in_seconds) {
|
||||
new_space_capacity)) {
|
||||
if (EnoughIdleTimeForScavenge(
|
||||
idle_time_in_ms, scavenge_speed_in_bytes_per_ms, new_space_size)) {
|
||||
heap->CollectGarbage(NEW_SPACE, "idle task: scavenge");
|
||||
heap->CollectGarbage(NEW_SPACE, GarbageCollectionReason::kIdleTask);
|
||||
} else {
|
||||
// Immediately request another idle task that can get larger idle time.
|
||||
job_->RescheduleIdleTask(heap);
|
||||
|
@ -2558,8 +2558,8 @@ HeapObject* FreeList::Allocate(int size_in_bytes) {
|
||||
// if it is big enough.
|
||||
owner_->EmptyAllocationInfo();
|
||||
|
||||
owner_->heap()->StartIncrementalMarkingIfNeeded(
|
||||
Heap::kNoGCFlags, kNoGCCallbackFlags, "old space step");
|
||||
owner_->heap()->StartIncrementalMarkingIfAllocationLimitIsReached(
|
||||
Heap::kNoGCFlags, kNoGCCallbackFlags);
|
||||
|
||||
int new_node_size = 0;
|
||||
FreeSpace* new_node = FindNodeFor(size_in_bytes, &new_node_size);
|
||||
@ -2995,8 +2995,8 @@ AllocationResult LargeObjectSpace::AllocateRaw(int object_size,
|
||||
reinterpret_cast<Object**>(object->address())[1] = Smi::FromInt(0);
|
||||
}
|
||||
|
||||
heap()->StartIncrementalMarkingIfNeeded(Heap::kNoGCFlags, kNoGCCallbackFlags,
|
||||
"old space step");
|
||||
heap()->StartIncrementalMarkingIfAllocationLimitIsReached(Heap::kNoGCFlags,
|
||||
kNoGCCallbackFlags);
|
||||
AllocationStep(object->address(), object_size);
|
||||
|
||||
if (heap()->incremental_marking()->black_allocation()) {
|
||||
|
@ -940,7 +940,8 @@ Object* Isolate::StackOverflow() {
|
||||
|
||||
#ifdef VERIFY_HEAP
|
||||
if (FLAG_verify_heap && FLAG_stress_compaction) {
|
||||
heap()->CollectAllGarbage(Heap::kNoGCFlags, "trigger compaction");
|
||||
heap()->CollectAllGarbage(Heap::kNoGCFlags,
|
||||
GarbageCollectionReason::kTesting);
|
||||
}
|
||||
#endif // VERIFY_HEAP
|
||||
|
||||
|
@ -18125,7 +18125,8 @@ Handle<ObjectHashTable> ObjectHashTable::Put(Handle<ObjectHashTable> table,
|
||||
if (capacity > ObjectHashTable::kMaxCapacity) {
|
||||
for (size_t i = 0; i < 2; ++i) {
|
||||
isolate->heap()->CollectAllGarbage(
|
||||
Heap::kFinalizeIncrementalMarkingMask, "full object hash table");
|
||||
Heap::kFinalizeIncrementalMarkingMask,
|
||||
GarbageCollectionReason::kFullHashtable);
|
||||
}
|
||||
table->Rehash(isolate->factory()->undefined_value());
|
||||
}
|
||||
|
@ -476,7 +476,7 @@ void HeapObjectsMap::UpdateHeapObjectsMap() {
|
||||
entries_map_.occupancy());
|
||||
}
|
||||
heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask,
|
||||
"HeapObjectsMap::UpdateHeapObjectsMap");
|
||||
GarbageCollectionReason::kHeapProfiler);
|
||||
HeapIterator iterator(heap_);
|
||||
for (HeapObject* obj = iterator.next();
|
||||
obj != NULL;
|
||||
@ -2506,12 +2506,10 @@ bool HeapSnapshotGenerator::GenerateSnapshot() {
|
||||
// full GC is reachable from the root when computing dominators.
|
||||
// This is not true for weakly reachable objects.
|
||||
// As a temporary solution we call GC twice.
|
||||
heap_->CollectAllGarbage(
|
||||
Heap::kMakeHeapIterableMask,
|
||||
"HeapSnapshotGenerator::GenerateSnapshot");
|
||||
heap_->CollectAllGarbage(
|
||||
Heap::kMakeHeapIterableMask,
|
||||
"HeapSnapshotGenerator::GenerateSnapshot");
|
||||
heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask,
|
||||
GarbageCollectionReason::kHeapProfiler);
|
||||
heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask,
|
||||
GarbageCollectionReason::kHeapProfiler);
|
||||
|
||||
#ifdef VERIFY_HEAP
|
||||
Heap* debug_heap = heap_;
|
||||
|
@ -259,8 +259,8 @@ v8::AllocationProfile::Node* SamplingHeapProfiler::TranslateAllocationNode(
|
||||
|
||||
v8::AllocationProfile* SamplingHeapProfiler::GetAllocationProfile() {
|
||||
if (flags_ & v8::HeapProfiler::kSamplingForceGC) {
|
||||
isolate_->heap()->CollectAllGarbage(Heap::kNoGCFlags,
|
||||
"SamplingHeapProfiler");
|
||||
isolate_->heap()->CollectAllGarbage(
|
||||
Heap::kNoGCFlags, GarbageCollectionReason::kSamplingProfiler);
|
||||
}
|
||||
// To resolve positions to line/column numbers, we will need to look up
|
||||
// scripts. Build a map to allow fast mapping from script id to script.
|
||||
|
@ -1522,7 +1522,8 @@ RUNTIME_FUNCTION(Runtime_GetDebugContext) {
|
||||
RUNTIME_FUNCTION(Runtime_CollectGarbage) {
|
||||
SealHandleScope shs(isolate);
|
||||
DCHECK(args.length() == 1);
|
||||
isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags, "%CollectGarbage");
|
||||
isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags,
|
||||
GarbageCollectionReason::kRuntime);
|
||||
return isolate->heap()->undefined_value();
|
||||
}
|
||||
|
||||
|
@ -107,6 +107,18 @@ void CcTest::Run() {
|
||||
|
||||
i::Heap* CcTest::heap() { return i_isolate()->heap(); }
|
||||
|
||||
void CcTest::CollectGarbage(i::AllocationSpace space) {
|
||||
heap()->CollectGarbage(space, i::GarbageCollectionReason::kTesting);
|
||||
}
|
||||
|
||||
void CcTest::CollectAllGarbage(int flags) {
|
||||
heap()->CollectAllGarbage(flags, i::GarbageCollectionReason::kTesting);
|
||||
}
|
||||
|
||||
void CcTest::CollectAllAvailableGarbage() {
|
||||
heap()->CollectAllAvailableGarbage(i::GarbageCollectionReason::kTesting);
|
||||
}
|
||||
|
||||
v8::base::RandomNumberGenerator* CcTest::random_number_generator() {
|
||||
return InitIsolateOnce()->random_number_generator();
|
||||
}
|
||||
|
@ -125,6 +125,10 @@ class CcTest {
|
||||
|
||||
static i::Heap* heap();
|
||||
|
||||
static void CollectGarbage(i::AllocationSpace space);
|
||||
static void CollectAllGarbage(int flags);
|
||||
static void CollectAllAvailableGarbage();
|
||||
|
||||
static v8::base::RandomNumberGenerator* random_number_generator();
|
||||
|
||||
static v8::Local<v8::Object> global();
|
||||
|
@ -15,8 +15,10 @@ namespace internal {
|
||||
namespace heap {
|
||||
|
||||
void SealCurrentObjects(Heap* heap) {
|
||||
heap->CollectAllGarbage();
|
||||
heap->CollectAllGarbage();
|
||||
heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
|
||||
GarbageCollectionReason::kTesting);
|
||||
heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
|
||||
GarbageCollectionReason::kTesting);
|
||||
heap->mark_compact_collector()->EnsureSweepingCompleted();
|
||||
heap->old_space()->EmptyAllocationInfo();
|
||||
for (Page* page : *heap->old_space()) {
|
||||
@ -151,7 +153,8 @@ void SimulateIncrementalMarking(i::Heap* heap, bool force_completion) {
|
||||
}
|
||||
CHECK(marking->IsMarking() || marking->IsStopped() || marking->IsComplete());
|
||||
if (marking->IsStopped()) {
|
||||
heap->StartIncrementalMarking();
|
||||
heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
|
||||
i::GarbageCollectionReason::kTesting);
|
||||
}
|
||||
CHECK(marking->IsMarking() || marking->IsComplete());
|
||||
if (!force_completion) return;
|
||||
@ -180,7 +183,7 @@ void AbandonCurrentlyFreeMemory(PagedSpace* space) {
|
||||
}
|
||||
|
||||
void GcAndSweep(Heap* heap, AllocationSpace space) {
|
||||
heap->CollectGarbage(space);
|
||||
heap->CollectGarbage(space, GarbageCollectionReason::kTesting);
|
||||
if (heap->mark_compact_collector()->sweeping_in_progress()) {
|
||||
heap->mark_compact_collector()->EnsureSweepingCompleted();
|
||||
}
|
||||
|
@ -91,7 +91,7 @@ Handle<Object> v8::internal::HeapTester::TestAllocateAfterFailures() {
|
||||
// Similar to what the CALL_AND_RETRY macro does in the last-resort case, we
|
||||
// are wrapping the allocator function in an AlwaysAllocateScope. Test that
|
||||
// all allocations succeed immediately without any retry.
|
||||
CcTest::heap()->CollectAllAvailableGarbage("panic");
|
||||
CcTest::CollectAllAvailableGarbage();
|
||||
AlwaysAllocateScope scope(CcTest::i_isolate());
|
||||
return handle(AllocateAfterFailures().ToObjectChecked(), CcTest::i_isolate());
|
||||
}
|
||||
|
@ -130,7 +130,7 @@ TEST(ArrayBuffer_Compaction) {
|
||||
page_before_gc->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
|
||||
CHECK(IsTracked(*buf1));
|
||||
|
||||
heap->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
|
||||
Page* page_after_gc = Page::FromAddress(buf1->address());
|
||||
CHECK(IsTracked(*buf1));
|
||||
@ -177,7 +177,7 @@ TEST(ArrayBuffer_UnregisterDuringSweep) {
|
||||
CHECK(IsTracked(*buf2));
|
||||
}
|
||||
|
||||
heap->CollectGarbage(OLD_SPACE);
|
||||
CcTest::CollectGarbage(OLD_SPACE);
|
||||
// |Externalize| will cause the buffer to be |Unregister|ed. Without
|
||||
// barriers and proper synchronization this will trigger a data race on
|
||||
// TSAN.
|
||||
|
@ -68,7 +68,7 @@ HEAP_TEST(CompactionFullAbortedPage) {
|
||||
CheckAllObjectsOnPage(compaction_page_handles, to_be_aborted_page);
|
||||
|
||||
heap->set_force_oom(true);
|
||||
heap->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
heap->mark_compact_collector()->EnsureSweepingCompleted();
|
||||
|
||||
// Check that all handles still point to the same page, i.e., compaction
|
||||
@ -128,7 +128,7 @@ HEAP_TEST(CompactionPartiallyAbortedPage) {
|
||||
Page::FromAddress(page_to_fill_handles.front()->address());
|
||||
|
||||
heap->set_force_oom(true);
|
||||
heap->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
heap->mark_compact_collector()->EnsureSweepingCompleted();
|
||||
|
||||
bool migration_aborted = false;
|
||||
@ -210,7 +210,7 @@ HEAP_TEST(CompactionPartiallyAbortedPageIntraAbortedPointers) {
|
||||
Page::FromAddress(page_to_fill_handles.front()->address());
|
||||
|
||||
heap->set_force_oom(true);
|
||||
heap->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
heap->mark_compact_collector()->EnsureSweepingCompleted();
|
||||
|
||||
// The following check makes sure that we compacted "some" objects, while
|
||||
@ -303,7 +303,7 @@ HEAP_TEST(CompactionPartiallyAbortedPageWithStoreBufferEntries) {
|
||||
Page::FromAddress(page_to_fill_handles.front()->address());
|
||||
|
||||
heap->set_force_oom(true);
|
||||
heap->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
heap->mark_compact_collector()->EnsureSweepingCompleted();
|
||||
|
||||
// The following check makes sure that we compacted "some" objects, while
|
||||
@ -353,7 +353,7 @@ HEAP_TEST(CompactionPartiallyAbortedPageWithStoreBufferEntries) {
|
||||
// If store buffer entries are not properly filtered/reset for aborted
|
||||
// pages we have now a broken address at an object slot in old space and
|
||||
// the following scavenge will crash.
|
||||
heap->CollectGarbage(NEW_SPACE);
|
||||
CcTest::CollectGarbage(NEW_SPACE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -122,7 +122,7 @@ TEST(IncrementalMarkingUsingIdleTasks) {
|
||||
i::heap::SimulateFullSpace(CcTest::heap()->old_space());
|
||||
i::IncrementalMarking* marking = CcTest::heap()->incremental_marking();
|
||||
marking->Stop();
|
||||
marking->Start();
|
||||
marking->Start(i::GarbageCollectionReason::kTesting);
|
||||
CHECK(platform.PendingIdleTask());
|
||||
const double kLongIdleTimeInSeconds = 1;
|
||||
const double kShortIdleTimeInSeconds = 0.010;
|
||||
@ -149,7 +149,7 @@ TEST(IncrementalMarkingUsingIdleTasksAfterGC) {
|
||||
MockPlatform platform(old_platform);
|
||||
i::V8::SetPlatformForTesting(&platform);
|
||||
i::heap::SimulateFullSpace(CcTest::heap()->old_space());
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
// Perform any pending idle tasks.
|
||||
while (platform.PendingIdleTask()) {
|
||||
platform.PerformIdleTask(kLongIdleTimeInSeconds);
|
||||
@ -157,7 +157,7 @@ TEST(IncrementalMarkingUsingIdleTasksAfterGC) {
|
||||
CHECK(!platform.PendingIdleTask());
|
||||
i::IncrementalMarking* marking = CcTest::heap()->incremental_marking();
|
||||
marking->Stop();
|
||||
marking->Start();
|
||||
marking->Start(i::GarbageCollectionReason::kTesting);
|
||||
CHECK(platform.PendingIdleTask());
|
||||
const int kShortStepCount = 10;
|
||||
for (int i = 0; i < kShortStepCount && platform.PendingIdleTask(); i++) {
|
||||
@ -180,7 +180,7 @@ TEST(IncrementalMarkingUsingDelayedTasks) {
|
||||
i::heap::SimulateFullSpace(CcTest::heap()->old_space());
|
||||
i::IncrementalMarking* marking = CcTest::heap()->incremental_marking();
|
||||
marking->Stop();
|
||||
marking->Start();
|
||||
marking->Start(i::GarbageCollectionReason::kTesting);
|
||||
CHECK(platform.PendingIdleTask());
|
||||
// The delayed task should be a no-op if the idle task makes progress.
|
||||
const int kIgnoredDelayedTaskStepCount = 1000;
|
||||
|
@ -89,8 +89,8 @@ TEST(Promotion) {
|
||||
|
||||
// Array should be in the new space.
|
||||
CHECK(heap->InSpace(*array, NEW_SPACE));
|
||||
heap->CollectAllGarbage();
|
||||
heap->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CHECK(heap->InSpace(*array, OLD_SPACE));
|
||||
}
|
||||
}
|
||||
@ -114,8 +114,8 @@ HEAP_TEST(NoPromotion) {
|
||||
heap->set_force_oom(true);
|
||||
// Array should be in the new space.
|
||||
CHECK(heap->InSpace(*array, NEW_SPACE));
|
||||
heap->CollectAllGarbage();
|
||||
heap->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CHECK(heap->InSpace(*array, NEW_SPACE));
|
||||
}
|
||||
}
|
||||
@ -132,7 +132,7 @@ HEAP_TEST(MarkCompactCollector) {
|
||||
Handle<JSGlobalObject> global(isolate->context()->global_object());
|
||||
|
||||
// call mark-compact when heap is empty
|
||||
heap->CollectGarbage(OLD_SPACE, "trigger 1");
|
||||
CcTest::CollectGarbage(OLD_SPACE);
|
||||
|
||||
// keep allocating garbage in new space until it fails
|
||||
const int arraysize = 100;
|
||||
@ -140,14 +140,14 @@ HEAP_TEST(MarkCompactCollector) {
|
||||
do {
|
||||
allocation = heap->AllocateFixedArray(arraysize);
|
||||
} while (!allocation.IsRetry());
|
||||
heap->CollectGarbage(NEW_SPACE, "trigger 2");
|
||||
CcTest::CollectGarbage(NEW_SPACE);
|
||||
heap->AllocateFixedArray(arraysize).ToObjectChecked();
|
||||
|
||||
// keep allocating maps until it fails
|
||||
do {
|
||||
allocation = heap->AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
|
||||
} while (!allocation.IsRetry());
|
||||
heap->CollectGarbage(MAP_SPACE, "trigger 3");
|
||||
CcTest::CollectGarbage(MAP_SPACE);
|
||||
heap->AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize).ToObjectChecked();
|
||||
|
||||
{ HandleScope scope(isolate);
|
||||
@ -159,7 +159,7 @@ HEAP_TEST(MarkCompactCollector) {
|
||||
factory->NewJSObject(function);
|
||||
}
|
||||
|
||||
heap->CollectGarbage(OLD_SPACE, "trigger 4");
|
||||
CcTest::CollectGarbage(OLD_SPACE);
|
||||
|
||||
{ HandleScope scope(isolate);
|
||||
Handle<String> func_name = factory->InternalizeUtf8String("theFunction");
|
||||
@ -177,7 +177,7 @@ HEAP_TEST(MarkCompactCollector) {
|
||||
JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
|
||||
}
|
||||
|
||||
heap->CollectGarbage(OLD_SPACE, "trigger 5");
|
||||
CcTest::CollectGarbage(OLD_SPACE);
|
||||
|
||||
{ HandleScope scope(isolate);
|
||||
Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
|
||||
@ -220,7 +220,7 @@ TEST(MapCompact) {
|
||||
// be able to trigger map compaction.
|
||||
// To give an additional chance to fail, try to force compaction which
|
||||
// should be impossible right now.
|
||||
CcTest::heap()->CollectAllGarbage(Heap::kForceCompactionMask);
|
||||
CcTest::CollectAllGarbage(Heap::kForceCompactionMask);
|
||||
// And now map pointers should be encodable again.
|
||||
CHECK(CcTest::heap()->map_space()->MapPointersEncodable());
|
||||
}
|
||||
@ -301,7 +301,7 @@ HEAP_TEST(ObjectGroups) {
|
||||
g2c1.location());
|
||||
}
|
||||
// Do a full GC
|
||||
heap->CollectGarbage(OLD_SPACE);
|
||||
CcTest::CollectGarbage(OLD_SPACE);
|
||||
|
||||
// All object should be alive.
|
||||
CHECK_EQ(0, NumberOfWeakCalls);
|
||||
@ -328,7 +328,7 @@ HEAP_TEST(ObjectGroups) {
|
||||
g2c1.location());
|
||||
}
|
||||
|
||||
heap->CollectGarbage(OLD_SPACE);
|
||||
CcTest::CollectGarbage(OLD_SPACE);
|
||||
|
||||
// All objects should be gone. 5 global handles in total.
|
||||
CHECK_EQ(5, NumberOfWeakCalls);
|
||||
@ -341,7 +341,7 @@ HEAP_TEST(ObjectGroups) {
|
||||
g2c1.location(), reinterpret_cast<void*>(&g2c1_and_id),
|
||||
&WeakPointerCallback, v8::WeakCallbackType::kParameter);
|
||||
|
||||
heap->CollectGarbage(OLD_SPACE);
|
||||
CcTest::CollectGarbage(OLD_SPACE);
|
||||
CHECK_EQ(7, NumberOfWeakCalls);
|
||||
}
|
||||
|
||||
|
@ -50,7 +50,7 @@ UNINITIALIZED_TEST(PagePromotion_NewToOld) {
|
||||
|
||||
std::vector<Handle<FixedArray>> handles;
|
||||
heap::SimulateFullSpace(heap->new_space(), &handles);
|
||||
heap->CollectGarbage(NEW_SPACE);
|
||||
heap->CollectGarbage(NEW_SPACE, i::GarbageCollectionReason::kTesting);
|
||||
CHECK_GT(handles.size(), 0u);
|
||||
// First object in handle should be on the first page.
|
||||
Handle<FixedArray> first_object = handles.front();
|
||||
|
@ -319,7 +319,7 @@ static void CheckAccessorArgsCorrect(
|
||||
CHECK(info.Data()
|
||||
->Equals(info.GetIsolate()->GetCurrentContext(), v8_str("data"))
|
||||
.FromJust());
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CHECK(info.GetIsolate() == CcTest::isolate());
|
||||
CHECK(info.This() == info.Holder());
|
||||
CHECK(info.Data()
|
||||
|
@ -360,7 +360,7 @@ void InterceptorHasOwnPropertyGetter(
|
||||
void InterceptorHasOwnPropertyGetterGC(
|
||||
Local<Name> name, const v8::PropertyCallbackInfo<v8::Value>& info) {
|
||||
ApiTestFuzzer::Fuzz();
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
@ -453,11 +453,11 @@ THREADED_TEST(ScriptUsingStringResource) {
|
||||
CHECK_EQ(static_cast<const String::ExternalStringResourceBase*>(resource),
|
||||
source->GetExternalStringResourceBase(&encoding));
|
||||
CHECK_EQ(String::TWO_BYTE_ENCODING, encoding);
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CHECK_EQ(0, dispose_count);
|
||||
}
|
||||
CcTest::i_isolate()->compilation_cache()->Clear();
|
||||
CcTest::heap()->CollectAllAvailableGarbage();
|
||||
CcTest::CollectAllAvailableGarbage();
|
||||
CHECK_EQ(1, dispose_count);
|
||||
}
|
||||
|
||||
@ -484,11 +484,11 @@ THREADED_TEST(ScriptUsingOneByteStringResource) {
|
||||
Local<Value> value = script->Run(env.local()).ToLocalChecked();
|
||||
CHECK(value->IsNumber());
|
||||
CHECK_EQ(7, value->Int32Value(env.local()).FromJust());
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CHECK_EQ(0, dispose_count);
|
||||
}
|
||||
CcTest::i_isolate()->compilation_cache()->Clear();
|
||||
CcTest::heap()->CollectAllAvailableGarbage();
|
||||
CcTest::CollectAllAvailableGarbage();
|
||||
CHECK_EQ(1, dispose_count);
|
||||
}
|
||||
|
||||
@ -504,8 +504,8 @@ THREADED_TEST(ScriptMakingExternalString) {
|
||||
v8::NewStringType::kNormal)
|
||||
.ToLocalChecked();
|
||||
// Trigger GCs so that the newly allocated string moves to old gen.
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now
|
||||
CcTest::CollectGarbage(i::NEW_SPACE); // in survivor space now
|
||||
CcTest::CollectGarbage(i::NEW_SPACE); // in old gen now
|
||||
CHECK_EQ(source->IsExternal(), false);
|
||||
CHECK_EQ(source->IsExternalOneByte(), false);
|
||||
String::Encoding encoding = String::UNKNOWN_ENCODING;
|
||||
@ -518,11 +518,11 @@ THREADED_TEST(ScriptMakingExternalString) {
|
||||
Local<Value> value = script->Run(env.local()).ToLocalChecked();
|
||||
CHECK(value->IsNumber());
|
||||
CHECK_EQ(7, value->Int32Value(env.local()).FromJust());
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CHECK_EQ(0, dispose_count);
|
||||
}
|
||||
CcTest::i_isolate()->compilation_cache()->Clear();
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CHECK_EQ(1, dispose_count);
|
||||
}
|
||||
|
||||
@ -535,8 +535,8 @@ THREADED_TEST(ScriptMakingExternalOneByteString) {
|
||||
v8::HandleScope scope(env->GetIsolate());
|
||||
Local<String> source = v8_str(c_source);
|
||||
// Trigger GCs so that the newly allocated string moves to old gen.
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now
|
||||
CcTest::CollectGarbage(i::NEW_SPACE); // in survivor space now
|
||||
CcTest::CollectGarbage(i::NEW_SPACE); // in old gen now
|
||||
bool success = source->MakeExternal(
|
||||
new TestOneByteResource(i::StrDup(c_source), &dispose_count));
|
||||
CHECK(success);
|
||||
@ -544,11 +544,11 @@ THREADED_TEST(ScriptMakingExternalOneByteString) {
|
||||
Local<Value> value = script->Run(env.local()).ToLocalChecked();
|
||||
CHECK(value->IsNumber());
|
||||
CHECK_EQ(7, value->Int32Value(env.local()).FromJust());
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CHECK_EQ(0, dispose_count);
|
||||
}
|
||||
CcTest::i_isolate()->compilation_cache()->Clear();
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CHECK_EQ(1, dispose_count);
|
||||
}
|
||||
|
||||
@ -558,8 +558,8 @@ TEST(MakingExternalStringConditions) {
|
||||
v8::HandleScope scope(env->GetIsolate());
|
||||
|
||||
// Free some space in the new space so that we can check freshness.
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
|
||||
CcTest::CollectGarbage(i::NEW_SPACE);
|
||||
CcTest::CollectGarbage(i::NEW_SPACE);
|
||||
|
||||
uint16_t* two_byte_string = AsciiToTwoByteString("s1");
|
||||
Local<String> local_string =
|
||||
@ -571,8 +571,8 @@ TEST(MakingExternalStringConditions) {
|
||||
// We should refuse to externalize new space strings.
|
||||
CHECK(!local_string->CanMakeExternal());
|
||||
// Trigger GCs so that the newly allocated string moves to old gen.
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now
|
||||
CcTest::CollectGarbage(i::NEW_SPACE); // in survivor space now
|
||||
CcTest::CollectGarbage(i::NEW_SPACE); // in old gen now
|
||||
// Old space strings should be accepted.
|
||||
CHECK(local_string->CanMakeExternal());
|
||||
}
|
||||
@ -583,15 +583,15 @@ TEST(MakingExternalOneByteStringConditions) {
|
||||
v8::HandleScope scope(env->GetIsolate());
|
||||
|
||||
// Free some space in the new space so that we can check freshness.
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
|
||||
CcTest::CollectGarbage(i::NEW_SPACE);
|
||||
CcTest::CollectGarbage(i::NEW_SPACE);
|
||||
|
||||
Local<String> local_string = v8_str("s1");
|
||||
// We should refuse to externalize new space strings.
|
||||
CHECK(!local_string->CanMakeExternal());
|
||||
// Trigger GCs so that the newly allocated string moves to old gen.
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now
|
||||
CcTest::CollectGarbage(i::NEW_SPACE); // in survivor space now
|
||||
CcTest::CollectGarbage(i::NEW_SPACE); // in old gen now
|
||||
// Old space strings should be accepted.
|
||||
CHECK(local_string->CanMakeExternal());
|
||||
}
|
||||
@ -612,8 +612,8 @@ TEST(MakingExternalUnalignedOneByteString) {
|
||||
|
||||
// Trigger GCs so that the newly allocated string moves to old gen.
|
||||
i::heap::SimulateFullSpace(CcTest::heap()->old_space());
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now
|
||||
CcTest::CollectGarbage(i::NEW_SPACE); // in survivor space now
|
||||
CcTest::CollectGarbage(i::NEW_SPACE); // in old gen now
|
||||
|
||||
// Turn into external string with unaligned resource data.
|
||||
const char* c_cons = "_abcdefghijklmnopqrstuvwxyz";
|
||||
@ -626,8 +626,8 @@ TEST(MakingExternalUnalignedOneByteString) {
|
||||
CHECK(success);
|
||||
|
||||
// Trigger GCs and force evacuation.
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::heap()->CollectAllGarbage(i::Heap::kReduceMemoryFootprintMask);
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CcTest::CollectAllGarbage(i::Heap::kReduceMemoryFootprintMask);
|
||||
}
|
||||
|
||||
|
||||
@ -642,14 +642,14 @@ THREADED_TEST(UsingExternalString) {
|
||||
.ToLocalChecked();
|
||||
i::Handle<i::String> istring = v8::Utils::OpenHandle(*string);
|
||||
// Trigger GCs so that the newly allocated string moves to old gen.
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now
|
||||
CcTest::CollectGarbage(i::NEW_SPACE); // in survivor space now
|
||||
CcTest::CollectGarbage(i::NEW_SPACE); // in old gen now
|
||||
i::Handle<i::String> isymbol =
|
||||
factory->InternalizeString(istring);
|
||||
CHECK(isymbol->IsInternalizedString());
|
||||
}
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
}
|
||||
|
||||
|
||||
@ -665,14 +665,14 @@ THREADED_TEST(UsingExternalOneByteString) {
|
||||
.ToLocalChecked();
|
||||
i::Handle<i::String> istring = v8::Utils::OpenHandle(*string);
|
||||
// Trigger GCs so that the newly allocated string moves to old gen.
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now
|
||||
CcTest::CollectGarbage(i::NEW_SPACE); // in survivor space now
|
||||
CcTest::CollectGarbage(i::NEW_SPACE); // in old gen now
|
||||
i::Handle<i::String> isymbol =
|
||||
factory->InternalizeString(istring);
|
||||
CHECK(isymbol->IsInternalizedString());
|
||||
}
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
}
|
||||
|
||||
|
||||
@ -739,12 +739,12 @@ THREADED_TEST(ScavengeExternalString) {
|
||||
new TestResource(two_byte_string, &dispose_count))
|
||||
.ToLocalChecked();
|
||||
i::Handle<i::String> istring = v8::Utils::OpenHandle(*string);
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
|
||||
CcTest::CollectGarbage(i::NEW_SPACE);
|
||||
in_new_space = CcTest::heap()->InNewSpace(*istring);
|
||||
CHECK(in_new_space || CcTest::heap()->old_space()->Contains(*istring));
|
||||
CHECK_EQ(0, dispose_count);
|
||||
}
|
||||
CcTest::heap()->CollectGarbage(in_new_space ? i::NEW_SPACE : i::OLD_SPACE);
|
||||
CcTest::CollectGarbage(in_new_space ? i::NEW_SPACE : i::OLD_SPACE);
|
||||
CHECK_EQ(1, dispose_count);
|
||||
}
|
||||
|
||||
@ -763,12 +763,12 @@ THREADED_TEST(ScavengeExternalOneByteString) {
|
||||
new TestOneByteResource(i::StrDup(one_byte_string), &dispose_count))
|
||||
.ToLocalChecked();
|
||||
i::Handle<i::String> istring = v8::Utils::OpenHandle(*string);
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
|
||||
CcTest::CollectGarbage(i::NEW_SPACE);
|
||||
in_new_space = CcTest::heap()->InNewSpace(*istring);
|
||||
CHECK(in_new_space || CcTest::heap()->old_space()->Contains(*istring));
|
||||
CHECK_EQ(0, dispose_count);
|
||||
}
|
||||
CcTest::heap()->CollectGarbage(in_new_space ? i::NEW_SPACE : i::OLD_SPACE);
|
||||
CcTest::CollectGarbage(in_new_space ? i::NEW_SPACE : i::OLD_SPACE);
|
||||
CHECK_EQ(1, dispose_count);
|
||||
}
|
||||
|
||||
@ -812,11 +812,11 @@ TEST(ExternalStringWithDisposeHandling) {
|
||||
Local<Value> value = script->Run(env.local()).ToLocalChecked();
|
||||
CHECK(value->IsNumber());
|
||||
CHECK_EQ(7, value->Int32Value(env.local()).FromJust());
|
||||
CcTest::heap()->CollectAllAvailableGarbage();
|
||||
CcTest::CollectAllAvailableGarbage();
|
||||
CHECK_EQ(0, TestOneByteResourceWithDisposeControl::dispose_count);
|
||||
}
|
||||
CcTest::i_isolate()->compilation_cache()->Clear();
|
||||
CcTest::heap()->CollectAllAvailableGarbage();
|
||||
CcTest::CollectAllAvailableGarbage();
|
||||
CHECK_EQ(1, TestOneByteResourceWithDisposeControl::dispose_calls);
|
||||
CHECK_EQ(0, TestOneByteResourceWithDisposeControl::dispose_count);
|
||||
|
||||
@ -835,11 +835,11 @@ TEST(ExternalStringWithDisposeHandling) {
|
||||
Local<Value> value = script->Run(env.local()).ToLocalChecked();
|
||||
CHECK(value->IsNumber());
|
||||
CHECK_EQ(7, value->Int32Value(env.local()).FromJust());
|
||||
CcTest::heap()->CollectAllAvailableGarbage();
|
||||
CcTest::CollectAllAvailableGarbage();
|
||||
CHECK_EQ(0, TestOneByteResourceWithDisposeControl::dispose_count);
|
||||
}
|
||||
CcTest::i_isolate()->compilation_cache()->Clear();
|
||||
CcTest::heap()->CollectAllAvailableGarbage();
|
||||
CcTest::CollectAllAvailableGarbage();
|
||||
CHECK_EQ(1, TestOneByteResourceWithDisposeControl::dispose_calls);
|
||||
CHECK_EQ(1, TestOneByteResourceWithDisposeControl::dispose_count);
|
||||
}
|
||||
@ -897,8 +897,8 @@ THREADED_TEST(StringConcat) {
|
||||
CHECK_EQ(68, value->Int32Value(env.local()).FromJust());
|
||||
}
|
||||
CcTest::i_isolate()->compilation_cache()->Clear();
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
}
|
||||
|
||||
|
||||
@ -2651,7 +2651,7 @@ static void CheckAlignedPointerInInternalField(Local<v8::Object> obj,
|
||||
void* value) {
|
||||
CHECK_EQ(0, static_cast<int>(reinterpret_cast<uintptr_t>(value) & 0x1));
|
||||
obj->SetAlignedPointerInInternalField(0, value);
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CHECK_EQ(value, obj->GetAlignedPointerFromInternalField(0));
|
||||
}
|
||||
|
||||
@ -2707,14 +2707,14 @@ THREADED_TEST(SetAlignedPointerInInternalFields) {
|
||||
void* values[] = {heap_allocated_1, heap_allocated_2};
|
||||
|
||||
obj->SetAlignedPointerInInternalFields(2, indices, values);
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CHECK_EQ(heap_allocated_1, obj->GetAlignedPointerFromInternalField(0));
|
||||
CHECK_EQ(heap_allocated_2, obj->GetAlignedPointerFromInternalField(1));
|
||||
|
||||
indices[0] = 1;
|
||||
indices[1] = 0;
|
||||
obj->SetAlignedPointerInInternalFields(2, indices, values);
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CHECK_EQ(heap_allocated_2, obj->GetAlignedPointerFromInternalField(0));
|
||||
CHECK_EQ(heap_allocated_1, obj->GetAlignedPointerFromInternalField(1));
|
||||
|
||||
@ -2726,7 +2726,7 @@ static void CheckAlignedPointerInEmbedderData(LocalContext* env, int index,
|
||||
void* value) {
|
||||
CHECK_EQ(0, static_cast<int>(reinterpret_cast<uintptr_t>(value) & 0x1));
|
||||
(*env)->SetAlignedPointerInEmbedderData(index, value);
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CHECK_EQ(value, (*env)->GetAlignedPointerFromEmbedderData(index));
|
||||
}
|
||||
|
||||
@ -2756,7 +2756,7 @@ THREADED_TEST(EmbedderDataAlignedPointers) {
|
||||
for (int i = 0; i < 100; i++) {
|
||||
env->SetAlignedPointerInEmbedderData(i, AlignedTestPointer(i));
|
||||
}
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
for (int i = 0; i < 100; i++) {
|
||||
CHECK_EQ(AlignedTestPointer(i), env->GetAlignedPointerFromEmbedderData(i));
|
||||
}
|
||||
@ -2788,7 +2788,7 @@ THREADED_TEST(IdentityHash) {
|
||||
|
||||
// Ensure that the test starts with an fresh heap to test whether the hash
|
||||
// code is based on the address.
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
Local<v8::Object> obj = v8::Object::New(isolate);
|
||||
int hash = obj->GetIdentityHash();
|
||||
int hash1 = obj->GetIdentityHash();
|
||||
@ -2798,7 +2798,7 @@ THREADED_TEST(IdentityHash) {
|
||||
// objects should not be assigned the same hash code. If the test below fails
|
||||
// the random number generator should be evaluated.
|
||||
CHECK_NE(hash, hash2);
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
int hash3 = v8::Object::New(isolate)->GetIdentityHash();
|
||||
// Make sure that the identity hash is not based on the initial address of
|
||||
// the object alone. If the test below fails the random number generator
|
||||
@ -2874,7 +2874,7 @@ TEST(SymbolIdentityHash) {
|
||||
int hash = symbol->GetIdentityHash();
|
||||
int hash1 = symbol->GetIdentityHash();
|
||||
CHECK_EQ(hash, hash1);
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
int hash3 = symbol->GetIdentityHash();
|
||||
CHECK_EQ(hash, hash3);
|
||||
}
|
||||
@ -2885,7 +2885,7 @@ TEST(SymbolIdentityHash) {
|
||||
int hash = js_symbol->GetIdentityHash();
|
||||
int hash1 = js_symbol->GetIdentityHash();
|
||||
CHECK_EQ(hash, hash1);
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
int hash3 = js_symbol->GetIdentityHash();
|
||||
CHECK_EQ(hash, hash3);
|
||||
}
|
||||
@ -2901,7 +2901,7 @@ TEST(StringIdentityHash) {
|
||||
int hash = str->GetIdentityHash();
|
||||
int hash1 = str->GetIdentityHash();
|
||||
CHECK_EQ(hash, hash1);
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
int hash3 = str->GetIdentityHash();
|
||||
CHECK_EQ(hash, hash3);
|
||||
|
||||
@ -2921,7 +2921,7 @@ THREADED_TEST(SymbolProperties) {
|
||||
v8::Local<v8::Symbol> sym2 = v8::Symbol::New(isolate, v8_str("my-symbol"));
|
||||
v8::Local<v8::Symbol> sym3 = v8::Symbol::New(isolate, v8_str("sym3"));
|
||||
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
|
||||
// Check basic symbol functionality.
|
||||
CHECK(sym1->IsSymbol());
|
||||
@ -2990,7 +2990,7 @@ THREADED_TEST(SymbolProperties) {
|
||||
CHECK_EQ(num_props + 1,
|
||||
obj->GetPropertyNames(env.local()).ToLocalChecked()->Length());
|
||||
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
|
||||
CHECK(obj->SetAccessor(env.local(), sym3, SymbolAccessorGetter,
|
||||
SymbolAccessorSetter)
|
||||
@ -3100,7 +3100,7 @@ THREADED_TEST(PrivatePropertiesOnProxies) {
|
||||
v8::Local<v8::Private> priv2 =
|
||||
v8::Private::New(isolate, v8_str("my-private"));
|
||||
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
|
||||
CHECK(priv2->Name()
|
||||
->Equals(env.local(),
|
||||
@ -3142,7 +3142,7 @@ THREADED_TEST(PrivatePropertiesOnProxies) {
|
||||
CHECK_EQ(num_props + 1,
|
||||
proxy->GetPropertyNames(env.local()).ToLocalChecked()->Length());
|
||||
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
|
||||
// Add another property and delete it afterwards to force the object in
|
||||
// slow case.
|
||||
@ -3194,7 +3194,7 @@ THREADED_TEST(PrivateProperties) {
|
||||
v8::Local<v8::Private> priv2 =
|
||||
v8::Private::New(isolate, v8_str("my-private"));
|
||||
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
|
||||
CHECK(priv2->Name()
|
||||
->Equals(env.local(),
|
||||
@ -3236,7 +3236,7 @@ THREADED_TEST(PrivateProperties) {
|
||||
CHECK_EQ(num_props + 1,
|
||||
obj->GetPropertyNames(env.local()).ToLocalChecked()->Length());
|
||||
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
|
||||
// Add another property and delete it afterwards to force the object in
|
||||
// slow case.
|
||||
@ -3385,7 +3385,7 @@ THREADED_TEST(ArrayBuffer_ApiInternalToExternal) {
|
||||
CheckInternalFieldsAreZero(ab);
|
||||
CHECK_EQ(1024, static_cast<int>(ab->ByteLength()));
|
||||
CHECK(!ab->IsExternal());
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
|
||||
ScopedArrayBufferContents ab_contents(ab->Externalize());
|
||||
CHECK(ab->IsExternal());
|
||||
@ -3661,7 +3661,7 @@ THREADED_TEST(SharedArrayBuffer_ApiInternalToExternal) {
|
||||
CheckInternalFieldsAreZero(ab);
|
||||
CHECK_EQ(1024, static_cast<int>(ab->ByteLength()));
|
||||
CHECK(!ab->IsExternal());
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
|
||||
ScopedSharedArrayBufferContents ab_contents(ab->Externalize());
|
||||
CHECK(ab->IsExternal());
|
||||
@ -3778,7 +3778,7 @@ THREADED_TEST(HiddenProperties) {
|
||||
v8::Local<v8::String> empty = v8_str("");
|
||||
v8::Local<v8::String> prop_name = v8_str("prop_name");
|
||||
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
|
||||
// Make sure delete of a non-existent hidden value works
|
||||
obj->DeletePrivate(env.local(), key).FromJust();
|
||||
@ -3796,7 +3796,7 @@ THREADED_TEST(HiddenProperties) {
|
||||
->Int32Value(env.local())
|
||||
.FromJust());
|
||||
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
|
||||
// Make sure we do not find the hidden property.
|
||||
CHECK(!obj->Has(env.local(), empty).FromJust());
|
||||
@ -3820,7 +3820,7 @@ THREADED_TEST(HiddenProperties) {
|
||||
->Int32Value(env.local())
|
||||
.FromJust());
|
||||
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
|
||||
// Add another property and delete it afterwards to force the object in
|
||||
// slow case.
|
||||
@ -3844,7 +3844,7 @@ THREADED_TEST(HiddenProperties) {
|
||||
->Int32Value(env.local())
|
||||
.FromJust());
|
||||
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
|
||||
CHECK(obj->SetPrivate(env.local(), key, v8::Integer::New(isolate, 2002))
|
||||
.FromJust());
|
||||
@ -4135,7 +4135,7 @@ void SecondPassCallback(const v8::WeakCallbackInfo<TwoPassCallbackData>& data) {
|
||||
if (!trigger_gc) return;
|
||||
auto data_2 = new TwoPassCallbackData(data.GetIsolate(), instance_counter);
|
||||
data_2->SetWeak();
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
}
|
||||
|
||||
|
||||
@ -4156,7 +4156,7 @@ TEST(TwoPassPhantomCallbacks) {
|
||||
data->SetWeak();
|
||||
}
|
||||
CHECK_EQ(static_cast<int>(kLength), instance_counter);
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
EmptyMessageQueues(isolate);
|
||||
CHECK_EQ(0, instance_counter);
|
||||
}
|
||||
@ -4175,7 +4175,7 @@ TEST(TwoPassPhantomCallbacksNestedGc) {
|
||||
array[10]->MarkTriggerGc();
|
||||
array[15]->MarkTriggerGc();
|
||||
CHECK_EQ(static_cast<int>(kLength), instance_counter);
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
EmptyMessageQueues(isolate);
|
||||
CHECK_EQ(0, instance_counter);
|
||||
}
|
||||
@ -4286,8 +4286,7 @@ void TestGlobalValueMap() {
|
||||
}
|
||||
CHECK_EQ(initial_handle_count + 1, global_handles->global_handles_count());
|
||||
if (map.IsWeak()) {
|
||||
CcTest::i_isolate()->heap()->CollectAllGarbage(
|
||||
i::Heap::kAbortIncrementalMarkingMask);
|
||||
CcTest::CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
|
||||
} else {
|
||||
map.Clear();
|
||||
}
|
||||
@ -4518,9 +4517,7 @@ THREADED_TEST(ApiObjectGroups) {
|
||||
iso->SetReferenceFromGroup(id2, g2c1.handle);
|
||||
}
|
||||
// Do a single full GC, ensure incremental marking is stopped.
|
||||
v8::internal::Heap* heap =
|
||||
reinterpret_cast<v8::internal::Isolate*>(iso)->heap();
|
||||
heap->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
|
||||
// All object should be alive.
|
||||
CHECK_EQ(0, counter.NumberOfWeakCalls());
|
||||
@ -4545,7 +4542,7 @@ THREADED_TEST(ApiObjectGroups) {
|
||||
iso->SetReferenceFromGroup(id2, g2c1.handle);
|
||||
}
|
||||
|
||||
heap->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
|
||||
// All objects should be gone. 5 global handles in total.
|
||||
CHECK_EQ(5, counter.NumberOfWeakCalls());
|
||||
@ -4556,7 +4553,7 @@ THREADED_TEST(ApiObjectGroups) {
|
||||
g2c1.handle.SetWeak(&g2c1, &WeakPointerCallback,
|
||||
v8::WeakCallbackType::kParameter);
|
||||
|
||||
heap->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CHECK_EQ(7, counter.NumberOfWeakCalls());
|
||||
}
|
||||
|
||||
@ -4623,9 +4620,7 @@ THREADED_TEST(ApiObjectGroupsForSubtypes) {
|
||||
iso->SetReferenceFromGroup(id2, g2c1.handle);
|
||||
}
|
||||
// Do a single full GC, ensure incremental marking is stopped.
|
||||
v8::internal::Heap* heap =
|
||||
reinterpret_cast<v8::internal::Isolate*>(iso)->heap();
|
||||
heap->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
|
||||
// All object should be alive.
|
||||
CHECK_EQ(0, counter.NumberOfWeakCalls());
|
||||
@ -4650,7 +4645,7 @@ THREADED_TEST(ApiObjectGroupsForSubtypes) {
|
||||
iso->SetReferenceFromGroup(id2, g2c1.handle);
|
||||
}
|
||||
|
||||
heap->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
|
||||
// All objects should be gone. 5 global handles in total.
|
||||
CHECK_EQ(5, counter.NumberOfWeakCalls());
|
||||
@ -4661,7 +4656,7 @@ THREADED_TEST(ApiObjectGroupsForSubtypes) {
|
||||
g2c1.handle.SetWeak(&g2c1, &WeakPointerCallback,
|
||||
v8::WeakCallbackType::kParameter);
|
||||
|
||||
heap->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CHECK_EQ(7, counter.NumberOfWeakCalls());
|
||||
}
|
||||
|
||||
@ -4746,9 +4741,7 @@ THREADED_TEST(ApiObjectGroupsCycle) {
|
||||
iso->SetReferenceFromGroup(id4, g1s1.handle);
|
||||
}
|
||||
// Do a single full GC
|
||||
v8::internal::Heap* heap =
|
||||
reinterpret_cast<v8::internal::Isolate*>(iso)->heap();
|
||||
heap->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
|
||||
// All object should be alive.
|
||||
CHECK_EQ(0, counter.NumberOfWeakCalls());
|
||||
@ -4777,7 +4770,7 @@ THREADED_TEST(ApiObjectGroupsCycle) {
|
||||
iso->SetReferenceFromGroup(id4, g1s1.handle);
|
||||
}
|
||||
|
||||
heap->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
|
||||
// All objects should be gone. 9 global handles in total.
|
||||
CHECK_EQ(9, counter.NumberOfWeakCalls());
|
||||
@ -5077,7 +5070,7 @@ TEST(NativeWeakMap) {
|
||||
CHECK(value->Equals(env.local(), weak_map->Get(obj2)).FromJust());
|
||||
CHECK(value->Equals(env.local(), weak_map->Get(sym1)).FromJust());
|
||||
}
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
{
|
||||
HandleScope scope(isolate);
|
||||
CHECK(value->Equals(env.local(), weak_map->Get(local1)).FromJust());
|
||||
@ -5099,7 +5092,7 @@ TEST(NativeWeakMap) {
|
||||
s1.handle.SetWeak(&s1, &WeakPointerCallback,
|
||||
v8::WeakCallbackType::kParameter);
|
||||
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CHECK_EQ(3, counter.NumberOfWeakCalls());
|
||||
|
||||
CHECK(o1.handle.IsEmpty());
|
||||
@ -7770,9 +7763,9 @@ static void IndependentWeakHandle(bool global_gc, bool interlinked) {
|
||||
b->Set(context, v8_str("x"), a).FromJust();
|
||||
}
|
||||
if (global_gc) {
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
} else {
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
|
||||
CcTest::CollectGarbage(i::NEW_SPACE);
|
||||
}
|
||||
// We are relying on this creating a big flag array and reserving the space
|
||||
// up front.
|
||||
@ -7792,9 +7785,9 @@ static void IndependentWeakHandle(bool global_gc, bool interlinked) {
|
||||
object_b.handle.MarkIndependent();
|
||||
CHECK(object_b.handle.IsIndependent());
|
||||
if (global_gc) {
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
} else {
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
|
||||
CcTest::CollectGarbage(i::NEW_SPACE);
|
||||
}
|
||||
// A single GC should be enough to reclaim the memory, since we are using
|
||||
// phantom handles.
|
||||
@ -7891,9 +7884,9 @@ void InternalFieldCallback(bool global_gc) {
|
||||
}
|
||||
}
|
||||
if (global_gc) {
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
} else {
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
|
||||
CcTest::CollectGarbage(i::NEW_SPACE);
|
||||
}
|
||||
|
||||
CHECK_EQ(1729, t1->x());
|
||||
@ -7938,9 +7931,9 @@ void v8::internal::HeapTester::ResetWeakHandle(bool global_gc) {
|
||||
object_a.handle.Reset(iso, a);
|
||||
object_b.handle.Reset(iso, b);
|
||||
if (global_gc) {
|
||||
CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
|
||||
CcTest::CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
|
||||
} else {
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
|
||||
CcTest::CollectGarbage(i::NEW_SPACE);
|
||||
}
|
||||
}
|
||||
|
||||
@ -7956,9 +7949,9 @@ void v8::internal::HeapTester::ResetWeakHandle(bool global_gc) {
|
||||
CHECK(object_b.handle.IsIndependent());
|
||||
}
|
||||
if (global_gc) {
|
||||
CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
|
||||
CcTest::CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
|
||||
} else {
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
|
||||
CcTest::CollectGarbage(i::NEW_SPACE);
|
||||
}
|
||||
CHECK(object_a.flag);
|
||||
CHECK(object_b.flag);
|
||||
@ -7970,12 +7963,11 @@ THREADED_HEAP_TEST(ResetWeakHandle) {
|
||||
v8::internal::HeapTester::ResetWeakHandle(true);
|
||||
}
|
||||
|
||||
static void InvokeScavenge() { CcTest::CollectGarbage(i::NEW_SPACE); }
|
||||
|
||||
static void InvokeScavenge() { CcTest::heap()->CollectGarbage(i::NEW_SPACE); }
|
||||
|
||||
|
||||
static void InvokeMarkSweep() { CcTest::heap()->CollectAllGarbage(); }
|
||||
|
||||
static void InvokeMarkSweep() {
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
}
|
||||
|
||||
static void ForceScavenge2(
|
||||
const v8::WeakCallbackInfo<FlagAndPersistent>& data) {
|
||||
@ -8051,7 +8043,7 @@ static void ArgumentsTestCallback(
|
||||
CHECK(v8::Integer::New(isolate, 3)->Equals(context, args[2]).FromJust());
|
||||
CHECK(v8::Undefined(isolate)->Equals(context, args[3]).FromJust());
|
||||
v8::HandleScope scope(args.GetIsolate());
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
}
|
||||
|
||||
|
||||
@ -9381,7 +9373,7 @@ static bool security_check_with_gc_called;
|
||||
static bool SecurityTestCallbackWithGC(Local<v8::Context> accessing_context,
|
||||
Local<v8::Object> accessed_object,
|
||||
Local<v8::Value> data) {
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
security_check_with_gc_called = true;
|
||||
return true;
|
||||
}
|
||||
@ -12169,7 +12161,7 @@ static void InterceptorCallICFastApi(
|
||||
reinterpret_cast<int*>(v8::External::Cast(*info.Data())->Value());
|
||||
++(*call_count);
|
||||
if ((*call_count) % 20 == 0) {
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
}
|
||||
}
|
||||
|
||||
@ -12226,8 +12218,8 @@ static void GenerateSomeGarbage() {
|
||||
void DirectApiCallback(const v8::FunctionCallbackInfo<v8::Value>& args) {
|
||||
static int count = 0;
|
||||
if (count++ % 3 == 0) {
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
// This should move the stub
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
// This should move the stub
|
||||
GenerateSomeGarbage(); // This should ensure the old stub memory is flushed
|
||||
}
|
||||
}
|
||||
@ -12296,7 +12288,7 @@ static int p_getter_count_3;
|
||||
|
||||
static Local<Value> DoDirectGetter() {
|
||||
if (++p_getter_count_3 % 3 == 0) {
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
GenerateSomeGarbage();
|
||||
}
|
||||
return v8_str("Direct Getter Result");
|
||||
@ -14016,8 +14008,8 @@ static void CheckSurvivingGlobalObjectsCount(int expected) {
|
||||
// the first garbage collection but some of the maps have already
|
||||
// been marked at that point. Therefore some of the maps are not
|
||||
// collected until the second garbage collection.
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::heap()->CollectAllGarbage(i::Heap::kMakeHeapIterableMask);
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CcTest::CollectAllGarbage(i::Heap::kMakeHeapIterableMask);
|
||||
int count = GetGlobalObjectsCount();
|
||||
#ifdef DEBUG
|
||||
if (count != expected) CcTest::heap()->TracePathToGlobal();
|
||||
@ -14118,7 +14110,8 @@ TEST(WeakCallbackApi) {
|
||||
handle, WeakApiCallback, v8::WeakCallbackType::kParameter);
|
||||
}
|
||||
reinterpret_cast<i::Isolate*>(isolate)->heap()->CollectAllGarbage(
|
||||
i::Heap::kAbortIncrementalMarkingMask);
|
||||
i::Heap::kAbortIncrementalMarkingMask,
|
||||
i::GarbageCollectionReason::kTesting);
|
||||
// Verify disposed.
|
||||
CHECK_EQ(initial_handles, globals->global_handles_count());
|
||||
}
|
||||
@ -14160,7 +14153,7 @@ THREADED_TEST(NewPersistentHandleFromWeakCallback) {
|
||||
handle1.SetWeak(&handle1, NewPersistentHandleCallback1,
|
||||
v8::WeakCallbackType::kParameter);
|
||||
handle2.Reset();
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
}
|
||||
|
||||
|
||||
@ -14170,7 +14163,7 @@ v8::Persistent<v8::Object> to_be_disposed;
|
||||
void DisposeAndForceGcCallback2(
|
||||
const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) {
|
||||
to_be_disposed.Reset();
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
}
|
||||
|
||||
|
||||
@ -14194,7 +14187,7 @@ THREADED_TEST(DoNotUseDeletedNodesInSecondLevelGc) {
|
||||
handle1.SetWeak(&handle1, DisposeAndForceGcCallback1,
|
||||
v8::WeakCallbackType::kParameter);
|
||||
to_be_disposed.Reset(isolate, handle2);
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
}
|
||||
|
||||
void DisposingCallback(
|
||||
@ -14232,7 +14225,7 @@ THREADED_TEST(NoGlobalHandlesOrphaningDueToWeakCallback) {
|
||||
v8::WeakCallbackType::kParameter);
|
||||
handle3.SetWeak(&handle3, HandleCreatingCallback1,
|
||||
v8::WeakCallbackType::kParameter);
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
EmptyMessageQueues(isolate);
|
||||
}
|
||||
|
||||
@ -14793,7 +14786,7 @@ UNINITIALIZED_TEST(SetJitCodeEventHandler) {
|
||||
i::Heap* heap = i_isolate->heap();
|
||||
|
||||
// Start with a clean slate.
|
||||
heap->CollectAllAvailableGarbage("TestSetJitCodeEventHandler_Prepare");
|
||||
heap->CollectAllAvailableGarbage(i::GarbageCollectionReason::kTesting);
|
||||
|
||||
{
|
||||
v8::HandleScope scope(isolate);
|
||||
@ -14837,7 +14830,7 @@ UNINITIALIZED_TEST(SetJitCodeEventHandler) {
|
||||
}
|
||||
|
||||
// Force code movement.
|
||||
heap->CollectAllAvailableGarbage("TestSetJitCodeEventHandler_Move");
|
||||
heap->CollectAllAvailableGarbage(i::GarbageCollectionReason::kTesting);
|
||||
|
||||
isolate->SetJitCodeEventHandler(v8::kJitCodeEventDefault, NULL);
|
||||
|
||||
@ -16465,7 +16458,7 @@ static void ObjectWithExternalArrayTestHelper(Local<Context> context,
|
||||
"}"
|
||||
"sum;");
|
||||
// Force GC to trigger verification.
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CHECK_EQ(28, result->Int32Value(context).FromJust());
|
||||
|
||||
// Make sure out-of-range loads do not throw.
|
||||
@ -16681,12 +16674,12 @@ static void FixedTypedArrayTestHelper(i::ExternalArrayType array_type,
|
||||
CHECK_EQ(FixedTypedArrayClass::kInstanceType,
|
||||
fixed_array->map()->instance_type());
|
||||
CHECK_EQ(kElementCount, fixed_array->length());
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
for (int i = 0; i < kElementCount; i++) {
|
||||
fixed_array->set(i, static_cast<ElementType>(i));
|
||||
}
|
||||
// Force GC to trigger verification.
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
for (int i = 0; i < kElementCount; i++) {
|
||||
CHECK_EQ(static_cast<int64_t>(static_cast<ElementType>(i)),
|
||||
static_cast<int64_t>(fixed_array->get_scalar(i)));
|
||||
@ -16876,10 +16869,10 @@ THREADED_TEST(SkipArrayBufferBackingStoreDuringGC) {
|
||||
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, store_ptr, 8);
|
||||
|
||||
// Should not crash
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectGarbage(i::NEW_SPACE); // in survivor space now
|
||||
CcTest::CollectGarbage(i::NEW_SPACE); // in old gen now
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
|
||||
// Should not move the pointer
|
||||
CHECK_EQ(ab->GetContents().Data(), store_ptr);
|
||||
@ -16897,15 +16890,15 @@ THREADED_TEST(SkipArrayBufferDuringScavenge) {
|
||||
reinterpret_cast<uint8_t*>(*reinterpret_cast<uintptr_t*>(*tmp));
|
||||
|
||||
// Make `store_ptr` point to from space
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
|
||||
CcTest::CollectGarbage(i::NEW_SPACE);
|
||||
|
||||
// Create ArrayBuffer with pointer-that-cannot-be-visited in the backing store
|
||||
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, store_ptr, 8);
|
||||
|
||||
// Should not crash,
|
||||
// i.e. backing store pointer should not be treated as a heap object pointer
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now
|
||||
CcTest::CollectGarbage(i::NEW_SPACE); // in survivor space now
|
||||
CcTest::CollectGarbage(i::NEW_SPACE); // in old gen now
|
||||
|
||||
// Use `ab` to silence compiler warning
|
||||
CHECK_EQ(ab->GetContents().Data(), store_ptr);
|
||||
@ -18206,7 +18199,8 @@ TEST(TestIdleNotification) {
|
||||
bool finished = false;
|
||||
for (int i = 0; i < 200 && !finished; i++) {
|
||||
if (i < 10 && CcTest::heap()->incremental_marking()->IsStopped()) {
|
||||
CcTest::heap()->StartIdleIncrementalMarking();
|
||||
CcTest::heap()->StartIdleIncrementalMarking(
|
||||
i::GarbageCollectionReason::kTesting);
|
||||
}
|
||||
finished = env->GetIsolate()->IdleNotificationDeadline(
|
||||
(v8::base::TimeTicks::HighResolutionNow().ToInternalValue() /
|
||||
@ -18225,7 +18219,7 @@ TEST(TestIdleNotification) {
|
||||
TEST(Regress2333) {
|
||||
LocalContext env;
|
||||
for (int i = 0; i < 3; i++) {
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
|
||||
CcTest::CollectGarbage(i::NEW_SPACE);
|
||||
}
|
||||
}
|
||||
|
||||
@ -18363,7 +18357,7 @@ TEST(ExternalizeOldSpaceTwoByteCons) {
|
||||
->ToString(env.local())
|
||||
.ToLocalChecked();
|
||||
CHECK(v8::Utils::OpenHandle(*cons)->IsConsString());
|
||||
CcTest::heap()->CollectAllAvailableGarbage();
|
||||
CcTest::CollectAllAvailableGarbage();
|
||||
CHECK(CcTest::heap()->old_space()->Contains(*v8::Utils::OpenHandle(*cons)));
|
||||
|
||||
TestResource* resource = new TestResource(
|
||||
@ -18387,7 +18381,7 @@ TEST(ExternalizeOldSpaceOneByteCons) {
|
||||
->ToString(env.local())
|
||||
.ToLocalChecked();
|
||||
CHECK(v8::Utils::OpenHandle(*cons)->IsConsString());
|
||||
CcTest::heap()->CollectAllAvailableGarbage();
|
||||
CcTest::CollectAllAvailableGarbage();
|
||||
CHECK(CcTest::heap()->old_space()->Contains(*v8::Utils::OpenHandle(*cons)));
|
||||
|
||||
TestOneByteResource* resource =
|
||||
@ -18431,7 +18425,7 @@ TEST(VisitExternalStrings) {
|
||||
v8::Local<v8::String> string3 =
|
||||
v8::String::NewExternalTwoByte(env->GetIsolate(), resource[3])
|
||||
.ToLocalChecked();
|
||||
CcTest::heap()->CollectAllAvailableGarbage(); // Tenure string.
|
||||
CcTest::CollectAllAvailableGarbage(); // Tenure string.
|
||||
// Turn into a symbol.
|
||||
i::Handle<i::String> string3_i = v8::Utils::OpenHandle(*string3);
|
||||
CHECK(!CcTest::i_isolate()->factory()->InternalizeString(
|
||||
@ -18518,7 +18512,7 @@ TEST(ExternalInternalizedStringCollectedAtGC) {
|
||||
|
||||
// Garbage collector deals swift blows to evil.
|
||||
CcTest::i_isolate()->compilation_cache()->Clear();
|
||||
CcTest::heap()->CollectAllAvailableGarbage();
|
||||
CcTest::CollectAllAvailableGarbage();
|
||||
|
||||
// Ring has been destroyed. Free Peoples of Middle-earth Rejoice.
|
||||
CHECK_EQ(1, destroyed);
|
||||
@ -18719,7 +18713,7 @@ TEST(Regress528) {
|
||||
other_context->Enter();
|
||||
CompileRun(source_simple);
|
||||
other_context->Exit();
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
if (GetGlobalObjectsCount() == 1) break;
|
||||
}
|
||||
CHECK_GE(2, gc_count);
|
||||
@ -18741,7 +18735,7 @@ TEST(Regress528) {
|
||||
other_context->Enter();
|
||||
CompileRun(source_eval);
|
||||
other_context->Exit();
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
if (GetGlobalObjectsCount() == 1) break;
|
||||
}
|
||||
CHECK_GE(2, gc_count);
|
||||
@ -18768,7 +18762,7 @@ TEST(Regress528) {
|
||||
other_context->Enter();
|
||||
CompileRun(source_exception);
|
||||
other_context->Exit();
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
if (GetGlobalObjectsCount() == 1) break;
|
||||
}
|
||||
CHECK_GE(2, gc_count);
|
||||
@ -19385,8 +19379,7 @@ void PrologueCallbackAlloc(v8::Isolate* isolate,
|
||||
Local<Object> obj = Object::New(isolate);
|
||||
CHECK(!obj.IsEmpty());
|
||||
|
||||
CcTest::heap()->CollectAllGarbage(
|
||||
i::Heap::kAbortIncrementalMarkingMask);
|
||||
CcTest::CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
|
||||
}
|
||||
|
||||
|
||||
@ -19405,8 +19398,7 @@ void EpilogueCallbackAlloc(v8::Isolate* isolate,
|
||||
Local<Object> obj = Object::New(isolate);
|
||||
CHECK(!obj.IsEmpty());
|
||||
|
||||
CcTest::heap()->CollectAllGarbage(
|
||||
i::Heap::kAbortIncrementalMarkingMask);
|
||||
CcTest::CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
|
||||
}
|
||||
|
||||
|
||||
@ -19419,26 +19411,26 @@ TEST(GCCallbacksOld) {
|
||||
context->GetIsolate()->AddGCEpilogueCallback(EpilogueCallback);
|
||||
CHECK_EQ(0, prologue_call_count);
|
||||
CHECK_EQ(0, epilogue_call_count);
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CHECK_EQ(1, prologue_call_count);
|
||||
CHECK_EQ(1, epilogue_call_count);
|
||||
context->GetIsolate()->AddGCPrologueCallback(PrologueCallbackSecond);
|
||||
context->GetIsolate()->AddGCEpilogueCallback(EpilogueCallbackSecond);
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CHECK_EQ(2, prologue_call_count);
|
||||
CHECK_EQ(2, epilogue_call_count);
|
||||
CHECK_EQ(1, prologue_call_count_second);
|
||||
CHECK_EQ(1, epilogue_call_count_second);
|
||||
context->GetIsolate()->RemoveGCPrologueCallback(PrologueCallback);
|
||||
context->GetIsolate()->RemoveGCEpilogueCallback(EpilogueCallback);
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CHECK_EQ(2, prologue_call_count);
|
||||
CHECK_EQ(2, epilogue_call_count);
|
||||
CHECK_EQ(2, prologue_call_count_second);
|
||||
CHECK_EQ(2, epilogue_call_count_second);
|
||||
context->GetIsolate()->RemoveGCPrologueCallback(PrologueCallbackSecond);
|
||||
context->GetIsolate()->RemoveGCEpilogueCallback(EpilogueCallbackSecond);
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CHECK_EQ(2, prologue_call_count);
|
||||
CHECK_EQ(2, epilogue_call_count);
|
||||
CHECK_EQ(2, prologue_call_count_second);
|
||||
@ -19454,26 +19446,26 @@ TEST(GCCallbacks) {
|
||||
isolate->AddGCEpilogueCallback(EpilogueCallback);
|
||||
CHECK_EQ(0, prologue_call_count);
|
||||
CHECK_EQ(0, epilogue_call_count);
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CHECK_EQ(1, prologue_call_count);
|
||||
CHECK_EQ(1, epilogue_call_count);
|
||||
isolate->AddGCPrologueCallback(PrologueCallbackSecond);
|
||||
isolate->AddGCEpilogueCallback(EpilogueCallbackSecond);
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CHECK_EQ(2, prologue_call_count);
|
||||
CHECK_EQ(2, epilogue_call_count);
|
||||
CHECK_EQ(1, prologue_call_count_second);
|
||||
CHECK_EQ(1, epilogue_call_count_second);
|
||||
isolate->RemoveGCPrologueCallback(PrologueCallback);
|
||||
isolate->RemoveGCEpilogueCallback(EpilogueCallback);
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CHECK_EQ(2, prologue_call_count);
|
||||
CHECK_EQ(2, epilogue_call_count);
|
||||
CHECK_EQ(2, prologue_call_count_second);
|
||||
CHECK_EQ(2, epilogue_call_count_second);
|
||||
isolate->RemoveGCPrologueCallback(PrologueCallbackSecond);
|
||||
isolate->RemoveGCEpilogueCallback(EpilogueCallbackSecond);
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CHECK_EQ(2, prologue_call_count);
|
||||
CHECK_EQ(2, epilogue_call_count);
|
||||
CHECK_EQ(2, prologue_call_count_second);
|
||||
@ -19483,8 +19475,7 @@ TEST(GCCallbacks) {
|
||||
CHECK_EQ(0, epilogue_call_count_alloc);
|
||||
isolate->AddGCPrologueCallback(PrologueCallbackAlloc);
|
||||
isolate->AddGCEpilogueCallback(EpilogueCallbackAlloc);
|
||||
CcTest::heap()->CollectAllGarbage(
|
||||
i::Heap::kAbortIncrementalMarkingMask);
|
||||
CcTest::CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
|
||||
CHECK_EQ(1, prologue_call_count_alloc);
|
||||
CHECK_EQ(1, epilogue_call_count_alloc);
|
||||
isolate->RemoveGCPrologueCallback(PrologueCallbackAlloc);
|
||||
@ -19662,7 +19653,7 @@ TEST(ContainsOnlyOneByte) {
|
||||
void FailedAccessCheckCallbackGC(Local<v8::Object> target,
|
||||
v8::AccessType type,
|
||||
Local<v8::Value> data) {
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CcTest::isolate()->ThrowException(
|
||||
v8::Exception::Error(v8_str("cross context")));
|
||||
}
|
||||
@ -20285,7 +20276,7 @@ TEST(DontDeleteCellLoadIC) {
|
||||
"})()",
|
||||
"ReferenceError: cell is not defined");
|
||||
CompileRun("cell = \"new_second\";");
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
ExpectString("readCell()", "new_second");
|
||||
ExpectString("readCell()", "new_second");
|
||||
}
|
||||
@ -20355,8 +20346,8 @@ TEST(PersistentHandleInNewSpaceVisitor) {
|
||||
object1.SetWrapperClassId(42);
|
||||
CHECK_EQ(42, object1.WrapperClassId());
|
||||
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
|
||||
v8::Persistent<v8::Object> object2(isolate, v8::Object::New(isolate));
|
||||
CHECK_EQ(0, object2.WrapperClassId());
|
||||
@ -21033,7 +21024,7 @@ THREADED_TEST(Regress1516) {
|
||||
CHECK_LE(1, elements);
|
||||
|
||||
// We have to abort incremental marking here to abandon black pages.
|
||||
CcTest::heap()->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
|
||||
CcTest::CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
|
||||
|
||||
CHECK_GT(elements, CountLiveMapsInMapCache(CcTest::i_isolate()->context()));
|
||||
}
|
||||
|
@ -1164,7 +1164,7 @@ TEST(FunctionCallSample) {
|
||||
|
||||
// Collect garbage that might have be generated while installing
|
||||
// extensions.
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
|
||||
CompileRun(call_function_test_source);
|
||||
v8::Local<v8::Function> function = GetFunction(env.local(), "start");
|
||||
|
@ -371,8 +371,8 @@ void CheckDebuggerUnloaded(bool check_functions) {
|
||||
CHECK(!CcTest::i_isolate()->debug()->debug_info_list_);
|
||||
|
||||
// Collect garbage to ensure weak handles are cleared.
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask);
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CcTest::CollectAllGarbage(Heap::kMakeHeapIterableMask);
|
||||
|
||||
// Iterate the head and check that there are no debugger related objects left.
|
||||
HeapIterator iterator(CcTest::heap());
|
||||
@ -800,10 +800,10 @@ static void DebugEventBreakPointCollectGarbage(
|
||||
break_point_hit_count++;
|
||||
if (break_point_hit_count % 2 == 0) {
|
||||
// Scavenge.
|
||||
CcTest::heap()->CollectGarbage(v8::internal::NEW_SPACE);
|
||||
CcTest::CollectGarbage(v8::internal::NEW_SPACE);
|
||||
} else {
|
||||
// Mark sweep compact.
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -824,7 +824,7 @@ static void DebugEventBreak(
|
||||
|
||||
// Run the garbage collector to enforce heap verification if option
|
||||
// --verify-heap is set.
|
||||
CcTest::heap()->CollectGarbage(v8::internal::NEW_SPACE);
|
||||
CcTest::CollectGarbage(v8::internal::NEW_SPACE);
|
||||
|
||||
// Set the break flag again to come back here as soon as possible.
|
||||
v8::Debug::DebugBreak(CcTest::isolate());
|
||||
@ -1217,12 +1217,12 @@ static void CallAndGC(v8::Local<v8::Context> context,
|
||||
CHECK_EQ(1 + i * 3, break_point_hit_count);
|
||||
|
||||
// Scavenge and call function.
|
||||
CcTest::heap()->CollectGarbage(v8::internal::NEW_SPACE);
|
||||
CcTest::CollectGarbage(v8::internal::NEW_SPACE);
|
||||
f->Call(context, recv, 0, NULL).ToLocalChecked();
|
||||
CHECK_EQ(2 + i * 3, break_point_hit_count);
|
||||
|
||||
// Mark sweep (and perhaps compact) and call function.
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
f->Call(context, recv, 0, NULL).ToLocalChecked();
|
||||
CHECK_EQ(3 + i * 3, break_point_hit_count);
|
||||
}
|
||||
@ -2080,7 +2080,7 @@ TEST(ScriptBreakPointLineTopLevel) {
|
||||
->Get(context, v8_str(env->GetIsolate(), "f"))
|
||||
.ToLocalChecked());
|
||||
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
|
||||
SetScriptBreakPointByNameFromJS(env->GetIsolate(), "test.html", 3, -1);
|
||||
|
||||
|
@ -145,7 +145,7 @@ void DeclarationContext::Check(const char* source, int get, int set, int query,
|
||||
InitializeIfNeeded();
|
||||
// A retry after a GC may pollute the counts, so perform gc now
|
||||
// to avoid that.
|
||||
CcTest::heap()->CollectGarbage(v8::internal::NEW_SPACE);
|
||||
CcTest::CollectGarbage(v8::internal::NEW_SPACE);
|
||||
HandleScope scope(CcTest::isolate());
|
||||
TryCatch catcher(CcTest::isolate());
|
||||
catcher.SetVerbose(true);
|
||||
@ -176,7 +176,7 @@ void DeclarationContext::Check(const char* source, int get, int set, int query,
|
||||
}
|
||||
}
|
||||
// Clean slate for the next test.
|
||||
CcTest::heap()->CollectAllAvailableGarbage();
|
||||
CcTest::CollectAllAvailableGarbage();
|
||||
}
|
||||
|
||||
|
||||
|
@ -109,7 +109,8 @@ class AllowNativesSyntaxNoInlining {
|
||||
// Abort any ongoing incremental marking to make sure that all weak global
|
||||
// handle callbacks are processed.
|
||||
static void NonIncrementalGC(i::Isolate* isolate) {
|
||||
isolate->heap()->CollectAllGarbage();
|
||||
isolate->heap()->CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask,
|
||||
i::GarbageCollectionReason::kTesting);
|
||||
}
|
||||
|
||||
|
||||
|
@ -56,7 +56,7 @@ static void TestHashMap(Handle<HashMap> table) {
|
||||
CHECK_EQ(table->Lookup(b), CcTest::heap()->the_hole_value());
|
||||
|
||||
// Keys still have to be valid after objects were moved.
|
||||
CcTest::heap()->CollectGarbage(NEW_SPACE);
|
||||
CcTest::CollectGarbage(NEW_SPACE);
|
||||
CHECK_EQ(table->NumberOfElements(), 1);
|
||||
CHECK_EQ(table->Lookup(a), *b);
|
||||
CHECK_EQ(table->Lookup(b), CcTest::heap()->the_hole_value());
|
||||
@ -126,7 +126,7 @@ static void TestHashSet(Handle<HashSet> table) {
|
||||
CHECK(!table->Has(isolate, b));
|
||||
|
||||
// Keys still have to be valid after objects were moved.
|
||||
CcTest::heap()->CollectGarbage(NEW_SPACE);
|
||||
CcTest::CollectGarbage(NEW_SPACE);
|
||||
CHECK_EQ(table->NumberOfElements(), 1);
|
||||
CHECK(table->Has(isolate, a));
|
||||
CHECK(!table->Has(isolate, b));
|
||||
|
@ -199,8 +199,6 @@ TEST(VectorCallICStates) {
|
||||
LocalContext context;
|
||||
v8::HandleScope scope(context->GetIsolate());
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
Heap* heap = isolate->heap();
|
||||
|
||||
// Make sure function f has a call that uses a type feedback slot.
|
||||
CompileRun(
|
||||
"function foo() { return 17; }"
|
||||
@ -219,7 +217,7 @@ TEST(VectorCallICStates) {
|
||||
CHECK_EQ(GENERIC, nexus.StateFromFeedback());
|
||||
|
||||
// After a collection, state should remain GENERIC.
|
||||
heap->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CHECK_EQ(GENERIC, nexus.StateFromFeedback());
|
||||
}
|
||||
|
||||
@ -229,8 +227,6 @@ TEST(VectorCallFeedbackForArray) {
|
||||
LocalContext context;
|
||||
v8::HandleScope scope(context->GetIsolate());
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
Heap* heap = isolate->heap();
|
||||
|
||||
// Make sure function f has a call that uses a type feedback slot.
|
||||
CompileRun(
|
||||
"function foo() { return 17; }"
|
||||
@ -246,7 +242,7 @@ TEST(VectorCallFeedbackForArray) {
|
||||
CHECK_EQ(MONOMORPHIC, nexus.StateFromFeedback());
|
||||
CHECK(nexus.GetFeedback()->IsAllocationSite());
|
||||
|
||||
heap->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
// It should stay monomorphic even after a GC.
|
||||
CHECK_EQ(MONOMORPHIC, nexus.StateFromFeedback());
|
||||
}
|
||||
@ -304,7 +300,6 @@ TEST(VectorLoadICStates) {
|
||||
LocalContext context;
|
||||
v8::HandleScope scope(context->GetIsolate());
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
Heap* heap = isolate->heap();
|
||||
|
||||
// Make sure function f has a call that uses a type feedback slot.
|
||||
CompileRun(
|
||||
@ -348,7 +343,7 @@ TEST(VectorLoadICStates) {
|
||||
CHECK(!nexus.FindFirstMap());
|
||||
|
||||
// After a collection, state should not be reset to PREMONOMORPHIC.
|
||||
heap->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CHECK_EQ(MEGAMORPHIC, nexus.StateFromFeedback());
|
||||
}
|
||||
|
||||
|
@ -352,7 +352,7 @@ TEST(EternalHandles) {
|
||||
CHECK(!eternals[i].IsEmpty());
|
||||
}
|
||||
|
||||
isolate->heap()->CollectAllAvailableGarbage();
|
||||
CcTest::CollectAllAvailableGarbage();
|
||||
|
||||
for (int i = 0; i < kArrayLength; i++) {
|
||||
for (int j = 0; j < 2; j++) {
|
||||
@ -445,7 +445,7 @@ TEST(FinalizerWeakness) {
|
||||
g.SetWeak(&g, finalizer, v8::WeakCallbackType::kFinalizer);
|
||||
}
|
||||
|
||||
CcTest::i_isolate()->heap()->CollectAllAvailableGarbage();
|
||||
CcTest::CollectAllAvailableGarbage();
|
||||
|
||||
CHECK(!g.IsEmpty());
|
||||
v8::HandleScope scope(isolate);
|
||||
@ -468,7 +468,7 @@ TEST(PhatomHandlesWithoutCallbacks) {
|
||||
}
|
||||
|
||||
CHECK_EQ(0, isolate->NumberOfPhantomHandleResetsSinceLastCall());
|
||||
CcTest::i_isolate()->heap()->CollectAllAvailableGarbage();
|
||||
CcTest::CollectAllAvailableGarbage();
|
||||
CHECK_EQ(2, isolate->NumberOfPhantomHandleResetsSinceLastCall());
|
||||
CHECK_EQ(0, isolate->NumberOfPhantomHandleResetsSinceLastCall());
|
||||
}
|
||||
|
@ -501,7 +501,7 @@ void CheckSimdSnapshot(const char* program, const char* var_name) {
|
||||
// 28 @ 13523 entry with no retainer: /hidden/ system / AllocationSite
|
||||
// 44 @ 767 $map: /hidden/ system / Map
|
||||
// 44 @ 59 $map: /hidden/ system / Map
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
|
||||
const v8::HeapSnapshot* snapshot = heap_profiler->TakeHeapSnapshot();
|
||||
CHECK(ValidateSnapshot(snapshot));
|
||||
@ -738,7 +738,7 @@ TEST(HeapSnapshotAddressReuse) {
|
||||
CompileRun(
|
||||
"for (var i = 0; i < 10000; ++i)\n"
|
||||
" a[i] = new A();\n");
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
|
||||
const v8::HeapSnapshot* snapshot2 = heap_profiler->TakeHeapSnapshot();
|
||||
CHECK(ValidateSnapshot(snapshot2));
|
||||
@ -780,7 +780,7 @@ TEST(HeapEntryIdsAndArrayShift) {
|
||||
"for (var i = 0; i < 1; ++i)\n"
|
||||
" a.shift();\n");
|
||||
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
|
||||
const v8::HeapSnapshot* snapshot2 = heap_profiler->TakeHeapSnapshot();
|
||||
CHECK(ValidateSnapshot(snapshot2));
|
||||
@ -821,7 +821,7 @@ TEST(HeapEntryIdsAndGC) {
|
||||
const v8::HeapSnapshot* snapshot1 = heap_profiler->TakeHeapSnapshot();
|
||||
CHECK(ValidateSnapshot(snapshot1));
|
||||
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
|
||||
const v8::HeapSnapshot* snapshot2 = heap_profiler->TakeHeapSnapshot();
|
||||
CHECK(ValidateSnapshot(snapshot2));
|
||||
@ -1150,7 +1150,7 @@ TEST(HeapSnapshotObjectsStats) {
|
||||
// We have to call GC 6 times. In other case the garbage will be
|
||||
// the reason of flakiness.
|
||||
for (int i = 0; i < 6; ++i) {
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
}
|
||||
|
||||
v8::SnapshotObjectId initial_id;
|
||||
@ -1305,7 +1305,7 @@ TEST(HeapObjectIds) {
|
||||
}
|
||||
|
||||
heap_profiler->StopTrackingHeapObjects();
|
||||
CcTest::heap()->CollectAllAvailableGarbage();
|
||||
CcTest::CollectAllAvailableGarbage();
|
||||
|
||||
for (int i = 0; i < kLength; i++) {
|
||||
v8::SnapshotObjectId id = heap_profiler->GetObjectId(objects[i]);
|
||||
@ -3058,7 +3058,7 @@ TEST(SamplingHeapProfiler) {
|
||||
" eval(\"new Array(100)\");\n"
|
||||
"}\n");
|
||||
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
|
||||
std::unique_ptr<v8::AllocationProfile> profile(
|
||||
heap_profiler->GetAllocationProfile());
|
||||
@ -3112,7 +3112,7 @@ TEST(SamplingHeapProfilerLeftTrimming) {
|
||||
" a.shift();\n"
|
||||
"}\n");
|
||||
|
||||
CcTest::heap()->CollectGarbage(v8::internal::NEW_SPACE);
|
||||
CcTest::CollectGarbage(v8::internal::NEW_SPACE);
|
||||
// Should not crash.
|
||||
|
||||
heap_profiler->StopSamplingHeapProfiler();
|
||||
|
@ -335,7 +335,7 @@ TEST(ExplicitGC) {
|
||||
}
|
||||
|
||||
// Do an explicit, real GC.
|
||||
t.heap()->CollectGarbage(i::NEW_SPACE);
|
||||
t.heap()->CollectGarbage(i::NEW_SPACE, i::GarbageCollectionReason::kTesting);
|
||||
|
||||
// Check that searching for the numbers finds the same values.
|
||||
for (size_t i = 0; i < arraysize(num_keys); i++) {
|
||||
@ -387,7 +387,7 @@ TEST(CanonicalHandleScope) {
|
||||
Handle<String> string2(*string1);
|
||||
CHECK_EQ(number1.location(), number2.location());
|
||||
CHECK_EQ(string1.location(), string2.location());
|
||||
heap->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
Handle<HeapNumber> number3(*number2);
|
||||
Handle<String> string3(*string2);
|
||||
CHECK_EQ(number1.location(), number3.location());
|
||||
|
@ -486,7 +486,7 @@ TEST(EquivalenceOfLoggingAndTraversal) {
|
||||
"})(this);");
|
||||
logger->StopProfiler();
|
||||
reinterpret_cast<i::Isolate*>(isolate)->heap()->CollectAllGarbage(
|
||||
i::Heap::kMakeHeapIterableMask);
|
||||
i::Heap::kMakeHeapIterableMask, i::GarbageCollectionReason::kTesting);
|
||||
logger->StringEvent("test-logging-done", "");
|
||||
|
||||
// Iterate heap to find compiled functions, will write to log.
|
||||
|
@ -46,7 +46,7 @@ static void SetUpNewSpaceWithPoisonedMementoAtTop() {
|
||||
NewSpace* new_space = heap->new_space();
|
||||
|
||||
// Make sure we can allocate some objects without causing a GC later.
|
||||
heap->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
|
||||
// Allocate a string, the GC may suspect a memento behind the string.
|
||||
Handle<SeqOneByteString> string =
|
||||
@ -72,8 +72,7 @@ TEST(Regress340063) {
|
||||
|
||||
// Call GC to see if we can handle a poisonous memento right after the
|
||||
// current new space top pointer.
|
||||
CcTest::i_isolate()->heap()->CollectAllGarbage(
|
||||
Heap::kAbortIncrementalMarkingMask);
|
||||
CcTest::CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
|
||||
}
|
||||
|
||||
|
||||
@ -90,8 +89,7 @@ TEST(Regress470390) {
|
||||
|
||||
// Call GC to see if we can handle a poisonous memento right after the
|
||||
// current new space top pointer.
|
||||
CcTest::i_isolate()->heap()->CollectAllGarbage(
|
||||
Heap::kAbortIncrementalMarkingMask);
|
||||
CcTest::CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
|
||||
}
|
||||
|
||||
|
||||
@ -103,5 +101,5 @@ TEST(BadMementoAfterTopForceScavenge) {
|
||||
SetUpNewSpaceWithPoisonedMementoAtTop();
|
||||
|
||||
// Force GC to test the poisoned memento handling
|
||||
CcTest::i_isolate()->heap()->CollectGarbage(i::NEW_SPACE);
|
||||
CcTest::CollectGarbage(i::NEW_SPACE);
|
||||
}
|
||||
|
@ -90,7 +90,8 @@ static Vector<const byte> Serialize(v8::Isolate* isolate) {
|
||||
}
|
||||
|
||||
Isolate* internal_isolate = reinterpret_cast<Isolate*>(isolate);
|
||||
internal_isolate->heap()->CollectAllAvailableGarbage("serialize");
|
||||
internal_isolate->heap()->CollectAllAvailableGarbage(
|
||||
i::GarbageCollectionReason::kTesting);
|
||||
StartupSerializer ser(internal_isolate,
|
||||
v8::SnapshotCreator::FunctionCodeHandling::kClear);
|
||||
ser.SerializeStrongReferences();
|
||||
@ -263,8 +264,10 @@ static void PartiallySerializeObject(Vector<const byte>* startup_blob_out,
|
||||
isolate->bootstrapper()->SourceLookup<Natives>(i);
|
||||
}
|
||||
}
|
||||
heap->CollectAllGarbage();
|
||||
heap->CollectAllGarbage();
|
||||
heap->CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask,
|
||||
i::GarbageCollectionReason::kTesting);
|
||||
heap->CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask,
|
||||
i::GarbageCollectionReason::kTesting);
|
||||
|
||||
Object* raw_foo;
|
||||
{
|
||||
@ -366,7 +369,8 @@ static void PartiallySerializeContext(Vector<const byte>* startup_blob_out,
|
||||
}
|
||||
// If we don't do this then we end up with a stray root pointing at the
|
||||
// context even after we have disposed of env.
|
||||
heap->CollectAllGarbage();
|
||||
heap->CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask,
|
||||
i::GarbageCollectionReason::kTesting);
|
||||
|
||||
{
|
||||
v8::HandleScope handle_scope(v8_isolate);
|
||||
@ -484,7 +488,8 @@ static void PartiallySerializeCustomContext(
|
||||
}
|
||||
// If we don't do this then we end up with a stray root pointing at the
|
||||
// context even after we have disposed of env.
|
||||
isolate->heap()->CollectAllAvailableGarbage("snapshotting");
|
||||
isolate->heap()->CollectAllAvailableGarbage(
|
||||
i::GarbageCollectionReason::kTesting);
|
||||
|
||||
{
|
||||
v8::HandleScope handle_scope(v8_isolate);
|
||||
@ -1894,7 +1899,6 @@ TEST(CodeSerializerEmbeddedObject) {
|
||||
LocalContext context;
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
isolate->compilation_cache()->Disable(); // Disable same-isolate code cache.
|
||||
Heap* heap = isolate->heap();
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
|
||||
size_t actual_size;
|
||||
@ -1934,7 +1938,7 @@ TEST(CodeSerializerEmbeddedObject) {
|
||||
CHECK(rit2.rinfo()->target_object()->IsHeapNumber());
|
||||
CHECK_EQ(0.3, HeapNumber::cast(rit2.rinfo()->target_object())->value());
|
||||
|
||||
heap->CollectAllAvailableGarbage();
|
||||
CcTest::CollectAllAvailableGarbage();
|
||||
|
||||
RelocIterator rit3(copy->code(),
|
||||
RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT));
|
||||
|
@ -71,8 +71,8 @@ TEST(Create) {
|
||||
#endif
|
||||
}
|
||||
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectGarbage(i::NEW_SPACE);
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
|
||||
// All symbols should be distinct.
|
||||
for (int i = 0; i < kNumSymbols; ++i) {
|
||||
|
@ -934,7 +934,7 @@ TEST(Regress436816) {
|
||||
CHECK(object->map()->HasFastPointerLayout());
|
||||
|
||||
// Trigger GCs and heap verification.
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
}
|
||||
|
||||
|
||||
@ -991,7 +991,7 @@ TEST(DescriptorArrayTrimming) {
|
||||
|
||||
// Call GC that should trim both |map|'s descriptor array and layout
|
||||
// descriptor.
|
||||
CcTest::heap()->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
|
||||
// The unused tail of the layout descriptor is now "clean" again.
|
||||
CHECK(map->layout_descriptor()->IsConsistentWithMap(*map, true));
|
||||
@ -1057,7 +1057,7 @@ TEST(DoScavenge) {
|
||||
CHECK(isolate->heap()->new_space()->Contains(*obj));
|
||||
|
||||
// Do scavenge so that |obj| is moved to survivor space.
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
|
||||
CcTest::CollectGarbage(i::NEW_SPACE);
|
||||
|
||||
// Create temp object in the new space.
|
||||
Handle<JSArray> temp = factory->NewJSArray(0, FAST_ELEMENTS);
|
||||
@ -1074,7 +1074,7 @@ TEST(DoScavenge) {
|
||||
|
||||
// Now |obj| moves to old gen and it has a double field that looks like
|
||||
// a pointer to a from semi-space.
|
||||
CcTest::heap()->CollectGarbage(i::NEW_SPACE, "boom");
|
||||
CcTest::CollectGarbage(i::NEW_SPACE);
|
||||
|
||||
CHECK(isolate->heap()->old_space()->Contains(*obj));
|
||||
|
||||
@ -1155,14 +1155,14 @@ TEST(DoScavengeWithIncrementalWriteBarrier) {
|
||||
CHECK(MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
|
||||
|
||||
// Trigger GCs so that |obj| moves to old gen.
|
||||
heap->CollectGarbage(i::NEW_SPACE); // in survivor space now
|
||||
heap->CollectGarbage(i::NEW_SPACE); // in old gen now
|
||||
CcTest::CollectGarbage(i::NEW_SPACE); // in survivor space now
|
||||
CcTest::CollectGarbage(i::NEW_SPACE); // in old gen now
|
||||
|
||||
CHECK(isolate->heap()->old_space()->Contains(*obj));
|
||||
CHECK(isolate->heap()->old_space()->Contains(*obj_value));
|
||||
CHECK(MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
|
||||
|
||||
heap->CollectGarbage(i::OLD_SPACE, "boom");
|
||||
CcTest::CollectGarbage(i::OLD_SPACE);
|
||||
|
||||
// |obj_value| must be evacuated.
|
||||
CHECK(!MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
|
||||
@ -1412,7 +1412,7 @@ static void TestWriteBarrier(Handle<Map> map, Handle<Map> new_map,
|
||||
obj->RawFastDoublePropertyAtPut(double_field_index, boom_value);
|
||||
|
||||
// Trigger GC to evacuate all candidates.
|
||||
CcTest::heap()->CollectGarbage(NEW_SPACE, "boom");
|
||||
CcTest::CollectGarbage(NEW_SPACE);
|
||||
|
||||
if (check_tagged_value) {
|
||||
FieldIndex tagged_field_index =
|
||||
@ -1491,7 +1491,7 @@ static void TestIncrementalWriteBarrier(Handle<Map> map, Handle<Map> new_map,
|
||||
obj->RawFastDoublePropertyAtPut(double_field_index, boom_value);
|
||||
|
||||
// Trigger GC to evacuate all candidates.
|
||||
CcTest::heap()->CollectGarbage(OLD_SPACE, "boom");
|
||||
CcTest::CollectGarbage(OLD_SPACE);
|
||||
|
||||
// Ensure that the values are still there and correct.
|
||||
CHECK(!MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
|
||||
|
@ -76,7 +76,6 @@ TEST(Weakness) {
|
||||
LocalContext context;
|
||||
Isolate* isolate = GetIsolateFrom(&context);
|
||||
Factory* factory = isolate->factory();
|
||||
Heap* heap = isolate->heap();
|
||||
HandleScope scope(isolate);
|
||||
Handle<JSWeakMap> weakmap = AllocateJSWeakMap(isolate);
|
||||
GlobalHandles* global_handles = isolate->global_handles();
|
||||
@ -105,7 +104,7 @@ TEST(Weakness) {
|
||||
CHECK_EQ(2, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
|
||||
|
||||
// Force a full GC.
|
||||
heap->CollectAllGarbage(false);
|
||||
CcTest::CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
|
||||
CHECK_EQ(0, NumberOfWeakCalls);
|
||||
CHECK_EQ(2, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
|
||||
CHECK_EQ(
|
||||
@ -121,7 +120,7 @@ TEST(Weakness) {
|
||||
}
|
||||
CHECK(global_handles->IsWeak(key.location()));
|
||||
|
||||
heap->CollectAllGarbage(false);
|
||||
CcTest::CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
|
||||
CHECK_EQ(1, NumberOfWeakCalls);
|
||||
CHECK_EQ(0, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
|
||||
CHECK_EQ(2,
|
||||
@ -133,7 +132,6 @@ TEST(Shrinking) {
|
||||
LocalContext context;
|
||||
Isolate* isolate = GetIsolateFrom(&context);
|
||||
Factory* factory = isolate->factory();
|
||||
Heap* heap = isolate->heap();
|
||||
HandleScope scope(isolate);
|
||||
Handle<JSWeakMap> weakmap = AllocateJSWeakMap(isolate);
|
||||
|
||||
@ -159,7 +157,7 @@ TEST(Shrinking) {
|
||||
CHECK_EQ(32, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
|
||||
CHECK_EQ(
|
||||
0, ObjectHashTable::cast(weakmap->table())->NumberOfDeletedElements());
|
||||
heap->CollectAllGarbage(false);
|
||||
CcTest::CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
|
||||
CHECK_EQ(0, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
|
||||
CHECK_EQ(
|
||||
32, ObjectHashTable::cast(weakmap->table())->NumberOfDeletedElements());
|
||||
@ -202,7 +200,7 @@ TEST(Regress2060a) {
|
||||
|
||||
// Force compacting garbage collection.
|
||||
CHECK(FLAG_always_compact);
|
||||
heap->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
}
|
||||
|
||||
|
||||
@ -244,9 +242,9 @@ TEST(Regress2060b) {
|
||||
// Force compacting garbage collection. The subsequent collections are used
|
||||
// to verify that key references were actually updated.
|
||||
CHECK(FLAG_always_compact);
|
||||
heap->CollectAllGarbage();
|
||||
heap->CollectAllGarbage();
|
||||
heap->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
}
|
||||
|
||||
|
||||
@ -263,5 +261,5 @@ TEST(Regress399527) {
|
||||
// The weak map is marked black here but leaving the handle scope will make
|
||||
// the object unreachable. Aborting incremental marking will clear all the
|
||||
// marking bits which makes the weak map garbage.
|
||||
heap->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
}
|
||||
|
@ -79,7 +79,6 @@ TEST(WeakSet_Weakness) {
|
||||
LocalContext context;
|
||||
Isolate* isolate = GetIsolateFrom(&context);
|
||||
Factory* factory = isolate->factory();
|
||||
Heap* heap = isolate->heap();
|
||||
HandleScope scope(isolate);
|
||||
Handle<JSWeakSet> weakset = AllocateJSWeakSet(isolate);
|
||||
GlobalHandles* global_handles = isolate->global_handles();
|
||||
@ -104,7 +103,7 @@ TEST(WeakSet_Weakness) {
|
||||
CHECK_EQ(1, ObjectHashTable::cast(weakset->table())->NumberOfElements());
|
||||
|
||||
// Force a full GC.
|
||||
heap->CollectAllGarbage(false);
|
||||
CcTest::CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
|
||||
CHECK_EQ(0, NumberOfWeakCalls);
|
||||
CHECK_EQ(1, ObjectHashTable::cast(weakset->table())->NumberOfElements());
|
||||
CHECK_EQ(
|
||||
@ -120,7 +119,7 @@ TEST(WeakSet_Weakness) {
|
||||
}
|
||||
CHECK(global_handles->IsWeak(key.location()));
|
||||
|
||||
heap->CollectAllGarbage(false);
|
||||
CcTest::CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
|
||||
CHECK_EQ(1, NumberOfWeakCalls);
|
||||
CHECK_EQ(0, ObjectHashTable::cast(weakset->table())->NumberOfElements());
|
||||
CHECK_EQ(
|
||||
@ -132,7 +131,6 @@ TEST(WeakSet_Shrinking) {
|
||||
LocalContext context;
|
||||
Isolate* isolate = GetIsolateFrom(&context);
|
||||
Factory* factory = isolate->factory();
|
||||
Heap* heap = isolate->heap();
|
||||
HandleScope scope(isolate);
|
||||
Handle<JSWeakSet> weakset = AllocateJSWeakSet(isolate);
|
||||
|
||||
@ -158,7 +156,7 @@ TEST(WeakSet_Shrinking) {
|
||||
CHECK_EQ(32, ObjectHashTable::cast(weakset->table())->NumberOfElements());
|
||||
CHECK_EQ(
|
||||
0, ObjectHashTable::cast(weakset->table())->NumberOfDeletedElements());
|
||||
heap->CollectAllGarbage(false);
|
||||
CcTest::CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
|
||||
CHECK_EQ(0, ObjectHashTable::cast(weakset->table())->NumberOfElements());
|
||||
CHECK_EQ(
|
||||
32, ObjectHashTable::cast(weakset->table())->NumberOfDeletedElements());
|
||||
@ -201,7 +199,7 @@ TEST(WeakSet_Regress2060a) {
|
||||
|
||||
// Force compacting garbage collection.
|
||||
CHECK(FLAG_always_compact);
|
||||
heap->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
}
|
||||
|
||||
|
||||
@ -243,7 +241,7 @@ TEST(WeakSet_Regress2060b) {
|
||||
// Force compacting garbage collection. The subsequent collections are used
|
||||
// to verify that key references were actually updated.
|
||||
CHECK(FLAG_always_compact);
|
||||
heap->CollectAllGarbage();
|
||||
heap->CollectAllGarbage();
|
||||
heap->CollectAllGarbage();
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
|
||||
}
|
||||
|
@ -160,7 +160,8 @@ TEST_F(GCTracerTest, RegularScope) {
|
||||
EXPECT_DOUBLE_EQ(0.0, tracer->current_.scopes[GCTracer::Scope::MC_MARK]);
|
||||
// Sample not added because it's not within a started tracer.
|
||||
tracer->AddScopeSample(GCTracer::Scope::MC_MARK, 100);
|
||||
tracer->Start(MARK_COMPACTOR, "gc unittest", "collector unittest");
|
||||
tracer->Start(MARK_COMPACTOR, GarbageCollectionReason::kTesting,
|
||||
"collector unittest");
|
||||
tracer->AddScopeSample(GCTracer::Scope::MC_MARK, 100);
|
||||
tracer->Stop(MARK_COMPACTOR);
|
||||
EXPECT_DOUBLE_EQ(100.0, tracer->current_.scopes[GCTracer::Scope::MC_MARK]);
|
||||
@ -174,7 +175,8 @@ TEST_F(GCTracerTest, IncrementalScope) {
|
||||
0.0, tracer->current_.scopes[GCTracer::Scope::MC_INCREMENTAL_FINALIZE]);
|
||||
// Sample is added because its ScopeId is listed as incremental sample.
|
||||
tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 100);
|
||||
tracer->Start(MARK_COMPACTOR, "gc unittest", "collector unittest");
|
||||
tracer->Start(MARK_COMPACTOR, GarbageCollectionReason::kTesting,
|
||||
"collector unittest");
|
||||
// Switch to incremental MC to enable writing back incremental scopes.
|
||||
tracer->current_.type = GCTracer::Event::INCREMENTAL_MARK_COMPACTOR;
|
||||
tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 100);
|
||||
@ -189,7 +191,8 @@ TEST_F(GCTracerTest, IncrementalMarkingDetails) {
|
||||
|
||||
// Round 1.
|
||||
tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 50);
|
||||
tracer->Start(MARK_COMPACTOR, "gc unittest", "collector unittest");
|
||||
tracer->Start(MARK_COMPACTOR, GarbageCollectionReason::kTesting,
|
||||
"collector unittest");
|
||||
// Switch to incremental MC to enable writing back incremental scopes.
|
||||
tracer->current_.type = GCTracer::Event::INCREMENTAL_MARK_COMPACTOR;
|
||||
tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 100);
|
||||
@ -213,7 +216,8 @@ TEST_F(GCTracerTest, IncrementalMarkingDetails) {
|
||||
// Round 2. Cumulative numbers should add up, others should be reset.
|
||||
tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 13);
|
||||
tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 15);
|
||||
tracer->Start(MARK_COMPACTOR, "gc unittest", "collector unittest");
|
||||
tracer->Start(MARK_COMPACTOR, GarbageCollectionReason::kTesting,
|
||||
"collector unittest");
|
||||
// Switch to incremental MC to enable writing back incremental scopes.
|
||||
tracer->current_.type = GCTracer::Event::INCREMENTAL_MARK_COMPACTOR;
|
||||
tracer->AddScopeSample(GCTracer::Scope::MC_INCREMENTAL_FINALIZE, 122);
|
||||
|
Loading…
Reference in New Issue
Block a user