Force full GC whenever CollectAllGarbage is meant to trigger a full GC.

Add a finalize incremental marking mode for CollectAllGarbage to finalize incremental marking when incremental marking is in progress, but we want a full gc at a given CollectAllGarbage call site.

Default mode for CollectAllGarbage is finalize incremental marking and perform a full GC.

BUG=

Review URL: https://codereview.chromium.org/1082973003

Cr-Commit-Position: refs/heads/master@{#27831}
This commit is contained in:
hpayer 2015-04-15 00:10:47 -07:00 committed by Commit bot
parent 83bc009d46
commit 9c105f0940
25 changed files with 235 additions and 224 deletions

View File

@ -830,6 +830,7 @@ bool Heap::CollectGarbage(GarbageCollector collector, const char* gc_reason,
}
if (collector == MARK_COMPACTOR &&
!mark_compact_collector()->finalize_incremental_marking() &&
!mark_compact_collector()->abort_incremental_marking() &&
!incremental_marking()->IsStopped() &&
!incremental_marking()->should_hurry() &&

View File

@ -757,6 +757,7 @@ class Heap {
static const int kNoGCFlags = 0;
static const int kReduceMemoryFootprintMask = 1;
static const int kAbortIncrementalMarkingMask = 2;
static const int kFinalizeIncrementalMarkingMask = 4;
// Making the heap iterable requires us to abort incremental marking.
static const int kMakeHeapIterableMask = kAbortIncrementalMarkingMask;
@ -774,7 +775,7 @@ class Heap {
// non-zero, then the slower precise sweeper is used, which leaves the heap
// in a state where we can iterate over the heap visiting all objects.
void CollectAllGarbage(
int flags, const char* gc_reason = NULL,
int flags = kFinalizeIncrementalMarkingMask, const char* gc_reason = NULL,
const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
// Last hope GC, should try to squeeze as much as possible.

View File

@ -23,6 +23,9 @@ void MarkCompactCollector::SetFlags(int flags) {
reduce_memory_footprint_ = ((flags & Heap::kReduceMemoryFootprintMask) != 0);
abort_incremental_marking_ =
((flags & Heap::kAbortIncrementalMarkingMask) != 0);
finalize_incremental_marking_ =
((flags & Heap::kFinalizeIncrementalMarkingMask) != 0);
DCHECK(!finalize_incremental_marking_ || !abort_incremental_marking_);
}

View File

@ -42,6 +42,7 @@ MarkCompactCollector::MarkCompactCollector(Heap* heap)
#endif
reduce_memory_footprint_(false),
abort_incremental_marking_(false),
finalize_incremental_marking_(false),
marking_parity_(ODD_MARKING_PARITY),
compacting_(false),
was_marked_incrementally_(false),

View File

@ -659,6 +659,10 @@ class MarkCompactCollector {
bool abort_incremental_marking() const { return abort_incremental_marking_; }
bool finalize_incremental_marking() const {
return finalize_incremental_marking_;
}
bool is_compacting() const { return compacting_; }
MarkingParity marking_parity() { return marking_parity_; }
@ -749,6 +753,8 @@ class MarkCompactCollector {
bool abort_incremental_marking_;
bool finalize_incremental_marking_;
MarkingParity marking_parity_;
// True if we are collecting slots to perform evacuation from evacuation

View File

@ -277,7 +277,7 @@ static void CheckAccessorArgsCorrect(
CHECK(info.This() == info.Holder());
CHECK(
info.Data()->Equals(v8::String::NewFromUtf8(CcTest::isolate(), "data")));
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CHECK(info.GetIsolate() == CcTest::isolate());
CHECK(info.This() == info.Holder());
CHECK(

View File

@ -312,7 +312,7 @@ void InterceptorHasOwnPropertyGetter(
void InterceptorHasOwnPropertyGetterGC(
Local<Name> name, const v8::PropertyCallbackInfo<v8::Value>& info) {
ApiTestFuzzer::Fuzz();
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
}
} // namespace

View File

@ -412,7 +412,7 @@ THREADED_TEST(ScriptUsingStringResource) {
CHECK_EQ(static_cast<const String::ExternalStringResourceBase*>(resource),
source->GetExternalStringResourceBase(&encoding));
CHECK_EQ(String::TWO_BYTE_ENCODING, encoding);
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CHECK_EQ(0, dispose_count);
}
CcTest::i_isolate()->compilation_cache()->Clear();
@ -441,7 +441,7 @@ THREADED_TEST(ScriptUsingOneByteStringResource) {
Local<Value> value = script->Run();
CHECK(value->IsNumber());
CHECK_EQ(7, value->Int32Value());
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CHECK_EQ(0, dispose_count);
}
CcTest::i_isolate()->compilation_cache()->Clear();
@ -473,11 +473,11 @@ THREADED_TEST(ScriptMakingExternalString) {
Local<Value> value = script->Run();
CHECK(value->IsNumber());
CHECK_EQ(7, value->Int32Value());
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CHECK_EQ(0, dispose_count);
}
CcTest::i_isolate()->compilation_cache()->Clear();
CcTest::heap()->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
CcTest::heap()->CollectAllGarbage();
CHECK_EQ(1, dispose_count);
}
@ -499,11 +499,11 @@ THREADED_TEST(ScriptMakingExternalOneByteString) {
Local<Value> value = script->Run();
CHECK(value->IsNumber());
CHECK_EQ(7, value->Int32Value());
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CHECK_EQ(0, dispose_count);
}
CcTest::i_isolate()->compilation_cache()->Clear();
CcTest::heap()->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
CcTest::heap()->CollectAllGarbage();
CHECK_EQ(1, dispose_count);
}
@ -622,7 +622,7 @@ TEST(MakingExternalUnalignedOneByteString) {
CHECK(success);
// Trigger GCs and force evacuation.
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CcTest::heap()->CollectAllGarbage(i::Heap::kReduceMemoryFootprintMask);
}
@ -642,8 +642,8 @@ THREADED_TEST(UsingExternalString) {
factory->InternalizeString(istring);
CHECK(isymbol->IsInternalizedString());
}
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CcTest::heap()->CollectAllGarbage();
}
@ -662,8 +662,8 @@ THREADED_TEST(UsingExternalOneByteString) {
factory->InternalizeString(istring);
CHECK(isymbol->IsInternalizedString());
}
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CcTest::heap()->CollectAllGarbage();
}
@ -870,8 +870,8 @@ THREADED_TEST(StringConcat) {
CHECK_EQ(68, value->Int32Value());
}
CcTest::i_isolate()->compilation_cache()->Clear();
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CcTest::heap()->CollectAllGarbage();
}
@ -2091,7 +2091,7 @@ static void CheckAlignedPointerInInternalField(Handle<v8::Object> obj,
void* value) {
CHECK_EQ(0, static_cast<int>(reinterpret_cast<uintptr_t>(value) & 0x1));
obj->SetAlignedPointerInInternalField(0, value);
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CHECK_EQ(value, obj->GetAlignedPointerFromInternalField(0));
}
@ -2129,7 +2129,7 @@ static void CheckAlignedPointerInEmbedderData(LocalContext* env, int index,
void* value) {
CHECK_EQ(0, static_cast<int>(reinterpret_cast<uintptr_t>(value) & 0x1));
(*env)->SetAlignedPointerInEmbedderData(index, value);
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CHECK_EQ(value, (*env)->GetAlignedPointerFromEmbedderData(index));
}
@ -2159,7 +2159,7 @@ THREADED_TEST(EmbedderDataAlignedPointers) {
for (int i = 0; i < 100; i++) {
env->SetAlignedPointerInEmbedderData(i, AlignedTestPointer(i));
}
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
for (int i = 0; i < 100; i++) {
CHECK_EQ(AlignedTestPointer(i), env->GetAlignedPointerFromEmbedderData(i));
}
@ -2204,7 +2204,7 @@ THREADED_TEST(IdentityHash) {
// Ensure that the test starts with an fresh heap to test whether the hash
// code is based on the address.
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
Local<v8::Object> obj = v8::Object::New(isolate);
int hash = obj->GetIdentityHash();
int hash1 = obj->GetIdentityHash();
@ -2214,7 +2214,7 @@ THREADED_TEST(IdentityHash) {
// objects should not be assigned the same hash code. If the test below fails
// the random number generator should be evaluated.
CHECK_NE(hash, hash2);
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
int hash3 = v8::Object::New(isolate)->GetIdentityHash();
// Make sure that the identity hash is not based on the initial address of
// the object alone. If the test below fails the random number generator
@ -2271,7 +2271,7 @@ TEST(SymbolIdentityHash) {
int hash = symbol->GetIdentityHash();
int hash1 = symbol->GetIdentityHash();
CHECK_EQ(hash, hash1);
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
int hash3 = symbol->GetIdentityHash();
CHECK_EQ(hash, hash3);
}
@ -2282,7 +2282,7 @@ TEST(SymbolIdentityHash) {
int hash = js_symbol->GetIdentityHash();
int hash1 = js_symbol->GetIdentityHash();
CHECK_EQ(hash, hash1);
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
int hash3 = js_symbol->GetIdentityHash();
CHECK_EQ(hash, hash3);
}
@ -2298,7 +2298,7 @@ TEST(StringIdentityHash) {
int hash = str->GetIdentityHash();
int hash1 = str->GetIdentityHash();
CHECK_EQ(hash, hash1);
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
int hash3 = str->GetIdentityHash();
CHECK_EQ(hash, hash3);
@ -2318,7 +2318,7 @@ THREADED_TEST(SymbolProperties) {
v8::Local<v8::Symbol> sym2 = v8::Symbol::New(isolate, v8_str("my-symbol"));
v8::Local<v8::Symbol> sym3 = v8::Symbol::New(isolate, v8_str("sym3"));
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
// Check basic symbol functionality.
CHECK(sym1->IsSymbol());
@ -2370,7 +2370,7 @@ THREADED_TEST(SymbolProperties) {
CHECK_EQ(1u, obj->GetOwnPropertyNames()->Length());
CHECK_EQ(num_props + 1, obj->GetPropertyNames()->Length());
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CHECK(obj->SetAccessor(sym3, SymbolAccessorGetter, SymbolAccessorSetter));
CHECK(obj->Get(sym3)->IsUndefined());
@ -2438,7 +2438,7 @@ THREADED_TEST(PrivateProperties) {
v8::Local<v8::Private> priv2 =
v8::Private::New(isolate, v8_str("my-private"));
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CHECK(priv2->Name()->Equals(v8::String::NewFromUtf8(isolate, "my-private")));
@ -2460,7 +2460,7 @@ THREADED_TEST(PrivateProperties) {
CHECK_EQ(1u, obj->GetOwnPropertyNames()->Length());
CHECK_EQ(num_props + 1, obj->GetPropertyNames()->Length());
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
// Add another property and delete it afterwards to force the object in
// slow case.
@ -2587,7 +2587,7 @@ THREADED_TEST(ArrayBuffer_ApiInternalToExternal) {
CheckInternalFieldsAreZero(ab);
CHECK_EQ(1024, static_cast<int>(ab->ByteLength()));
CHECK(!ab->IsExternal());
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
ScopedArrayBufferContents ab_contents(ab->Externalize());
CHECK(ab->IsExternal());
@ -2850,7 +2850,7 @@ THREADED_TEST(HiddenProperties) {
v8::Local<v8::String> empty = v8_str("");
v8::Local<v8::String> prop_name = v8_str("prop_name");
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
// Make sure delete of a non-existent hidden value works
CHECK(obj->DeleteHiddenValue(key));
@ -2860,7 +2860,7 @@ THREADED_TEST(HiddenProperties) {
CHECK(obj->SetHiddenValue(key, v8::Integer::New(isolate, 2002)));
CHECK_EQ(2002, obj->GetHiddenValue(key)->Int32Value());
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
// Make sure we do not find the hidden property.
CHECK(!obj->Has(empty));
@ -2871,7 +2871,7 @@ THREADED_TEST(HiddenProperties) {
CHECK_EQ(2002, obj->GetHiddenValue(key)->Int32Value());
CHECK_EQ(2003, obj->Get(empty)->Int32Value());
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
// Add another property and delete it afterwards to force the object in
// slow case.
@ -2882,7 +2882,7 @@ THREADED_TEST(HiddenProperties) {
CHECK(obj->Delete(prop_name));
CHECK_EQ(2002, obj->GetHiddenValue(key)->Int32Value());
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CHECK(obj->SetHiddenValue(key, Handle<Value>()));
CHECK(obj->GetHiddenValue(key).IsEmpty());
@ -3169,7 +3169,7 @@ void SecondPassCallback(const v8::WeakCallbackInfo<TwoPassCallbackData>& data) {
if (!trigger_gc) return;
auto data_2 = new TwoPassCallbackData(data.GetIsolate(), instance_counter);
data_2->SetWeak();
CcTest::heap()->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
CcTest::heap()->CollectAllGarbage();
}
@ -3190,7 +3190,7 @@ TEST(TwoPassPhantomCallbacks) {
data->SetWeak();
}
CHECK_EQ(static_cast<int>(kLength), instance_counter);
CcTest::heap()->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
CcTest::heap()->CollectAllGarbage();
CHECK_EQ(0, instance_counter);
}
@ -3208,7 +3208,7 @@ TEST(TwoPassPhantomCallbacksNestedGc) {
array[10]->MarkTriggerGc();
array[15]->MarkTriggerGc();
CHECK_EQ(static_cast<int>(kLength), instance_counter);
CcTest::heap()->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
CcTest::heap()->CollectAllGarbage();
CHECK_EQ(0, instance_counter);
}
@ -3286,7 +3286,7 @@ static void TestPersistentValueMap() {
if (map.IsWeak()) {
reinterpret_cast<v8::internal::Isolate*>(isolate)
->heap()
->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
->CollectAllGarbage();
} else {
map.Clear();
}
@ -3622,7 +3622,7 @@ THREADED_TEST(ApiObjectGroups) {
// Do a single full GC, ensure incremental marking is stopped.
v8::internal::Heap* heap =
reinterpret_cast<v8::internal::Isolate*>(iso)->heap();
heap->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage();
// All object should be alive.
CHECK_EQ(0, counter.NumberOfWeakCalls());
@ -3646,7 +3646,7 @@ THREADED_TEST(ApiObjectGroups) {
iso->SetReferenceFromGroup(id2, g2c1.handle);
}
heap->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage();
// All objects should be gone. 5 global handles in total.
CHECK_EQ(5, counter.NumberOfWeakCalls());
@ -3655,7 +3655,7 @@ THREADED_TEST(ApiObjectGroups) {
g1c1.handle.SetWeak(&g1c1, &WeakPointerCallback);
g2c1.handle.SetWeak(&g2c1, &WeakPointerCallback);
heap->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage();
CHECK_EQ(7, counter.NumberOfWeakCalls());
}
@ -3716,7 +3716,7 @@ THREADED_TEST(ApiObjectGroupsForSubtypes) {
// Do a single full GC, ensure incremental marking is stopped.
v8::internal::Heap* heap =
reinterpret_cast<v8::internal::Isolate*>(iso)->heap();
heap->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage();
// All object should be alive.
CHECK_EQ(0, counter.NumberOfWeakCalls());
@ -3740,7 +3740,7 @@ THREADED_TEST(ApiObjectGroupsForSubtypes) {
iso->SetReferenceFromGroup(id2, g2c1.handle);
}
heap->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage();
// All objects should be gone. 5 global handles in total.
CHECK_EQ(5, counter.NumberOfWeakCalls());
@ -3749,7 +3749,7 @@ THREADED_TEST(ApiObjectGroupsForSubtypes) {
g1c1.handle.SetWeak(&g1c1, &WeakPointerCallback);
g2c1.handle.SetWeak(&g2c1, &WeakPointerCallback);
heap->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage();
CHECK_EQ(7, counter.NumberOfWeakCalls());
}
@ -3828,7 +3828,7 @@ THREADED_TEST(ApiObjectGroupsCycle) {
// Do a single full GC
v8::internal::Heap* heap =
reinterpret_cast<v8::internal::Isolate*>(iso)->heap();
heap->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage();
// All object should be alive.
CHECK_EQ(0, counter.NumberOfWeakCalls());
@ -3856,7 +3856,7 @@ THREADED_TEST(ApiObjectGroupsCycle) {
iso->SetReferenceFromGroup(id4, g1s1.handle);
}
heap->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage();
// All objects should be gone. 9 global handles in total.
CHECK_EQ(9, counter.NumberOfWeakCalls());
@ -3885,7 +3885,7 @@ THREADED_TEST(WeakRootsSurviveTwoRoundsOfGC) {
// Do a single full GC
i::Isolate* i_iso = reinterpret_cast<v8::internal::Isolate*>(iso);
i::Heap* heap = i_iso->heap();
heap->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage();
// We should have received the weak callback.
CHECK_EQ(1, counter.NumberOfWeakCalls());
@ -3970,7 +3970,7 @@ TEST(ApiObjectGroupsCycleForScavenger) {
v8::internal::Heap* heap =
reinterpret_cast<v8::internal::Isolate*>(iso)->heap();
heap->CollectAllGarbage(i::Heap::kNoGCFlags);
heap->CollectAllGarbage();
// All objects should be alive.
CHECK_EQ(0, counter.NumberOfWeakCalls());
@ -4002,7 +4002,7 @@ TEST(ApiObjectGroupsCycleForScavenger) {
->Set(v8_str("x"), Local<Value>::New(iso, g1s1.handle));
}
heap->CollectAllGarbage(i::Heap::kNoGCFlags);
heap->CollectAllGarbage();
// All objects should be gone. 7 global handles in total.
CHECK_EQ(7, counter.NumberOfWeakCalls());
@ -4262,7 +4262,7 @@ TEST(NativeWeakMap) {
CHECK(value->Equals(weak_map->Get(obj2)));
CHECK(value->Equals(weak_map->Get(sym1)));
}
CcTest::heap()->CollectAllGarbage(TestHeap::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
{
HandleScope scope(isolate);
CHECK(value->Equals(weak_map->Get(local1)));
@ -4275,7 +4275,7 @@ TEST(NativeWeakMap) {
o2.handle.SetWeak(&o2, &WeakPointerCallback);
s1.handle.SetWeak(&s1, &WeakPointerCallback);
CcTest::heap()->CollectAllGarbage(TestHeap::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CHECK_EQ(3, counter.NumberOfWeakCalls());
CHECK(o1.handle.IsEmpty());
@ -6528,7 +6528,7 @@ static void IndependentWeakHandle(bool global_gc, bool interlinked) {
b->Set(v8_str("x"), a);
}
if (global_gc) {
CcTest::heap()->CollectAllGarbage(TestHeap::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
} else {
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
}
@ -6550,7 +6550,7 @@ static void IndependentWeakHandle(bool global_gc, bool interlinked) {
object_b.handle.MarkIndependent();
CHECK(object_b.handle.IsIndependent());
if (global_gc) {
CcTest::heap()->CollectAllGarbage(TestHeap::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
} else {
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
}
@ -6646,8 +6646,7 @@ void InternalFieldCallback(bool global_gc) {
}
}
if (global_gc) {
CcTest::heap()->CollectAllGarbage(
TestHeap::Heap::kAbortIncrementalMarkingMask);
CcTest::heap()->CollectAllGarbage();
} else {
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
}
@ -6727,9 +6726,7 @@ THREADED_TEST(ResetWeakHandle) {
static void InvokeScavenge() { CcTest::heap()->CollectGarbage(i::NEW_SPACE); }
static void InvokeMarkSweep() {
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
}
static void InvokeMarkSweep() { CcTest::heap()->CollectAllGarbage(); }
static void ForceScavenge(
@ -6807,7 +6804,7 @@ THREADED_TEST(IndependentHandleRevival) {
object.handle.MarkIndependent();
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
CHECK(object.flag);
CcTest::heap()->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
CcTest::heap()->CollectAllGarbage();
{
v8::HandleScope handle_scope(isolate);
v8::Local<v8::Object> o =
@ -6833,7 +6830,7 @@ static void ArgumentsTestCallback(
CHECK(v8::Integer::New(isolate, 3)->Equals(args[2]));
CHECK(v8::Undefined(isolate)->Equals(args[3]));
v8::HandleScope scope(args.GetIsolate());
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
}
@ -8085,7 +8082,7 @@ static bool security_check_with_gc_called;
static bool SecurityTestCallbackWithGC(Local<v8::Object> global,
Local<v8::Value> name,
v8::AccessType type, Local<Value> data) {
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
security_check_with_gc_called = true;
return true;
}
@ -10372,7 +10369,7 @@ static void InterceptorCallICFastApi(
reinterpret_cast<int*>(v8::External::Cast(*info.Data())->Value());
++(*call_count);
if ((*call_count) % 20 == 0) {
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
}
}
@ -10416,7 +10413,7 @@ static void GenerateSomeGarbage() {
void DirectApiCallback(const v8::FunctionCallbackInfo<v8::Value>& args) {
static int count = 0;
if (count++ % 3 == 0) {
CcTest::heap()->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
CcTest::heap()->CollectAllGarbage();
// This should move the stub
GenerateSomeGarbage(); // This should ensure the old stub memory is flushed
}
@ -10480,7 +10477,7 @@ static int p_getter_count_3;
static Handle<Value> DoDirectGetter() {
if (++p_getter_count_3 % 3 == 0) {
CcTest::heap()->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
CcTest::heap()->CollectAllGarbage();
GenerateSomeGarbage();
}
return v8_str("Direct Getter Result");
@ -11719,7 +11716,7 @@ static void CheckSurvivingGlobalObjectsCount(int expected) {
// the first garbage collection but some of the maps have already
// been marked at that point. Therefore some of the maps are not
// collected until the second garbage collection.
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CcTest::heap()->CollectAllGarbage(i::Heap::kMakeHeapIterableMask);
int count = GetGlobalObjectsCount();
#ifdef DEBUG
@ -11820,8 +11817,8 @@ TEST(WeakCallbackApi) {
handle->SetWeak<v8::Object, v8::Persistent<v8::Object> >(handle,
WeakApiCallback);
}
reinterpret_cast<i::Isolate*>(isolate)->heap()->
CollectAllGarbage(i::Heap::kNoGCFlags);
reinterpret_cast<i::Isolate*>(isolate)->heap()->CollectAllGarbage(
i::Heap::kAbortIncrementalMarkingMask);
// Verify disposed.
CHECK_EQ(initial_handles, globals->global_handles_count());
}
@ -11855,7 +11852,7 @@ THREADED_TEST(NewPersistentHandleFromWeakCallback) {
// weak callback of the first handle would be able to 'reallocate' it.
handle1.SetWeak(&handle1, NewPersistentHandleCallback);
handle2.Reset();
CcTest::heap()->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
CcTest::heap()->CollectAllGarbage();
}
@ -11864,7 +11861,7 @@ v8::Persistent<v8::Object> to_be_disposed;
void DisposeAndForceGcCallback(
const v8::WeakCallbackData<v8::Object, v8::Persistent<v8::Object> >& data) {
to_be_disposed.Reset();
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
data.GetParameter()->Reset();
}
@ -11881,7 +11878,7 @@ THREADED_TEST(DoNotUseDeletedNodesInSecondLevelGc) {
}
handle1.SetWeak(&handle1, DisposeAndForceGcCallback);
to_be_disposed.Reset(isolate, handle2);
CcTest::heap()->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
CcTest::heap()->CollectAllGarbage();
}
void DisposingCallback(
@ -11911,7 +11908,7 @@ THREADED_TEST(NoGlobalHandlesOrphaningDueToWeakCallback) {
}
handle2.SetWeak(&handle2, DisposingCallback);
handle3.SetWeak(&handle3, HandleCreatingCallback);
CcTest::heap()->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
CcTest::heap()->CollectAllGarbage();
}
@ -13531,12 +13528,12 @@ THREADED_TEST(PixelArray) {
v8::kExternalUint8ClampedArray,
pixel_data));
// Force GC to trigger verification.
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
for (int i = 0; i < kElementCount; i++) {
pixels->set(i, i % 256);
}
// Force GC to trigger verification.
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
for (int i = 0; i < kElementCount; i++) {
CHECK_EQ(i % 256, pixels->get_scalar(i));
CHECK_EQ(i % 256, pixel_data[i]);
@ -14111,7 +14108,7 @@ static void ObjectWithExternalArrayTestHelper(
"}"
"sum;");
// Force GC to trigger verification.
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CHECK_EQ(28, result->Int32Value());
// Make sure out-of-range loads do not throw.
@ -14318,12 +14315,12 @@ static void FixedTypedArrayTestHelper(
CHECK_EQ(FixedTypedArrayClass::kInstanceType,
fixed_array->map()->instance_type());
CHECK_EQ(kElementCount, fixed_array->length());
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
for (int i = 0; i < kElementCount; i++) {
fixed_array->set(i, static_cast<ElementType>(i));
}
// Force GC to trigger verification.
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
for (int i = 0; i < kElementCount; i++) {
CHECK_EQ(static_cast<int64_t>(static_cast<ElementType>(i)),
static_cast<int64_t>(fixed_array->get_scalar(i)));
@ -14422,12 +14419,12 @@ static void ExternalArrayTestHelper(v8::ExternalArrayType array_type,
i::Handle<ExternalArrayClass>::cast(
factory->NewExternalArray(kElementCount, array_type, array_data));
// Force GC to trigger verification.
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
for (int i = 0; i < kElementCount; i++) {
array->set(i, static_cast<ElementType>(i));
}
// Force GC to trigger verification.
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
for (int i = 0; i < kElementCount; i++) {
CHECK_EQ(static_cast<int64_t>(i),
static_cast<int64_t>(array->get_scalar(i)));
@ -16482,7 +16479,7 @@ TEST(Regress528) {
other_context->Enter();
CompileRun(source_simple);
other_context->Exit();
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
if (GetGlobalObjectsCount() == 1) break;
}
CHECK_GE(2, gc_count);
@ -16504,7 +16501,7 @@ TEST(Regress528) {
other_context->Enter();
CompileRun(source_eval);
other_context->Exit();
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
if (GetGlobalObjectsCount() == 1) break;
}
CHECK_GE(2, gc_count);
@ -16531,7 +16528,7 @@ TEST(Regress528) {
other_context->Enter();
CompileRun(source_exception);
other_context->Exit();
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
if (GetGlobalObjectsCount() == 1) break;
}
CHECK_GE(2, gc_count);
@ -17051,26 +17048,26 @@ TEST(GCCallbacksOld) {
v8::V8::AddGCEpilogueCallback(EpilogueCallback);
CHECK_EQ(0, prologue_call_count);
CHECK_EQ(0, epilogue_call_count);
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CHECK_EQ(1, prologue_call_count);
CHECK_EQ(1, epilogue_call_count);
v8::V8::AddGCPrologueCallback(PrologueCallbackSecond);
v8::V8::AddGCEpilogueCallback(EpilogueCallbackSecond);
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CHECK_EQ(2, prologue_call_count);
CHECK_EQ(2, epilogue_call_count);
CHECK_EQ(1, prologue_call_count_second);
CHECK_EQ(1, epilogue_call_count_second);
v8::V8::RemoveGCPrologueCallback(PrologueCallback);
v8::V8::RemoveGCEpilogueCallback(EpilogueCallback);
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CHECK_EQ(2, prologue_call_count);
CHECK_EQ(2, epilogue_call_count);
CHECK_EQ(2, prologue_call_count_second);
CHECK_EQ(2, epilogue_call_count_second);
v8::V8::RemoveGCPrologueCallback(PrologueCallbackSecond);
v8::V8::RemoveGCEpilogueCallback(EpilogueCallbackSecond);
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CHECK_EQ(2, prologue_call_count);
CHECK_EQ(2, epilogue_call_count);
CHECK_EQ(2, prologue_call_count_second);
@ -17086,26 +17083,26 @@ TEST(GCCallbacks) {
isolate->AddGCEpilogueCallback(EpilogueCallback);
CHECK_EQ(0, prologue_call_count);
CHECK_EQ(0, epilogue_call_count);
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CHECK_EQ(1, prologue_call_count);
CHECK_EQ(1, epilogue_call_count);
isolate->AddGCPrologueCallback(PrologueCallbackSecond);
isolate->AddGCEpilogueCallback(EpilogueCallbackSecond);
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CHECK_EQ(2, prologue_call_count);
CHECK_EQ(2, epilogue_call_count);
CHECK_EQ(1, prologue_call_count_second);
CHECK_EQ(1, epilogue_call_count_second);
isolate->RemoveGCPrologueCallback(PrologueCallback);
isolate->RemoveGCEpilogueCallback(EpilogueCallback);
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CHECK_EQ(2, prologue_call_count);
CHECK_EQ(2, epilogue_call_count);
CHECK_EQ(2, prologue_call_count_second);
CHECK_EQ(2, epilogue_call_count_second);
isolate->RemoveGCPrologueCallback(PrologueCallbackSecond);
isolate->RemoveGCEpilogueCallback(EpilogueCallbackSecond);
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CHECK_EQ(2, prologue_call_count);
CHECK_EQ(2, epilogue_call_count);
CHECK_EQ(2, prologue_call_count_second);
@ -17401,7 +17398,7 @@ TEST(ContainsOnlyOneByte) {
void FailedAccessCheckCallbackGC(Local<v8::Object> target,
v8::AccessType type,
Local<v8::Value> data) {
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
}
@ -17920,7 +17917,7 @@ TEST(DontDeleteCellLoadIC) {
"})()",
"ReferenceError: cell is not defined");
CompileRun("cell = \"new_second\";");
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
ExpectString("readCell()", "new_second");
ExpectString("readCell()", "new_second");
}
@ -17989,8 +17986,8 @@ TEST(PersistentHandleInNewSpaceVisitor) {
object1.SetWrapperClassId(42);
CHECK_EQ(42, object1.WrapperClassId());
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CcTest::heap()->CollectAllGarbage();
v8::Persistent<v8::Object> object2(isolate, v8::Object::New(isolate));
CHECK_EQ(0, object2.WrapperClassId());
@ -18556,7 +18553,7 @@ THREADED_TEST(Regress1516) {
int elements = CountLiveMapsInMapCache(CcTest::i_isolate()->context());
CHECK_LE(1, elements);
CcTest::heap()->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
CcTest::heap()->CollectAllGarbage();
CHECK_GT(elements, CountLiveMapsInMapCache(CcTest::i_isolate()->context()));
}

View File

@ -329,7 +329,7 @@ TEST(ConstantPoolCompacting) {
// Force compacting garbage collection.
CHECK(FLAG_always_compact);
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage();
CHECK_NE(old_ptr, *object);
CHECK_EQ(*object, array->get_heap_ptr_entry(0));

View File

@ -1178,7 +1178,7 @@ TEST(FunctionCallSample) {
// Collect garbage that might have be generated while installing
// extensions.
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CompileRun(call_function_test_source);
v8::Local<v8::Function> function = GetFunction(*env, "start");

View File

@ -397,7 +397,7 @@ void CheckDebuggerUnloaded(bool check_functions) {
CHECK(!CcTest::i_isolate()->debug()->debug_info_list_);
// Collect garbage to ensure weak handles are cleared.
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CcTest::heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask);
// Iterate the head and check that there are no debugger related objects left.
@ -870,7 +870,7 @@ static void DebugEventBreakPointCollectGarbage(
CcTest::heap()->CollectGarbage(v8::internal::NEW_SPACE);
} else {
// Mark sweep compact.
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
}
}
}
@ -1376,7 +1376,7 @@ static void CallAndGC(v8::Local<v8::Object> recv,
CHECK_EQ(2 + i * 3, break_point_hit_count);
// Mark sweep (and perhaps compact) and call function.
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
f->Call(recv, 0, NULL);
CHECK_EQ(3 + i * 3, break_point_hit_count);
}
@ -2220,7 +2220,7 @@ TEST(ScriptBreakPointLineTopLevel) {
f = v8::Local<v8::Function>::Cast(
env->Global()->Get(v8::String::NewFromUtf8(env->GetIsolate(), "f")));
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
SetScriptBreakPointByNameFromJS(env->GetIsolate(), "test.html", 3, -1);

View File

@ -99,7 +99,7 @@ class AllowNativesSyntaxNoInlining {
// Abort any ongoing incremental marking to make sure that all weak global
// handle callbacks are processed.
static void NonIncrementalGC(i::Isolate* isolate) {
isolate->heap()->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
isolate->heap()->CollectAllGarbage();
}

View File

@ -195,7 +195,7 @@ TEST(VectorICProfilerStatistics) {
CHECK_EQ(1, feedback_vector->ic_generic_count());
// A collection will not affect the site.
heap->CollectAllGarbage(i::Heap::kNoGCFlags);
heap->CollectAllGarbage();
CHECK_EQ(0, feedback_vector->ic_with_type_info_count());
CHECK_EQ(1, feedback_vector->ic_generic_count());
@ -209,7 +209,7 @@ TEST(VectorICProfilerStatistics) {
CHECK(nexus.GetFeedback()->IsAllocationSite());
heap->CollectAllGarbage(i::Heap::kNoGCFlags);
heap->CollectAllGarbage();
CHECK_EQ(1, feedback_vector->ic_with_type_info_count());
CHECK_EQ(0, feedback_vector->ic_generic_count());
CHECK(nexus.GetFeedback()->IsAllocationSite());
@ -243,7 +243,7 @@ TEST(VectorCallICStates) {
CHECK_EQ(GENERIC, nexus.StateFromFeedback());
// After a collection, state should remain GENERIC.
heap->CollectAllGarbage(i::Heap::kNoGCFlags);
heap->CollectAllGarbage();
CHECK_EQ(GENERIC, nexus.StateFromFeedback());
// A call to Array is special, it contains an AllocationSite as feedback.
@ -253,7 +253,7 @@ TEST(VectorCallICStates) {
CHECK_EQ(MONOMORPHIC, nexus.StateFromFeedback());
CHECK(nexus.GetFeedback()->IsAllocationSite());
heap->CollectAllGarbage(i::Heap::kNoGCFlags);
heap->CollectAllGarbage();
CHECK_EQ(MONOMORPHIC, nexus.StateFromFeedback());
}
@ -307,7 +307,7 @@ TEST(VectorLoadICStates) {
CHECK(!nexus.FindFirstMap());
// After a collection, state should not be reset to PREMONOMORPHIC.
heap->CollectAllGarbage(i::Heap::kNoGCFlags);
heap->CollectAllGarbage();
CHECK_EQ(MEGAMORPHIC, nexus.StateFromFeedback());
}

View File

@ -668,7 +668,7 @@ TEST(HeapSnapshotAddressReuse) {
CompileRun(
"for (var i = 0; i < 10000; ++i)\n"
" a[i] = new A();\n");
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
const v8::HeapSnapshot* snapshot2 = heap_profiler->TakeHeapSnapshot();
CHECK(ValidateSnapshot(snapshot2));
@ -710,7 +710,7 @@ TEST(HeapEntryIdsAndArrayShift) {
"for (var i = 0; i < 1; ++i)\n"
" a.shift();\n");
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
const v8::HeapSnapshot* snapshot2 = heap_profiler->TakeHeapSnapshot();
CHECK(ValidateSnapshot(snapshot2));
@ -751,7 +751,7 @@ TEST(HeapEntryIdsAndGC) {
const v8::HeapSnapshot* snapshot1 = heap_profiler->TakeHeapSnapshot();
CHECK(ValidateSnapshot(snapshot1));
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
const v8::HeapSnapshot* snapshot2 = heap_profiler->TakeHeapSnapshot();
CHECK(ValidateSnapshot(snapshot2));
@ -1060,7 +1060,7 @@ TEST(HeapSnapshotObjectsStats) {
// We have to call GC 6 times. In other case the garbage will be
// the reason of flakiness.
for (int i = 0; i < 6; ++i) {
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
}
v8::SnapshotObjectId initial_id;

View File

@ -452,7 +452,7 @@ TEST(WeakGlobalHandlesMark) {
CHECK(!GlobalHandles::IsNearDeath(h2.location()));
// Incremental marking potentially marked handles before they turned weak.
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage();
CHECK((*h1)->IsString());
@ -947,7 +947,7 @@ TEST(Regression39128) {
TestHeap* heap = CcTest::test_heap();
// Increase the chance of 'bump-the-pointer' allocation in old space.
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage();
v8::HandleScope scope(CcTest::isolate());
@ -1048,14 +1048,14 @@ UNINITIALIZED_TEST(TestCodeFlushing) {
CHECK(function->shared()->is_compiled());
// The code will survive at least two GCs.
i_isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
i_isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
i_isolate->heap()->CollectAllGarbage();
i_isolate->heap()->CollectAllGarbage();
CHECK(function->shared()->is_compiled());
// Simulate several GCs that use full marking.
const int kAgingThreshold = 6;
for (int i = 0; i < kAgingThreshold; i++) {
i_isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
i_isolate->heap()->CollectAllGarbage();
}
// foo should no longer be in the compilation cache
@ -1101,12 +1101,12 @@ TEST(TestCodeFlushingPreAged) {
CHECK(function->shared()->is_compiled());
// The code has been run so will survive at least one GC.
CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
CcTest::heap()->CollectAllGarbage();
CHECK(function->shared()->is_compiled());
// The code was only run once, so it should be pre-aged and collected on the
// next GC.
CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
CcTest::heap()->CollectAllGarbage();
CHECK(!function->shared()->is_compiled() || function->IsOptimized());
// Execute the function again twice, and ensure it is reset to the young age.
@ -1116,14 +1116,14 @@ TEST(TestCodeFlushingPreAged) {
}
// The code will survive at least two GC now that it is young again.
CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
CcTest::heap()->CollectAllGarbage();
CcTest::heap()->CollectAllGarbage();
CHECK(function->shared()->is_compiled());
// Simulate several GCs that use full marking.
const int kAgingThreshold = 6;
for (int i = 0; i < kAgingThreshold; i++) {
CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
CcTest::heap()->CollectAllGarbage();
}
// foo should no longer be in the compilation cache
@ -1166,15 +1166,15 @@ TEST(TestCodeFlushingIncremental) {
CHECK(function->shared()->is_compiled());
// The code will survive at least two GCs.
CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
CcTest::heap()->CollectAllGarbage();
CcTest::heap()->CollectAllGarbage();
CHECK(function->shared()->is_compiled());
// Simulate several GCs that use incremental marking.
const int kAgingThreshold = 6;
for (int i = 0; i < kAgingThreshold; i++) {
SimulateIncrementalMarking(CcTest::heap());
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
}
CHECK(!function->shared()->is_compiled() || function->IsOptimized());
CHECK(!function->is_compiled() || function->IsOptimized());
@ -1189,7 +1189,7 @@ TEST(TestCodeFlushingIncremental) {
for (int i = 0; i < kAgingThreshold; i++) {
SimulateIncrementalMarking(CcTest::heap());
if (!function->next_function_link()->IsUndefined()) break;
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
}
// Force optimization while incremental marking is active and while
@ -1199,7 +1199,7 @@ TEST(TestCodeFlushingIncremental) {
}
// Simulate one final GC to make sure the candidate queue is sane.
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CHECK(function->shared()->is_compiled() || !function->IsOptimized());
CHECK(function->is_compiled() || !function->IsOptimized());
}
@ -1228,7 +1228,7 @@ TEST(TestCodeFlushingIncrementalScavenge) {
Handle<String> bar_name = factory->InternalizeUtf8String("bar");
// Perfrom one initial GC to enable code flushing.
CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
CcTest::heap()->CollectAllGarbage();
// This compile will add the code to the compilation cache.
{ v8::HandleScope scope(CcTest::isolate());
@ -1268,7 +1268,7 @@ TEST(TestCodeFlushingIncrementalScavenge) {
CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking");
// Simulate one final GC to make sure the candidate queue is sane.
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CHECK(!function->shared()->is_compiled() || function->IsOptimized());
CHECK(!function->is_compiled() || function->IsOptimized());
}
@ -1305,8 +1305,8 @@ TEST(TestCodeFlushingIncrementalAbort) {
CHECK(function->shared()->is_compiled());
// The code will survive at least two GCs.
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage();
heap->CollectAllGarbage();
CHECK(function->shared()->is_compiled());
// Bump the code age so that flushing is triggered.
@ -1333,7 +1333,7 @@ TEST(TestCodeFlushingIncrementalAbort) {
}
// Simulate one final GC to make sure the candidate queue is sane.
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage();
CHECK(function->shared()->is_compiled() || !function->IsOptimized());
CHECK(function->is_compiled() || !function->IsOptimized());
}
@ -1388,7 +1388,7 @@ TEST(CompilationCacheCachingBehavior) {
true, native_context, language_mode);
CHECK(!info.is_null());
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage();
// On second compilation, the hash is replaced by a real cache entry mapping
// the source to the shared function info containing the code.
@ -1400,7 +1400,7 @@ TEST(CompilationCacheCachingBehavior) {
info.ToHandleChecked()->code()->MakeOlder(NO_MARKING_PARITY);
}
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage();
// Ensure code aging cleared the entry from the cache.
info = compilation_cache->LookupScript(source, Handle<Object>(), 0, 0, false,
true, native_context, language_mode);
@ -1500,7 +1500,7 @@ TEST(TestInternalWeakLists) {
// Collect garbage that might have been created by one of the
// installed extensions.
isolate->compilation_cache()->Clear();
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage();
CHECK_EQ(i + 1, CountNativeContexts());
@ -1532,7 +1532,7 @@ TEST(TestInternalWeakLists) {
// Mark compact handles the weak references.
isolate->compilation_cache()->Clear();
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage();
CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
// Get rid of f3 and f5 in the same way.
@ -1541,14 +1541,14 @@ TEST(TestInternalWeakLists) {
CcTest::heap()->CollectGarbage(NEW_SPACE);
CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
}
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
CompileRun("f5=null");
for (int j = 0; j < 10; j++) {
CcTest::heap()->CollectGarbage(NEW_SPACE);
CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
}
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i]));
ctx[i]->Exit();
@ -1556,7 +1556,7 @@ TEST(TestInternalWeakLists) {
// Force compilation cache cleanup.
CcTest::heap()->NotifyContextDisposed(true);
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
// Dispose the native contexts one by one.
for (int i = 0; i < kNumTestContexts; i++) {
@ -1572,7 +1572,7 @@ TEST(TestInternalWeakLists) {
}
// Mark compact handles the weak references.
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
}
@ -1588,7 +1588,7 @@ static int CountNativeContextsWithGC(Isolate* isolate, int n) {
Handle<Object> object(heap->native_contexts_list(), isolate);
while (!object->IsUndefined()) {
count++;
if (count == n) heap->CollectAllGarbage(Heap::kNoGCFlags);
if (count == n) heap->CollectAllGarbage();
object =
Handle<Object>(Context::cast(*object)->get(Context::NEXT_CONTEXT_LINK),
isolate);
@ -1610,7 +1610,7 @@ static int CountOptimizedUserFunctionsWithGC(v8::Handle<v8::Context> context,
while (object->IsJSFunction() &&
!Handle<JSFunction>::cast(object)->IsBuiltin()) {
count++;
if (count == n) isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags);
if (count == n) isolate->heap()->CollectAllGarbage();
object = Handle<Object>(
Object::cast(JSFunction::cast(*object)->next_function_link()),
isolate);
@ -1693,11 +1693,11 @@ TEST(TestSizeOfRegExpCode) {
// Get initial heap size after several full GCs, which will stabilize
// the heap size and return with sweeping finished completely.
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CcTest::heap()->CollectAllGarbage();
CcTest::heap()->CollectAllGarbage();
CcTest::heap()->CollectAllGarbage();
CcTest::heap()->CollectAllGarbage();
MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
if (collector->sweeping_in_progress()) {
collector->EnsureSweepingCompleted();
@ -1705,11 +1705,11 @@ TEST(TestSizeOfRegExpCode) {
int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
CompileRun("'foo'.match(reg_exp_source);");
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
int size_with_regexp = static_cast<int>(CcTest::heap()->SizeOfObjects());
CompileRun("'foo'.match(half_size_reg_exp);");
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
int size_with_optimized_regexp =
static_cast<int>(CcTest::heap()->SizeOfObjects());
@ -1729,11 +1729,11 @@ TEST(TestSizeOfObjects) {
// Get initial heap size after several full GCs, which will stabilize
// the heap size and return with sweeping finished completely.
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CcTest::heap()->CollectAllGarbage();
CcTest::heap()->CollectAllGarbage();
CcTest::heap()->CollectAllGarbage();
CcTest::heap()->CollectAllGarbage();
MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
if (collector->sweeping_in_progress()) {
collector->EnsureSweepingCompleted();
@ -1755,7 +1755,7 @@ TEST(TestSizeOfObjects) {
// The heap size should go back to initial size after a full GC, even
// though sweeping didn't finish yet.
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
// Normally sweeping would not be complete here, but no guarantees.
@ -2177,7 +2177,7 @@ TEST(PrototypeTransitionClearing) {
// Verify that only dead prototype transitions are cleared.
CHECK_EQ(initialTransitions + 10,
NumberOfProtoTransitions(baseObject->map()));
CcTest::heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
CcTest::heap()->CollectAllGarbage();
const int transitions = 10 - 3;
CHECK_EQ(initialTransitions + transitions,
NumberOfProtoTransitions(baseObject->map()));
@ -2259,8 +2259,8 @@ TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
// guard interrupt. But here we didn't ask for that, and there is no
// JS code running to trigger the interrupt, so we explicitly finalize
// here.
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags,
"Test finalizing incremental mark-sweep");
CcTest::heap()->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
"Test finalizing incremental mark-sweep");
}
CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
@ -2933,7 +2933,7 @@ TEST(Regress1465) {
CHECK_EQ(transitions_count, transitions_before);
SimulateIncrementalMarking(CcTest::heap());
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
// Count number of live transitions after marking. Note that one transition
// is left, because 'o' still holds an instance of one transition target.
@ -3116,7 +3116,7 @@ TEST(Regress2143a) {
CcTest::heap()->AgeInlineCaches();
// Explicitly request GC to perform final marking step and sweeping.
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
Handle<JSObject> root =
v8::Utils::OpenHandle(
@ -3160,7 +3160,7 @@ TEST(Regress2143b) {
CcTest::heap()->AgeInlineCaches();
// Explicitly request GC to perform final marking step and sweeping.
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
Handle<JSObject> root =
v8::Utils::OpenHandle(
@ -3198,15 +3198,17 @@ TEST(ReleaseOverReservedPages) {
// Triggering one GC will cause a lot of garbage to be discovered but
// even spread across all allocated pages.
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask,
heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
"triggered for preparation");
CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
// Triggering subsequent GCs should cause at least half of the pages
// to be released to the OS after at most two cycles.
heap->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 1");
heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
"triggered by test 1");
CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
heap->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 2");
heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
"triggered by test 2");
CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages() * 2);
// Triggering a last-resort GC should cause all pages to be released to the
@ -3248,7 +3250,7 @@ TEST(Regress2237) {
}
CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
}
@ -3346,7 +3348,7 @@ TEST(IncrementalMarkingPreservesMonomorphicCallIC) {
CHECK(feedback_vector->Get(FeedbackVectorICSlot(slot2))->IsWeakCell());
SimulateIncrementalMarking(CcTest::heap());
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CHECK(!WeakCell::cast(feedback_vector->Get(FeedbackVectorICSlot(slot1)))
->cleared());
@ -3407,7 +3409,7 @@ TEST(IncrementalMarkingPreservesMonomorphicConstructor) {
CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
SimulateIncrementalMarking(CcTest::heap());
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
}
@ -3442,7 +3444,7 @@ TEST(IncrementalMarkingClearsMonomorphicConstructor) {
// Fire context dispose notification.
CcTest::isolate()->ContextDisposedNotification();
SimulateIncrementalMarking(CcTest::heap());
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(isolate),
vector->Get(FeedbackVectorSlot(0)));
@ -3472,7 +3474,7 @@ TEST(IncrementalMarkingPreservesMonomorphicIC) {
}
SimulateIncrementalMarking(CcTest::heap());
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
if (FLAG_vector_ics) {
@ -3514,7 +3516,7 @@ TEST(IncrementalMarkingClearsMonomorphicIC) {
// Fire context dispose notification.
CcTest::isolate()->ContextDisposedNotification();
SimulateIncrementalMarking(CcTest::heap());
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
if (FLAG_vector_ics) {
@ -3562,7 +3564,7 @@ TEST(IncrementalMarkingPreservesPolymorphicIC) {
// Fire context dispose notification.
SimulateIncrementalMarking(CcTest::heap());
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
if (FLAG_vector_ics) {
@ -3611,7 +3613,7 @@ TEST(IncrementalMarkingClearsPolymorphicIC) {
// Fire context dispose notification.
CcTest::isolate()->ContextDisposedNotification();
SimulateIncrementalMarking(CcTest::heap());
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
if (FLAG_vector_ics) {
@ -3740,7 +3742,7 @@ TEST(Regress159140) {
HandleScope scope(isolate);
// Perform one initial GC to enable code flushing.
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage();
// Prepare several closures that are all eligible for code flushing
// because all reachable ones are not optimized. Make sure that the
@ -3786,7 +3788,7 @@ TEST(Regress159140) {
// finish the GC to complete code flushing.
SimulateIncrementalMarking(heap);
CompileRun("%OptimizeFunctionOnNextCall(g); g(3);");
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage();
// Unoptimized code is missing and the deoptimizer will go ballistic.
CompileRun("g('bozo');");
@ -3802,7 +3804,7 @@ TEST(Regress165495) {
HandleScope scope(isolate);
// Perform one initial GC to enable code flushing.
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage();
// Prepare an optimized closure that the optimized code map will get
// populated. Then age the unoptimized code to trigger code flushing
@ -3832,7 +3834,7 @@ TEST(Regress165495) {
// Simulate incremental marking so that unoptimized code is flushed
// even though it still is cached in the optimized code map.
SimulateIncrementalMarking(heap);
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage();
// Make a new closure that will get code installed from the code map.
// Unoptimized code is missing and the deoptimizer will go ballistic.
@ -3851,7 +3853,7 @@ TEST(Regress169209) {
HandleScope scope(isolate);
// Perform one initial GC to enable code flushing.
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage();
// Prepare a shared function info eligible for code flushing for which
// the unoptimized code will be replaced during optimization.
@ -3910,7 +3912,7 @@ TEST(Regress169209) {
"g(false);");
// Finish garbage collection cycle.
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage();
CHECK(shared1->code()->gc_metadata() == NULL);
}
@ -3999,7 +4001,7 @@ TEST(Regress168801) {
HandleScope scope(isolate);
// Perform one initial GC to enable code flushing.
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage();
// Ensure the code ends up on an evacuation candidate.
SimulateFullSpace(heap->code_space());
@ -4039,8 +4041,8 @@ TEST(Regress168801) {
}
// This cycle will bust the heap and subsequent cycles will go ballistic.
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage();
heap->CollectAllGarbage();
}
@ -4056,7 +4058,7 @@ TEST(Regress173458) {
HandleScope scope(isolate);
// Perform one initial GC to enable code flushing.
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage();
// Ensure the code ends up on an evacuation candidate.
SimulateFullSpace(heap->code_space());
@ -4093,8 +4095,8 @@ TEST(Regress173458) {
CHECK(isolate->debug()->Load());
// This cycle will bust the heap and subsequent cycles will go ballistic.
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage();
heap->CollectAllGarbage();
}
@ -4229,7 +4231,7 @@ TEST(EnsureAllocationSiteDependentCodesProcessed) {
// Now make sure that a gc should get rid of the function, even though we
// still have the allocation site alive.
for (int i = 0; i < 4; i++) {
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage();
}
// The site still exists because of our global handle, but the code is no
@ -4277,7 +4279,7 @@ TEST(CellsInOptimizedCodeAreWeak) {
// Now make sure that a gc should get rid of the function
for (int i = 0; i < 4; i++) {
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage();
}
DCHECK(code->marked_for_deoptimization());
@ -4318,7 +4320,7 @@ TEST(ObjectsInOptimizedCodeAreWeak) {
// Now make sure that a gc should get rid of the function
for (int i = 0; i < 4; i++) {
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage();
}
DCHECK(code->marked_for_deoptimization());
@ -4357,7 +4359,7 @@ TEST(NoWeakHashTableLeakWithIncrementalMarking) {
i, i, i, i, i, i, i, i);
CompileRun(source.start());
}
heap->CollectAllGarbage(i::Heap::kNoGCFlags);
heap->CollectAllGarbage();
}
int elements = 0;
if (heap->weak_object_to_code_table()->IsHashTable()) {
@ -4507,7 +4509,7 @@ TEST(WeakFunctionInConstructor) {
weak_ic_cleared = false;
garbage.SetWeak(static_cast<void*>(&garbage), &ClearWeakIC);
Heap* heap = CcTest::i_isolate()->heap();
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage();
CHECK(weak_ic_cleared);
// We've determined the constructor in createObj has had it's weak cell
@ -4519,7 +4521,7 @@ TEST(WeakFunctionInConstructor) {
Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
CHECK(slot_value->IsWeakCell());
if (WeakCell::cast(slot_value)->cleared()) break;
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage();
}
Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
@ -4546,7 +4548,7 @@ void CheckWeakness(const char* source) {
weak_ic_cleared = false;
garbage.SetWeak(static_cast<void*>(&garbage), &ClearWeakIC);
Heap* heap = CcTest::i_isolate()->heap();
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage();
CHECK(weak_ic_cleared);
}
@ -4760,7 +4762,7 @@ TEST(MonomorphicStaysMonomorphicAfterGC) {
v8::HandleScope scope(CcTest::isolate());
CompileRun("(testIC())");
}
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage();
CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, MONOMORPHIC);
{
v8::HandleScope scope(CcTest::isolate());
@ -4796,7 +4798,7 @@ TEST(PolymorphicStaysPolymorphicAfterGC) {
v8::HandleScope scope(CcTest::isolate());
CompileRun("(testIC())");
}
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage();
CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, POLYMORPHIC);
{
v8::HandleScope scope(CcTest::isolate());
@ -4864,7 +4866,7 @@ TEST(WeakCellsWithIncrementalMarking) {
CHECK(weak_cell->value()->IsFixedArray());
weak_cells[i] = inner_scope.CloseAndEscape(weak_cell);
}
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage();
CHECK_EQ(*survivor, weak_cells[0]->value());
for (int i = 1; i < N; i++) {
CHECK(weak_cells[i]->cleared());
@ -4911,7 +4913,7 @@ TEST(AddInstructionChangesNewSpacePromotion) {
heap->DisableInlineAllocation();
heap->set_allocation_timeout(1);
g->Call(global, 1, args1);
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage();
}
@ -5222,14 +5224,14 @@ TEST(Regress3877) {
"a.x = new cls();"
"cls.prototype = null;");
for (int i = 0; i < 4; i++) {
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage();
}
// The map of a.x keeps prototype alive
CHECK(!weak_prototype->cleared());
// Change the map of a.x and make the previous map garbage collectable.
CompileRun("a.x.__proto__ = {};");
for (int i = 0; i < 4; i++) {
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage();
}
CHECK(weak_prototype->cleared());
}

View File

@ -92,8 +92,8 @@ TEST(Promotion) {
CHECK(heap->InSpace(*array, NEW_SPACE));
// Call mark compact GC, so array becomes an old object.
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage();
heap->CollectAllGarbage();
// Array now sits in the old space
CHECK(heap->InSpace(*array, OLD_SPACE));

View File

@ -36,7 +36,7 @@ static void SetUpNewSpaceWithPoisonedMementoAtTop() {
NewSpace* new_space = heap->new_space();
// Make sure we can allocate some objects without causing a GC later.
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage();
// Allocate a string, the GC may suspect a memento behind the string.
Handle<SeqOneByteString> string =

View File

@ -429,7 +429,7 @@ TEST(ObservationWeakMap) {
CHECK_EQ(1, NumberOfElements(callbackInfoMap));
CHECK_EQ(1, NumberOfElements(objectInfoMap));
CHECK_EQ(1, NumberOfElements(notifierObjectInfoMap));
i_isolate->heap()->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
i_isolate->heap()->CollectAllGarbage();
CHECK_EQ(0, NumberOfElements(callbackInfoMap));
CHECK_EQ(0, NumberOfElements(objectInfoMap));
CHECK_EQ(0, NumberOfElements(notifierObjectInfoMap));
@ -682,7 +682,7 @@ static void CheckSurvivingGlobalObjectsCount(int expected) {
// the first garbage collection but some of the maps have already
// been marked at that point. Therefore some of the maps are not
// collected until the second garbage collection.
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CcTest::heap()->CollectAllGarbage(i::Heap::kMakeHeapIterableMask);
int count = GetGlobalObjectsCount();
#ifdef DEBUG

View File

@ -297,8 +297,8 @@ UNINITIALIZED_TEST(PartialSerialization) {
isolate->bootstrapper()->NativesSourceLookup(i);
}
}
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage();
heap->CollectAllGarbage();
Object* raw_foo;
{
@ -422,7 +422,7 @@ UNINITIALIZED_TEST(ContextSerialization) {
}
// If we don't do this then we end up with a stray root pointing at the
// context even after we have disposed of env.
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage();
int file_name_length = StrLength(FLAG_testing_serialization_file) + 10;
Vector<char> startup_name = Vector<char>::New(file_name_length + 1);

View File

@ -64,7 +64,7 @@ TEST(Create) {
}
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
// All symbols should be distinct.
for (int i = 0; i < kNumSymbols; ++i) {

View File

@ -97,7 +97,7 @@ class ThreadB : public v8::base::Thread {
v8::Context::Scope context_scope(context);
// Clear the caches by forcing major GC.
CcTest::heap()->CollectAllGarbage(v8::internal::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
turn = SECOND_TIME_FILL_CACHE;
break;
}

View File

@ -909,7 +909,7 @@ TEST(Regress436816) {
CHECK(object->map()->HasFastPointerLayout());
// Trigger GCs and heap verification.
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
}
@ -966,7 +966,7 @@ TEST(DescriptorArrayTrimming) {
// Call GC that should trim both |map|'s descriptor array and layout
// descriptor.
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
// The unused tail of the layout descriptor is now "clean" again.
CHECK(map->layout_descriptor()->IsConsistentWithMap(*map, true));
@ -1390,7 +1390,7 @@ TEST(StoreBufferScanOnScavenge) {
chunk->set_scan_on_scavenge(true);
// Trigger GCs and force evacuation. Should not crash there.
CcTest::heap()->CollectAllGarbage(i::Heap::kNoGCFlags);
CcTest::heap()->CollectAllGarbage();
CHECK_EQ(boom_value, GetDoubleFieldValue(*obj, field_index));
}

View File

@ -199,7 +199,7 @@ TEST(Regress2060a) {
// Force compacting garbage collection.
CHECK(FLAG_always_compact);
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage();
}
@ -241,9 +241,9 @@ TEST(Regress2060b) {
// Force compacting garbage collection. The subsequent collections are used
// to verify that key references were actually updated.
CHECK(FLAG_always_compact);
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage();
heap->CollectAllGarbage();
heap->CollectAllGarbage();
}
@ -260,5 +260,5 @@ TEST(Regress399527) {
// The weak map is marked black here but leaving the handle scope will make
// the object unreachable. Aborting incremental marking will clear all the
// marking bits which makes the weak map garbage.
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
heap->CollectAllGarbage();
}

View File

@ -199,7 +199,7 @@ TEST(WeakSet_Regress2060a) {
// Force compacting garbage collection.
CHECK(FLAG_always_compact);
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage();
}
@ -241,7 +241,7 @@ TEST(WeakSet_Regress2060b) {
// Force compacting garbage collection. The subsequent collections are used
// to verify that key references were actually updated.
CHECK(FLAG_always_compact);
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage(Heap::kNoGCFlags);
heap->CollectAllGarbage();
heap->CollectAllGarbage();
heap->CollectAllGarbage();
}

View File

@ -128,7 +128,7 @@ TEST(WeakArrayBuffersFromApi) {
CHECK(HasArrayBufferInWeakList(isolate->heap(), *iab1));
CHECK(HasArrayBufferInWeakList(isolate->heap(), *iab2));
}
isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
isolate->heap()->CollectAllGarbage();
CHECK_EQ(1, CountArrayBuffersInWeakList(isolate->heap()) - start);
{
HandleScope scope2(isolate);
@ -138,7 +138,7 @@ TEST(WeakArrayBuffersFromApi) {
}
}
isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
isolate->heap()->CollectAllGarbage();
CHECK_EQ(start, CountArrayBuffersInWeakList(isolate->heap()));
}
@ -180,7 +180,7 @@ TEST(WeakArrayBuffersFromScript) {
i::ScopedVector<char> source(1024);
i::SNPrintF(source, "ab%d = null;", i);
CompileRun(source.start());
isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
isolate->heap()->CollectAllGarbage();
CHECK_EQ(2, CountArrayBuffersInWeakList(isolate->heap()) - start);
@ -199,7 +199,7 @@ TEST(WeakArrayBuffersFromScript) {
CompileRun("ab1 = null; ab2 = null; ab3 = null;");
}
isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
isolate->heap()->CollectAllGarbage();
CHECK_EQ(start, CountArrayBuffersInWeakList(isolate->heap()));
}
}
@ -227,12 +227,12 @@ void TestViewFromApi() {
CHECK(HasViewInWeakList(isolate->heap(), *iab, *ita1));
CHECK(HasViewInWeakList(isolate->heap(), *iab, *ita2));
}
isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
isolate->heap()->CollectAllGarbage();
CHECK_EQ(1, CountViews(isolate->heap(), *iab));
Handle<JSArrayBufferView> ita1 = v8::Utils::OpenHandle(*ta1);
CHECK(HasViewInWeakList(isolate->heap(), *iab, *ita1));
}
isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
isolate->heap()->CollectAllGarbage();
CHECK_EQ(0, CountViews(isolate->heap(), *iab));
}
@ -333,7 +333,7 @@ static void TestTypedArrayFromScript(const char* constructor) {
i::SNPrintF(source, "ta%d = null;", i);
CompileRun(source.start());
isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
isolate->heap()->CollectAllGarbage();
CHECK_EQ(1, CountArrayBuffersInWeakList(isolate->heap()) - start);
@ -354,7 +354,7 @@ static void TestTypedArrayFromScript(const char* constructor) {
}
CompileRun("ta1 = null; ta2 = null; ta3 = null;");
isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
isolate->heap()->CollectAllGarbage();
CHECK_EQ(1, CountArrayBuffersInWeakList(isolate->heap()) - start);