From f3babafbdd7a7804273d2665b2aa8004ffb07610 Mon Sep 17 00:00:00 2001 From: Ulan Degenbaev Date: Mon, 2 Mar 2020 14:52:18 +0100 Subject: [PATCH] [heap] Restrict usages of AlwaysAllocateScope MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The scope disables garbage collection and should be only used in heap, deserializer, isolate bootstrap, and testing. Change-Id: Ide95926ef32fd9362cd9134e883e1bd626cc3b11 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2083292 Reviewed-by: Dominik Inführ Reviewed-by: Jakob Gruber Commit-Queue: Ulan Degenbaev Cr-Commit-Position: refs/heads/master@{#66557} --- src/execution/isolate.cc | 6 +++--- src/heap/heap-inl.h | 6 +++--- src/heap/heap.cc | 8 ++++---- src/heap/heap.h | 21 +++++++++++++++++++-- src/heap/mark-compact.cc | 2 +- src/runtime/runtime-test.cc | 1 - test/cctest/heap/test-alloc.cc | 2 +- test/cctest/heap/test-heap.cc | 22 +++++++++++----------- test/cctest/heap/test-invalidated-slots.cc | 9 ++++----- test/cctest/heap/test-spaces.cc | 2 +- test/cctest/test-api.cc | 4 ++-- test/cctest/test-serialize.cc | 2 +- test/cctest/test-strings.cc | 2 +- 13 files changed, 51 insertions(+), 36 deletions(-) diff --git a/src/execution/isolate.cc b/src/execution/isolate.cc index ca6a0a5905..24d02163b2 100644 --- a/src/execution/isolate.cc +++ b/src/execution/isolate.cc @@ -3342,7 +3342,7 @@ bool Isolate::Init(ReadOnlyDeserializer* read_only_deserializer, has_fatal_error_ = false; // The initialization process does not handle memory exhaustion. - AlwaysAllocateScope always_allocate(this); + AlwaysAllocateScope always_allocate(heap()); #define ASSIGN_ELEMENT(CamelName, hacker_name) \ isolate_addresses_[IsolateAddressId::k##CamelName##Address] = \ @@ -3471,8 +3471,8 @@ bool Isolate::Init(ReadOnlyDeserializer* read_only_deserializer, // If we are deserializing, read the state into the now-empty heap. { - AlwaysAllocateScope always_allocate(this); - CodeSpaceMemoryModificationScope modification_scope(&heap_); + AlwaysAllocateScope always_allocate(heap()); + CodeSpaceMemoryModificationScope modification_scope(heap()); if (create_heap_objects) { heap_.read_only_space()->ClearStringPaddingIfNeeded(); diff --git a/src/heap/heap-inl.h b/src/heap/heap-inl.h index a006b98353..e618b91058 100644 --- a/src/heap/heap-inl.h +++ b/src/heap/heap-inl.h @@ -648,13 +648,13 @@ AlwaysAllocateScope::AlwaysAllocateScope(Heap* heap) : heap_(heap) { heap_->always_allocate_scope_count_++; } -AlwaysAllocateScope::AlwaysAllocateScope(Isolate* isolate) - : AlwaysAllocateScope(isolate->heap()) {} - AlwaysAllocateScope::~AlwaysAllocateScope() { heap_->always_allocate_scope_count_--; } +AlwaysAllocateScopeForTesting::AlwaysAllocateScopeForTesting(Heap* heap) + : scope_(heap) {} + CodeSpaceMemoryModificationScope::CodeSpaceMemoryModificationScope(Heap* heap) : heap_(heap) { if (heap_->write_protect_code_memory()) { diff --git a/src/heap/heap.cc b/src/heap/heap.cc index 6db19d85ed..9193014139 100644 --- a/src/heap/heap.cc +++ b/src/heap/heap.cc @@ -2263,7 +2263,7 @@ void Heap::MinorMarkCompact() { LOG(isolate_, ResourceEvent("MinorMarkCompact", "begin")); TRACE_GC(tracer(), GCTracer::Scope::MINOR_MC); - AlwaysAllocateScope always_allocate(isolate()); + AlwaysAllocateScope always_allocate(this); IncrementalMarking::PauseBlackAllocationScope pause_black_allocation( incremental_marking()); ConcurrentMarking::PauseScope pause_scope(concurrent_marking()); @@ -2380,7 +2380,7 @@ void Heap::Scavenge() { // There are soft limits in the allocation code, designed to trigger a mark // sweep collection by failing allocations. There is no sense in trying to // trigger one during scavenge: scavenges allocation should always succeed. - AlwaysAllocateScope scope(isolate()); + AlwaysAllocateScope scope(this); // Bump-pointer allocations done during scavenge are not real allocations. // Pause the inline allocation steps. @@ -5063,7 +5063,7 @@ HeapObject Heap::AllocateRawWithRetryOrFailSlowPath( isolate()->counters()->gc_last_resort_from_handles()->Increment(); CollectAllAvailableGarbage(GarbageCollectionReason::kLastResort); { - AlwaysAllocateScope scope(isolate()); + AlwaysAllocateScope scope(this); alloc = AllocateRaw(size, allocation, origin, alignment); } if (alloc.To(&result)) { @@ -5097,7 +5097,7 @@ HeapObject Heap::AllocateRawCodeInLargeObjectSpace(int size) { isolate()->counters()->gc_last_resort_from_handles()->Increment(); CollectAllAvailableGarbage(GarbageCollectionReason::kLastResort); { - AlwaysAllocateScope scope(isolate()); + AlwaysAllocateScope scope(this); alloc = code_lo_space()->AllocateRaw(size); } if (alloc.To(&result)) { diff --git a/src/heap/heap.h b/src/heap/heap.h index 7fc55e4fe0..d59dda19fc 100644 --- a/src/heap/heap.h +++ b/src/heap/heap.h @@ -2276,16 +2276,33 @@ class HeapStats { intptr_t* end_marker; // 27 }; +// Disables GC for all allocations. It should not be used +// outside heap, deserializer, and isolate bootstrap. +// Use AlwaysAllocateScopeForTesting in tests. class AlwaysAllocateScope { public: - explicit inline AlwaysAllocateScope(Heap* heap); - explicit inline AlwaysAllocateScope(Isolate* isolate); inline ~AlwaysAllocateScope(); private: + friend class AlwaysAllocateScopeForTesting; + friend class Deserializer; + friend class DeserializerAllocator; + friend class Evacuator; + friend class Heap; + friend class Isolate; + + explicit inline AlwaysAllocateScope(Heap* heap); Heap* heap_; }; +class AlwaysAllocateScopeForTesting { + public: + explicit inline AlwaysAllocateScopeForTesting(Heap* heap); + + private: + AlwaysAllocateScope scope_; +}; + // The CodeSpaceMemoryModificationScope can only be used by the main thread. class CodeSpaceMemoryModificationScope { public: diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc index 51ec80792b..7a87adb5f6 100644 --- a/src/heap/mark-compact.cc +++ b/src/heap/mark-compact.cc @@ -2940,7 +2940,7 @@ void Evacuator::EvacuatePage(MemoryChunk* chunk) { intptr_t saved_live_bytes = 0; double evacuation_time = 0.0; { - AlwaysAllocateScope always_allocate(heap()->isolate()); + AlwaysAllocateScope always_allocate(heap()); TimedScope timed_scope(&evacuation_time); RawEvacuatePage(chunk, &saved_live_bytes); } diff --git a/src/runtime/runtime-test.cc b/src/runtime/runtime-test.cc index fb06a8b8f9..e3ebff56c6 100644 --- a/src/runtime/runtime-test.cc +++ b/src/runtime/runtime-test.cc @@ -730,7 +730,6 @@ RUNTIME_FUNCTION(Runtime_SimulateNewspaceFull) { HandleScope scope(isolate); Heap* heap = isolate->heap(); NewSpace* space = heap->new_space(); - AlwaysAllocateScope always_allocate(heap); do { FillUpOneNewSpacePage(isolate, heap); } while (space->AddFreshPage()); diff --git a/test/cctest/heap/test-alloc.cc b/test/cctest/heap/test-alloc.cc index 684bda4411..eb1959dc45 100644 --- a/test/cctest/heap/test-alloc.cc +++ b/test/cctest/heap/test-alloc.cc @@ -46,8 +46,8 @@ Handle HeapTester::TestAllocateAfterFailures() { // we wrap the allocator function in an AlwaysAllocateScope. Test that // all allocations succeed immediately without any retry. CcTest::CollectAllAvailableGarbage(); - AlwaysAllocateScope scope(CcTest::i_isolate()); Heap* heap = CcTest::heap(); + AlwaysAllocateScopeForTesting scope(heap); int size = FixedArray::SizeFor(100); // Young generation. HeapObject obj = diff --git a/test/cctest/heap/test-heap.cc b/test/cctest/heap/test-heap.cc index 6c11603c44..25668aa23f 100644 --- a/test/cctest/heap/test-heap.cc +++ b/test/cctest/heap/test-heap.cc @@ -1576,7 +1576,7 @@ HEAP_TEST(TestSizeOfObjects) { // Allocate objects on several different old-space pages so that // concurrent sweeper threads will be busy sweeping the old space on // subsequent GC runs. - AlwaysAllocateScope always_allocate(CcTest::i_isolate()); + AlwaysAllocateScopeForTesting always_allocate(heap); int filler_size = static_cast(FixedArray::SizeFor(8192)); for (int i = 1; i <= 100; i++) { isolate->factory()->NewFixedArray(8192, AllocationType::kOld); @@ -2298,7 +2298,7 @@ TEST(OptimizedAllocationAlwaysInNewSpace) { v8::HandleScope scope(CcTest::isolate()); v8::Local ctx = CcTest::isolate()->GetCurrentContext(); heap::SimulateFullSpace(CcTest::heap()->new_space()); - AlwaysAllocateScope always_allocate(CcTest::i_isolate()); + AlwaysAllocateScopeForTesting always_allocate(CcTest::heap()); v8::Local res = CompileRun( "function c(x) {" " this.x = x;" @@ -2822,7 +2822,7 @@ TEST(Regress1465) { CompileRun("function F() {}"); { - AlwaysAllocateScope always_allocate(CcTest::i_isolate()); + AlwaysAllocateScopeForTesting always_allocate(CcTest::i_isolate()->heap()); for (int i = 0; i < transitions_count; i++) { EmbeddedVector buffer; SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i); @@ -2860,7 +2860,7 @@ static i::Handle GetByName(const char* name) { #ifdef DEBUG static void AddTransitions(int transitions_count) { - AlwaysAllocateScope always_allocate(CcTest::i_isolate()); + AlwaysAllocateScopeForTesting always_allocate(CcTest::i_isolate()->heap()); for (int i = 0; i < transitions_count; i++) { EmbeddedVector buffer; SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i); @@ -3010,7 +3010,7 @@ TEST(ReleaseOverReservedPages) { const int initial_page_count = old_space->CountTotalPages(); const int overall_page_count = number_of_test_pages + initial_page_count; for (int i = 0; i < number_of_test_pages; i++) { - AlwaysAllocateScope always_allocate(isolate); + AlwaysAllocateScopeForTesting always_allocate(heap); heap::SimulateFullSpace(old_space); factory->NewFixedArray(1, AllocationType::kOld); } @@ -3587,7 +3587,7 @@ TEST(Regress169928) { // This should crash with a protection violation if we are running a build // with the bug. - AlwaysAllocateScope aa_scope(isolate); + AlwaysAllocateScopeForTesting aa_scope(isolate->heap()); v8::Script::Compile(env.local(), mote_code_string) .ToLocalChecked() ->Run(env.local()) @@ -5129,7 +5129,7 @@ void AllocateInSpace(Isolate* isolate, size_t bytes, AllocationSpace space) { CHECK(IsAligned(bytes, kTaggedSize)); Factory* factory = isolate->factory(); HandleScope scope(isolate); - AlwaysAllocateScope always_allocate(isolate); + AlwaysAllocateScopeForTesting always_allocate(isolate->heap()); int elements = static_cast((bytes - FixedArray::kHeaderSize) / kTaggedSize); Handle array = factory->NewFixedArray( @@ -5419,7 +5419,7 @@ HEAP_TEST(Regress589413) { { // Ensure that incremental marking is not started unexpectedly. - AlwaysAllocateScope always_allocate(isolate); + AlwaysAllocateScopeForTesting always_allocate(isolate->heap()); // Make sure the byte arrays will be promoted on the next GC. CcTest::CollectGarbage(NEW_SPACE); @@ -5649,7 +5649,7 @@ TEST(Regress615489) { CHECK(marking->IsMarking()); marking->StartBlackAllocationForTesting(); { - AlwaysAllocateScope always_allocate(CcTest::i_isolate()); + AlwaysAllocateScopeForTesting always_allocate(heap); v8::HandleScope inner(CcTest::isolate()); isolate->factory()->NewFixedArray(500, AllocationType::kOld)->Size(); } @@ -6389,13 +6389,13 @@ HEAP_TEST(RegressMissingWriteBarrierInAllocate) { heap::SimulateIncrementalMarking(heap, false); Handle map; { - AlwaysAllocateScope always_allocate(isolate); + AlwaysAllocateScopeForTesting always_allocate(heap); map = isolate->factory()->NewMap(HEAP_NUMBER_TYPE, HeapNumber::kSize); } heap->incremental_marking()->StartBlackAllocationForTesting(); Handle object; { - AlwaysAllocateScope always_allocate(isolate); + AlwaysAllocateScopeForTesting always_allocate(heap); object = handle(isolate->factory()->NewForTest(map, AllocationType::kOld), isolate); } diff --git a/test/cctest/heap/test-invalidated-slots.cc b/test/cctest/heap/test-invalidated-slots.cc index 861c48d69d..67e5c0d48e 100644 --- a/test/cctest/heap/test-invalidated-slots.cc +++ b/test/cctest/heap/test-invalidated-slots.cc @@ -24,12 +24,11 @@ Page* HeapTester::AllocateByteArraysOnPage( const int kLength = 256 - ByteArray::kHeaderSize; const int kSize = ByteArray::SizeFor(kLength); CHECK_EQ(kSize, 256); - Isolate* isolate = heap->isolate(); PagedSpace* old_space = heap->old_space(); Page* page; // Fill a page with byte arrays. { - AlwaysAllocateScope always_allocate(isolate); + AlwaysAllocateScopeForTesting always_allocate(heap); heap::SimulateFullSpace(old_space); ByteArray byte_array; CHECK(AllocateByteArrayForTest(heap, kLength, AllocationType::kOld) @@ -181,7 +180,7 @@ HEAP_TEST(InvalidatedSlotsResetObjectRegression) { Handle AllocateArrayOnFreshPage(Isolate* isolate, PagedSpace* old_space, int length) { - AlwaysAllocateScope always_allocate(isolate); + AlwaysAllocateScopeForTesting always_allocate(isolate->heap()); heap::SimulateFullSpace(old_space); return isolate->factory()->NewFixedArray(length, AllocationType::kOld); } @@ -242,7 +241,7 @@ HEAP_TEST(InvalidatedSlotsRightTrimLargeFixedArray) { AllocateArrayOnEvacuationCandidate(isolate, old_space, 1); Handle trimmed; { - AlwaysAllocateScope always_allocate(isolate); + AlwaysAllocateScopeForTesting always_allocate(heap); trimmed = factory->NewFixedArray( kMaxRegularHeapObjectSize / kTaggedSize + 100, AllocationType::kOld); DCHECK(MemoryChunk::FromHeapObject(*trimmed)->InLargeObjectSpace()); @@ -319,7 +318,7 @@ HEAP_TEST(InvalidatedSlotsFastToSlow) { AllocateArrayOnFreshPage(isolate, old_space, 1); Handle obj; { - AlwaysAllocateScope always_allocate(isolate); + AlwaysAllocateScopeForTesting always_allocate(heap); Handle function = factory->NewFunctionForTest(name); function->shared().set_expected_nof_properties(3); obj = factory->NewJSObject(function, AllocationType::kOld); diff --git a/test/cctest/heap/test-spaces.cc b/test/cctest/heap/test-spaces.cc index 789a08ac6e..eb91a5e671 100644 --- a/test/cctest/heap/test-spaces.cc +++ b/test/cctest/heap/test-spaces.cc @@ -568,7 +568,7 @@ HEAP_TEST(Regress777177) { { // Ensure a new linear allocation area on a fresh page. - AlwaysAllocateScope always_allocate(isolate); + AlwaysAllocateScopeForTesting always_allocate(heap); heap::SimulateFullSpace(old_space); AllocationResult result = old_space->AllocateRaw(filler_size, kWordAligned); HeapObject obj = result.ToObjectChecked(); diff --git a/test/cctest/test-api.cc b/test/cctest/test-api.cc index 72e255578f..d480423d58 100644 --- a/test/cctest/test-api.cc +++ b/test/cctest/test-api.cc @@ -13751,7 +13751,7 @@ UNINITIALIZED_TEST(SetJitCodeEventHandler) { const int kIterations = 10; for (int i = 0; i < kIterations; ++i) { LocalContext env(isolate); - i::AlwaysAllocateScope always_allocate(i_isolate); + i::AlwaysAllocateScopeForTesting always_allocate(heap); CompileRun(script); // Keep a strong reference to the code object in the handle scope. @@ -16448,7 +16448,7 @@ TEST(RecursionWithSourceURLInMessageScriptResourceNameOrSourceURL) { static void CreateGarbageInOldSpace() { i::Factory* factory = CcTest::i_isolate()->factory(); v8::HandleScope scope(CcTest::isolate()); - i::AlwaysAllocateScope always_allocate(CcTest::i_isolate()); + i::AlwaysAllocateScopeForTesting always_allocate(CcTest::i_isolate()->heap()); for (int i = 0; i < 1000; i++) { factory->NewFixedArray(1000, i::AllocationType::kOld); } diff --git a/test/cctest/test-serialize.cc b/test/cctest/test-serialize.cc index d7caf0f04e..37335b8e77 100644 --- a/test/cctest/test-serialize.cc +++ b/test/cctest/test-serialize.cc @@ -1871,7 +1871,7 @@ TEST(CodeSerializerLargeCodeObjectWithIncrementalMarking) { Handle moving_object; Page* ec_page; { - AlwaysAllocateScope always_allocate(isolate); + AlwaysAllocateScopeForTesting always_allocate(heap); heap::SimulateFullSpace(heap->old_space()); moving_object = isolate->factory()->InternalizeString( isolate->factory()->NewStringFromAsciiChecked("happy_hippo")); diff --git a/test/cctest/test-strings.cc b/test/cctest/test-strings.cc index a2a11acc61..4fe6de0f62 100644 --- a/test/cctest/test-strings.cc +++ b/test/cctest/test-strings.cc @@ -686,7 +686,7 @@ void TestStringCharacterStream(BuildString build, int test_cases) { for (int i = 0; i < test_cases; i++) { printf("%d\n", i); HandleScope inner_scope(isolate); - AlwaysAllocateScope always_allocate(isolate); + AlwaysAllocateScopeForTesting always_allocate(isolate->heap()); // Build flat version of cons string. Handle flat_string = build(i, &data); ConsStringStats flat_string_stats;