[heap] Restrict usages of AlwaysAllocateScope

The scope disables garbage collection and should be only used in
heap, deserializer, isolate bootstrap, and testing.

Change-Id: Ide95926ef32fd9362cd9134e883e1bd626cc3b11
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2083292
Reviewed-by: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: Jakob Gruber <jgruber@chromium.org>
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#66557}
This commit is contained in:
Ulan Degenbaev 2020-03-02 14:52:18 +01:00 committed by Commit Bot
parent fdf6f185b6
commit f3babafbdd
13 changed files with 51 additions and 36 deletions

View File

@ -3342,7 +3342,7 @@ bool Isolate::Init(ReadOnlyDeserializer* read_only_deserializer,
has_fatal_error_ = false;
// The initialization process does not handle memory exhaustion.
AlwaysAllocateScope always_allocate(this);
AlwaysAllocateScope always_allocate(heap());
#define ASSIGN_ELEMENT(CamelName, hacker_name) \
isolate_addresses_[IsolateAddressId::k##CamelName##Address] = \
@ -3471,8 +3471,8 @@ bool Isolate::Init(ReadOnlyDeserializer* read_only_deserializer,
// If we are deserializing, read the state into the now-empty heap.
{
AlwaysAllocateScope always_allocate(this);
CodeSpaceMemoryModificationScope modification_scope(&heap_);
AlwaysAllocateScope always_allocate(heap());
CodeSpaceMemoryModificationScope modification_scope(heap());
if (create_heap_objects) {
heap_.read_only_space()->ClearStringPaddingIfNeeded();

View File

@ -648,13 +648,13 @@ AlwaysAllocateScope::AlwaysAllocateScope(Heap* heap) : heap_(heap) {
heap_->always_allocate_scope_count_++;
}
AlwaysAllocateScope::AlwaysAllocateScope(Isolate* isolate)
: AlwaysAllocateScope(isolate->heap()) {}
AlwaysAllocateScope::~AlwaysAllocateScope() {
heap_->always_allocate_scope_count_--;
}
AlwaysAllocateScopeForTesting::AlwaysAllocateScopeForTesting(Heap* heap)
: scope_(heap) {}
CodeSpaceMemoryModificationScope::CodeSpaceMemoryModificationScope(Heap* heap)
: heap_(heap) {
if (heap_->write_protect_code_memory()) {

View File

@ -2263,7 +2263,7 @@ void Heap::MinorMarkCompact() {
LOG(isolate_, ResourceEvent("MinorMarkCompact", "begin"));
TRACE_GC(tracer(), GCTracer::Scope::MINOR_MC);
AlwaysAllocateScope always_allocate(isolate());
AlwaysAllocateScope always_allocate(this);
IncrementalMarking::PauseBlackAllocationScope pause_black_allocation(
incremental_marking());
ConcurrentMarking::PauseScope pause_scope(concurrent_marking());
@ -2380,7 +2380,7 @@ void Heap::Scavenge() {
// There are soft limits in the allocation code, designed to trigger a mark
// sweep collection by failing allocations. There is no sense in trying to
// trigger one during scavenge: scavenges allocation should always succeed.
AlwaysAllocateScope scope(isolate());
AlwaysAllocateScope scope(this);
// Bump-pointer allocations done during scavenge are not real allocations.
// Pause the inline allocation steps.
@ -5063,7 +5063,7 @@ HeapObject Heap::AllocateRawWithRetryOrFailSlowPath(
isolate()->counters()->gc_last_resort_from_handles()->Increment();
CollectAllAvailableGarbage(GarbageCollectionReason::kLastResort);
{
AlwaysAllocateScope scope(isolate());
AlwaysAllocateScope scope(this);
alloc = AllocateRaw(size, allocation, origin, alignment);
}
if (alloc.To(&result)) {
@ -5097,7 +5097,7 @@ HeapObject Heap::AllocateRawCodeInLargeObjectSpace(int size) {
isolate()->counters()->gc_last_resort_from_handles()->Increment();
CollectAllAvailableGarbage(GarbageCollectionReason::kLastResort);
{
AlwaysAllocateScope scope(isolate());
AlwaysAllocateScope scope(this);
alloc = code_lo_space()->AllocateRaw(size);
}
if (alloc.To(&result)) {

View File

@ -2276,16 +2276,33 @@ class HeapStats {
intptr_t* end_marker; // 27
};
// Disables GC for all allocations. It should not be used
// outside heap, deserializer, and isolate bootstrap.
// Use AlwaysAllocateScopeForTesting in tests.
class AlwaysAllocateScope {
public:
explicit inline AlwaysAllocateScope(Heap* heap);
explicit inline AlwaysAllocateScope(Isolate* isolate);
inline ~AlwaysAllocateScope();
private:
friend class AlwaysAllocateScopeForTesting;
friend class Deserializer;
friend class DeserializerAllocator;
friend class Evacuator;
friend class Heap;
friend class Isolate;
explicit inline AlwaysAllocateScope(Heap* heap);
Heap* heap_;
};
class AlwaysAllocateScopeForTesting {
public:
explicit inline AlwaysAllocateScopeForTesting(Heap* heap);
private:
AlwaysAllocateScope scope_;
};
// The CodeSpaceMemoryModificationScope can only be used by the main thread.
class CodeSpaceMemoryModificationScope {
public:

View File

@ -2940,7 +2940,7 @@ void Evacuator::EvacuatePage(MemoryChunk* chunk) {
intptr_t saved_live_bytes = 0;
double evacuation_time = 0.0;
{
AlwaysAllocateScope always_allocate(heap()->isolate());
AlwaysAllocateScope always_allocate(heap());
TimedScope timed_scope(&evacuation_time);
RawEvacuatePage(chunk, &saved_live_bytes);
}

View File

@ -730,7 +730,6 @@ RUNTIME_FUNCTION(Runtime_SimulateNewspaceFull) {
HandleScope scope(isolate);
Heap* heap = isolate->heap();
NewSpace* space = heap->new_space();
AlwaysAllocateScope always_allocate(heap);
do {
FillUpOneNewSpacePage(isolate, heap);
} while (space->AddFreshPage());

View File

@ -46,8 +46,8 @@ Handle<Object> HeapTester::TestAllocateAfterFailures() {
// we wrap the allocator function in an AlwaysAllocateScope. Test that
// all allocations succeed immediately without any retry.
CcTest::CollectAllAvailableGarbage();
AlwaysAllocateScope scope(CcTest::i_isolate());
Heap* heap = CcTest::heap();
AlwaysAllocateScopeForTesting scope(heap);
int size = FixedArray::SizeFor(100);
// Young generation.
HeapObject obj =

View File

@ -1576,7 +1576,7 @@ HEAP_TEST(TestSizeOfObjects) {
// Allocate objects on several different old-space pages so that
// concurrent sweeper threads will be busy sweeping the old space on
// subsequent GC runs.
AlwaysAllocateScope always_allocate(CcTest::i_isolate());
AlwaysAllocateScopeForTesting always_allocate(heap);
int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
for (int i = 1; i <= 100; i++) {
isolate->factory()->NewFixedArray(8192, AllocationType::kOld);
@ -2298,7 +2298,7 @@ TEST(OptimizedAllocationAlwaysInNewSpace) {
v8::HandleScope scope(CcTest::isolate());
v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
heap::SimulateFullSpace(CcTest::heap()->new_space());
AlwaysAllocateScope always_allocate(CcTest::i_isolate());
AlwaysAllocateScopeForTesting always_allocate(CcTest::heap());
v8::Local<v8::Value> res = CompileRun(
"function c(x) {"
" this.x = x;"
@ -2822,7 +2822,7 @@ TEST(Regress1465) {
CompileRun("function F() {}");
{
AlwaysAllocateScope always_allocate(CcTest::i_isolate());
AlwaysAllocateScopeForTesting always_allocate(CcTest::i_isolate()->heap());
for (int i = 0; i < transitions_count; i++) {
EmbeddedVector<char, 64> buffer;
SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
@ -2860,7 +2860,7 @@ static i::Handle<JSObject> GetByName(const char* name) {
#ifdef DEBUG
static void AddTransitions(int transitions_count) {
AlwaysAllocateScope always_allocate(CcTest::i_isolate());
AlwaysAllocateScopeForTesting always_allocate(CcTest::i_isolate()->heap());
for (int i = 0; i < transitions_count; i++) {
EmbeddedVector<char, 64> buffer;
SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
@ -3010,7 +3010,7 @@ TEST(ReleaseOverReservedPages) {
const int initial_page_count = old_space->CountTotalPages();
const int overall_page_count = number_of_test_pages + initial_page_count;
for (int i = 0; i < number_of_test_pages; i++) {
AlwaysAllocateScope always_allocate(isolate);
AlwaysAllocateScopeForTesting always_allocate(heap);
heap::SimulateFullSpace(old_space);
factory->NewFixedArray(1, AllocationType::kOld);
}
@ -3587,7 +3587,7 @@ TEST(Regress169928) {
// This should crash with a protection violation if we are running a build
// with the bug.
AlwaysAllocateScope aa_scope(isolate);
AlwaysAllocateScopeForTesting aa_scope(isolate->heap());
v8::Script::Compile(env.local(), mote_code_string)
.ToLocalChecked()
->Run(env.local())
@ -5129,7 +5129,7 @@ void AllocateInSpace(Isolate* isolate, size_t bytes, AllocationSpace space) {
CHECK(IsAligned(bytes, kTaggedSize));
Factory* factory = isolate->factory();
HandleScope scope(isolate);
AlwaysAllocateScope always_allocate(isolate);
AlwaysAllocateScopeForTesting always_allocate(isolate->heap());
int elements =
static_cast<int>((bytes - FixedArray::kHeaderSize) / kTaggedSize);
Handle<FixedArray> array = factory->NewFixedArray(
@ -5419,7 +5419,7 @@ HEAP_TEST(Regress589413) {
{
// Ensure that incremental marking is not started unexpectedly.
AlwaysAllocateScope always_allocate(isolate);
AlwaysAllocateScopeForTesting always_allocate(isolate->heap());
// Make sure the byte arrays will be promoted on the next GC.
CcTest::CollectGarbage(NEW_SPACE);
@ -5649,7 +5649,7 @@ TEST(Regress615489) {
CHECK(marking->IsMarking());
marking->StartBlackAllocationForTesting();
{
AlwaysAllocateScope always_allocate(CcTest::i_isolate());
AlwaysAllocateScopeForTesting always_allocate(heap);
v8::HandleScope inner(CcTest::isolate());
isolate->factory()->NewFixedArray(500, AllocationType::kOld)->Size();
}
@ -6389,13 +6389,13 @@ HEAP_TEST(RegressMissingWriteBarrierInAllocate) {
heap::SimulateIncrementalMarking(heap, false);
Handle<Map> map;
{
AlwaysAllocateScope always_allocate(isolate);
AlwaysAllocateScopeForTesting always_allocate(heap);
map = isolate->factory()->NewMap(HEAP_NUMBER_TYPE, HeapNumber::kSize);
}
heap->incremental_marking()->StartBlackAllocationForTesting();
Handle<HeapObject> object;
{
AlwaysAllocateScope always_allocate(isolate);
AlwaysAllocateScopeForTesting always_allocate(heap);
object = handle(isolate->factory()->NewForTest(map, AllocationType::kOld),
isolate);
}

View File

@ -24,12 +24,11 @@ Page* HeapTester::AllocateByteArraysOnPage(
const int kLength = 256 - ByteArray::kHeaderSize;
const int kSize = ByteArray::SizeFor(kLength);
CHECK_EQ(kSize, 256);
Isolate* isolate = heap->isolate();
PagedSpace* old_space = heap->old_space();
Page* page;
// Fill a page with byte arrays.
{
AlwaysAllocateScope always_allocate(isolate);
AlwaysAllocateScopeForTesting always_allocate(heap);
heap::SimulateFullSpace(old_space);
ByteArray byte_array;
CHECK(AllocateByteArrayForTest(heap, kLength, AllocationType::kOld)
@ -181,7 +180,7 @@ HEAP_TEST(InvalidatedSlotsResetObjectRegression) {
Handle<FixedArray> AllocateArrayOnFreshPage(Isolate* isolate,
PagedSpace* old_space, int length) {
AlwaysAllocateScope always_allocate(isolate);
AlwaysAllocateScopeForTesting always_allocate(isolate->heap());
heap::SimulateFullSpace(old_space);
return isolate->factory()->NewFixedArray(length, AllocationType::kOld);
}
@ -242,7 +241,7 @@ HEAP_TEST(InvalidatedSlotsRightTrimLargeFixedArray) {
AllocateArrayOnEvacuationCandidate(isolate, old_space, 1);
Handle<FixedArray> trimmed;
{
AlwaysAllocateScope always_allocate(isolate);
AlwaysAllocateScopeForTesting always_allocate(heap);
trimmed = factory->NewFixedArray(
kMaxRegularHeapObjectSize / kTaggedSize + 100, AllocationType::kOld);
DCHECK(MemoryChunk::FromHeapObject(*trimmed)->InLargeObjectSpace());
@ -319,7 +318,7 @@ HEAP_TEST(InvalidatedSlotsFastToSlow) {
AllocateArrayOnFreshPage(isolate, old_space, 1);
Handle<JSObject> obj;
{
AlwaysAllocateScope always_allocate(isolate);
AlwaysAllocateScopeForTesting always_allocate(heap);
Handle<JSFunction> function = factory->NewFunctionForTest(name);
function->shared().set_expected_nof_properties(3);
obj = factory->NewJSObject(function, AllocationType::kOld);

View File

@ -568,7 +568,7 @@ HEAP_TEST(Regress777177) {
{
// Ensure a new linear allocation area on a fresh page.
AlwaysAllocateScope always_allocate(isolate);
AlwaysAllocateScopeForTesting always_allocate(heap);
heap::SimulateFullSpace(old_space);
AllocationResult result = old_space->AllocateRaw(filler_size, kWordAligned);
HeapObject obj = result.ToObjectChecked();

View File

@ -13751,7 +13751,7 @@ UNINITIALIZED_TEST(SetJitCodeEventHandler) {
const int kIterations = 10;
for (int i = 0; i < kIterations; ++i) {
LocalContext env(isolate);
i::AlwaysAllocateScope always_allocate(i_isolate);
i::AlwaysAllocateScopeForTesting always_allocate(heap);
CompileRun(script);
// Keep a strong reference to the code object in the handle scope.
@ -16448,7 +16448,7 @@ TEST(RecursionWithSourceURLInMessageScriptResourceNameOrSourceURL) {
static void CreateGarbageInOldSpace() {
i::Factory* factory = CcTest::i_isolate()->factory();
v8::HandleScope scope(CcTest::isolate());
i::AlwaysAllocateScope always_allocate(CcTest::i_isolate());
i::AlwaysAllocateScopeForTesting always_allocate(CcTest::i_isolate()->heap());
for (int i = 0; i < 1000; i++) {
factory->NewFixedArray(1000, i::AllocationType::kOld);
}

View File

@ -1871,7 +1871,7 @@ TEST(CodeSerializerLargeCodeObjectWithIncrementalMarking) {
Handle<String> moving_object;
Page* ec_page;
{
AlwaysAllocateScope always_allocate(isolate);
AlwaysAllocateScopeForTesting always_allocate(heap);
heap::SimulateFullSpace(heap->old_space());
moving_object = isolate->factory()->InternalizeString(
isolate->factory()->NewStringFromAsciiChecked("happy_hippo"));

View File

@ -686,7 +686,7 @@ void TestStringCharacterStream(BuildString build, int test_cases) {
for (int i = 0; i < test_cases; i++) {
printf("%d\n", i);
HandleScope inner_scope(isolate);
AlwaysAllocateScope always_allocate(isolate);
AlwaysAllocateScopeForTesting always_allocate(isolate->heap());
// Build flat version of cons string.
Handle<String> flat_string = build(i, &data);
ConsStringStats flat_string_stats;