From 47684fe852eb4f2de546b656b8268d0a161b57c9 Mon Sep 17 00:00:00 2001 From: jgruber Date: Tue, 17 Jan 2017 06:19:58 -0800 Subject: [PATCH] [heap] Don't allocate immovable code in LO space during serialization Background: the first page of each space is implicitly immovable. Recently, our builtin code objects have reached a size at which we fill up the first page of code space during initialization. Once that occurs, newly requested allocations of immovable code are allocated in a large object space page of 512K. This CL mitigates these effects by simply marking pages as immovable during snapshot creation instead of going into LO space. On snapshot builds, this should just work: deserialized pages are trimmed and marked immovable when deserialization finishes. However, non-snapshot builds and allocations of immovable CEntryStub code at runtime are still affected. BUG=v8:5831 Review-Url: https://codereview.chromium.org/2635973002 Cr-Commit-Position: refs/heads/master@{#42411} --- src/code-stubs.cc | 3 +- src/heap/heap.cc | 31 +++++++++++++------- src/heap/heap.h | 2 ++ src/isolate.h | 1 + test/cctest/heap/heap-tester.h | 1 + test/cctest/heap/test-heap.cc | 53 ++++++++++++++++++++++++++++++++++ 6 files changed, 79 insertions(+), 12 deletions(-) diff --git a/src/code-stubs.cc b/src/code-stubs.cc index 92b46bae0f..032fdb30b3 100644 --- a/src/code-stubs.cc +++ b/src/code-stubs.cc @@ -193,8 +193,7 @@ Handle CodeStub::GetCode() { } Activate(code); - DCHECK(!NeedsImmovableCode() || - heap->lo_space()->Contains(code) || + DCHECK(!NeedsImmovableCode() || Heap::IsImmovable(code) || heap->code_space()->FirstPage()->Contains(code->address())); return Handle(code, isolate()); } diff --git a/src/heap/heap.cc b/src/heap/heap.cc index c309030b6c..c4a1baa6d3 100644 --- a/src/heap/heap.cc +++ b/src/heap/heap.cc @@ -3160,6 +3160,11 @@ bool Heap::CanMoveObjectStart(HeapObject* object) { return Page::FromAddress(address)->SweepingDone(); } +bool Heap::IsImmovable(HeapObject* object) { + MemoryChunk* chunk = MemoryChunk::FromAddress(object->address()); + return chunk->NeverEvacuate() || chunk->owner()->identity() == LO_SPACE; +} + void Heap::AdjustLiveBytes(HeapObject* object, int by) { // As long as the inspected object is black and we are currently not iterating // the heap using HeapIterator, we can update the live byte count. We cannot @@ -3401,18 +3406,24 @@ AllocationResult Heap::AllocateCode(int object_size, bool immovable) { if (!allocation.To(&result)) return allocation; if (immovable) { Address address = result->address(); + MemoryChunk* chunk = MemoryChunk::FromAddress(address); // Code objects which should stay at a fixed address are allocated either // in the first page of code space (objects on the first page of each space - // are never moved) or in large object space. - if (!code_space_->FirstPage()->Contains(address) && - MemoryChunk::FromAddress(address)->owner()->identity() != LO_SPACE) { - // Discard the first code allocation, which was on a page where it could - // be moved. - CreateFillerObjectAt(result->address(), object_size, - ClearRecordedSlots::kNo); - allocation = lo_space_->AllocateRaw(object_size, EXECUTABLE); - if (!allocation.To(&result)) return allocation; - OnAllocationEvent(result, object_size); + // are never moved), in large object space, or (during snapshot creation) + // the containing page is marked as immovable. + if (!Heap::IsImmovable(result) && + !code_space_->FirstPage()->Contains(address)) { + if (isolate()->serializer_enabled()) { + chunk->MarkNeverEvacuate(); + } else { + // Discard the first code allocation, which was on a page where it could + // be moved. + CreateFillerObjectAt(result->address(), object_size, + ClearRecordedSlots::kNo); + allocation = lo_space_->AllocateRaw(object_size, EXECUTABLE); + if (!allocation.To(&result)) return allocation; + OnAllocationEvent(result, object_size); + } } } diff --git a/src/heap/heap.h b/src/heap/heap.h index 1aa9feeafb..3805085e70 100644 --- a/src/heap/heap.h +++ b/src/heap/heap.h @@ -753,6 +753,8 @@ class Heap { bool CanMoveObjectStart(HeapObject* object); + static bool IsImmovable(HeapObject* object); + // Maintain consistency of live bytes during incremental marking. void AdjustLiveBytes(HeapObject* object, int by); diff --git a/src/isolate.h b/src/isolate.h index 64fadbb24f..441d995d69 100644 --- a/src/isolate.h +++ b/src/isolate.h @@ -1474,6 +1474,7 @@ class Isolate { friend class ExecutionAccess; friend class HandleScopeImplementer; + friend class HeapTester; friend class OptimizingCompileDispatcher; friend class SweeperThread; friend class ThreadManager; diff --git a/test/cctest/heap/heap-tester.h b/test/cctest/heap/heap-tester.h index 99d39ca7ab..acfbe92bb6 100644 --- a/test/cctest/heap/heap-tester.h +++ b/test/cctest/heap/heap-tester.h @@ -34,6 +34,7 @@ V(Regress589413) \ V(Regress658718) \ V(Regress670675) \ + V(Regress5831) \ V(WriteBarriersInCopyJSObject) #define HEAP_TEST(Name) \ diff --git a/test/cctest/heap/test-heap.cc b/test/cctest/heap/test-heap.cc index 9851512f73..94e0cfc6bd 100644 --- a/test/cctest/heap/test-heap.cc +++ b/test/cctest/heap/test-heap.cc @@ -28,6 +28,7 @@ #include #include +#include "src/code-stubs.h" #include "src/compilation-cache.h" #include "src/context-measure.h" #include "src/deoptimizer.h" @@ -6837,5 +6838,57 @@ HEAP_TEST(Regress670675) { DCHECK(marking->IsStopped()); } +HEAP_TEST(Regress5831) { + CcTest::InitializeVM(); + Heap* heap = CcTest::heap(); + Isolate* isolate = CcTest::i_isolate(); + HandleScope handle_scope(isolate); + + // Used to ensure that the first code space page remains filled. + Handle array = isolate->factory()->NewFixedArray(32); + + { + // Ensure that the first code space page is full. + CEntryStub stub(isolate, 1); + Handle code = stub.GetCode(); + + int i = 0; + array = FixedArray::SetAndGrow(array, i++, code); + + while (heap->code_space()->FirstPage()->Contains(code->address())) { + code = isolate->factory()->CopyCode(code); + array = FixedArray::SetAndGrow(array, i++, code); + } + } + + class ImmovableCEntryStub : public i::CEntryStub { + public: + explicit ImmovableCEntryStub(i::Isolate* isolate) + : i::CEntryStub(isolate, 3, i::kSaveFPRegs, i::kArgvOnStack, true) {} + bool NeedsImmovableCode() override { return true; } + }; + + ImmovableCEntryStub stub(isolate); + + { + // Make sure the code object has not yet been generated. + Code* code; + CHECK(!stub.FindCodeInCache(&code)); + } + + // Fake a serializer run. + isolate->serializer_enabled_ = true; + + // Generate the code. + Handle code = stub.GetCode(); + CHECK(code->Size() <= i::kMaxRegularHeapObjectSize); + CHECK(!heap->code_space()->FirstPage()->Contains(code->address())); + + // Ensure it's not in large object space. + MemoryChunk* chunk = MemoryChunk::FromAddress(code->address()); + CHECK(chunk->owner()->identity() != LO_SPACE); + CHECK(chunk->NeverEvacuate()); +} + } // namespace internal } // namespace v8