Reland "[heap] Ensure that OOM callback is called before OOM."

This relands commit ed3636e21bc772fec35deefc90850dc5bf850775..

Original change's description:
> [heap] Ensure that OOM callback is called before OOM.
>
> This patch also fixes MaxReserved() to accound for page headers and
> adds two tests for heap size and memory allocator size near OOM.
>
> Bug: chromium:824214
> Change-Id: I5bbe00a9d6a5798cdf4481861a10dca842244a63
> Reviewed-on: https://chromium-review.googlesource.com/973614
> Reviewed-by: Hannes Payer <hpayer@chromium.org>
> Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#52184}

TBR=machenbach@chromium.org


Change-Id: Idc3086a8b9dd30038f48cae64c9a8eb0b45ee372
Reviewed-on: https://chromium-review.googlesource.com/977913
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: Michael Achenbach <machenbach@chromium.org>
Cr-Commit-Position: refs/heads/master@{#52200}
This commit is contained in:
Ulan Degenbaev 2018-03-23 21:19:37 +01:00 committed by Commit Bot
parent bce46fe6bb
commit 815c65965f
4 changed files with 112 additions and 11 deletions

View File

@ -251,6 +251,12 @@ Heap::Heap()
RememberUnmappedPage(nullptr, false);
}
size_t Heap::MaxReserved() {
const double kFactor = Page::kPageSize * 1.0 / Page::kAllocatableMemory;
return static_cast<size_t>(
(2 * max_semi_space_size_ + max_old_generation_size_) * kFactor);
}
size_t Heap::Capacity() {
if (!HasBeenSetUp()) return 0;
@ -1272,12 +1278,16 @@ void Heap::EnsureFillerObjectAtTop() {
bool Heap::CollectGarbage(AllocationSpace space,
GarbageCollectionReason gc_reason,
const v8::GCCallbackFlags gc_callback_flags) {
// The VM is in the GC state until exiting this function.
VMState<GC> state(isolate());
const char* collector_reason = nullptr;
GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
if (!CanExpandOldGeneration(new_space()->Capacity())) {
InvokeOutOfMemoryCallback();
}
// The VM is in the GC state until exiting this function.
VMState<GC> state(isolate());
#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
// Reset the allocation timeout, but make sure to allow at least a few
// allocations after a collection. The reason for this is that we have a lot

View File

@ -1396,9 +1396,7 @@ class Heap {
// ===========================================================================
// Returns the maximum amount of memory reserved for the heap.
size_t MaxReserved() {
return 2 * max_semi_space_size_ + max_old_generation_size_;
}
size_t MaxReserved();
size_t MaxSemiSpaceSize() { return max_semi_space_size_; }
size_t InitialSemiSpaceSize() { return initial_semispace_size_; }
size_t MaxOldGenerationSize() { return max_old_generation_size_; }
@ -2067,10 +2065,6 @@ class Heap {
bool CanExpandOldGeneration(size_t size);
bool IsCloseToOutOfMemory(size_t slack) {
return OldGenerationCapacity() + slack >= MaxOldGenerationSize();
}
bool ShouldExpandOldGenerationOnSlowAllocation();
enum class IncrementalMarkingLimit { kNoLimit, kSoftLimit, kHardLimit };

View File

@ -945,7 +945,7 @@ size_t IncrementalMarking::StepSizeToMakeProgress() {
const size_t kTargetStepCountAtOOM = 32;
size_t oom_slack = heap()->new_space()->Capacity() + 64 * MB;
if (heap()->IsCloseToOutOfMemory(oom_slack)) {
if (!heap()->CanExpandOldGeneration(oom_slack)) {
return heap()->PromotedSpaceSizeOfObjects() / kTargetStepCountAtOOM;
}

View File

@ -5914,6 +5914,10 @@ void OOMCallback(const char* location, bool is_heap_oom) {
}
UNINITIALIZED_TEST(OutOfMemory) {
if (FLAG_stress_incremental_marking) return;
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) return;
#endif
FLAG_max_old_space_size = kHeapLimit / MB;
v8::Isolate::CreateParams create_params;
create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
@ -5972,6 +5976,99 @@ HEAP_TEST(Regress779503) {
CcTest::heap()->delay_sweeper_tasks_for_testing_ = false;
}
struct OutOfMemoryState {
Heap* heap;
bool oom_triggered;
size_t old_generation_capacity_at_oom;
size_t memory_allocator_size_at_oom;
};
void OutOfMemoryCallback(void* raw_state) {
OutOfMemoryState* state = static_cast<OutOfMemoryState*>(raw_state);
Heap* heap = state->heap;
state->oom_triggered = true;
state->old_generation_capacity_at_oom = heap->OldGenerationCapacity();
state->memory_allocator_size_at_oom = heap->memory_allocator()->Size();
heap->IncreaseHeapLimitForDebugging();
}
size_t MemoryAllocatorSizeFromHeapCapacity(size_t capacity) {
// Size to capacity factor.
double factor = Page::kPageSize * 1.0 / Page::kAllocatableMemory;
// Some tables (e.g. deoptimization table) are allocated directly with the
// memory allocator. Allow some slack to account for them.
size_t slack = 1 * MB;
return static_cast<size_t>(capacity * factor) + slack;
}
UNINITIALIZED_TEST(OutOfMemorySmallObjects) {
if (FLAG_stress_incremental_marking) return;
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) return;
#endif
const size_t kOldGenerationLimit = 300 * MB;
FLAG_max_old_space_size = kOldGenerationLimit / MB;
v8::Isolate::CreateParams create_params;
create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
Isolate* isolate =
reinterpret_cast<Isolate*>(v8::Isolate::New(create_params));
Heap* heap = isolate->heap();
Factory* factory = isolate->factory();
OutOfMemoryState state;
state.heap = heap;
state.oom_triggered = false;
heap->SetOutOfMemoryCallback(OutOfMemoryCallback, &state);
{
HandleScope handle_scope(isolate);
while (!state.oom_triggered) {
factory->NewFixedArray(100);
}
}
CHECK_LE(state.old_generation_capacity_at_oom,
kOldGenerationLimit + heap->new_space()->Capacity());
CHECK_LE(kOldGenerationLimit, state.old_generation_capacity_at_oom +
heap->new_space()->Capacity());
CHECK_LE(
state.memory_allocator_size_at_oom,
MemoryAllocatorSizeFromHeapCapacity(state.old_generation_capacity_at_oom +
2 * heap->new_space()->Capacity()));
reinterpret_cast<v8::Isolate*>(isolate)->Dispose();
}
UNINITIALIZED_TEST(OutOfMemoryLargeObjects) {
if (FLAG_stress_incremental_marking) return;
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) return;
#endif
const size_t kOldGenerationLimit = 300 * MB;
FLAG_max_old_space_size = kOldGenerationLimit / MB;
v8::Isolate::CreateParams create_params;
create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
Isolate* isolate =
reinterpret_cast<Isolate*>(v8::Isolate::New(create_params));
Heap* heap = isolate->heap();
Factory* factory = isolate->factory();
OutOfMemoryState state;
state.heap = heap;
state.oom_triggered = false;
heap->SetOutOfMemoryCallback(OutOfMemoryCallback, &state);
const int kFixedArrayLength = 1000000;
{
HandleScope handle_scope(isolate);
while (!state.oom_triggered) {
factory->NewFixedArray(kFixedArrayLength);
}
}
CHECK_LE(state.old_generation_capacity_at_oom, kOldGenerationLimit);
CHECK_LE(kOldGenerationLimit, state.old_generation_capacity_at_oom +
FixedArray::SizeFor(kFixedArrayLength));
CHECK_LE(
state.memory_allocator_size_at_oom,
MemoryAllocatorSizeFromHeapCapacity(state.old_generation_capacity_at_oom +
2 * heap->new_space()->Capacity()));
reinterpret_cast<v8::Isolate*>(isolate)->Dispose();
}
} // namespace heap
} // namespace internal
} // namespace v8