Revert GC scheduling for external backing stores
Revert "Reland "[heap] Attempt to incorporate backing store counters into heap sizing and GC trigger stragery."" This reverts commiteb164dbd00
. Revert "[d8] Fixed external gc test (limit multiplied by number of isolates)." This reverts commit38cbc26a75
. Revert "[heap] Fixed typo in method name." This reverts commit263174af75
. Bug: chromium:845409, chromium:879045 Cq-Include-Trybots: luci.chromium.try:linux_chromium_rel_ng Change-Id: I555bcff2ad04ae23368c7b3999a237083010f9c6 Reviewed-on: https://chromium-review.googlesource.com/1196550 Reviewed-by: Ulan Degenbaev <ulan@chromium.org> Commit-Queue: Michael Lippautz <mlippautz@chromium.org> Cr-Commit-Position: refs/heads/master@{#55529}
This commit is contained in:
parent
32745df15a
commit
8206187381
@ -9380,7 +9380,7 @@ class Internals {
|
||||
kExternalMemoryLimitOffset + kApiInt64Size;
|
||||
static const int kIsolateRootsOffset = kExternalMemoryLimitOffset +
|
||||
kApiInt64Size + kApiInt64Size +
|
||||
kApiInt64Size + kApiPointerSize;
|
||||
kApiPointerSize + kApiPointerSize;
|
||||
static const int kUndefinedValueRootIndex = 4;
|
||||
static const int kTheHoleValueRootIndex = 5;
|
||||
static const int kNullValueRootIndex = 6;
|
||||
|
@ -8359,8 +8359,7 @@ void Isolate::GetHeapStatistics(HeapStatistics* heap_statistics) {
|
||||
heap_statistics->malloced_memory_ =
|
||||
isolate->allocator()->GetCurrentMemoryUsage() +
|
||||
isolate->wasm_engine()->allocator()->GetCurrentMemoryUsage();
|
||||
heap_statistics->external_memory_ = isolate->heap()->external_memory() +
|
||||
isolate->heap()->backing_store_bytes();
|
||||
heap_statistics->external_memory_ = isolate->heap()->external_memory();
|
||||
heap_statistics->peak_malloced_memory_ =
|
||||
isolate->allocator()->GetMaxMemoryUsage() +
|
||||
isolate->wasm_engine()->allocator()->GetMaxMemoryUsage();
|
||||
|
43
src/d8.cc
43
src/d8.cc
@ -135,7 +135,6 @@ class ShellArrayBufferAllocator : public ArrayBufferAllocatorBase {
|
||||
|
||||
// ArrayBuffer allocator that never allocates over 10MB.
|
||||
class MockArrayBufferAllocator : public ArrayBufferAllocatorBase {
|
||||
protected:
|
||||
void* Allocate(size_t length) override {
|
||||
return ArrayBufferAllocatorBase::Allocate(Adjust(length));
|
||||
}
|
||||
@ -155,37 +154,6 @@ class MockArrayBufferAllocator : public ArrayBufferAllocatorBase {
|
||||
}
|
||||
};
|
||||
|
||||
class MockArrayBufferAllocatiorWithLimit : public MockArrayBufferAllocator {
|
||||
public:
|
||||
explicit MockArrayBufferAllocatiorWithLimit(size_t allocation_limit)
|
||||
: space_left_(allocation_limit) {}
|
||||
|
||||
protected:
|
||||
void* Allocate(size_t length) override {
|
||||
if (length > space_left_) {
|
||||
return nullptr;
|
||||
}
|
||||
space_left_ -= length;
|
||||
return MockArrayBufferAllocator::Allocate(length);
|
||||
}
|
||||
|
||||
void* AllocateUninitialized(size_t length) override {
|
||||
if (length > space_left_) {
|
||||
return nullptr;
|
||||
}
|
||||
space_left_ -= length;
|
||||
return MockArrayBufferAllocator::AllocateUninitialized(length);
|
||||
}
|
||||
|
||||
void Free(void* data, size_t length) override {
|
||||
space_left_ += length;
|
||||
return MockArrayBufferAllocator::Free(data, length);
|
||||
}
|
||||
|
||||
private:
|
||||
std::atomic<size_t> space_left_;
|
||||
};
|
||||
|
||||
// Predictable v8::Platform implementation. Worker threads are disabled, idle
|
||||
// tasks are disallowed, and the time reported by {MonotonicallyIncreasingTime}
|
||||
// is deterministic.
|
||||
@ -2821,11 +2789,6 @@ bool Shell::SetOptions(int argc, char* argv[]) {
|
||||
strcmp(argv[i], "--no-stress-background-compile") == 0) {
|
||||
options.stress_background_compile = false;
|
||||
argv[i] = nullptr;
|
||||
} else if (strncmp(argv[i], "--mock-arraybuffer-allocator-limit=", 35) ==
|
||||
0) {
|
||||
options.mock_arraybuffer_allocator = true;
|
||||
options.mock_arraybuffer_allocator_limit = atoi(argv[i] + 35);
|
||||
argv[i] = nullptr;
|
||||
} else if (strcmp(argv[i], "--noalways-opt") == 0 ||
|
||||
strcmp(argv[i], "--no-always-opt") == 0) {
|
||||
// No support for stressing if we can't use --always-opt.
|
||||
@ -3399,11 +3362,7 @@ int Shell::Main(int argc, char* argv[]) {
|
||||
Isolate::CreateParams create_params;
|
||||
ShellArrayBufferAllocator shell_array_buffer_allocator;
|
||||
MockArrayBufferAllocator mock_arraybuffer_allocator;
|
||||
MockArrayBufferAllocatiorWithLimit mock_arraybuffer_allocator_limit(
|
||||
options.mock_arraybuffer_allocator_limit * options.num_isolates);
|
||||
if (options.mock_arraybuffer_allocator_limit > 0) {
|
||||
Shell::array_buffer_allocator = &mock_arraybuffer_allocator_limit;
|
||||
} else if (options.mock_arraybuffer_allocator) {
|
||||
if (options.mock_arraybuffer_allocator) {
|
||||
Shell::array_buffer_allocator = &mock_arraybuffer_allocator;
|
||||
} else {
|
||||
Shell::array_buffer_allocator = &shell_array_buffer_allocator;
|
||||
|
1
src/d8.h
1
src/d8.h
@ -369,7 +369,6 @@ class ShellOptions {
|
||||
bool test_shell;
|
||||
bool expected_to_throw;
|
||||
bool mock_arraybuffer_allocator;
|
||||
int mock_arraybuffer_allocator_limit = 0;
|
||||
bool enable_inspector;
|
||||
int num_isolates;
|
||||
v8::ScriptCompiler::CompileOptions compile_options;
|
||||
|
@ -48,6 +48,8 @@ class ArrayBufferCollector::FreeingTask final : public CancelableTask {
|
||||
};
|
||||
|
||||
void ArrayBufferCollector::FreeAllocationsOnBackgroundThread() {
|
||||
// TODO(wez): Remove backing-store from external memory accounting.
|
||||
heap_->account_external_memory_concurrently_freed();
|
||||
if (!heap_->IsTearingDown() && FLAG_concurrent_array_buffer_freeing) {
|
||||
V8::GetCurrentPlatform()->CallOnWorkerThread(
|
||||
base::make_unique<FreeingTask>(heap_));
|
||||
|
@ -30,6 +30,12 @@ void ArrayBufferTracker::RegisterNew(Heap* heap, JSArrayBuffer* buffer) {
|
||||
DCHECK_NOT_NULL(tracker);
|
||||
tracker->Add(buffer, length);
|
||||
}
|
||||
|
||||
// TODO(wez): Remove backing-store from external memory accounting.
|
||||
// We may go over the limit of externally allocated memory here. We call the
|
||||
// api function to trigger a GC in this case.
|
||||
reinterpret_cast<v8::Isolate*>(heap->isolate())
|
||||
->AdjustAmountOfExternalAllocatedMemory(length);
|
||||
}
|
||||
|
||||
void ArrayBufferTracker::Unregister(Heap* heap, JSArrayBuffer* buffer) {
|
||||
@ -43,6 +49,9 @@ void ArrayBufferTracker::Unregister(Heap* heap, JSArrayBuffer* buffer) {
|
||||
DCHECK_NOT_NULL(tracker);
|
||||
tracker->Remove(buffer, length);
|
||||
}
|
||||
|
||||
// TODO(wez): Remove backing-store from external memory accounting.
|
||||
heap->update_external_memory(-static_cast<intptr_t>(length));
|
||||
}
|
||||
|
||||
Space* LocalArrayBufferTracker::space() { return page_->owner(); }
|
||||
@ -67,6 +76,10 @@ void LocalArrayBufferTracker::Free(Callback should_free) {
|
||||
if (freed_memory > 0) {
|
||||
page_->DecrementExternalBackingStoreBytes(
|
||||
ExternalBackingStoreType::kArrayBuffer, freed_memory);
|
||||
|
||||
// TODO(wez): Remove backing-store from external memory accounting.
|
||||
page_->heap()->update_external_memory_concurrently_freed(
|
||||
static_cast<intptr_t>(freed_memory));
|
||||
}
|
||||
}
|
||||
|
||||
@ -86,6 +99,7 @@ void ArrayBufferTracker::FreeDead(Page* page, MarkingState* marking_state) {
|
||||
void LocalArrayBufferTracker::Add(JSArrayBuffer* buffer, size_t length) {
|
||||
page_->IncrementExternalBackingStoreBytes(
|
||||
ExternalBackingStoreType::kArrayBuffer, length);
|
||||
|
||||
auto ret = array_buffers_.insert(
|
||||
{buffer,
|
||||
{buffer->backing_store(), length, buffer->backing_store(),
|
||||
@ -99,6 +113,7 @@ void LocalArrayBufferTracker::Add(JSArrayBuffer* buffer, size_t length) {
|
||||
void LocalArrayBufferTracker::Remove(JSArrayBuffer* buffer, size_t length) {
|
||||
page_->DecrementExternalBackingStoreBytes(
|
||||
ExternalBackingStoreType::kArrayBuffer, length);
|
||||
|
||||
TrackingData::iterator it = array_buffers_.find(buffer);
|
||||
// Check that we indeed find a key to remove.
|
||||
DCHECK(it != array_buffers_.end());
|
||||
|
@ -54,6 +54,7 @@ void LocalArrayBufferTracker::Process(Callback callback) {
|
||||
tracker->Add(new_buffer, length);
|
||||
}
|
||||
moved_memory += it->second.length;
|
||||
|
||||
} else if (result == kRemoveEntry) {
|
||||
const size_t length = it->second.length;
|
||||
freed_memory += length;
|
||||
@ -67,6 +68,11 @@ void LocalArrayBufferTracker::Process(Callback callback) {
|
||||
UNREACHABLE();
|
||||
}
|
||||
}
|
||||
if (moved_memory || freed_memory) {
|
||||
// TODO(wez): Remove backing-store from external memory accounting.
|
||||
page_->heap()->update_external_memory_concurrently_freed(
|
||||
static_cast<intptr_t>(freed_memory));
|
||||
}
|
||||
|
||||
array_buffers_.swap(kept_array_buffers);
|
||||
|
||||
|
@ -66,10 +66,35 @@ double MemoryController::GrowingFactor(double gc_speed, double mutator_speed,
|
||||
return factor;
|
||||
}
|
||||
|
||||
double MemoryController::MaxGrowingFactor(size_t curr_max_size) {
|
||||
const double min_small_factor = 1.3;
|
||||
const double max_small_factor = 2.0;
|
||||
const double high_factor = 4.0;
|
||||
|
||||
size_t max_size_in_mb = curr_max_size / MB;
|
||||
max_size_in_mb = Max(max_size_in_mb, kMinSize);
|
||||
|
||||
// If we are on a device with lots of memory, we allow a high heap
|
||||
// growing factor.
|
||||
if (max_size_in_mb >= kMaxSize) {
|
||||
return high_factor;
|
||||
}
|
||||
|
||||
DCHECK_GE(max_size_in_mb, kMinSize);
|
||||
DCHECK_LT(max_size_in_mb, kMaxSize);
|
||||
|
||||
// On smaller devices we linearly scale the factor: (X-A)/(B-A)*(D-C)+C
|
||||
double factor = (max_size_in_mb - kMinSize) *
|
||||
(max_small_factor - min_small_factor) /
|
||||
(kMaxSize - kMinSize) +
|
||||
min_small_factor;
|
||||
return factor;
|
||||
}
|
||||
|
||||
size_t MemoryController::CalculateAllocationLimit(
|
||||
size_t curr_size, size_t max_size, double max_factor, double gc_speed,
|
||||
double mutator_speed, size_t new_space_capacity,
|
||||
Heap::HeapGrowingMode growing_mode) {
|
||||
size_t curr_size, size_t max_size, double gc_speed, double mutator_speed,
|
||||
size_t new_space_capacity, Heap::HeapGrowingMode growing_mode) {
|
||||
double max_factor = MaxGrowingFactor(max_size);
|
||||
double factor = GrowingFactor(gc_speed, mutator_speed, max_factor);
|
||||
|
||||
if (FLAG_trace_gc_verbose) {
|
||||
@ -100,7 +125,7 @@ size_t MemoryController::CalculateAllocationLimit(
|
||||
MinimumAllocationLimitGrowingStep(growing_mode));
|
||||
limit += new_space_capacity;
|
||||
uint64_t halfway_to_the_max =
|
||||
(static_cast<uint64_t>(curr_size) + static_cast<uint64_t>(max_size)) / 2;
|
||||
(static_cast<uint64_t>(curr_size) + max_size) / 2;
|
||||
size_t result = static_cast<size_t>(Min(limit, halfway_to_the_max));
|
||||
|
||||
if (FLAG_trace_gc_verbose) {
|
||||
@ -122,30 +147,5 @@ size_t MemoryController::MinimumAllocationLimitGrowingStep(
|
||||
: kRegularAllocationLimitGrowingStep);
|
||||
}
|
||||
|
||||
double HeapController::MaxGrowingFactor(size_t curr_max_size) {
|
||||
const double min_small_factor = 1.3;
|
||||
const double max_small_factor = 2.0;
|
||||
const double high_factor = 4.0;
|
||||
|
||||
size_t max_size_in_mb = curr_max_size / MB;
|
||||
max_size_in_mb = Max(max_size_in_mb, kMinSize);
|
||||
|
||||
// If we are on a device with lots of memory, we allow a high heap
|
||||
// growing factor.
|
||||
if (max_size_in_mb >= kMaxSize) {
|
||||
return high_factor;
|
||||
}
|
||||
|
||||
DCHECK_GE(max_size_in_mb, kMinSize);
|
||||
DCHECK_LT(max_size_in_mb, kMaxSize);
|
||||
|
||||
// On smaller devices we linearly scale the factor: (X-A)/(B-A)*(D-C)+C
|
||||
double factor = (max_size_in_mb - kMinSize) *
|
||||
(max_small_factor - min_small_factor) /
|
||||
(kMaxSize - kMinSize) +
|
||||
min_small_factor;
|
||||
return factor;
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
@ -13,51 +13,35 @@
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
enum AvailableAllocationSpace {
|
||||
kAboveAllocationLimit,
|
||||
kCloseToAllocationLimit,
|
||||
kBelowAllocationLimit,
|
||||
};
|
||||
|
||||
class V8_EXPORT_PRIVATE MemoryController {
|
||||
public:
|
||||
MemoryController(Heap* heap, double min_growing_factor,
|
||||
double max_growing_factor,
|
||||
double conservative_growing_factor,
|
||||
double target_mutator_utilization,
|
||||
double close_to_allocation_limit_factor)
|
||||
double target_mutator_utilization, size_t min_size,
|
||||
size_t max_size)
|
||||
: heap_(heap),
|
||||
kMinGrowingFactor(min_growing_factor),
|
||||
kMaxGrowingFactor(max_growing_factor),
|
||||
kConservativeGrowingFactor(conservative_growing_factor),
|
||||
kTargetMutatorUtilization(target_mutator_utilization),
|
||||
kCloseToAllocationLimitFactor(close_to_allocation_limit_factor) {}
|
||||
kMinSize(min_size),
|
||||
kMaxSize(max_size) {}
|
||||
virtual ~MemoryController() {}
|
||||
|
||||
// Computes the allocation limit to trigger the next garbage collection.
|
||||
size_t CalculateAllocationLimit(size_t curr_size, size_t max_size,
|
||||
double max_factor, double gc_speed,
|
||||
double mutator_speed,
|
||||
double gc_speed, double mutator_speed,
|
||||
size_t new_space_capacity,
|
||||
Heap::HeapGrowingMode growing_mode);
|
||||
|
||||
// Computes the growing step when the limit increases.
|
||||
size_t MinimumAllocationLimitGrowingStep(Heap::HeapGrowingMode growing_mode);
|
||||
|
||||
AvailableAllocationSpace CheckAllocationLimit(size_t used_memory,
|
||||
size_t allocation_limit) {
|
||||
if (used_memory > allocation_limit) {
|
||||
return AvailableAllocationSpace::kAboveAllocationLimit;
|
||||
} else if (used_memory >
|
||||
(kCloseToAllocationLimitFactor * allocation_limit)) {
|
||||
return AvailableAllocationSpace::kCloseToAllocationLimit;
|
||||
}
|
||||
return AvailableAllocationSpace::kBelowAllocationLimit;
|
||||
}
|
||||
|
||||
protected:
|
||||
double GrowingFactor(double gc_speed, double mutator_speed,
|
||||
double max_factor);
|
||||
double MaxGrowingFactor(size_t curr_max_size);
|
||||
virtual const char* ControllerName() = 0;
|
||||
|
||||
Heap* const heap_;
|
||||
@ -66,7 +50,9 @@ class V8_EXPORT_PRIVATE MemoryController {
|
||||
const double kMaxGrowingFactor;
|
||||
const double kConservativeGrowingFactor;
|
||||
const double kTargetMutatorUtilization;
|
||||
const double kCloseToAllocationLimitFactor;
|
||||
// Sizes are in MB.
|
||||
const size_t kMinSize;
|
||||
const size_t kMaxSize;
|
||||
|
||||
FRIEND_TEST(HeapControllerTest, HeapGrowingFactor);
|
||||
FRIEND_TEST(HeapControllerTest, MaxHeapGrowingFactor);
|
||||
@ -74,29 +60,20 @@ class V8_EXPORT_PRIVATE MemoryController {
|
||||
FRIEND_TEST(HeapControllerTest, OldGenerationAllocationLimit);
|
||||
};
|
||||
|
||||
class V8_EXPORT_PRIVATE HeapController : public MemoryController {
|
||||
class HeapController : public MemoryController {
|
||||
public:
|
||||
// Sizes are in MB.
|
||||
static const size_t kMinSize = 128 * Heap::kPointerMultiplier;
|
||||
static const size_t kMaxSize = 1024 * Heap::kPointerMultiplier;
|
||||
|
||||
explicit HeapController(Heap* heap)
|
||||
: MemoryController(heap, 1.1, 4.0, 1.3, 0.97, 0.75) {}
|
||||
double MaxGrowingFactor(size_t curr_max_size);
|
||||
: MemoryController(heap, 1.1, 4.0, 1.3, 0.97, kMinHeapSize,
|
||||
kMaxHeapSize) {}
|
||||
|
||||
// Sizes are in MB.
|
||||
static const size_t kMinHeapSize = 128 * Heap::kPointerMultiplier;
|
||||
static const size_t kMaxHeapSize = 1024 * Heap::kPointerMultiplier;
|
||||
|
||||
protected:
|
||||
const char* ControllerName() { return "HeapController"; }
|
||||
};
|
||||
|
||||
class GlobalMemoryController : public MemoryController {
|
||||
public:
|
||||
explicit GlobalMemoryController(Heap* heap)
|
||||
: MemoryController(heap, 1.1, 4.0, 1.3, 0.97, 0.75) {}
|
||||
|
||||
protected:
|
||||
const char* ControllerName() { return "GlobalMemoryController"; }
|
||||
};
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
|
106
src/heap/heap.cc
106
src/heap/heap.cc
@ -138,7 +138,7 @@ Heap::Heap()
|
||||
: external_memory_(0),
|
||||
external_memory_limit_(kExternalAllocationSoftLimit),
|
||||
external_memory_at_last_mark_compact_(0),
|
||||
backing_store_bytes_(0),
|
||||
external_memory_concurrently_freed_(0),
|
||||
isolate_(nullptr),
|
||||
code_range_size_(0),
|
||||
// semispace_size_ should be a power of 2 and old_generation_size_ should
|
||||
@ -146,9 +146,6 @@ Heap::Heap()
|
||||
max_semi_space_size_(8 * (kPointerSize / 4) * MB),
|
||||
initial_semispace_size_(kMinSemiSpaceSizeInKB * KB),
|
||||
max_old_generation_size_(700ul * (kPointerSize / 4) * MB),
|
||||
max_global_memory_size_(
|
||||
Min(static_cast<uint64_t>(std::numeric_limits<size_t>::max()),
|
||||
static_cast<uint64_t>(16) * GB)),
|
||||
initial_max_old_generation_size_(max_old_generation_size_),
|
||||
initial_old_generation_size_(max_old_generation_size_ /
|
||||
kInitalOldGenerationLimitFactor),
|
||||
@ -187,7 +184,6 @@ Heap::Heap()
|
||||
mmap_region_base_(0),
|
||||
remembered_unmapped_pages_index_(0),
|
||||
old_generation_allocation_limit_(initial_old_generation_size_),
|
||||
global_memory_allocation_limit_(initial_old_generation_size_),
|
||||
inline_allocation_disabled_(false),
|
||||
tracer_(nullptr),
|
||||
promoted_objects_size_(0),
|
||||
@ -263,8 +259,8 @@ size_t Heap::ComputeMaxOldGenerationSize(uint64_t physical_memory) {
|
||||
size_t computed_size = static_cast<size_t>(physical_memory / i::MB /
|
||||
old_space_physical_memory_factor *
|
||||
kPointerMultiplier);
|
||||
return Max(Min(computed_size, HeapController::kMaxSize),
|
||||
HeapController::kMinSize);
|
||||
return Max(Min(computed_size, HeapController::kMaxHeapSize),
|
||||
HeapController::kMinHeapSize);
|
||||
}
|
||||
|
||||
size_t Heap::Capacity() {
|
||||
@ -481,8 +477,6 @@ void Heap::PrintShortHeapStatistics() {
|
||||
CommittedMemoryOfHeapAndUnmapper() / KB);
|
||||
PrintIsolate(isolate_, "External memory reported: %6" PRId64 " KB\n",
|
||||
external_memory_ / KB);
|
||||
PrintIsolate(isolate_, "Backing store memory reported: %6" PRId64 " KB\n",
|
||||
backing_store_bytes_ / KB);
|
||||
PrintIsolate(isolate_, "External memory global %zu KB\n",
|
||||
external_memory_callback_() / KB);
|
||||
PrintIsolate(isolate_, "Total time spent in GC : %.1f ms\n",
|
||||
@ -1082,12 +1076,6 @@ void Heap::HandleGCRequest() {
|
||||
incremental_marking()->reset_request_type();
|
||||
FinalizeIncrementalMarkingIncrementally(
|
||||
GarbageCollectionReason::kFinalizeMarkingViaStackGuard);
|
||||
} else if (global_controller_->CheckAllocationLimit(
|
||||
GlobalSizeOfObjects(), global_memory_allocation_limit_) ==
|
||||
AvailableAllocationSpace::kAboveAllocationLimit) {
|
||||
CollectAllGarbage(current_gc_flags_,
|
||||
GarbageCollectionReason::kGlobalAllocationLimit,
|
||||
GCCallbackFlags::kGCCallbackFlagCollectAllExternalMemory);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1475,11 +1463,8 @@ void Heap::StartIncrementalMarkingIfAllocationLimitIsReached(
|
||||
if (reached_limit == IncrementalMarkingLimit::kSoftLimit) {
|
||||
incremental_marking()->incremental_marking_job()->ScheduleTask(this);
|
||||
} else if (reached_limit == IncrementalMarkingLimit::kHardLimit) {
|
||||
StartIncrementalMarking(
|
||||
gc_flags,
|
||||
OldGenerationSpaceAvailable() <= new_space_->Capacity()
|
||||
? GarbageCollectionReason::kAllocationLimit
|
||||
: GarbageCollectionReason::kGlobalAllocationLimit,
|
||||
StartIncrementalMarking(gc_flags,
|
||||
GarbageCollectionReason::kAllocationLimit,
|
||||
gc_callback_flags);
|
||||
}
|
||||
}
|
||||
@ -1761,7 +1746,7 @@ bool Heap::PerformGarbageCollection(
|
||||
}
|
||||
|
||||
UpdateSurvivalStatistics(static_cast<int>(start_new_space_size));
|
||||
ConfigureInitialAllocationLimits();
|
||||
ConfigureInitialOldGenerationSize();
|
||||
|
||||
if (collector != MARK_COMPACTOR) {
|
||||
// Objects that died in the new space might have been accounted
|
||||
@ -1794,31 +1779,27 @@ bool Heap::PerformGarbageCollection(
|
||||
double gc_speed = tracer()->CombinedMarkCompactSpeedInBytesPerMillisecond();
|
||||
double mutator_speed =
|
||||
tracer()->CurrentOldGenerationAllocationThroughputInBytesPerMillisecond();
|
||||
double max_factor =
|
||||
heap_controller()->MaxGrowingFactor(max_old_generation_size_);
|
||||
size_t old_gen_size = OldGenerationSizeOfObjects();
|
||||
|
||||
size_t new_old_limit = heap_controller()->CalculateAllocationLimit(
|
||||
old_gen_size, max_old_generation_size_, max_factor, gc_speed,
|
||||
mutator_speed, new_space()->Capacity(), CurrentHeapGrowingMode());
|
||||
size_t new_global_limit = global_controller()->CalculateAllocationLimit(
|
||||
GlobalSizeOfObjects(), max_global_memory_size_, max_factor, gc_speed,
|
||||
mutator_speed, new_space()->Capacity(), CurrentHeapGrowingMode());
|
||||
|
||||
if (collector == MARK_COMPACTOR) {
|
||||
// Register the amount of external allocated memory.
|
||||
external_memory_at_last_mark_compact_ = external_memory_;
|
||||
external_memory_limit_ = external_memory_ + kExternalAllocationSoftLimit;
|
||||
|
||||
set_allocation_limits(new_old_limit, new_global_limit);
|
||||
size_t new_limit = heap_controller()->CalculateAllocationLimit(
|
||||
old_gen_size, max_old_generation_size_, gc_speed, mutator_speed,
|
||||
new_space()->Capacity(), CurrentHeapGrowingMode());
|
||||
old_generation_allocation_limit_ = new_limit;
|
||||
|
||||
CheckIneffectiveMarkCompact(
|
||||
old_gen_size, tracer()->AverageMarkCompactMutatorUtilization());
|
||||
} else if (HasLowYoungGenerationAllocationRate() &&
|
||||
old_generation_size_configured_) {
|
||||
set_allocation_limits(
|
||||
Min(old_generation_allocation_limit_, new_old_limit),
|
||||
Min(global_memory_allocation_limit_, new_global_limit));
|
||||
size_t new_limit = heap_controller()->CalculateAllocationLimit(
|
||||
old_gen_size, max_old_generation_size_, gc_speed, mutator_speed,
|
||||
new_space()->Capacity(), CurrentHeapGrowingMode());
|
||||
if (new_limit < old_generation_allocation_limit_) {
|
||||
old_generation_allocation_limit_ = new_limit;
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
@ -2304,23 +2285,6 @@ bool Heap::ExternalStringTable::Contains(HeapObject* obj) {
|
||||
return false;
|
||||
}
|
||||
|
||||
void Heap::update_backing_store_bytes(int64_t amount) {
|
||||
backing_store_bytes_ += amount;
|
||||
|
||||
if (amount <= 0 || gc_state_ != NOT_IN_GC) return;
|
||||
AvailableAllocationSpace available = global_controller_->CheckAllocationLimit(
|
||||
GlobalSizeOfObjects(), global_memory_allocation_limit_);
|
||||
if (available == AvailableAllocationSpace::kAboveAllocationLimit) {
|
||||
isolate()->stack_guard()->RequestGC();
|
||||
} else if (available == AvailableAllocationSpace::kCloseToAllocationLimit &&
|
||||
incremental_marking_->IsStopped() && FLAG_incremental_marking) {
|
||||
StartIncrementalMarking(
|
||||
GCFlagsForIncrementalMarking(),
|
||||
GarbageCollectionReason::kGlobalAllocationLimit,
|
||||
GCCallbackFlags::kGCCallbackFlagCollectAllExternalMemory);
|
||||
}
|
||||
}
|
||||
|
||||
void Heap::ProcessMovedExternalString(Page* old_page, Page* new_page,
|
||||
ExternalString* string) {
|
||||
size_t size = string->ExternalPayloadSize();
|
||||
@ -2673,20 +2637,14 @@ void Heap::UnregisterArrayBuffer(JSArrayBuffer* buffer) {
|
||||
ArrayBufferTracker::Unregister(this, buffer);
|
||||
}
|
||||
|
||||
size_t Heap::ConfigureInitialControllerSize(MemoryController* controller,
|
||||
size_t curr_limit) {
|
||||
return Max(
|
||||
controller->MinimumAllocationLimitGrowingStep(CurrentHeapGrowingMode()),
|
||||
static_cast<size_t>(static_cast<double>(curr_limit) *
|
||||
(tracer()->AverageSurvivalRatio() / 100)));
|
||||
}
|
||||
void Heap::ConfigureInitialAllocationLimits() {
|
||||
void Heap::ConfigureInitialOldGenerationSize() {
|
||||
if (!old_generation_size_configured_ && tracer()->SurvivalEventsRecorded()) {
|
||||
size_t new_old_limit = ConfigureInitialControllerSize(
|
||||
heap_controller_, old_generation_allocation_limit_);
|
||||
size_t new_global_limit = ConfigureInitialControllerSize(
|
||||
global_controller_, global_memory_allocation_limit_);
|
||||
set_allocation_limits(new_old_limit, new_global_limit);
|
||||
old_generation_allocation_limit_ =
|
||||
Max(heap_controller()->MinimumAllocationLimitGrowingStep(
|
||||
CurrentHeapGrowingMode()),
|
||||
static_cast<size_t>(
|
||||
static_cast<double>(old_generation_allocation_limit_) *
|
||||
(tracer()->AverageSurvivalRatio() / 100)));
|
||||
}
|
||||
}
|
||||
|
||||
@ -3623,8 +3581,8 @@ void Heap::CollectGarbageOnMemoryPressure() {
|
||||
double end = MonotonicallyIncreasingTimeInMs();
|
||||
|
||||
// Estimate how much memory we can free.
|
||||
int64_t potential_garbage = (CommittedMemory() - SizeOfObjects()) +
|
||||
external_memory_ + backing_store_bytes_;
|
||||
int64_t potential_garbage =
|
||||
(CommittedMemory() - SizeOfObjects()) + external_memory_;
|
||||
// If we can potentially free large amount of memory, then start GC right
|
||||
// away instead of waiting for memory reducer.
|
||||
if (potential_garbage >= kGarbageThresholdInBytes &&
|
||||
@ -3783,8 +3741,6 @@ const char* Heap::GarbageCollectionReasonToString(
|
||||
return "testing";
|
||||
case GarbageCollectionReason::kExternalFinalize:
|
||||
return "external finalize";
|
||||
case GarbageCollectionReason::kGlobalAllocationLimit:
|
||||
return "global allocation limit";
|
||||
case GarbageCollectionReason::kUnknown:
|
||||
return "unknown";
|
||||
}
|
||||
@ -4581,12 +4537,8 @@ Heap::IncrementalMarkingLimit Heap::IncrementalMarkingLimitReached() {
|
||||
}
|
||||
|
||||
size_t old_generation_space_available = OldGenerationSpaceAvailable();
|
||||
AvailableAllocationSpace global_available =
|
||||
global_controller_->CheckAllocationLimit(GlobalSizeOfObjects(),
|
||||
global_memory_allocation_limit_);
|
||||
|
||||
if (old_generation_space_available > new_space_->Capacity() &&
|
||||
global_available == AvailableAllocationSpace::kBelowAllocationLimit) {
|
||||
if (old_generation_space_available > new_space_->Capacity()) {
|
||||
return IncrementalMarkingLimit::kNoLimit;
|
||||
}
|
||||
if (ShouldOptimizeForMemoryUsage()) {
|
||||
@ -4752,7 +4704,6 @@ void Heap::SetUp() {
|
||||
store_buffer_ = new StoreBuffer(this);
|
||||
|
||||
heap_controller_ = new HeapController(this);
|
||||
global_controller_ = new GlobalMemoryController(this);
|
||||
|
||||
mark_compact_collector_ = new MarkCompactCollector(this);
|
||||
incremental_marking_ =
|
||||
@ -4996,11 +4947,6 @@ void Heap::TearDown() {
|
||||
heap_controller_ = nullptr;
|
||||
}
|
||||
|
||||
if (global_controller_ != nullptr) {
|
||||
delete global_controller_;
|
||||
global_controller_ = nullptr;
|
||||
}
|
||||
|
||||
if (mark_compact_collector_ != nullptr) {
|
||||
mark_compact_collector_->TearDown();
|
||||
delete mark_compact_collector_;
|
||||
|
@ -172,7 +172,6 @@ class GCIdleTimeAction;
|
||||
class GCIdleTimeHandler;
|
||||
class GCIdleTimeHeapState;
|
||||
class GCTracer;
|
||||
class GlobalMemoryController;
|
||||
class HeapController;
|
||||
class HeapObjectAllocationTracker;
|
||||
class HeapObjectsFilter;
|
||||
@ -181,7 +180,6 @@ class HistogramTimer;
|
||||
class Isolate;
|
||||
class LocalEmbedderHeapTracer;
|
||||
class MemoryAllocator;
|
||||
class MemoryController;
|
||||
class MemoryReducer;
|
||||
class MinorMarkCompactCollector;
|
||||
class ObjectIterator;
|
||||
@ -237,8 +235,7 @@ enum class GarbageCollectionReason {
|
||||
kSamplingProfiler = 19,
|
||||
kSnapshotCreator = 20,
|
||||
kTesting = 21,
|
||||
kExternalFinalize = 22,
|
||||
kGlobalAllocationLimit = 23
|
||||
kExternalFinalize = 22
|
||||
// If you add new items here, then update the incremental_marking_reason,
|
||||
// mark_compact_reason, and scavenge_reason counters in counters.h.
|
||||
// Also update src/tools/metrics/histograms/histograms.xml in chromium.
|
||||
@ -680,9 +677,16 @@ class Heap {
|
||||
int64_t external_memory_hard_limit() { return MaxOldGenerationSize() / 2; }
|
||||
|
||||
int64_t external_memory() { return external_memory_; }
|
||||
int64_t backing_store_bytes() const { return backing_store_bytes_; }
|
||||
void update_external_memory(int64_t delta) { external_memory_ += delta; }
|
||||
|
||||
void update_backing_store_bytes(int64_t amount);
|
||||
void update_external_memory_concurrently_freed(intptr_t freed) {
|
||||
external_memory_concurrently_freed_ += freed;
|
||||
}
|
||||
|
||||
void account_external_memory_concurrently_freed() {
|
||||
external_memory_ -= external_memory_concurrently_freed_;
|
||||
external_memory_concurrently_freed_ = 0;
|
||||
}
|
||||
|
||||
void ProcessMovedExternalString(Page* old_page, Page* new_page,
|
||||
ExternalString* string);
|
||||
@ -1325,14 +1329,6 @@ class Heap {
|
||||
// Excludes external memory held by those objects.
|
||||
size_t OldGenerationSizeOfObjects();
|
||||
|
||||
// Returns the size of JS and external objects
|
||||
size_t GlobalSizeOfObjects() {
|
||||
size_t global_size = OldGenerationSizeOfObjects() +
|
||||
static_cast<size_t>(backing_store_bytes_);
|
||||
CHECK_GE(global_size, static_cast<size_t>(backing_store_bytes_));
|
||||
return global_size;
|
||||
}
|
||||
|
||||
// ===========================================================================
|
||||
// Prologue/epilogue callback methods.========================================
|
||||
// ===========================================================================
|
||||
@ -1701,9 +1697,7 @@ class Heap {
|
||||
// Flush the number to string cache.
|
||||
void FlushNumberStringCache();
|
||||
|
||||
size_t ConfigureInitialControllerSize(MemoryController* controller,
|
||||
size_t curr_limit);
|
||||
void ConfigureInitialAllocationLimits();
|
||||
void ConfigureInitialOldGenerationSize();
|
||||
|
||||
bool HasLowYoungGenerationAllocationRate();
|
||||
bool HasLowOldGenerationAllocationRate();
|
||||
@ -1852,7 +1846,6 @@ class Heap {
|
||||
// ===========================================================================
|
||||
|
||||
HeapController* heap_controller() { return heap_controller_; }
|
||||
GlobalMemoryController* global_controller() { return global_controller_; }
|
||||
MemoryReducer* memory_reducer() { return memory_reducer_; }
|
||||
|
||||
// For some webpages RAIL mode does not switch from PERFORMANCE_LOAD.
|
||||
@ -1863,12 +1856,6 @@ class Heap {
|
||||
|
||||
bool ShouldOptimizeForLoadTime();
|
||||
|
||||
void set_allocation_limits(size_t old_generation_allocation_limit,
|
||||
size_t global_memory_allocation_limit) {
|
||||
old_generation_allocation_limit_ = old_generation_allocation_limit;
|
||||
global_memory_allocation_limit_ = global_memory_allocation_limit;
|
||||
}
|
||||
|
||||
size_t old_generation_allocation_limit() const {
|
||||
return old_generation_allocation_limit_;
|
||||
}
|
||||
@ -1983,8 +1970,8 @@ class Heap {
|
||||
// Caches the amount of external memory registered at the last MC.
|
||||
int64_t external_memory_at_last_mark_compact_;
|
||||
|
||||
// Backing store bytes (array buffers and external strings).
|
||||
std::atomic<int64_t> backing_store_bytes_;
|
||||
// The amount of memory that has been freed concurrently.
|
||||
std::atomic<intptr_t> external_memory_concurrently_freed_;
|
||||
|
||||
// This can be calculated directly from a pointer to the heap; however, it is
|
||||
// more expedient to get at the isolate directly from within Heap methods.
|
||||
@ -2017,7 +2004,6 @@ class Heap {
|
||||
size_t max_semi_space_size_;
|
||||
size_t initial_semispace_size_;
|
||||
size_t max_old_generation_size_;
|
||||
size_t max_global_memory_size_;
|
||||
size_t initial_max_old_generation_size_;
|
||||
size_t initial_old_generation_size_;
|
||||
bool old_generation_size_configured_;
|
||||
@ -2118,10 +2104,6 @@ class Heap {
|
||||
// generation and on every allocation in large object space.
|
||||
size_t old_generation_allocation_limit_;
|
||||
|
||||
// The limit when to trigger memory pressure. This limit accounts for JS
|
||||
// memory and external memory (array buffers and external strings).
|
||||
size_t global_memory_allocation_limit_;
|
||||
|
||||
// Indicates that inline bump-pointer allocation has been globally disabled
|
||||
// for all spaces. This is used to disable allocations in generated code.
|
||||
bool inline_allocation_disabled_;
|
||||
@ -2175,7 +2157,6 @@ class Heap {
|
||||
StoreBuffer* store_buffer_;
|
||||
|
||||
HeapController* heap_controller_;
|
||||
GlobalMemoryController* global_controller_;
|
||||
|
||||
IncrementalMarking* incremental_marking_;
|
||||
ConcurrentMarking* concurrent_marking_;
|
||||
|
@ -803,6 +803,7 @@ void MarkCompactCollector::Prepare() {
|
||||
space = spaces.next()) {
|
||||
space->PrepareForMarkCompact();
|
||||
}
|
||||
heap()->account_external_memory_concurrently_freed();
|
||||
|
||||
#ifdef VERIFY_HEAP
|
||||
if (!was_marked_incrementally_ && FLAG_verify_heap) {
|
||||
@ -3847,6 +3848,8 @@ void MinorMarkCompactCollector::CollectGarbage() {
|
||||
RememberedSet<OLD_TO_NEW>::PreFreeEmptyBuckets(chunk);
|
||||
}
|
||||
});
|
||||
|
||||
heap()->account_external_memory_concurrently_freed();
|
||||
}
|
||||
|
||||
void MinorMarkCompactCollector::MakeIterable(
|
||||
|
@ -1018,13 +1018,11 @@ class Space : public Malloced {
|
||||
void IncrementExternalBackingStoreBytes(ExternalBackingStoreType type,
|
||||
size_t amount) {
|
||||
external_backing_store_bytes_[type] += amount;
|
||||
heap()->update_backing_store_bytes(static_cast<int64_t>(amount));
|
||||
}
|
||||
void DecrementExternalBackingStoreBytes(ExternalBackingStoreType type,
|
||||
size_t amount) {
|
||||
DCHECK_GE(external_backing_store_bytes_[type], amount);
|
||||
external_backing_store_bytes_[type] -= amount;
|
||||
heap()->update_backing_store_bytes(-static_cast<int64_t>(amount));
|
||||
}
|
||||
|
||||
V8_EXPORT_PRIVATE void* GetRandomMmapAddr();
|
||||
|
@ -1588,7 +1588,6 @@ HEAP_TEST(TestSizeOfObjects) {
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
Heap* heap = CcTest::heap();
|
||||
MarkCompactCollector* collector = heap->mark_compact_collector();
|
||||
ManualGCScope manual_gc_scope;
|
||||
|
||||
// Get initial heap size after several full GCs, which will stabilize
|
||||
// the heap size and return with sweeping finished completely.
|
||||
|
@ -1,9 +0,0 @@
|
||||
// Copyright 2018 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
// Flags: --mock-arraybuffer-allocator-limit=800000000
|
||||
|
||||
for (var i = 0; i < 1024; i++) {
|
||||
let garbage = new ArrayBuffer(1024*1024);
|
||||
}
|
@ -51,14 +51,15 @@ TEST_F(HeapControllerTest, HeapGrowingFactor) {
|
||||
TEST_F(HeapControllerTest, MaxHeapGrowingFactor) {
|
||||
HeapController heap_controller(i_isolate()->heap());
|
||||
CheckEqualRounded(
|
||||
1.3, heap_controller.MaxGrowingFactor(HeapController::kMinSize * MB));
|
||||
1.3, heap_controller.MaxGrowingFactor(heap_controller.kMinSize * MB));
|
||||
CheckEqualRounded(1.600, heap_controller.MaxGrowingFactor(
|
||||
HeapController::kMaxSize / 2 * MB));
|
||||
heap_controller.kMaxSize / 2 * MB));
|
||||
CheckEqualRounded(
|
||||
1.999, heap_controller.MaxGrowingFactor(
|
||||
(HeapController::kMaxSize - Heap::kPointerMultiplier) * MB));
|
||||
CheckEqualRounded(
|
||||
4.0, heap_controller.MaxGrowingFactor(HeapController::kMaxSize * MB));
|
||||
(heap_controller.kMaxSize - Heap::kPointerMultiplier) * MB));
|
||||
CheckEqualRounded(4.0,
|
||||
heap_controller.MaxGrowingFactor(
|
||||
static_cast<size_t>(heap_controller.kMaxSize) * MB));
|
||||
}
|
||||
|
||||
TEST_F(HeapControllerTest, OldGenerationAllocationLimit) {
|
||||
@ -74,43 +75,39 @@ TEST_F(HeapControllerTest, OldGenerationAllocationLimit) {
|
||||
double factor =
|
||||
heap_controller.GrowingFactor(gc_speed, mutator_speed, max_factor);
|
||||
|
||||
EXPECT_EQ(
|
||||
static_cast<size_t>(old_gen_size * factor + new_space_capacity),
|
||||
EXPECT_EQ(static_cast<size_t>(old_gen_size * factor + new_space_capacity),
|
||||
heap->heap_controller()->CalculateAllocationLimit(
|
||||
old_gen_size, max_old_generation_size, max_factor, gc_speed,
|
||||
mutator_speed, new_space_capacity, Heap::HeapGrowingMode::kDefault));
|
||||
|
||||
factor = Min(factor, heap_controller.kConservativeGrowingFactor);
|
||||
EXPECT_EQ(
|
||||
static_cast<size_t>(old_gen_size * factor + new_space_capacity),
|
||||
heap->heap_controller()->CalculateAllocationLimit(
|
||||
old_gen_size, max_old_generation_size, max_factor, gc_speed,
|
||||
mutator_speed, new_space_capacity, Heap::HeapGrowingMode::kSlow));
|
||||
old_gen_size, max_old_generation_size, gc_speed, mutator_speed,
|
||||
new_space_capacity, Heap::HeapGrowingMode::kDefault));
|
||||
|
||||
factor = Min(factor, heap_controller.kConservativeGrowingFactor);
|
||||
EXPECT_EQ(static_cast<size_t>(old_gen_size * factor + new_space_capacity),
|
||||
heap->heap_controller()->CalculateAllocationLimit(
|
||||
old_gen_size, max_old_generation_size, max_factor, gc_speed,
|
||||
mutator_speed, new_space_capacity,
|
||||
Heap::HeapGrowingMode::kConservative));
|
||||
old_gen_size, max_old_generation_size, gc_speed, mutator_speed,
|
||||
new_space_capacity, Heap::HeapGrowingMode::kSlow));
|
||||
|
||||
factor = Min(factor, heap_controller.kConservativeGrowingFactor);
|
||||
EXPECT_EQ(static_cast<size_t>(old_gen_size * factor + new_space_capacity),
|
||||
heap->heap_controller()->CalculateAllocationLimit(
|
||||
old_gen_size, max_old_generation_size, gc_speed, mutator_speed,
|
||||
new_space_capacity, Heap::HeapGrowingMode::kConservative));
|
||||
|
||||
factor = heap_controller.kMinGrowingFactor;
|
||||
EXPECT_EQ(
|
||||
static_cast<size_t>(old_gen_size * factor + new_space_capacity),
|
||||
EXPECT_EQ(static_cast<size_t>(old_gen_size * factor + new_space_capacity),
|
||||
heap->heap_controller()->CalculateAllocationLimit(
|
||||
old_gen_size, max_old_generation_size, max_factor, gc_speed,
|
||||
mutator_speed, new_space_capacity, Heap::HeapGrowingMode::kMinimal));
|
||||
old_gen_size, max_old_generation_size, gc_speed, mutator_speed,
|
||||
new_space_capacity, Heap::HeapGrowingMode::kMinimal));
|
||||
}
|
||||
|
||||
TEST_F(HeapControllerTest, MaxOldGenerationSize) {
|
||||
HeapController heap_controller(i_isolate()->heap());
|
||||
uint64_t configurations[][2] = {
|
||||
{0, HeapController::kMinSize},
|
||||
{512, HeapController::kMinSize},
|
||||
{0, heap_controller.kMinSize},
|
||||
{512, heap_controller.kMinSize},
|
||||
{1 * GB, 256 * Heap::kPointerMultiplier},
|
||||
{2 * static_cast<uint64_t>(GB), 512 * Heap::kPointerMultiplier},
|
||||
{4 * static_cast<uint64_t>(GB), HeapController::kMaxSize},
|
||||
{8 * static_cast<uint64_t>(GB), HeapController::kMaxSize}};
|
||||
{4 * static_cast<uint64_t>(GB), heap_controller.kMaxSize},
|
||||
{8 * static_cast<uint64_t>(GB), heap_controller.kMaxSize}};
|
||||
|
||||
for (auto configuration : configurations) {
|
||||
ASSERT_EQ(configuration[1],
|
||||
|
@ -294,10 +294,7 @@ local WHITELIST = {
|
||||
"StateTag",
|
||||
|
||||
-- Ignore printing of elements transition.
|
||||
"PrintElementsTransition",
|
||||
|
||||
-- Ignore GC reason method that returns a string (not GC trigger).
|
||||
"GarbageCollectionReasonToString"
|
||||
"PrintElementsTransition"
|
||||
};
|
||||
|
||||
local function AddCause(name, cause)
|
||||
|
Loading…
Reference in New Issue
Block a user