[heap] Reuse freed CodeRange addresses.

This patch adds a singleton that tracks recently freed code range
regions and provides hints for newly created code ranges such that
the freed addresses are reused.

This is a workaround for the CFG leak described in the linked bug.

Bug: chromium:870054

Change-Id: Ice237a056268379f0fef40abdb1accad125a56b3
Reviewed-on: https://chromium-review.googlesource.com/1174837
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#55139}
This commit is contained in:
Ulan Degenbaev 2018-08-15 20:08:41 +02:00 committed by Commit Bot
parent 0027c83440
commit 4d474c51d8
3 changed files with 82 additions and 6 deletions

View File

@ -95,11 +95,15 @@ PauseAllocationObserversScope::~PauseAllocationObserversScope() {
// -----------------------------------------------------------------------------
// CodeRange
static base::LazyInstance<CodeRangeAddressHint>::type code_range_address_hint =
LAZY_INSTANCE_INITIALIZER;
CodeRange::CodeRange(Isolate* isolate, size_t requested)
: isolate_(isolate),
free_list_(0),
allocation_list_(0),
current_allocation_block_index_(0) {
current_allocation_block_index_(0),
requested_code_range_size_(0) {
DCHECK(!virtual_memory_.IsReserved());
if (requested == 0) {
@ -124,10 +128,13 @@ CodeRange::CodeRange(Isolate* isolate, size_t requested)
DCHECK(!kRequiresCodeRange || requested <= kMaximalCodeRangeSize);
requested_code_range_size_ = requested;
VirtualMemory reservation;
void* hint = code_range_address_hint.Pointer()->GetAddressHint(requested);
if (!AlignedAllocVirtualMemory(
requested, Max(kCodeRangeAreaAlignment, AllocatePageSize()),
GetRandomMmapAddr(), &reservation)) {
requested, Max(kCodeRangeAreaAlignment, AllocatePageSize()), hint,
&reservation)) {
V8::FatalProcessOutOfMemory(isolate,
"CodeRange setup: allocate virtual memory");
}
@ -156,6 +163,15 @@ CodeRange::CodeRange(Isolate* isolate, size_t requested)
virtual_memory_.TakeControl(&reservation);
}
CodeRange::~CodeRange() {
if (virtual_memory_.IsReserved()) {
Address addr = start();
virtual_memory_.Free();
code_range_address_hint.Pointer()->NotifyFreedCodeRange(
reinterpret_cast<void*>(addr), requested_code_range_size_);
}
}
bool CodeRange::CompareFreeBlockAddress(const FreeBlock& left,
const FreeBlock& right) {
return left.start < right.start;
@ -262,6 +278,22 @@ void CodeRange::ReleaseBlock(const FreeBlock* block) {
free_list_.push_back(*block);
}
void* CodeRangeAddressHint::GetAddressHint(size_t code_range_size) {
base::LockGuard<base::Mutex> guard(&mutex_);
auto it = recently_freed_.find(code_range_size);
if (it == recently_freed_.end() || it->second.empty()) {
return GetRandomMmapAddr();
}
void* result = it->second.back();
it->second.pop_back();
return result;
}
void CodeRangeAddressHint::NotifyFreedCodeRange(void* code_range_start,
size_t code_range_size) {
base::LockGuard<base::Mutex> guard(&mutex_);
recently_freed_[code_range_size].push_back(code_range_start);
}
// -----------------------------------------------------------------------------
// MemoryAllocator

View File

@ -1069,9 +1069,7 @@ class MemoryChunkValidator {
class CodeRange {
public:
CodeRange(Isolate* isolate, size_t requested_size);
~CodeRange() {
if (virtual_memory_.IsReserved()) virtual_memory_.Free();
}
~CodeRange();
bool valid() { return virtual_memory_.IsReserved(); }
Address start() {
@ -1144,10 +1142,31 @@ class CodeRange {
// The block at current_allocation_block_index_ is the current block.
std::vector<FreeBlock> allocation_list_;
size_t current_allocation_block_index_;
size_t requested_code_range_size_;
DISALLOW_COPY_AND_ASSIGN(CodeRange);
};
// The process-wide singleton that keeps track of code range regions with the
// intention to reuse free code range regions as a workaround for CFG memory
// leaks (see crbug.com/870054).
class CodeRangeAddressHint {
public:
// Returns the most recently freed code range start address for the given
// size. If there is no such entry, then a random address is returned.
V8_EXPORT_PRIVATE void* GetAddressHint(size_t code_range_size);
V8_EXPORT_PRIVATE void NotifyFreedCodeRange(void* code_range_start,
size_t code_range_size);
private:
base::Mutex mutex_;
// A map from code range size to an array of recently freed code range
// addresses. There should be O(1) different code range sizes.
// The length of each array is limited by the peak number of code ranges,
// which should be also O(1).
std::map<size_t, std::vector<void*>> recently_freed_;
};
class SkipList {
public:

View File

@ -115,5 +115,30 @@ TEST_F(SpacesTest, WriteBarrierInNewSpaceFromSpace) {
EXPECT_FALSE(slim_chunk->InNewSpace());
}
TEST_F(SpacesTest, CodeRangeAddressReuse) {
CodeRangeAddressHint hint;
// Create code ranges.
void* code_range1 = hint.GetAddressHint(100);
void* code_range2 = hint.GetAddressHint(200);
void* code_range3 = hint.GetAddressHint(100);
// Since the addresses are random, we cannot check that they are different.
// Free two code ranges.
hint.NotifyFreedCodeRange(code_range1, 100);
hint.NotifyFreedCodeRange(code_range2, 200);
// The next two code ranges should reuse the freed addresses.
void* code_range4 = hint.GetAddressHint(100);
EXPECT_EQ(code_range4, code_range1);
void* code_range5 = hint.GetAddressHint(200);
EXPECT_EQ(code_range5, code_range2);
// Free the third code range and check address reuse.
hint.NotifyFreedCodeRange(code_range3, 100);
void* code_range6 = hint.GetAddressHint(100);
EXPECT_EQ(code_range6, code_range3);
}
} // namespace internal
} // namespace v8