[heap] Remove max executable size configuration.

BUG=chromium:716032

Review-Url: https://codereview.chromium.org/2842303003
Cr-Commit-Position: refs/heads/master@{#44975}
This commit is contained in:
hpayer 2017-04-28 04:41:04 -07:00 committed by Commit bot
parent 006240292e
commit 1e50277b9f
20 changed files with 81 additions and 73 deletions

View File

@ -874,7 +874,7 @@ void SetResourceConstraints(i::Isolate* isolate,
if (semi_space_size != 0 || old_space_size != 0 ||
max_executable_size != 0 || code_range_size != 0) {
isolate->heap()->ConfigureHeap(semi_space_size, old_space_size,
max_executable_size, code_range_size);
code_range_size);
}
isolate->allocator()->ConfigureSegmentPool(max_pool_size);

View File

@ -4881,7 +4881,14 @@ void Assembler::GrowBuffer() {
} else {
desc.buffer_size = buffer_size_ + 1*MB;
}
CHECK_GT(desc.buffer_size, 0); // no overflow
// Some internal data structures overflow for very large buffers,
// they must ensure that kMaximalBufferSize is not too large.
if (desc.buffer_size > kMaximalBufferSize ||
static_cast<size_t>(desc.buffer_size) >
isolate_data().max_old_generation_size_) {
V8::FatalProcessOutOfMemory("Assembler::GrowBuffer");
}
// Set up new buffer.
desc.buffer = NewArray<byte>(desc.buffer_size);

View File

@ -1723,6 +1723,9 @@ class Assembler : public AssemblerBase {
std::vector<ConstantPoolEntry> pending_64_bit_constants_;
private:
// Avoid overflows for displacements etc.
static const int kMaximalBufferSize = 512 * MB;
int next_buffer_check_; // pc offset of next buffer check
// Constant pool generation

View File

@ -2905,7 +2905,14 @@ void Assembler::GrowBuffer() {
} else {
desc.buffer_size = buffer_size_ + 1 * MB;
}
CHECK_GT(desc.buffer_size, 0); // No overflow.
// Some internal data structures overflow for very large buffers,
// they must ensure that kMaximalBufferSize is not too large.
if (desc.buffer_size > kMaximalBufferSize ||
static_cast<size_t>(desc.buffer_size) >
isolate_data().max_old_generation_size_) {
V8::FatalProcessOutOfMemory("Assembler::GrowBuffer");
}
byte* buffer = reinterpret_cast<byte*>(buffer_);

View File

@ -2134,6 +2134,9 @@ class Assembler : public AssemblerBase {
int next_veneer_pool_check_;
private:
// Avoid overflows for displacements etc.
static const int kMaximalBufferSize = 512 * MB;
// If a veneer is emitted for a branch instruction, that instruction must be
// removed from the associated label's link chain so that the assembler does
// not later attempt (likely unsuccessfully) to patch it to branch directly to

View File

@ -140,11 +140,8 @@ const char* const RelocInfo::kFillerCommentString = "DEOPTIMIZATION PADDING";
// Implementation of AssemblerBase
AssemblerBase::IsolateData::IsolateData(Isolate* isolate)
: serializer_enabled_(isolate->serializer_enabled())
#if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X64
,
: serializer_enabled_(isolate->serializer_enabled()),
max_old_generation_size_(isolate->heap()->MaxOldGenerationSize())
#endif
#if V8_TARGET_ARCH_X64
,
code_range_start_(

View File

@ -69,9 +69,7 @@ class AssemblerBase: public Malloced {
IsolateData(const IsolateData&) = default;
bool serializer_enabled_;
#if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X64
size_t max_old_generation_size_;
#endif
#if V8_TARGET_ARCH_X64
Address code_range_start_;
#endif

View File

@ -87,7 +87,6 @@ Heap::Heap()
initial_old_generation_size_(max_old_generation_size_ /
kInitalOldGenerationLimitFactor),
old_generation_size_configured_(false),
max_executable_size_(256ul * (kPointerSize / 4) * MB),
// Variables set based on semispace_size_ and old_generation_size_ in
// ConfigureHeap.
// Will be 4 * reserved_semispace_size_ to ensure that young
@ -5067,7 +5066,7 @@ void Heap::IterateStrongRoots(RootVisitor* v, VisitMode mode) {
// and through the API, we should gracefully handle the case that the heap
// size is not big enough to fit all the initial objects.
bool Heap::ConfigureHeap(size_t max_semi_space_size, size_t max_old_space_size,
size_t max_executable_size, size_t code_range_size) {
size_t code_range_size) {
if (HasBeenSetUp()) return false;
// Overwrite default configuration.
@ -5077,9 +5076,6 @@ bool Heap::ConfigureHeap(size_t max_semi_space_size, size_t max_old_space_size,
if (max_old_space_size != 0) {
max_old_generation_size_ = max_old_space_size * MB;
}
if (max_executable_size != 0) {
max_executable_size_ = max_executable_size * MB;
}
// If max space size flags are specified overwrite the configuration.
if (FLAG_max_semi_space_size > 0) {
@ -5089,15 +5085,11 @@ bool Heap::ConfigureHeap(size_t max_semi_space_size, size_t max_old_space_size,
max_old_generation_size_ =
static_cast<size_t>(FLAG_max_old_space_size) * MB;
}
if (FLAG_max_executable_size > 0) {
max_executable_size_ = static_cast<size_t>(FLAG_max_executable_size) * MB;
}
if (Page::kPageSize > MB) {
max_semi_space_size_ = ROUND_UP(max_semi_space_size_, Page::kPageSize);
max_old_generation_size_ =
ROUND_UP(max_old_generation_size_, Page::kPageSize);
max_executable_size_ = ROUND_UP(max_executable_size_, Page::kPageSize);
}
if (FLAG_stress_compaction) {
@ -5139,12 +5131,6 @@ bool Heap::ConfigureHeap(size_t max_semi_space_size, size_t max_old_space_size,
Max(static_cast<size_t>(paged_space_count * Page::kPageSize),
max_old_generation_size_);
// The max executable size must be less than or equal to the max old
// generation size.
if (max_executable_size_ > max_old_generation_size_) {
max_executable_size_ = max_old_generation_size_;
}
if (FLAG_initial_old_space_size > 0) {
initial_old_generation_size_ = FLAG_initial_old_space_size * MB;
} else {
@ -5189,9 +5175,7 @@ void Heap::GetFromRingBuffer(char* buffer) {
memcpy(buffer + copied, trace_ring_buffer_, ring_buffer_end_);
}
bool Heap::ConfigureHeapDefault() { return ConfigureHeap(0, 0, 0, 0); }
bool Heap::ConfigureHeapDefault() { return ConfigureHeap(0, 0, 0); }
void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
*stats->start_marker = HeapStats::kStartMarker;
@ -5510,9 +5494,7 @@ bool Heap::SetUp() {
// Set up memory allocator.
memory_allocator_ = new MemoryAllocator(isolate_);
if (!memory_allocator_->SetUp(MaxReserved(), MaxExecutableSize(),
code_range_size_))
return false;
if (!memory_allocator_->SetUp(MaxReserved(), code_range_size_)) return false;
store_buffer_ = new StoreBuffer(this);

View File

@ -1016,7 +1016,7 @@ class Heap {
// Configure heap size in MB before setup. Return false if the heap has been
// set up already.
bool ConfigureHeap(size_t max_semi_space_size, size_t max_old_space_size,
size_t max_executable_size, size_t code_range_size);
size_t code_range_size);
bool ConfigureHeapDefault();
// Prepares the heap, setting up memory areas that are needed in the isolate
@ -1370,7 +1370,6 @@ class Heap {
size_t MaxSemiSpaceSize() { return max_semi_space_size_; }
size_t InitialSemiSpaceSize() { return initial_semispace_size_; }
size_t MaxOldGenerationSize() { return max_old_generation_size_; }
size_t MaxExecutableSize() { return max_executable_size_; }
// Returns the capacity of the heap in bytes w/o growing. Heap grows when
// more spaces are needed until it reaches the limit.
@ -2189,7 +2188,6 @@ class Heap {
size_t initial_max_old_generation_size_;
size_t initial_old_generation_size_;
bool old_generation_size_configured_;
size_t max_executable_size_;
size_t maximum_committed_;
// For keeping track of how much data has survived

View File

@ -291,18 +291,14 @@ MemoryAllocator::MemoryAllocator(Isolate* isolate)
: isolate_(isolate),
code_range_(nullptr),
capacity_(0),
capacity_executable_(0),
size_(0),
size_executable_(0),
lowest_ever_allocated_(reinterpret_cast<void*>(-1)),
highest_ever_allocated_(reinterpret_cast<void*>(0)),
unmapper_(this) {}
bool MemoryAllocator::SetUp(size_t capacity, size_t capacity_executable,
size_t code_range_size) {
bool MemoryAllocator::SetUp(size_t capacity, size_t code_range_size) {
capacity_ = RoundUp(capacity, Page::kPageSize);
capacity_executable_ = RoundUp(capacity_executable, Page::kPageSize);
DCHECK_GE(capacity_, capacity_executable_);
size_ = 0;
size_executable_ = 0;
@ -322,7 +318,6 @@ void MemoryAllocator::TearDown() {
// TODO(gc) this will be true again when we fix FreeMemory.
// DCHECK(size_executable_ == 0);
capacity_ = 0;
capacity_executable_ = 0;
if (last_chunk_.IsReserved()) {
last_chunk_.Release();
@ -698,13 +693,6 @@ MemoryChunk* MemoryAllocator::AllocateChunk(size_t reserve_area_size,
GetCommitPageSize()) +
CodePageGuardSize();
// Check executable memory limit.
if ((size_executable_.Value() + chunk_size) > capacity_executable_) {
LOG(isolate_, StringEvent("MemoryAllocator::AllocateRawMemory",
"V8 Executable Allocation capacity exceeded"));
return NULL;
}
// Size of header (not executable) plus area (executable).
size_t commit_size = RoundUp(CodePageGuardStartOffset() + commit_area_size,
GetCommitPageSize());

View File

@ -1273,8 +1273,7 @@ class V8_EXPORT_PRIVATE MemoryAllocator {
// Initializes its internal bookkeeping structures.
// Max capacity of the total space and executable memory limit.
bool SetUp(size_t max_capacity, size_t capacity_executable,
size_t code_range_size);
bool SetUp(size_t max_capacity, size_t code_range_size);
void TearDown();
@ -1305,13 +1304,6 @@ class V8_EXPORT_PRIVATE MemoryAllocator {
return capacity_ < size ? 0 : capacity_ - size;
}
// Returns the maximum available executable bytes of heaps.
size_t AvailableExecutable() {
const size_t executable_size = SizeExecutable();
if (capacity_executable_ < executable_size) return 0;
return capacity_executable_ - executable_size;
}
// Returns maximum available bytes that the old space can have.
size_t MaxAvailable() {
return (Available() / Page::kPageSize) * Page::kAllocatableMemory;
@ -1410,8 +1402,6 @@ class V8_EXPORT_PRIVATE MemoryAllocator {
// Maximum space size in bytes.
size_t capacity_;
// Maximum subset of capacity_ that can be executable
size_t capacity_executable_;
// Allocated space size in bytes.
base::AtomicNumber<size_t> size_;

View File

@ -3549,13 +3549,20 @@ void Assembler::GrowBuffer() {
if (!own_buffer_) FATAL("external code buffer is too small");
// Compute new buffer size.
CodeDesc desc; // The new buffer.
CodeDesc desc; // the new buffer
if (buffer_size_ < 1 * MB) {
desc.buffer_size = 2*buffer_size_;
} else {
desc.buffer_size = buffer_size_ + 1*MB;
}
CHECK_GT(desc.buffer_size, 0); // No overflow.
// Some internal data structures overflow for very large buffers,
// they must ensure that kMaximalBufferSize is not too large.
if (desc.buffer_size > kMaximalBufferSize ||
static_cast<size_t>(desc.buffer_size) >
isolate_data().max_old_generation_size_) {
V8::FatalProcessOutOfMemory("Assembler::GrowBuffer");
}
// Set up new buffer.
desc.buffer = NewArray<byte>(desc.buffer_size);

View File

@ -1927,6 +1927,9 @@ class Assembler : public AssemblerBase {
inline void CheckBuffer();
private:
// Avoid overflows for displacements etc.
static const int kMaximalBufferSize = 512 * MB;
inline static void set_target_internal_reference_encoded_at(Address pc,
Address target);

View File

@ -3795,13 +3795,20 @@ void Assembler::GrowBuffer() {
if (!own_buffer_) FATAL("external code buffer is too small");
// Compute new buffer size.
CodeDesc desc; // The new buffer.
CodeDesc desc; // the new buffer
if (buffer_size_ < 1 * MB) {
desc.buffer_size = 2*buffer_size_;
} else {
desc.buffer_size = buffer_size_ + 1*MB;
}
CHECK_GT(desc.buffer_size, 0); // No overflow.
// Some internal data structures overflow for very large buffers,
// they must ensure that kMaximalBufferSize is not too large.
if (desc.buffer_size > kMaximalBufferSize ||
static_cast<size_t>(desc.buffer_size) >
isolate_data().max_old_generation_size_) {
V8::FatalProcessOutOfMemory("Assembler::GrowBuffer");
}
// Set up new buffer.
desc.buffer = NewArray<byte>(desc.buffer_size);

View File

@ -1976,6 +1976,9 @@ class Assembler : public AssemblerBase {
inline void CheckTrampolinePoolQuick(int extra_instructions = 0);
private:
// Avoid overflows for displacements etc.
static const int kMaximalBufferSize = 512 * MB;
// Buffer size and constant pool distance are checked together at regular
// intervals of kBufferCheckInterval emitted bytes.
static constexpr int kBufferCheckInterval = 1 * KB / 2;

View File

@ -1984,7 +1984,14 @@ void Assembler::GrowBuffer(int needed) {
if (space < needed) {
desc.buffer_size += needed - space;
}
CHECK_GT(desc.buffer_size, 0); // no overflow
// Some internal data structures overflow for very large buffers,
// they must ensure that kMaximalBufferSize is not too large.
if (desc.buffer_size > kMaximalBufferSize ||
static_cast<size_t>(desc.buffer_size) >
isolate_data().max_old_generation_size_) {
V8::FatalProcessOutOfMemory("Assembler::GrowBuffer");
}
// Set up new buffer.
desc.buffer = NewArray<byte>(desc.buffer_size);

View File

@ -1438,6 +1438,9 @@ class Assembler : public AssemblerBase {
RelocInfoWriter reloc_info_writer;
private:
// Avoid overflows for displacements etc.
static const int kMaximalBufferSize = 512 * MB;
// Repeated checking whether the trampoline pool should be emitted is rather
// expensive. By default we only check again once a number of instructions
// has been generated.

View File

@ -2059,7 +2059,14 @@ void Assembler::GrowBuffer(int needed) {
if (space < needed) {
desc.buffer_size += needed - space;
}
CHECK_GT(desc.buffer_size, 0); // no overflow
// Some internal data structures overflow for very large buffers,
// they must ensure that kMaximalBufferSize is not too large.
if (desc.buffer_size > kMaximalBufferSize ||
static_cast<size_t>(desc.buffer_size) >
isolate_data().max_old_generation_size_) {
V8::FatalProcessOutOfMemory("Assembler::GrowBuffer");
}
// Set up new buffer.
desc.buffer = NewArray<byte>(desc.buffer_size);

View File

@ -1366,6 +1366,9 @@ class Assembler : public AssemblerBase {
void RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data = 0);
private:
// Avoid overflows for displacements etc.
static const int kMaximalBufferSize = 512 * MB;
// Code generation
// The relocation writer's position is at least kGap bytes below the end of
// the generated instructions. This is so that multi-instruction sequences do

View File

@ -156,8 +156,7 @@ static void VerifyMemoryChunk(Isolate* isolate,
size_t second_commit_area_size,
Executability executable) {
MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
CHECK(memory_allocator->SetUp(heap->MaxReserved(), heap->MaxExecutableSize(),
0));
CHECK(memory_allocator->SetUp(heap->MaxReserved(), 0));
{
TestMemoryAllocatorScope test_allocator_scope(isolate, memory_allocator);
TestCodeRangeScope test_code_range_scope(isolate, code_range);
@ -208,8 +207,7 @@ TEST(Regress3540) {
Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap();
MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
CHECK(memory_allocator->SetUp(heap->MaxReserved(), heap->MaxExecutableSize(),
0));
CHECK(memory_allocator->SetUp(heap->MaxReserved(), 0));
TestMemoryAllocatorScope test_allocator_scope(isolate, memory_allocator);
CodeRange* code_range = new CodeRange(isolate);
size_t code_range_size =
@ -309,8 +307,7 @@ TEST(MemoryAllocator) {
MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
CHECK(memory_allocator != nullptr);
CHECK(memory_allocator->SetUp(heap->MaxReserved(), heap->MaxExecutableSize(),
0));
CHECK(memory_allocator->SetUp(heap->MaxReserved(), 0));
TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
{
@ -357,8 +354,7 @@ TEST(NewSpace) {
Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap();
MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
CHECK(memory_allocator->SetUp(heap->MaxReserved(), heap->MaxExecutableSize(),
0));
CHECK(memory_allocator->SetUp(heap->MaxReserved(), 0));
TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
NewSpace new_space(heap);
@ -383,8 +379,7 @@ TEST(OldSpace) {
Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap();
MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
CHECK(memory_allocator->SetUp(heap->MaxReserved(), heap->MaxExecutableSize(),
0));
CHECK(memory_allocator->SetUp(heap->MaxReserved(), 0));
TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
OldSpace* s = new OldSpace(heap, OLD_SPACE, NOT_EXECUTABLE);