[codegen] Reduce kMinimalBufferSize and add kDefaultBufferSize

In Liftoff, we have a good estimate about how big the generated code
might get. Also, we often compile hundreds of functions which each hold
an assembler buffer alive until we finally add that code to the wasm
module.
In order to reduce memory consumption in Liftoff, this CL reduces
{AssemblerBase::kMinimalBufferSize} from 4096 to 128, and adds
{AssemblerBase::kDefaultBufferSize} to be used instead.

R=jkummerow@chromium.org

Change-Id: I7029bf501244770f4824a86b233d7f99c4b7910b
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1914559
Reviewed-by: Jakob Kummerow <jkummerow@chromium.org>
Commit-Queue: Clemens Backes <clemensb@chromium.org>
Cr-Commit-Position: refs/heads/master@{#64958}
This commit is contained in:
Clemens Backes 2019-11-13 17:58:58 +01:00 committed by Commit Bot
parent 315c4166d7
commit 89e0902d57
15 changed files with 24 additions and 9 deletions

View File

@ -1198,6 +1198,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
// not have to check for overflow. The same is true for writes of large
// relocation info entries.
static constexpr int kGap = 32;
STATIC_ASSERT(AssemblerBase::kMinimalBufferSize >= 2 * kGap);
// Relocation info generation
// Each relocation is encoded as a variable size value

View File

@ -2639,6 +2639,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
// relocation info entries, and debug strings encoded in the instruction
// stream.
static constexpr int kGap = 64;
STATIC_ASSERT(AssemblerBase::kMinimalBufferSize >= 2 * kGap);
public:
#ifdef DEBUG

View File

@ -140,7 +140,7 @@ AssemblerBase::AssemblerBase(const AssemblerOptions& options,
predictable_code_size_(false),
constant_pool_available_(false),
jump_optimization_info_(nullptr) {
if (!buffer_) buffer_ = NewAssemblerBuffer(kMinimalBufferSize);
if (!buffer_) buffer_ = NewAssemblerBuffer(kDefaultBufferSize);
buffer_start_ = buffer_->start();
pc_ = buffer_start_;
}

View File

@ -271,7 +271,13 @@ class V8_EXPORT_PRIVATE AssemblerBase : public Malloced {
}
}
static const int kMinimalBufferSize = 4 * KB;
// The minimum buffer size. Should be at least two times the platform-specific
// {Assembler::kGap}.
static constexpr int kMinimalBufferSize = 128;
// The default buffer size used if we do not know the final size of the
// generated code.
static constexpr int kDefaultBufferSize = 4 * KB;
protected:
// Add 'target' to the {code_targets_} vector, if necessary, and return the

View File

@ -361,6 +361,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
// otherwise valid instructions.)
// This allows for a single, fast space check per instruction.
static constexpr int kGap = 32;
STATIC_ASSERT(AssemblerBase::kMinimalBufferSize >= 2 * kGap);
public:
// Create an assembler. Instructions and relocation information are emitted

View File

@ -1610,6 +1610,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
// not have to check for overflow. The same is true for writes of large
// relocation info entries.
static constexpr int kGap = 32;
STATIC_ASSERT(AssemblerBase::kMinimalBufferSize >= 2 * kGap);
// Repeated checking whether the trampoline pool should be emitted is rather
// expensive. By default we only check again once a number of instructions

View File

@ -1643,6 +1643,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
// not have to check for overflow. The same is true for writes of large
// relocation info entries.
static constexpr int kGap = 64;
STATIC_ASSERT(AssemblerBase::kMinimalBufferSize >= 2 * kGap);
// Repeated checking whether the trampoline pool should be emitted is rather
// expensive. By default we only check again once a number of instructions

View File

@ -1158,6 +1158,7 @@ class Assembler : public AssemblerBase {
// not have to check for overflow. The same is true for writes of large
// relocation info entries.
static constexpr int kGap = 32;
STATIC_ASSERT(AssemblerBase::kMinimalBufferSize >= 2 * kGap);
RelocInfoWriter reloc_info_writer;

View File

@ -1384,6 +1384,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
// not have to check for overflow. The same is true for writes of large
// relocation info entries.
static constexpr int kGap = 32;
STATIC_ASSERT(AssemblerBase::kMinimalBufferSize >= 2 * kGap);
// Relocation info generation
// Each relocation is encoded as a variable size value

View File

@ -317,6 +317,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
// otherwise valid instructions.)
// This allows for a single, fast space check per instruction.
static constexpr int kGap = 32;
STATIC_ASSERT(AssemblerBase::kMinimalBufferSize >= 2 * kGap);
public:
// Create an assembler. Instructions and relocation information are emitted

View File

@ -192,7 +192,7 @@ HEAP_TEST(TestNewSpaceRefsInCopiedCode) {
Handle<HeapNumber> value = factory->NewHeapNumber(1.000123);
CHECK(Heap::InYoungGeneration(*value));
i::byte buffer[i::Assembler::kMinimalBufferSize];
i::byte buffer[i::Assembler::kDefaultBufferSize];
MacroAssembler masm(isolate, v8::internal::CodeObjectRequired::kYes,
ExternalAssemblerBuffer(buffer, sizeof(buffer)));
// Add a new-space reference to the code.
@ -4338,7 +4338,7 @@ TEST(NextCodeLinkInCodeDataContainerIsCleared) {
}
static Handle<Code> DummyOptimizedCode(Isolate* isolate) {
i::byte buffer[i::Assembler::kMinimalBufferSize];
i::byte buffer[i::Assembler::kDefaultBufferSize];
MacroAssembler masm(isolate, v8::internal::CodeObjectRequired::kYes,
ExternalAssemblerBuffer(buffer, sizeof(buffer)));
CodeDesc desc;

View File

@ -187,7 +187,7 @@ void TestSmiCompare(MacroAssembler* masm, Label* exit, int id, int x, int y) {
TEST(SmiCompare) {
Isolate* isolate = CcTest::i_isolate();
HandleScope handles(isolate);
auto buffer = AllocateAssemblerBuffer(2 * Assembler::kMinimalBufferSize);
auto buffer = AllocateAssemblerBuffer(2 * Assembler::kDefaultBufferSize);
MacroAssembler assembler(isolate, v8::internal::CodeObjectRequired::kYes,
buffer->CreateView());

View File

@ -32,7 +32,8 @@ constexpr int kJumpTableSlotCount = 128;
constexpr uint32_t kJumpTableSize =
JumpTableAssembler::SizeForNumberOfSlots(kJumpTableSlotCount);
constexpr size_t kThunkBufferSize = AssemblerBase::kMinimalBufferSize;
// Must be a safe commit page size.
constexpr size_t kThunkBufferSize = 4 * KB;
#if V8_TARGET_ARCH_ARM64 || V8_TARGET_ARCH_X64
constexpr uint32_t kAvailableBufferSlots =

View File

@ -74,7 +74,7 @@ class TestingAssemblerBuffer : public AssemblerBuffer {
};
static inline std::unique_ptr<TestingAssemblerBuffer> AllocateAssemblerBuffer(
size_t requested = v8::internal::AssemblerBase::kMinimalBufferSize,
size_t requested = v8::internal::AssemblerBase::kDefaultBufferSize,
void* address = nullptr) {
return std::make_unique<TestingAssemblerBuffer>(requested, address);
}

View File

@ -88,7 +88,7 @@ class TrapHandlerTest : public TestWithIsolate,
crash_address_ = reinterpret_cast<Address>(backing_store_->buffer_start()) +
backing_store_->byte_length() + 32;
// Allocate a buffer for the generated code.
buffer_ = AllocateAssemblerBuffer(AssemblerBase::kMinimalBufferSize,
buffer_ = AllocateAssemblerBuffer(AssemblerBase::kDefaultBufferSize,
GetRandomMmapAddr());
InitRecoveryCode();
@ -138,7 +138,7 @@ class TrapHandlerTest : public TestWithIsolate,
// Create a code snippet where we can jump to to recover from a signal or
// exception. The code snippet only consists of a return statement.
recovery_buffer_ = AllocateAssemblerBuffer(
AssemblerBase::kMinimalBufferSize, GetRandomMmapAddr());
AssemblerBase::kDefaultBufferSize, GetRandomMmapAddr());
MacroAssembler masm(nullptr, AssemblerOptions{}, CodeObjectRequired::kNo,
recovery_buffer_->CreateView());