[cctests/assembler-arm64] Rework constant pool test

This CL changes a constant pool test to include tests for the marker
and the encoded pool size.

Change-Id: Ia5cfd173e5d523a02252fd3b14f302e5c8994881
Reviewed-on: https://chromium-review.googlesource.com/1186626
Reviewed-by: Georg Neis <neis@chromium.org>
Commit-Queue: Sigurd Schneider <sigurds@chromium.org>
Cr-Commit-Position: refs/heads/master@{#55423}
This commit is contained in:
Sigurd Schneider 2018-08-27 10:28:32 +02:00 committed by Commit Bot
parent 8c171105df
commit 86d3005059

View File

@ -130,13 +130,11 @@ static void InitializeVM() {
new Decoder<DispatchingDecoderVisitor>(); \
Simulator simulator(decoder); \
PrintDisassembler* pdis = nullptr; \
RegisterDump core;
/* if (Cctest::trace_sim()) { \
pdis = new PrintDisassembler(stdout); \
decoder.PrependVisitor(pdis); \
} \
*/
RegisterDump core; \
if (i::FLAG_trace_sim) { \
pdis = new PrintDisassembler(stdout); \
decoder->PrependVisitor(pdis); \
}
// Reset the assembler and simulator, so that instructions can be generated,
// but don't actually emit any code. This can be used by tests that need to
@ -6742,65 +6740,72 @@ TEST(ldr_literal) {
#ifdef DEBUG
// These tests rely on functions available in debug mode.
enum LiteralPoolEmitOption { NoJumpRequired, JumpRequired };
enum LiteralPoolEmitOutcome { EmitExpected, NoEmitExpected };
static void LdrLiteralRangeHelper(int range_, LiteralPoolEmitOption option,
bool expect_dump) {
CHECK_GT(range_, 0);
SETUP_SIZE(range_ + 1024);
static void LdrLiteralRangeHelper(size_t range, LiteralPoolEmitOutcome outcome,
size_t prepadding = 0) {
SETUP_SIZE(static_cast<int>(range + 1024));
Label label_1, label_2;
size_t range = static_cast<size_t>(range_);
size_t code_size = 0;
size_t pool_guard_size;
if (option == NoJumpRequired) {
// Space for an explicit branch.
pool_guard_size = kInstrSize;
} else {
pool_guard_size = 0;
}
const size_t pool_entries = 2;
const size_t kEntrySize = 8;
START();
// Force a pool dump so the pool starts off empty.
__ CheckConstPool(true, true);
CHECK_CONSTANT_POOL_SIZE(0);
// Emit prepadding to influence alignment of the pool; we don't count this
// into code size.
for (size_t i = 0; i < prepadding; ++i) __ Nop();
LoadLiteral(&masm, x0, 0x1234567890ABCDEFUL);
LoadLiteral(&masm, x1, 0xABCDEF1234567890UL);
CHECK_CONSTANT_POOL_SIZE(16);
code_size += 2 * kInstrSize;
CHECK_CONSTANT_POOL_SIZE(pool_entries * kEntrySize);
// Check that the requested range (allowing space for a branch over the pool)
// can be handled by this test.
CHECK_LE(code_size + pool_guard_size, range);
CHECK_LE(code_size, range);
// Emit NOPs up to 'range', leaving space for the pool guard.
while ((code_size + pool_guard_size + kInstrSize) < range) {
auto PoolSizeAt = [pool_entries](int pc_offset) {
// To determine padding, consider the size of the prologue of the pool,
// and the jump around the pool, which we always need.
size_t prologue_size = 2 * kInstrSize + kInstrSize;
size_t pc = pc_offset + prologue_size;
const size_t padding = IsAligned(pc, 8) ? 0 : 4;
return prologue_size + pool_entries * kEntrySize + padding;
};
int pc_offset_before_emission = -1;
// Emit NOPs up to 'range'.
while (code_size < range) {
pc_offset_before_emission = __ pc_offset() + kInstrSize;
__ Nop();
code_size += kInstrSize;
}
CHECK_EQ(code_size, range);
// Emit the guard sequence before the literal pool.
if (option == NoJumpRequired) {
__ B(&label_1);
code_size += kInstrSize;
}
// The next instruction will trigger pool emission when expect_dump is true.
CHECK_EQ(code_size, range - kInstrSize);
CHECK_CONSTANT_POOL_SIZE(16);
// Possibly generate a literal pool.
__ Nop();
__ Bind(&label_1);
if (expect_dump) {
if (outcome == EmitExpected) {
CHECK_CONSTANT_POOL_SIZE(0);
// Check that the size of the emitted constant pool is as expected.
size_t pool_size = PoolSizeAt(pc_offset_before_emission);
CHECK_EQ(pc_offset_before_emission + pool_size, __ pc_offset());
byte* pool_start = buf + pc_offset_before_emission;
Instruction* branch = reinterpret_cast<Instruction*>(pool_start);
CHECK(branch->IsImmBranch());
CHECK_EQ(pool_size, branch->ImmPCOffset());
Instruction* marker =
reinterpret_cast<Instruction*>(pool_start + kInstrSize);
CHECK(marker->IsLdrLiteralX());
const size_t padding =
IsAligned(pc_offset_before_emission + kInstrSize, kEntrySize) ? 0 : 1;
CHECK_EQ(pool_entries * 2 + 1 + padding, marker->ImmLLiteral());
} else {
CHECK_CONSTANT_POOL_SIZE(16);
CHECK_EQ(outcome, NoEmitExpected);
CHECK_CONSTANT_POOL_SIZE(pool_entries * kEntrySize);
CHECK_EQ(pc_offset_before_emission, __ pc_offset());
}
// Force a pool flush to check that a second pool functions correctly.
@ -6810,7 +6815,7 @@ static void LdrLiteralRangeHelper(int range_, LiteralPoolEmitOption option,
// These loads should be after the pool (and will require a new one).
LoadLiteral(&masm, x4, 0x34567890ABCDEF12UL);
LoadLiteral(&masm, x5, 0xABCDEF0123456789UL);
CHECK_CONSTANT_POOL_SIZE(16);
CHECK_CONSTANT_POOL_SIZE(pool_entries * kEntrySize);
END();
RUN();
@ -6824,34 +6829,32 @@ static void LdrLiteralRangeHelper(int range_, LiteralPoolEmitOption option,
TEARDOWN();
}
TEST(ldr_literal_range_1) {
TEST(ldr_literal_range_max_dist_emission_1) {
INIT_V8();
LdrLiteralRangeHelper(MacroAssembler::GetApproxMaxDistToConstPoolForTesting(),
NoJumpRequired, true);
EmitExpected);
}
TEST(ldr_literal_range_max_dist_emission_2) {
INIT_V8();
LdrLiteralRangeHelper(MacroAssembler::GetApproxMaxDistToConstPoolForTesting(),
EmitExpected, 1);
}
TEST(ldr_literal_range_2) {
TEST(ldr_literal_range_max_dist_no_emission_1) {
INIT_V8();
LdrLiteralRangeHelper(
MacroAssembler::GetApproxMaxDistToConstPoolForTesting() - kInstrSize,
NoJumpRequired, false);
NoEmitExpected);
}
TEST(ldr_literal_range_3) {
INIT_V8();
LdrLiteralRangeHelper(MacroAssembler::GetCheckConstPoolIntervalForTesting(),
JumpRequired, false);
}
TEST(ldr_literal_range_4) {
TEST(ldr_literal_range_max_dist_no_emission_2) {
INIT_V8();
LdrLiteralRangeHelper(
MacroAssembler::GetCheckConstPoolIntervalForTesting() - kInstrSize,
JumpRequired, false);
MacroAssembler::GetApproxMaxDistToConstPoolForTesting() - kInstrSize,
NoEmitExpected, 1);
}
#endif
TEST(add_sub_imm) {