[Interpreter] Move non-effectful accumulator load elision to BytecodeArrayWriter

Moves the logic for eliding non-effectful accumulator load elision from the
peephole optimizer to the BytecodeArrayWriter.

BUG=v8:6194

Change-Id: I05fbe4ee8ac340e5c355285d0b47e4a9d52fd0a8
Reviewed-on: https://chromium-review.googlesource.com/469828
Commit-Queue: Ross McIlroy <rmcilroy@chromium.org>
Reviewed-by: Leszek Swirski <leszeks@chromium.org>
Cr-Commit-Position: refs/heads/master@{#44560}
This commit is contained in:
Ross McIlroy 2017-04-11 12:01:28 +01:00 committed by Commit Bot
parent 2468dacd69
commit f3e4b7c14d
9 changed files with 133 additions and 128 deletions

View File

@ -312,6 +312,8 @@ DEFINE_BOOL(ignition, false, "use ignition interpreter")
DEFINE_BOOL(ignition_deadcode, true, DEFINE_BOOL(ignition_deadcode, true,
"use ignition dead code elimination optimizer") "use ignition dead code elimination optimizer")
DEFINE_BOOL(ignition_osr, true, "enable support for OSR from ignition code") DEFINE_BOOL(ignition_osr, true, "enable support for OSR from ignition code")
DEFINE_BOOL(ignition_elide_noneffectful_bytecodes, true,
"elide bytecodes which won't have any external effect")
DEFINE_BOOL(ignition_peephole, true, "use ignition peephole optimizer") DEFINE_BOOL(ignition_peephole, true, "use ignition peephole optimizer")
DEFINE_BOOL(ignition_reo, true, "use ignition register equivalence optimizer") DEFINE_BOOL(ignition_reo, true, "use ignition register equivalence optimizer")
DEFINE_BOOL(ignition_filter_expression_positions, true, DEFINE_BOOL(ignition_filter_expression_positions, true,

View File

@ -24,7 +24,12 @@ BytecodeArrayWriter::BytecodeArrayWriter(
: bytecodes_(zone), : bytecodes_(zone),
unbound_jumps_(0), unbound_jumps_(0),
source_position_table_builder_(zone, source_position_mode), source_position_table_builder_(zone, source_position_mode),
constant_array_builder_(constant_array_builder) { constant_array_builder_(constant_array_builder),
last_bytecode_(Bytecode::kIllegal),
last_bytecode_offset_(0),
last_bytecode_had_source_info_(false),
elide_noneffectful_bytecodes_(
FLAG_ignition_elide_noneffectful_bytecodes) {
bytecodes_.reserve(512); // Derived via experimentation. bytecodes_.reserve(512); // Derived via experimentation.
} }
@ -55,6 +60,7 @@ Handle<BytecodeArray> BytecodeArrayWriter::ToBytecodeArray(
// override // override
void BytecodeArrayWriter::Write(BytecodeNode* node) { void BytecodeArrayWriter::Write(BytecodeNode* node) {
DCHECK(!Bytecodes::IsJump(node->bytecode())); DCHECK(!Bytecodes::IsJump(node->bytecode()));
MaybeElideLastBytecode(node->bytecode(), node->source_info().is_valid());
UpdateSourcePositionTable(node); UpdateSourcePositionTable(node);
EmitBytecode(node); EmitBytecode(node);
} }
@ -62,6 +68,7 @@ void BytecodeArrayWriter::Write(BytecodeNode* node) {
// override // override
void BytecodeArrayWriter::WriteJump(BytecodeNode* node, BytecodeLabel* label) { void BytecodeArrayWriter::WriteJump(BytecodeNode* node, BytecodeLabel* label) {
DCHECK(Bytecodes::IsJump(node->bytecode())); DCHECK(Bytecodes::IsJump(node->bytecode()));
MaybeElideLastBytecode(node->bytecode(), node->source_info().is_valid());
UpdateSourcePositionTable(node); UpdateSourcePositionTable(node);
EmitJump(node, label); EmitJump(node, label);
} }
@ -75,6 +82,7 @@ void BytecodeArrayWriter::BindLabel(BytecodeLabel* label) {
// Now treat as if the label will only be back referred to. // Now treat as if the label will only be back referred to.
} }
label->bind_to(current_offset); label->bind_to(current_offset);
InvalidateLastBytecode();
} }
// override // override
@ -88,6 +96,7 @@ void BytecodeArrayWriter::BindLabel(const BytecodeLabel& target,
// Now treat as if the label will only be back referred to. // Now treat as if the label will only be back referred to.
} }
label->bind_to(target.offset()); label->bind_to(target.offset());
InvalidateLastBytecode();
} }
void BytecodeArrayWriter::UpdateSourcePositionTable( void BytecodeArrayWriter::UpdateSourcePositionTable(
@ -101,6 +110,31 @@ void BytecodeArrayWriter::UpdateSourcePositionTable(
} }
} }
void BytecodeArrayWriter::MaybeElideLastBytecode(Bytecode next_bytecode,
bool has_source_info) {
if (!elide_noneffectful_bytecodes_) return;
// If the last bytecode loaded the accumulator without any external effect,
// and the next bytecode clobbers this load without reading the accumulator,
// then the previous bytecode can be elided as it has no effect.
if (Bytecodes::IsAccumulatorLoadWithoutEffects(last_bytecode_) &&
Bytecodes::GetAccumulatorUse(next_bytecode) == AccumulatorUse::kWrite &&
(!last_bytecode_had_source_info_ || !has_source_info)) {
DCHECK_GT(bytecodes()->size(), last_bytecode_offset_);
bytecodes()->resize(last_bytecode_offset_);
// If the last bytecode had source info we will transfer the source info
// to this bytecode.
has_source_info |= last_bytecode_had_source_info_;
}
last_bytecode_ = next_bytecode;
last_bytecode_had_source_info_ = has_source_info;
last_bytecode_offset_ = bytecodes()->size();
}
void BytecodeArrayWriter::InvalidateLastBytecode() {
last_bytecode_ = Bytecode::kIllegal;
}
void BytecodeArrayWriter::EmitBytecode(const BytecodeNode* const node) { void BytecodeArrayWriter::EmitBytecode(const BytecodeNode* const node) {
DCHECK_NE(node->bytecode(), Bytecode::kIllegal); DCHECK_NE(node->bytecode(), Bytecode::kIllegal);

View File

@ -65,6 +65,9 @@ class V8_EXPORT_PRIVATE BytecodeArrayWriter final
void EmitJump(BytecodeNode* node, BytecodeLabel* label); void EmitJump(BytecodeNode* node, BytecodeLabel* label);
void UpdateSourcePositionTable(const BytecodeNode* const node); void UpdateSourcePositionTable(const BytecodeNode* const node);
void MaybeElideLastBytecode(Bytecode next_bytecode, bool has_source_info);
void InvalidateLastBytecode();
ZoneVector<uint8_t>* bytecodes() { return &bytecodes_; } ZoneVector<uint8_t>* bytecodes() { return &bytecodes_; }
SourcePositionTableBuilder* source_position_table_builder() { SourcePositionTableBuilder* source_position_table_builder() {
return &source_position_table_builder_; return &source_position_table_builder_;
@ -78,6 +81,11 @@ class V8_EXPORT_PRIVATE BytecodeArrayWriter final
SourcePositionTableBuilder source_position_table_builder_; SourcePositionTableBuilder source_position_table_builder_;
ConstantArrayBuilder* constant_array_builder_; ConstantArrayBuilder* constant_array_builder_;
Bytecode last_bytecode_;
size_t last_bytecode_offset_;
bool last_bytecode_had_source_info_;
bool elide_noneffectful_bytecodes_;
friend class BytecodeArrayWriterUnittest; friend class BytecodeArrayWriterUnittest;
DISALLOW_COPY_AND_ASSIGN(BytecodeArrayWriter); DISALLOW_COPY_AND_ASSIGN(BytecodeArrayWriter);
}; };

View File

@ -43,6 +43,8 @@ class V8_EXPORT_PRIVATE BytecodeRegisterOptimizer final
// Perform explicit register transfer operations. // Perform explicit register transfer operations.
void DoLdar(Register input) { void DoLdar(Register input) {
// TODO(rmcilroy): Avoid treating accumulator loads as clobbering the
// accumulator until the value is actually materialized in the accumulator.
RegisterInfo* input_info = GetRegisterInfo(input); RegisterInfo* input_info = GetRegisterInfo(input);
RegisterTransfer(input_info, accumulator_info_); RegisterTransfer(input_info, accumulator_info_);
} }

View File

@ -79,33 +79,6 @@ const char* PeepholeActionTableWriter::kNamespaceElements[] = {"v8", "internal",
// static // static
PeepholeActionAndData PeepholeActionTableWriter::LookupActionAndData( PeepholeActionAndData PeepholeActionTableWriter::LookupActionAndData(
Bytecode last, Bytecode current) { Bytecode last, Bytecode current) {
// The accumulator is invisible to the debugger. If there is a sequence
// of consecutive accumulator loads (that don't have side effects) then
// only the final load is potentially visible.
if (Bytecodes::IsAccumulatorLoadWithoutEffects(last) &&
Bytecodes::IsAccumulatorLoadWithoutEffects(current)) {
return {PeepholeAction::kElideLastAction, Bytecode::kIllegal};
}
// The current instruction clobbers the accumulator without reading
// it. The load in the last instruction can be elided as it has no
// effect.
if (Bytecodes::IsAccumulatorLoadWithoutEffects(last) &&
Bytecodes::GetAccumulatorUse(current) == AccumulatorUse::kWrite) {
return {PeepholeAction::kElideLastAction, Bytecode::kIllegal};
}
// Ldar and Star make the accumulator and register hold equivalent
// values. Only the first bytecode is needed if there's a sequence
// of back-to-back Ldar and Star bytecodes with the same operand.
if (Bytecodes::IsLdarOrStar(last) && Bytecodes::IsLdarOrStar(current)) {
return {PeepholeAction::kElideCurrentIfOperand0MatchesAction,
Bytecode::kIllegal};
}
// TODO(rmcilroy): Add elide for consecutive mov to and from the same
// register.
// If there is no last bytecode to optimize against, store the incoming // If there is no last bytecode to optimize against, store the incoming
// bytecode or for jumps emit incoming bytecode immediately. // bytecode or for jumps emit incoming bytecode immediately.
if (last == Bytecode::kIllegal) { if (last == Bytecode::kIllegal) {

View File

@ -147,23 +147,24 @@ snippet: "
" "
frame size: 7 frame size: 7
parameter count: 1 parameter count: 1
bytecode array length: 85 bytecode array length: 87
bytecodes: [ bytecodes: [
/* 30 E> */ B(StackCheck), /* 30 E> */ B(StackCheck),
/* 42 S> */ B(CreateObjectLiteral), U8(0), U8(2), U8(1), R(1), /* 42 S> */ B(CreateObjectLiteral), U8(0), U8(2), U8(1), R(1),
B(Mov), R(1), R(0), B(Mov), R(1), R(0),
/* 77 S> */ B(CreateArrayLiteral), U8(1), U8(3), U8(9), /* 77 S> */ B(CreateArrayLiteral), U8(1), U8(3), U8(9),
B(JumpIfUndefined), U8(70), B(JumpIfUndefined), U8(72),
B(JumpIfNull), U8(68), B(JumpIfNull), U8(70),
B(ToObject), R(1), B(ToObject), R(1),
B(ForInPrepare), R(1), R(2), B(ForInPrepare), R(1), R(2),
B(LdaZero), B(LdaZero),
B(Star), R(5), B(Star), R(5),
/* 68 S> */ B(ForInContinue), R(5), R(4), /* 68 S> */ B(ForInContinue), R(5), R(4),
B(JumpIfFalse), U8(55), B(JumpIfFalse), U8(57),
B(ForInNext), R(1), R(5), R(2), U8(14), B(ForInNext), R(1), R(5), R(2), U8(14),
B(JumpIfUndefined), U8(41), B(JumpIfUndefined), U8(43),
B(Star), R(6), B(Star), R(6),
B(Ldar), R(6),
/* 67 E> */ B(StaNamedPropertySloppy), R(0), U8(2), U8(12), /* 67 E> */ B(StaNamedPropertySloppy), R(0), U8(2), U8(12),
/* 62 E> */ B(StackCheck), /* 62 E> */ B(StackCheck),
/* 95 S> */ B(Nop), /* 95 S> */ B(Nop),
@ -182,7 +183,7 @@ bytecodes: [
/* 143 S> */ B(Jump), U8(9), /* 143 S> */ B(Jump), U8(9),
B(ForInStep), R(5), B(ForInStep), R(5),
B(Star), R(5), B(Star), R(5),
B(JumpLoop), U8(55), I8(0), B(JumpLoop), U8(57), I8(0),
B(LdaUndefined), B(LdaUndefined),
/* 152 S> */ B(Return), /* 152 S> */ B(Return),
] ]

View File

@ -25,10 +25,10 @@ class BytecodeAnalysisTest : public TestWithIsolateAndZone {
~BytecodeAnalysisTest() override {} ~BytecodeAnalysisTest() override {}
static void SetUpTestCase() { static void SetUpTestCase() {
old_FLAG_ignition_peephole_ = i::FLAG_ignition_peephole; CHECK_NULL(save_flags_);
save_flags_ = new SaveFlags();
i::FLAG_ignition_elide_noneffectful_bytecodes = false;
i::FLAG_ignition_peephole = false; i::FLAG_ignition_peephole = false;
old_FLAG_ignition_reo_ = i::FLAG_ignition_reo;
i::FLAG_ignition_reo = false; i::FLAG_ignition_reo = false;
TestWithIsolateAndZone::SetUpTestCase(); TestWithIsolateAndZone::SetUpTestCase();
@ -36,8 +36,8 @@ class BytecodeAnalysisTest : public TestWithIsolateAndZone {
static void TearDownTestCase() { static void TearDownTestCase() {
TestWithIsolateAndZone::TearDownTestCase(); TestWithIsolateAndZone::TearDownTestCase();
i::FLAG_ignition_peephole = old_FLAG_ignition_peephole_; delete save_flags_;
i::FLAG_ignition_reo = old_FLAG_ignition_reo_; save_flags_ = nullptr;
} }
std::string ToLivenessString(const BytecodeLivenessState* liveness) const { std::string ToLivenessString(const BytecodeLivenessState* liveness) const {
@ -83,14 +83,12 @@ class BytecodeAnalysisTest : public TestWithIsolateAndZone {
} }
private: private:
static bool old_FLAG_ignition_peephole_; static SaveFlags* save_flags_;
static bool old_FLAG_ignition_reo_;
DISALLOW_COPY_AND_ASSIGN(BytecodeAnalysisTest); DISALLOW_COPY_AND_ASSIGN(BytecodeAnalysisTest);
}; };
bool BytecodeAnalysisTest::old_FLAG_ignition_peephole_; SaveFlags* BytecodeAnalysisTest::save_flags_ = nullptr;
bool BytecodeAnalysisTest::old_FLAG_ignition_reo_;
TEST_F(BytecodeAnalysisTest, EmptyBlock) { TEST_F(BytecodeAnalysisTest, EmptyBlock) {
interpreter::BytecodeArrayBuilder builder(isolate(), zone(), 3, 0, 3); interpreter::BytecodeArrayBuilder builder(isolate(), zone(), 3, 0, 3);
@ -132,9 +130,6 @@ TEST_F(BytecodeAnalysisTest, StoreThenLoad) {
builder.StoreAccumulatorInRegister(reg_0); builder.StoreAccumulatorInRegister(reg_0);
expected_liveness.emplace_back("...L", "L..."); expected_liveness.emplace_back("...L", "L...");
builder.LoadNull();
expected_liveness.emplace_back("L...", "L...");
builder.LoadAccumulatorWithRegister(reg_0); builder.LoadAccumulatorWithRegister(reg_0);
expected_liveness.emplace_back("L...", "...L"); expected_liveness.emplace_back("L...", "...L");

View File

@ -20,6 +20,8 @@ namespace v8 {
namespace internal { namespace internal {
namespace interpreter { namespace interpreter {
#define R(i) static_cast<uint32_t>(Register(i).ToOperand())
class BytecodeArrayWriterUnittest : public TestWithIsolateAndZone { class BytecodeArrayWriterUnittest : public TestWithIsolateAndZone {
public: public:
BytecodeArrayWriterUnittest() BytecodeArrayWriterUnittest()
@ -113,25 +115,35 @@ TEST_F(BytecodeArrayWriterUnittest, SimpleExample) {
Write(Bytecode::kLdaSmi, 127, {55, true}); Write(Bytecode::kLdaSmi, 127, {55, true});
CHECK_EQ(bytecodes()->size(), 3u); CHECK_EQ(bytecodes()->size(), 3u);
Write(Bytecode::kStar, Register(20).ToOperand());
CHECK_EQ(bytecodes()->size(), 5u);
Write(Bytecode::kLdar, Register(200).ToOperand()); Write(Bytecode::kLdar, Register(200).ToOperand());
CHECK_EQ(bytecodes()->size(), 7u); CHECK_EQ(bytecodes()->size(), 9u);
Write(Bytecode::kReturn, {70, true}); Write(Bytecode::kReturn, {70, true});
CHECK_EQ(bytecodes()->size(), 8u); CHECK_EQ(bytecodes()->size(), 10u);
static const uint8_t bytes[] = {B(StackCheck), B(LdaSmi), U8(127), B(Wide), static const uint8_t expected_bytes[] = {
B(Ldar), R16(200), B(Return)}; // clang-format off
CHECK_EQ(bytecodes()->size(), arraysize(bytes)); /* 0 10 E> */ B(StackCheck),
for (size_t i = 0; i < arraysize(bytes); ++i) { /* 1 55 S> */ B(LdaSmi), U8(127),
CHECK_EQ(bytecodes()->at(i), bytes[i]); /* 3 */ B(Star), R8(20),
/* 5 */ B(Wide), B(Ldar), R16(200),
/* 9 70 S> */ B(Return),
// clang-format on
};
CHECK_EQ(bytecodes()->size(), arraysize(expected_bytes));
for (size_t i = 0; i < arraysize(expected_bytes); ++i) {
CHECK_EQ(bytecodes()->at(i), expected_bytes[i]);
} }
Handle<BytecodeArray> bytecode_array = writer()->ToBytecodeArray( Handle<BytecodeArray> bytecode_array = writer()->ToBytecodeArray(
isolate(), 0, 0, factory()->empty_fixed_array()); isolate(), 0, 0, factory()->empty_fixed_array());
CHECK_EQ(bytecodes()->size(), arraysize(bytes)); CHECK_EQ(bytecodes()->size(), arraysize(expected_bytes));
PositionTableEntry expected_positions[] = { PositionTableEntry expected_positions[] = {
{0, 10, false}, {1, 55, true}, {7, 70, true}}; {0, 10, false}, {1, 55, true}, {9, 70, true}};
SourcePositionTableIterator source_iterator( SourcePositionTableIterator source_iterator(
bytecode_array->source_position_table()); bytecode_array->source_position_table());
for (size_t i = 0; i < arraysize(expected_positions); ++i) { for (size_t i = 0; i < arraysize(expected_positions); ++i) {
@ -180,7 +192,6 @@ TEST_F(BytecodeArrayWriterUnittest, ComplexExample) {
BytecodeLabel back_jump, jump_for_in, jump_end_1, jump_end_2, jump_end_3; BytecodeLabel back_jump, jump_for_in, jump_end_1, jump_end_2, jump_end_3;
#define R(i) static_cast<uint32_t>(Register(i).ToOperand())
Write(Bytecode::kStackCheck, {30, false}); Write(Bytecode::kStackCheck, {30, false});
Write(Bytecode::kLdaConstant, U8(0), {42, true}); Write(Bytecode::kLdaConstant, U8(0), {42, true});
Write(Bytecode::kAdd, R(1), U8(1), {42, false}); Write(Bytecode::kAdd, R(1), U8(1), {42, false});
@ -209,7 +220,6 @@ TEST_F(BytecodeArrayWriterUnittest, ComplexExample) {
writer()->BindLabel(&jump_end_3); writer()->BindLabel(&jump_end_3);
Write(Bytecode::kLdaUndefined); Write(Bytecode::kLdaUndefined);
Write(Bytecode::kReturn, {85, true}); Write(Bytecode::kReturn, {85, true});
#undef R
CHECK_EQ(bytecodes()->size(), arraysize(expected_bytes)); CHECK_EQ(bytecodes()->size(), arraysize(expected_bytes));
for (size_t i = 0; i < arraysize(expected_bytes); ++i) { for (size_t i = 0; i < arraysize(expected_bytes); ++i) {
@ -232,6 +242,57 @@ TEST_F(BytecodeArrayWriterUnittest, ComplexExample) {
CHECK(source_iterator.done()); CHECK(source_iterator.done());
} }
TEST_F(BytecodeArrayWriterUnittest, ElideNoneffectfulBytecodes) {
if (!i::FLAG_ignition_elide_noneffectful_bytecodes) return;
static const uint8_t expected_bytes[] = {
// clang-format off
/* 0 10 E> */ B(StackCheck),
/* 1 55 S> */ B(Ldar), R8(20),
/* 3 */ B(Star), R8(20),
/* 5 */ B(CreateMappedArguments),
/* 6 60 S> */ B(LdaSmi), U8(127),
/* 8 70 S> */ B(Ldar), R8(20),
/* 10 75 S> */ B(Return),
// clang-format on
};
static const PositionTableEntry expected_positions[] = {{0, 10, false},
{1, 55, true},
{6, 60, false},
{8, 70, true},
{10, 75, true}};
Write(Bytecode::kStackCheck, {10, false});
Write(Bytecode::kLdaSmi, 127, {55, true}); // Should be elided.
Write(Bytecode::kLdar, Register(20).ToOperand());
Write(Bytecode::kStar, Register(20).ToOperand());
Write(Bytecode::kLdar, Register(20).ToOperand()); // Should be elided.
Write(Bytecode::kCreateMappedArguments);
Write(Bytecode::kLdaSmi, 127, {60, false}); // Not elided due to source info.
Write(Bytecode::kLdar, Register(20).ToOperand(), {70, true});
Write(Bytecode::kReturn, {75, true});
CHECK_EQ(bytecodes()->size(), arraysize(expected_bytes));
for (size_t i = 0; i < arraysize(expected_bytes); ++i) {
CHECK_EQ(static_cast<int>(bytecodes()->at(i)),
static_cast<int>(expected_bytes[i]));
}
Handle<BytecodeArray> bytecode_array = writer()->ToBytecodeArray(
isolate(), 0, 0, factory()->empty_fixed_array());
SourcePositionTableIterator source_iterator(
bytecode_array->source_position_table());
for (size_t i = 0; i < arraysize(expected_positions); ++i) {
const PositionTableEntry& expected = expected_positions[i];
CHECK_EQ(source_iterator.code_offset(), expected.code_offset);
CHECK_EQ(source_iterator.source_position().ScriptOffset(),
expected.source_position);
CHECK_EQ(source_iterator.is_statement(), expected.is_statement);
source_iterator.Advance();
}
CHECK(source_iterator.done());
}
} // namespace interpreter } // namespace interpreter
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8

View File

@ -92,77 +92,6 @@ TEST_F(BytecodePeepholeOptimizerTest, FlushOnBind) {
CHECK_EQ(add, last_written()); CHECK_EQ(add, last_written());
} }
// Tests covering BytecodePeepholeOptimizer::CanElideCurrent().
TEST_F(BytecodePeepholeOptimizerTest, StarRxLdarRy) {
BytecodeNode first(Bytecode::kStar, Register(0).ToOperand());
BytecodeNode second(Bytecode::kLdar, Register(1).ToOperand());
optimizer()->Write(&first);
CHECK_EQ(write_count(), 0);
optimizer()->Write(&second);
CHECK_EQ(write_count(), 1);
CHECK_EQ(last_written(), first);
Flush();
CHECK_EQ(write_count(), 2);
CHECK_EQ(last_written(), second);
}
TEST_F(BytecodePeepholeOptimizerTest, StarRxLdarRx) {
BytecodeLabel label;
BytecodeNode first(Bytecode::kStar, Register(0).ToOperand());
BytecodeNode second(Bytecode::kLdar, Register(0).ToOperand());
optimizer()->Write(&first);
optimizer()->Write(&second);
CHECK_EQ(write_count(), 0);
Flush();
CHECK_EQ(write_count(), 1);
CHECK_EQ(last_written(), first);
}
TEST_F(BytecodePeepholeOptimizerTest, StarRxLdarRxStatement) {
BytecodeNode first(Bytecode::kStar, Register(0).ToOperand());
BytecodeSourceInfo source_info(3, true);
BytecodeNode second(Bytecode::kLdar, Register(0).ToOperand(), source_info);
optimizer()->Write(&first);
CHECK_EQ(write_count(), 0);
optimizer()->Write(&second);
CHECK_EQ(write_count(), 1);
CHECK_EQ(last_written(), first);
Flush();
CHECK_EQ(write_count(), 2);
CHECK_EQ(last_written().bytecode(), Bytecode::kNop);
CHECK_EQ(last_written().source_info(), source_info);
}
// Tests covering BytecodePeepholeOptimizer::CanElideLast().
TEST_F(BytecodePeepholeOptimizerTest, LdaTrueLdaFalse) {
BytecodeNode first(Bytecode::kLdaTrue);
BytecodeNode second(Bytecode::kLdaFalse);
optimizer()->Write(&first);
CHECK_EQ(write_count(), 0);
optimizer()->Write(&second);
CHECK_EQ(write_count(), 0);
Flush();
CHECK_EQ(write_count(), 1);
CHECK_EQ(last_written(), second);
}
TEST_F(BytecodePeepholeOptimizerTest, LdaTrueStatementLdaFalse) {
BytecodeSourceInfo source_info(3, true);
BytecodeNode first(Bytecode::kLdaTrue, source_info);
BytecodeNode second(Bytecode::kLdaFalse);
optimizer()->Write(&first);
CHECK_EQ(write_count(), 0);
optimizer()->Write(&second);
CHECK_EQ(write_count(), 0);
Flush();
CHECK_EQ(write_count(), 1);
CHECK_EQ(last_written(), second);
CHECK(second.source_info().is_statement());
CHECK_EQ(second.source_info().source_position(), 3);
}
// Tests covering BytecodePeepholeOptimizer::UpdateLastAndCurrentBytecodes(). // Tests covering BytecodePeepholeOptimizer::UpdateLastAndCurrentBytecodes().
} // namespace interpreter } // namespace interpreter