[Interpreter] Optimize BytecodeArrayBuilder and BytecodeArrayWriter.

This CL optimizes the code in BytecodeArrayBuilder and
BytecodeArrayWriter by making the following main changes:

 - Move operand scale calculation out of BytecodeArrayWriter to the
BytecodeNode constructor, where the decision on which operands are
scalable can generally be statically decided by the compiler.
 - Move the maximum register calculation out of BytecodeArrayWriter
and into BytecodeRegisterOptimizer (which is the only place outside
BytecodeGenerator which updates which registers are used). This
avoids the BytecodeArrayWriter needing to know the operand types
of a node as it writes it.
 - Modify EmitBytecodes to use individual push_backs rather than
building a buffer and calling insert, since this turns out to be faster.
 - Initialize BytecodeArrayWriter's bytecode vector by reserving 512
bytes,
 - Make common functions in Bytecodes constexpr so that they
can be statically calculated by the compiler.
 - Move common functions and constructors in Bytecodes and
BytecodeNode to the header so that they can be inlined.
 - Change large static switch statements in Bytecodes to const array
lookups, and move to the header to allow inlining.

I also took the opportunity to remove a number of unused helper
functions, and rework some others for consistency.

This reduces the percentage of time spent in making BytecodeArrays
 in  CodeLoad from ~15% to ~11% according to perf. The
CoadLoad score increase by around 2%.

BUG=v8:4280

Review-Url: https://codereview.chromium.org/2351763002
Cr-Commit-Position: refs/heads/master@{#39599}
This commit is contained in:
rmcilroy 2016-09-21 08:02:32 -07:00 committed by Commit bot
parent 5784773feb
commit b11a8b4d41
25 changed files with 1326 additions and 1598 deletions

View File

@ -1413,6 +1413,8 @@ v8_source_set("v8_base") {
"src/interpreter/bytecode-generator.h",
"src/interpreter/bytecode-label.cc",
"src/interpreter/bytecode-label.h",
"src/interpreter/bytecode-operands.cc",
"src/interpreter/bytecode-operands.h",
"src/interpreter/bytecode-peephole-optimizer.cc",
"src/interpreter/bytecode-peephole-optimizer.h",
"src/interpreter/bytecode-peephole-table.h",
@ -2380,7 +2382,10 @@ v8_executable("mkpeephole") {
visibility = [ ":*" ] # Only targets in this file can depend on this.
sources = [
"src/interpreter/bytecode-operands.cc",
"src/interpreter/bytecode-operands.h",
"src/interpreter/bytecode-peephole-optimizer.h",
"src/interpreter/bytecode-traits.h",
"src/interpreter/bytecodes.cc",
"src/interpreter/bytecodes.h",
"src/interpreter/mkpeephole.cc",

View File

@ -80,86 +80,122 @@ Handle<BytecodeArray> BytecodeArrayBuilder::ToBytecodeArray(Isolate* isolate) {
Handle<FixedArray> handler_table =
handler_table_builder()->ToHandlerTable(isolate);
return pipeline_->ToBytecodeArray(isolate, fixed_register_count(),
return pipeline_->ToBytecodeArray(isolate,
fixed_and_temporary_register_count(),
parameter_count(), handler_table);
}
namespace {
static bool ExpressionPositionIsNeeded(Bytecode bytecode) {
// An expression position is always needed if filtering is turned
// off. Otherwise an expression is only needed if the bytecode has
// external side effects.
return !FLAG_ignition_filter_expression_positions ||
!Bytecodes::IsWithoutExternalSideEffects(bytecode);
}
} // namespace
void BytecodeArrayBuilder::AttachSourceInfo(BytecodeNode* node) {
if (latest_source_info_.is_valid()) {
// Statement positions need to be emitted immediately. Expression
// positions can be pushed back until a bytecode is found that can
// throw. Hence we only invalidate the existing source position
// information if it is used.
if (latest_source_info_.is_statement() ||
ExpressionPositionIsNeeded(node->bytecode())) {
node->source_info().Clone(latest_source_info_);
latest_source_info_.set_invalid();
}
}
}
void BytecodeArrayBuilder::Output(Bytecode bytecode, uint32_t operand0,
uint32_t operand1, uint32_t operand2,
uint32_t operand3) {
DCHECK(OperandsAreValid(bytecode, 4, operand0, operand1, operand2, operand3));
BytecodeNode node(bytecode, operand0, operand1, operand2, operand3);
AttachSourceInfo(&node);
BytecodeNode node(bytecode, operand0, operand1, operand2, operand3,
&latest_source_info_);
pipeline()->Write(&node);
}
void BytecodeArrayBuilder::Output(Bytecode bytecode, uint32_t operand0,
uint32_t operand1, uint32_t operand2) {
DCHECK(OperandsAreValid(bytecode, 3, operand0, operand1, operand2));
BytecodeNode node(bytecode, operand0, operand1, operand2);
AttachSourceInfo(&node);
BytecodeNode node(bytecode, operand0, operand1, operand2,
&latest_source_info_);
pipeline()->Write(&node);
}
void BytecodeArrayBuilder::Output(Bytecode bytecode, uint32_t operand0,
uint32_t operand1) {
DCHECK(OperandsAreValid(bytecode, 2, operand0, operand1));
BytecodeNode node(bytecode, operand0, operand1);
AttachSourceInfo(&node);
BytecodeNode node(bytecode, operand0, operand1, &latest_source_info_);
pipeline()->Write(&node);
}
void BytecodeArrayBuilder::Output(Bytecode bytecode, uint32_t operand0) {
DCHECK(OperandsAreValid(bytecode, 1, operand0));
BytecodeNode node(bytecode, operand0);
AttachSourceInfo(&node);
BytecodeNode node(bytecode, operand0, &latest_source_info_);
pipeline()->Write(&node);
}
void BytecodeArrayBuilder::Output(Bytecode bytecode) {
DCHECK(OperandsAreValid(bytecode, 0));
BytecodeNode node(bytecode);
AttachSourceInfo(&node);
BytecodeNode node(bytecode, &latest_source_info_);
pipeline()->Write(&node);
}
void BytecodeArrayBuilder::OutputJump(Bytecode bytecode, BytecodeLabel* label) {
BytecodeNode node(bytecode, 0, &latest_source_info_);
pipeline_->WriteJump(&node, label);
LeaveBasicBlock();
}
void BytecodeArrayBuilder::OutputJump(Bytecode bytecode, uint32_t operand0,
BytecodeLabel* label) {
BytecodeNode node(bytecode, 0, operand0, &latest_source_info_);
pipeline_->WriteJump(&node, label);
LeaveBasicBlock();
}
BytecodeArrayBuilder& BytecodeArrayBuilder::BinaryOperation(Token::Value op,
Register reg,
int feedback_slot) {
Output(BytecodeForBinaryOperation(op), RegisterOperand(reg),
UnsignedOperand(feedback_slot));
switch (op) {
case Token::Value::ADD:
Output(Bytecode::kAdd, RegisterOperand(reg),
UnsignedOperand(feedback_slot));
break;
case Token::Value::SUB:
Output(Bytecode::kSub, RegisterOperand(reg),
UnsignedOperand(feedback_slot));
break;
case Token::Value::MUL:
Output(Bytecode::kMul, RegisterOperand(reg),
UnsignedOperand(feedback_slot));
break;
case Token::Value::DIV:
Output(Bytecode::kDiv, RegisterOperand(reg),
UnsignedOperand(feedback_slot));
break;
case Token::Value::MOD:
Output(Bytecode::kMod, RegisterOperand(reg),
UnsignedOperand(feedback_slot));
break;
case Token::Value::BIT_OR:
Output(Bytecode::kBitwiseOr, RegisterOperand(reg),
UnsignedOperand(feedback_slot));
break;
case Token::Value::BIT_XOR:
Output(Bytecode::kBitwiseXor, RegisterOperand(reg),
UnsignedOperand(feedback_slot));
break;
case Token::Value::BIT_AND:
Output(Bytecode::kBitwiseAnd, RegisterOperand(reg),
UnsignedOperand(feedback_slot));
break;
case Token::Value::SHL:
Output(Bytecode::kShiftLeft, RegisterOperand(reg),
UnsignedOperand(feedback_slot));
break;
case Token::Value::SAR:
Output(Bytecode::kShiftRight, RegisterOperand(reg),
UnsignedOperand(feedback_slot));
break;
case Token::Value::SHR:
Output(Bytecode::kShiftRightLogical, RegisterOperand(reg),
UnsignedOperand(feedback_slot));
break;
default:
UNREACHABLE();
}
return *this;
}
BytecodeArrayBuilder& BytecodeArrayBuilder::CountOperation(Token::Value op,
int feedback_slot) {
Output(BytecodeForCountOperation(op), UnsignedOperand(feedback_slot));
if (op == Token::Value::ADD) {
Output(Bytecode::kInc, UnsignedOperand(feedback_slot));
} else {
DCHECK_EQ(op, Token::Value::SUB);
Output(Bytecode::kDec, UnsignedOperand(feedback_slot));
}
return *this;
}
@ -168,7 +204,6 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::LogicalNot() {
return *this;
}
BytecodeArrayBuilder& BytecodeArrayBuilder::TypeOf() {
Output(Bytecode::kTypeOf);
return *this;
@ -176,11 +211,43 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::TypeOf() {
BytecodeArrayBuilder& BytecodeArrayBuilder::CompareOperation(
Token::Value op, Register reg, int feedback_slot) {
if (op == Token::INSTANCEOF || op == Token::IN) {
Output(BytecodeForCompareOperation(op), RegisterOperand(reg));
} else {
Output(BytecodeForCompareOperation(op), RegisterOperand(reg),
UnsignedOperand(feedback_slot));
switch (op) {
case Token::Value::EQ:
Output(Bytecode::kTestEqual, RegisterOperand(reg),
UnsignedOperand(feedback_slot));
break;
case Token::Value::NE:
Output(Bytecode::kTestNotEqual, RegisterOperand(reg),
UnsignedOperand(feedback_slot));
break;
case Token::Value::EQ_STRICT:
Output(Bytecode::kTestEqualStrict, RegisterOperand(reg),
UnsignedOperand(feedback_slot));
break;
case Token::Value::LT:
Output(Bytecode::kTestLessThan, RegisterOperand(reg),
UnsignedOperand(feedback_slot));
break;
case Token::Value::GT:
Output(Bytecode::kTestGreaterThan, RegisterOperand(reg),
UnsignedOperand(feedback_slot));
break;
case Token::Value::LTE:
Output(Bytecode::kTestLessThanOrEqual, RegisterOperand(reg),
UnsignedOperand(feedback_slot));
break;
case Token::Value::GTE:
Output(Bytecode::kTestGreaterThanOrEqual, RegisterOperand(reg),
UnsignedOperand(feedback_slot));
break;
case Token::Value::INSTANCEOF:
Output(Bytecode::kTestInstanceOf, RegisterOperand(reg));
break;
case Token::Value::IN:
Output(Bytecode::kTestIn, RegisterOperand(reg));
break;
default:
UNREACHABLE();
}
return *this;
}
@ -254,18 +321,26 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::MoveRegister(Register from,
BytecodeArrayBuilder& BytecodeArrayBuilder::LoadGlobal(int feedback_slot,
TypeofMode typeof_mode) {
// TODO(rmcilroy): Potentially store typeof information in an
// operand rather than having extra bytecodes.
Bytecode bytecode = BytecodeForLoadGlobal(typeof_mode);
Output(bytecode, UnsignedOperand(feedback_slot));
if (typeof_mode == INSIDE_TYPEOF) {
Output(Bytecode::kLdaGlobalInsideTypeof, feedback_slot);
} else {
DCHECK_EQ(typeof_mode, NOT_INSIDE_TYPEOF);
Output(Bytecode::kLdaGlobal, UnsignedOperand(feedback_slot));
}
return *this;
}
BytecodeArrayBuilder& BytecodeArrayBuilder::StoreGlobal(
const Handle<String> name, int feedback_slot, LanguageMode language_mode) {
Bytecode bytecode = BytecodeForStoreGlobal(language_mode);
size_t name_index = GetConstantPoolEntry(name);
Output(bytecode, UnsignedOperand(name_index), UnsignedOperand(feedback_slot));
if (language_mode == SLOPPY) {
Output(Bytecode::kStaGlobalSloppy, UnsignedOperand(name_index),
UnsignedOperand(feedback_slot));
} else {
DCHECK_EQ(language_mode, STRICT);
Output(Bytecode::kStaGlobalStrict, UnsignedOperand(name_index),
UnsignedOperand(feedback_slot));
}
return *this;
}
@ -287,11 +362,13 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::StoreContextSlot(Register context,
BytecodeArrayBuilder& BytecodeArrayBuilder::LoadLookupSlot(
const Handle<String> name, TypeofMode typeof_mode) {
Bytecode bytecode = (typeof_mode == INSIDE_TYPEOF)
? Bytecode::kLdaLookupSlotInsideTypeof
: Bytecode::kLdaLookupSlot;
size_t name_index = GetConstantPoolEntry(name);
Output(bytecode, UnsignedOperand(name_index));
if (typeof_mode == INSIDE_TYPEOF) {
Output(Bytecode::kLdaLookupSlotInsideTypeof, UnsignedOperand(name_index));
} else {
DCHECK_EQ(typeof_mode, NOT_INSIDE_TYPEOF);
Output(Bytecode::kLdaLookupSlot, UnsignedOperand(name_index));
}
return *this;
}
@ -321,9 +398,13 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::LoadLookupGlobalSlot(
BytecodeArrayBuilder& BytecodeArrayBuilder::StoreLookupSlot(
const Handle<String> name, LanguageMode language_mode) {
Bytecode bytecode = BytecodeForStoreLookupSlot(language_mode);
size_t name_index = GetConstantPoolEntry(name);
Output(bytecode, UnsignedOperand(name_index));
if (language_mode == SLOPPY) {
Output(Bytecode::kStaLookupSlotSloppy, UnsignedOperand(name_index));
} else {
DCHECK_EQ(language_mode, STRICT);
Output(Bytecode::kStaLookupSlotStrict, UnsignedOperand(name_index));
}
return *this;
}
@ -345,19 +426,29 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::LoadKeyedProperty(
BytecodeArrayBuilder& BytecodeArrayBuilder::StoreNamedProperty(
Register object, const Handle<Name> name, int feedback_slot,
LanguageMode language_mode) {
Bytecode bytecode = BytecodeForStoreNamedProperty(language_mode);
size_t name_index = GetConstantPoolEntry(name);
Output(bytecode, RegisterOperand(object), UnsignedOperand(name_index),
UnsignedOperand(feedback_slot));
if (language_mode == SLOPPY) {
Output(Bytecode::kStaNamedPropertySloppy, RegisterOperand(object),
UnsignedOperand(name_index), UnsignedOperand(feedback_slot));
} else {
DCHECK_EQ(language_mode, STRICT);
Output(Bytecode::kStaNamedPropertyStrict, RegisterOperand(object),
UnsignedOperand(name_index), UnsignedOperand(feedback_slot));
}
return *this;
}
BytecodeArrayBuilder& BytecodeArrayBuilder::StoreKeyedProperty(
Register object, Register key, int feedback_slot,
LanguageMode language_mode) {
Bytecode bytecode = BytecodeForStoreKeyedProperty(language_mode);
Output(bytecode, RegisterOperand(object), RegisterOperand(key),
UnsignedOperand(feedback_slot));
if (language_mode == SLOPPY) {
Output(Bytecode::kStaKeyedPropertySloppy, RegisterOperand(object),
RegisterOperand(key), UnsignedOperand(feedback_slot));
} else {
DCHECK_EQ(language_mode, STRICT);
Output(Bytecode::kStaKeyedPropertyStrict, RegisterOperand(object),
RegisterOperand(key), UnsignedOperand(feedback_slot));
}
return *this;
}
@ -399,11 +490,19 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::CreateWithContext(
BytecodeArrayBuilder& BytecodeArrayBuilder::CreateArguments(
CreateArgumentsType type) {
// TODO(rmcilroy): Consider passing the type as a bytecode operand rather
// than having two different bytecodes once we have better support for
// branches in the InterpreterAssembler.
Bytecode bytecode = BytecodeForCreateArguments(type);
Output(bytecode);
switch (type) {
case CreateArgumentsType::kMappedArguments:
Output(Bytecode::kCreateMappedArguments);
break;
case CreateArgumentsType::kUnmappedArguments:
Output(Bytecode::kCreateUnmappedArguments);
break;
case CreateArgumentsType::kRestParameter:
Output(Bytecode::kCreateRestParameter);
break;
default:
UNREACHABLE();
}
return *this;
}
@ -476,54 +575,44 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::Bind(const BytecodeLabel& target,
return *this;
}
BytecodeArrayBuilder& BytecodeArrayBuilder::OutputJump(BytecodeNode* node,
BytecodeLabel* label) {
AttachSourceInfo(node);
pipeline_->WriteJump(node, label);
LeaveBasicBlock();
return *this;
}
BytecodeArrayBuilder& BytecodeArrayBuilder::Jump(BytecodeLabel* label) {
BytecodeNode node(Bytecode::kJump, 0);
return OutputJump(&node, label);
OutputJump(Bytecode::kJump, label);
return *this;
}
BytecodeArrayBuilder& BytecodeArrayBuilder::JumpIfTrue(BytecodeLabel* label) {
// The peephole optimizer attempts to simplify JumpIfToBooleanTrue
// to JumpIfTrue.
BytecodeNode node(Bytecode::kJumpIfToBooleanTrue, 0);
return OutputJump(&node, label);
OutputJump(Bytecode::kJumpIfToBooleanTrue, label);
return *this;
}
BytecodeArrayBuilder& BytecodeArrayBuilder::JumpIfFalse(BytecodeLabel* label) {
// The peephole optimizer attempts to simplify JumpIfToBooleanFalse
// to JumpIfFalse.
BytecodeNode node(Bytecode::kJumpIfToBooleanFalse, 0);
return OutputJump(&node, label);
OutputJump(Bytecode::kJumpIfToBooleanFalse, label);
return *this;
}
BytecodeArrayBuilder& BytecodeArrayBuilder::JumpIfNull(BytecodeLabel* label) {
BytecodeNode node(Bytecode::kJumpIfNull, 0);
return OutputJump(&node, label);
OutputJump(Bytecode::kJumpIfNull, label);
return *this;
}
BytecodeArrayBuilder& BytecodeArrayBuilder::JumpIfUndefined(
BytecodeLabel* label) {
BytecodeNode node(Bytecode::kJumpIfUndefined, 0);
return OutputJump(&node, label);
OutputJump(Bytecode::kJumpIfUndefined, label);
return *this;
}
BytecodeArrayBuilder& BytecodeArrayBuilder::JumpIfNotHole(
BytecodeLabel* label) {
BytecodeNode node(Bytecode::kJumpIfNotHole, 0);
return OutputJump(&node, label);
OutputJump(Bytecode::kJumpIfNotHole, label);
return *this;
}
BytecodeArrayBuilder& BytecodeArrayBuilder::JumpLoop(BytecodeLabel* label,
int loop_depth) {
BytecodeNode node(Bytecode::kJumpLoop, 0, UnsignedOperand(loop_depth));
return OutputJump(&node, label);
OutputJump(Bytecode::kJumpLoop, UnsignedOperand(loop_depth), label);
return *this;
}
BytecodeArrayBuilder& BytecodeArrayBuilder::StackCheck(int position) {
@ -644,9 +733,16 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::Call(Register callable,
size_t receiver_args_count,
int feedback_slot,
TailCallMode tail_call_mode) {
Bytecode bytecode = BytecodeForCall(tail_call_mode);
Output(bytecode, RegisterOperand(callable), RegisterOperand(receiver_args),
UnsignedOperand(receiver_args_count), UnsignedOperand(feedback_slot));
if (tail_call_mode == TailCallMode::kDisallow) {
Output(Bytecode::kCall, RegisterOperand(callable),
RegisterOperand(receiver_args), UnsignedOperand(receiver_args_count),
UnsignedOperand(feedback_slot));
} else {
DCHECK(tail_call_mode == TailCallMode::kAllow);
Output(Bytecode::kTailCall, RegisterOperand(callable),
RegisterOperand(receiver_args), UnsignedOperand(receiver_args_count),
UnsignedOperand(feedback_slot));
}
return *this;
}
@ -709,7 +805,12 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::CallJSRuntime(
BytecodeArrayBuilder& BytecodeArrayBuilder::Delete(Register object,
LanguageMode language_mode) {
Output(BytecodeForDelete(language_mode), RegisterOperand(object));
if (language_mode == SLOPPY) {
Output(Bytecode::kDeletePropertySloppy, RegisterOperand(object));
} else {
DCHECK_EQ(language_mode, STRICT);
Output(Bytecode::kDeletePropertyStrict, RegisterOperand(object));
}
return *this;
}
@ -731,25 +832,6 @@ void BytecodeArrayBuilder::SetReturnPosition() {
latest_source_info_.MakeStatementPosition(return_position_);
}
void BytecodeArrayBuilder::SetStatementPosition(Statement* stmt) {
if (stmt->position() == kNoSourcePosition) return;
latest_source_info_.MakeStatementPosition(stmt->position());
}
void BytecodeArrayBuilder::SetExpressionPosition(Expression* expr) {
if (expr->position() == kNoSourcePosition) return;
if (!latest_source_info_.is_statement()) {
// Ensure the current expression position is overwritten with the
// latest value.
latest_source_info_.MakeExpressionPosition(expr->position());
}
}
void BytecodeArrayBuilder::SetExpressionAsStatementPosition(Expression* expr) {
if (expr->position() == kNoSourcePosition) return;
latest_source_info_.MakeStatementPosition(expr->position());
}
bool BytecodeArrayBuilder::TemporaryRegisterIsLive(Register reg) const {
return temporary_register_allocator()->RegisterIsLive(reg);
}
@ -813,7 +895,7 @@ bool BytecodeArrayBuilder::OperandsAreValid(
break;
case OperandType::kIdx:
// TODO(leszeks): Possibly split this up into constant pool indices and
// other indices, for checking
// other indices, for checking.
break;
case OperandType::kUImm:
case OperandType::kImm:
@ -856,180 +938,6 @@ bool BytecodeArrayBuilder::OperandsAreValid(
return true;
}
// static
Bytecode BytecodeArrayBuilder::BytecodeForBinaryOperation(Token::Value op) {
switch (op) {
case Token::Value::ADD:
return Bytecode::kAdd;
case Token::Value::SUB:
return Bytecode::kSub;
case Token::Value::MUL:
return Bytecode::kMul;
case Token::Value::DIV:
return Bytecode::kDiv;
case Token::Value::MOD:
return Bytecode::kMod;
case Token::Value::BIT_OR:
return Bytecode::kBitwiseOr;
case Token::Value::BIT_XOR:
return Bytecode::kBitwiseXor;
case Token::Value::BIT_AND:
return Bytecode::kBitwiseAnd;
case Token::Value::SHL:
return Bytecode::kShiftLeft;
case Token::Value::SAR:
return Bytecode::kShiftRight;
case Token::Value::SHR:
return Bytecode::kShiftRightLogical;
default:
UNREACHABLE();
return Bytecode::kIllegal;
}
}
// static
Bytecode BytecodeArrayBuilder::BytecodeForCountOperation(Token::Value op) {
switch (op) {
case Token::Value::ADD:
return Bytecode::kInc;
case Token::Value::SUB:
return Bytecode::kDec;
default:
UNREACHABLE();
return Bytecode::kIllegal;
}
}
// static
Bytecode BytecodeArrayBuilder::BytecodeForCompareOperation(Token::Value op) {
switch (op) {
case Token::Value::EQ:
return Bytecode::kTestEqual;
case Token::Value::NE:
return Bytecode::kTestNotEqual;
case Token::Value::EQ_STRICT:
return Bytecode::kTestEqualStrict;
case Token::Value::LT:
return Bytecode::kTestLessThan;
case Token::Value::GT:
return Bytecode::kTestGreaterThan;
case Token::Value::LTE:
return Bytecode::kTestLessThanOrEqual;
case Token::Value::GTE:
return Bytecode::kTestGreaterThanOrEqual;
case Token::Value::INSTANCEOF:
return Bytecode::kTestInstanceOf;
case Token::Value::IN:
return Bytecode::kTestIn;
default:
UNREACHABLE();
return Bytecode::kIllegal;
}
}
// static
Bytecode BytecodeArrayBuilder::BytecodeForStoreNamedProperty(
LanguageMode language_mode) {
switch (language_mode) {
case SLOPPY:
return Bytecode::kStaNamedPropertySloppy;
case STRICT:
return Bytecode::kStaNamedPropertyStrict;
default:
UNREACHABLE();
}
return Bytecode::kIllegal;
}
// static
Bytecode BytecodeArrayBuilder::BytecodeForStoreKeyedProperty(
LanguageMode language_mode) {
switch (language_mode) {
case SLOPPY:
return Bytecode::kStaKeyedPropertySloppy;
case STRICT:
return Bytecode::kStaKeyedPropertyStrict;
default:
UNREACHABLE();
}
return Bytecode::kIllegal;
}
// static
Bytecode BytecodeArrayBuilder::BytecodeForLoadGlobal(TypeofMode typeof_mode) {
return typeof_mode == INSIDE_TYPEOF ? Bytecode::kLdaGlobalInsideTypeof
: Bytecode::kLdaGlobal;
}
// static
Bytecode BytecodeArrayBuilder::BytecodeForStoreGlobal(
LanguageMode language_mode) {
switch (language_mode) {
case SLOPPY:
return Bytecode::kStaGlobalSloppy;
case STRICT:
return Bytecode::kStaGlobalStrict;
default:
UNREACHABLE();
}
return Bytecode::kIllegal;
}
// static
Bytecode BytecodeArrayBuilder::BytecodeForStoreLookupSlot(
LanguageMode language_mode) {
switch (language_mode) {
case SLOPPY:
return Bytecode::kStaLookupSlotSloppy;
case STRICT:
return Bytecode::kStaLookupSlotStrict;
default:
UNREACHABLE();
}
return Bytecode::kIllegal;
}
// static
Bytecode BytecodeArrayBuilder::BytecodeForCreateArguments(
CreateArgumentsType type) {
switch (type) {
case CreateArgumentsType::kMappedArguments:
return Bytecode::kCreateMappedArguments;
case CreateArgumentsType::kUnmappedArguments:
return Bytecode::kCreateUnmappedArguments;
case CreateArgumentsType::kRestParameter:
return Bytecode::kCreateRestParameter;
}
UNREACHABLE();
return Bytecode::kIllegal;
}
// static
Bytecode BytecodeArrayBuilder::BytecodeForDelete(LanguageMode language_mode) {
switch (language_mode) {
case SLOPPY:
return Bytecode::kDeletePropertySloppy;
case STRICT:
return Bytecode::kDeletePropertyStrict;
default:
UNREACHABLE();
}
return Bytecode::kIllegal;
}
// static
Bytecode BytecodeArrayBuilder::BytecodeForCall(TailCallMode tail_call_mode) {
switch (tail_call_mode) {
case TailCallMode::kDisallow:
return Bytecode::kCall;
case TailCallMode::kAllow:
return Bytecode::kTailCall;
default:
UNREACHABLE();
}
return Bytecode::kIllegal;
}
} // namespace interpreter
} // namespace internal
} // namespace v8

View File

@ -309,9 +309,24 @@ class BytecodeArrayBuilder final : public ZoneObject {
void InitializeReturnPosition(FunctionLiteral* literal);
void SetStatementPosition(Statement* stmt);
void SetExpressionPosition(Expression* expr);
void SetExpressionAsStatementPosition(Expression* expr);
void SetStatementPosition(Statement* stmt) {
if (stmt->position() == kNoSourcePosition) return;
latest_source_info_.MakeStatementPosition(stmt->position());
}
void SetExpressionPosition(Expression* expr) {
if (expr->position() == kNoSourcePosition) return;
if (!latest_source_info_.is_statement()) {
// Ensure the current expression position is overwritten with the
// latest value.
latest_source_info_.MakeExpressionPosition(expr->position());
}
}
void SetExpressionAsStatementPosition(Expression* expr) {
if (expr->position() == kNoSourcePosition) return;
latest_source_info_.MakeStatementPosition(expr->position());
}
// Accessors
TemporaryRegisterAllocator* temporary_register_allocator() {
@ -345,36 +360,23 @@ class BytecodeArrayBuilder final : public ZoneObject {
private:
friend class BytecodeRegisterAllocator;
static Bytecode BytecodeForBinaryOperation(Token::Value op);
static Bytecode BytecodeForCountOperation(Token::Value op);
static Bytecode BytecodeForCompareOperation(Token::Value op);
static Bytecode BytecodeForStoreNamedProperty(LanguageMode language_mode);
static Bytecode BytecodeForStoreKeyedProperty(LanguageMode language_mode);
static Bytecode BytecodeForLoadGlobal(TypeofMode typeof_mode);
static Bytecode BytecodeForStoreGlobal(LanguageMode language_mode);
static Bytecode BytecodeForStoreLookupSlot(LanguageMode language_mode);
static Bytecode BytecodeForCreateArguments(CreateArgumentsType type);
static Bytecode BytecodeForDelete(LanguageMode language_mode);
static Bytecode BytecodeForCall(TailCallMode tail_call_mode);
INLINE(void Output(Bytecode bytecode, uint32_t operand0, uint32_t operand1,
uint32_t operand2, uint32_t operand3));
INLINE(void Output(Bytecode bytecode, uint32_t operand0, uint32_t operand1,
uint32_t operand2));
INLINE(void Output(Bytecode bytecode, uint32_t operand0, uint32_t operand1));
INLINE(void Output(Bytecode bytecode, uint32_t operand0));
INLINE(void Output(Bytecode bytecode));
void Output(Bytecode bytecode, uint32_t operand0, uint32_t operand1,
uint32_t operand2, uint32_t operand3);
void Output(Bytecode bytecode, uint32_t operand0, uint32_t operand1,
uint32_t operand2);
void Output(Bytecode bytecode, uint32_t operand0, uint32_t operand1);
void Output(Bytecode bytecode, uint32_t operand0);
void Output(Bytecode bytecode);
BytecodeArrayBuilder& OutputJump(BytecodeNode* node, BytecodeLabel* label);
INLINE(void OutputJump(Bytecode bytecode, BytecodeLabel* label));
INLINE(void OutputJump(Bytecode bytecode, uint32_t operand0,
BytecodeLabel* label));
bool RegisterIsValid(Register reg) const;
bool OperandsAreValid(Bytecode bytecode, int operand_count,
uint32_t operand0 = 0, uint32_t operand1 = 0,
uint32_t operand2 = 0, uint32_t operand3 = 0) const;
// Attach latest source position to |node|.
void AttachSourceInfo(BytecodeNode* node);
// Set position for return.
void SetReturnPosition();

View File

@ -21,27 +21,23 @@ BytecodeArrayWriter::BytecodeArrayWriter(
Zone* zone, ConstantArrayBuilder* constant_array_builder,
SourcePositionTableBuilder::RecordingMode source_position_mode)
: bytecodes_(zone),
max_register_count_(0),
unbound_jumps_(0),
source_position_table_builder_(zone, source_position_mode),
constant_array_builder_(constant_array_builder) {}
constant_array_builder_(constant_array_builder) {
bytecodes_.reserve(512); // Derived via experimentation.
}
// override
BytecodeArrayWriter::~BytecodeArrayWriter() {}
// override
Handle<BytecodeArray> BytecodeArrayWriter::ToBytecodeArray(
Isolate* isolate, int fixed_register_count, int parameter_count,
Isolate* isolate, int register_count, int parameter_count,
Handle<FixedArray> handler_table) {
DCHECK_EQ(0, unbound_jumps_);
int bytecode_size = static_cast<int>(bytecodes()->size());
// All locals need a frame slot for the debugger, but may not be
// present in generated code.
int frame_size_for_locals = fixed_register_count * kPointerSize;
int frame_size_used = max_register_count() * kPointerSize;
int frame_size = std::max(frame_size_for_locals, frame_size_used);
int frame_size = register_count * kPointerSize;
Handle<FixedArray> constant_pool =
constant_array_builder()->ToFixedArray(isolate);
Handle<BytecodeArray> bytecode_array = isolate->factory()->NewBytecodeArray(
@ -104,116 +100,48 @@ void BytecodeArrayWriter::UpdateSourcePositionTable(
}
}
namespace {
OperandScale ScaleForScalableByteOperand(OperandSize operand_size) {
STATIC_ASSERT(static_cast<int>(OperandSize::kByte) ==
static_cast<int>(OperandScale::kSingle));
STATIC_ASSERT(static_cast<int>(OperandSize::kShort) ==
static_cast<int>(OperandScale::kDouble));
STATIC_ASSERT(static_cast<int>(OperandSize::kQuad) ==
static_cast<int>(OperandScale::kQuadruple));
return static_cast<OperandScale>(operand_size);
}
OperandScale OperandScaleForScalableSignedByte(uint32_t operand_value) {
int32_t signed_operand = static_cast<int32_t>(operand_value);
OperandSize bytes_required = Bytecodes::SizeForSignedOperand(signed_operand);
return ScaleForScalableByteOperand(bytes_required);
}
OperandScale OperandScaleForScalableUnsignedByte(uint32_t operand_value) {
OperandSize bytes_required = Bytecodes::SizeForUnsignedOperand(operand_value);
return ScaleForScalableByteOperand(bytes_required);
}
OperandScale GetOperandScale(const BytecodeNode* const node) {
const OperandTypeInfo* operand_type_infos =
Bytecodes::GetOperandTypeInfos(node->bytecode());
OperandScale operand_scale = OperandScale::kSingle;
int operand_count = node->operand_count();
for (int i = 0; i < operand_count; ++i) {
switch (operand_type_infos[i]) {
case OperandTypeInfo::kScalableSignedByte: {
uint32_t operand = node->operand(i);
operand_scale =
std::max(operand_scale, OperandScaleForScalableSignedByte(operand));
break;
}
case OperandTypeInfo::kScalableUnsignedByte: {
uint32_t operand = node->operand(i);
operand_scale = std::max(operand_scale,
OperandScaleForScalableUnsignedByte(operand));
break;
}
case OperandTypeInfo::kFixedUnsignedByte:
case OperandTypeInfo::kFixedUnsignedShort:
break;
case OperandTypeInfo::kNone:
UNREACHABLE();
break;
}
}
return operand_scale;
}
} // namespace
void BytecodeArrayWriter::EmitBytecode(const BytecodeNode* const node) {
DCHECK_NE(node->bytecode(), Bytecode::kIllegal);
uint8_t buffer[kMaxSizeOfPackedBytecode];
uint8_t* buffer_limit = buffer;
Bytecode bytecode = node->bytecode();
OperandScale operand_scale = node->operand_scale();
OperandScale operand_scale = GetOperandScale(node);
if (operand_scale != OperandScale::kSingle) {
Bytecode prefix = Bytecodes::OperandScaleToPrefixBytecode(operand_scale);
*buffer_limit++ = Bytecodes::ToByte(prefix);
bytecodes()->push_back(Bytecodes::ToByte(prefix));
}
Bytecode bytecode = node->bytecode();
*buffer_limit++ = Bytecodes::ToByte(bytecode);
bytecodes()->push_back(Bytecodes::ToByte(bytecode));
const uint32_t* const operands = node->operands();
const OperandType* operand_types = Bytecodes::GetOperandTypes(bytecode);
const int operand_count = Bytecodes::NumberOfOperands(bytecode);
const int operand_count = node->operand_count();
const OperandSize* operand_sizes =
Bytecodes::GetOperandSizes(bytecode, operand_scale);
for (int i = 0; i < operand_count; ++i) {
OperandSize operand_size =
Bytecodes::SizeOfOperand(operand_types[i], operand_scale);
switch (operand_size) {
switch (operand_sizes[i]) {
case OperandSize::kNone:
UNREACHABLE();
break;
case OperandSize::kByte:
*buffer_limit++ = static_cast<uint8_t>(operands[i]);
bytecodes()->push_back(static_cast<uint8_t>(operands[i]));
break;
case OperandSize::kShort: {
WriteUnalignedUInt16(buffer_limit, operands[i]);
buffer_limit += 2;
const uint8_t* raw_operand =
reinterpret_cast<const uint8_t*>(&operands[i]);
bytecodes()->push_back(raw_operand[0]);
bytecodes()->push_back(raw_operand[1]);
break;
}
case OperandSize::kQuad: {
WriteUnalignedUInt32(buffer_limit, operands[i]);
buffer_limit += 4;
const uint8_t* raw_operand =
reinterpret_cast<const uint8_t*>(&operands[i]);
bytecodes()->push_back(raw_operand[0]);
bytecodes()->push_back(raw_operand[1]);
bytecodes()->push_back(raw_operand[2]);
bytecodes()->push_back(raw_operand[3]);
break;
}
}
int count = Bytecodes::GetNumberOfRegistersRepresentedBy(operand_types[i]);
if (count == 0) {
continue;
}
// NB operand_types is terminated by OperandType::kNone so
// operand_types[i + 1] is valid whilst i < operand_count.
if (operand_types[i + 1] == OperandType::kRegCount) {
count = static_cast<int>(operands[i]);
}
Register reg = Register::FromOperand(static_cast<int32_t>(operands[i]));
max_register_count_ = std::max(max_register_count_, reg.index() + count);
}
DCHECK_LE(buffer_limit, buffer + sizeof(buffer));
bytecodes()->insert(bytecodes()->end(), buffer, buffer_limit);
}
// static
@ -247,18 +175,17 @@ void BytecodeArrayWriter::PatchJumpWith8BitOperand(size_t jump_location,
DCHECK(Bytecodes::IsJumpImmediate(jump_bytecode));
size_t operand_location = jump_location + 1;
DCHECK_EQ(bytecodes()->at(operand_location), k8BitJumpPlaceholder);
if (Bytecodes::SizeForSignedOperand(delta) == OperandSize::kByte) {
// The jump fits within the range of an Imm operand, so cancel
if (Bytecodes::ScaleForSignedOperand(delta) == OperandScale::kSingle) {
// The jump fits within the range of an Imm8 operand, so cancel
// the reservation and jump directly.
constant_array_builder()->DiscardReservedEntry(OperandSize::kByte);
bytecodes()->at(operand_location) = static_cast<uint8_t>(delta);
} else {
// The jump does not fit within the range of an Imm operand, so
// The jump does not fit within the range of an Imm8 operand, so
// commit reservation putting the offset into the constant pool,
// and update the jump instruction and operand.
size_t entry = constant_array_builder()->CommitReservedEntry(
OperandSize::kByte, Smi::FromInt(delta));
DCHECK_LE(entry, kMaxUInt32);
DCHECK_EQ(Bytecodes::SizeForUnsignedOperand(static_cast<uint32_t>(entry)),
OperandSize::kByte);
jump_bytecode = GetJumpWithConstantOperand(jump_bytecode);
@ -273,14 +200,21 @@ void BytecodeArrayWriter::PatchJumpWith16BitOperand(size_t jump_location,
DCHECK(Bytecodes::IsJumpImmediate(jump_bytecode));
size_t operand_location = jump_location + 1;
uint8_t operand_bytes[2];
if (Bytecodes::SizeForSignedOperand(delta) <= OperandSize::kShort) {
if (Bytecodes::ScaleForSignedOperand(delta) <= OperandScale::kDouble) {
// The jump fits within the range of an Imm16 operand, so cancel
// the reservation and jump directly.
constant_array_builder()->DiscardReservedEntry(OperandSize::kShort);
WriteUnalignedUInt16(operand_bytes, static_cast<uint16_t>(delta));
} else {
jump_bytecode = GetJumpWithConstantOperand(jump_bytecode);
bytecodes()->at(jump_location) = Bytecodes::ToByte(jump_bytecode);
// The jump does not fit within the range of an Imm16 operand, so
// commit reservation putting the offset into the constant pool,
// and update the jump instruction and operand.
size_t entry = constant_array_builder()->CommitReservedEntry(
OperandSize::kShort, Smi::FromInt(delta));
DCHECK_EQ(Bytecodes::SizeForUnsignedOperand(static_cast<uint32_t>(entry)),
OperandSize::kShort);
jump_bytecode = GetJumpWithConstantOperand(jump_bytecode);
bytecodes()->at(jump_location) = Bytecodes::ToByte(jump_bytecode);
WriteUnalignedUInt16(operand_bytes, static_cast<uint16_t>(entry));
}
DCHECK(bytecodes()->at(operand_location) == k8BitJumpPlaceholder &&
@ -351,8 +285,8 @@ void BytecodeArrayWriter::EmitJump(BytecodeNode* node, BytecodeLabel* label) {
// Label has been bound already so this is a backwards jump.
size_t abs_delta = current_offset - label->offset();
int delta = -static_cast<int>(abs_delta);
OperandSize operand_size = Bytecodes::SizeForSignedOperand(delta);
if (operand_size > OperandSize::kByte) {
OperandScale operand_scale = Bytecodes::ScaleForSignedOperand(delta);
if (operand_scale > OperandScale::kSingle) {
// Adjust for scaling byte prefix for wide jump offset.
DCHECK_LE(delta, 0);
delta -= 1;

View File

@ -33,7 +33,7 @@ class BytecodeArrayWriter final : public BytecodePipelineStage {
void BindLabel(BytecodeLabel* label) override;
void BindLabel(const BytecodeLabel& target, BytecodeLabel* label) override;
Handle<BytecodeArray> ToBytecodeArray(
Isolate* isolate, int fixed_register_count, int parameter_count,
Isolate* isolate, int register_count, int parameter_count,
Handle<FixedArray> handler_table) override;
private:
@ -69,10 +69,8 @@ class BytecodeArrayWriter final : public BytecodePipelineStage {
ConstantArrayBuilder* constant_array_builder() {
return constant_array_builder_;
}
int max_register_count() { return max_register_count_; }
ZoneVector<uint8_t> bytecodes_;
int max_register_count_;
int unbound_jumps_;
SourcePositionTableBuilder source_position_table_builder_;
ConstantArrayBuilder* constant_array_builder_;

View File

@ -14,10 +14,10 @@ BytecodeDeadCodeOptimizer::BytecodeDeadCodeOptimizer(
// override
Handle<BytecodeArray> BytecodeDeadCodeOptimizer::ToBytecodeArray(
Isolate* isolate, int fixed_register_count, int parameter_count,
Isolate* isolate, int register_count, int parameter_count,
Handle<FixedArray> handler_table) {
return next_stage_->ToBytecodeArray(isolate, fixed_register_count,
parameter_count, handler_table);
return next_stage_->ToBytecodeArray(isolate, register_count, parameter_count,
handler_table);
}
// override

View File

@ -24,7 +24,7 @@ class BytecodeDeadCodeOptimizer final : public BytecodePipelineStage,
void BindLabel(BytecodeLabel* label) override;
void BindLabel(const BytecodeLabel& target, BytecodeLabel* label) override;
Handle<BytecodeArray> ToBytecodeArray(
Isolate* isolate, int fixed_register_count, int parameter_count,
Isolate* isolate, int register_count, int parameter_count,
Handle<FixedArray> handler_table) override;
private:

View File

@ -0,0 +1,89 @@
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/interpreter/bytecode-operands.h"
#include <iomanip>
namespace v8 {
namespace internal {
namespace interpreter {
namespace {
const char* AccumulatorUseToString(AccumulatorUse accumulator_use) {
switch (accumulator_use) {
case AccumulatorUse::kNone:
return "None";
case AccumulatorUse::kRead:
return "Read";
case AccumulatorUse::kWrite:
return "Write";
case AccumulatorUse::kReadWrite:
return "ReadWrite";
}
UNREACHABLE();
return "";
}
const char* OperandTypeToString(OperandType operand_type) {
switch (operand_type) {
#define CASE(Name, _) \
case OperandType::k##Name: \
return #Name;
OPERAND_TYPE_LIST(CASE)
#undef CASE
}
UNREACHABLE();
return "";
}
const char* OperandScaleToString(OperandScale operand_scale) {
switch (operand_scale) {
#define CASE(Name, _) \
case OperandScale::k##Name: \
return #Name;
OPERAND_SCALE_LIST(CASE)
#undef CASE
}
UNREACHABLE();
return "";
}
const char* OperandSizeToString(OperandSize operand_size) {
switch (operand_size) {
case OperandSize::kNone:
return "None";
case OperandSize::kByte:
return "Byte";
case OperandSize::kShort:
return "Short";
case OperandSize::kQuad:
return "Quad";
}
UNREACHABLE();
return "";
}
} // namespace
std::ostream& operator<<(std::ostream& os, const AccumulatorUse& use) {
return os << AccumulatorUseToString(use);
}
std::ostream& operator<<(std::ostream& os, const OperandSize& operand_size) {
return os << OperandSizeToString(operand_size);
}
std::ostream& operator<<(std::ostream& os, const OperandScale& operand_scale) {
return os << OperandScaleToString(operand_scale);
}
std::ostream& operator<<(std::ostream& os, const OperandType& operand_type) {
return os << OperandTypeToString(operand_type);
}
} // namespace interpreter
} // namespace internal
} // namespace v8

View File

@ -0,0 +1,126 @@
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_INTERPRETER_BYTECODE_OPERANDS_H_
#define V8_INTERPRETER_BYTECODE_OPERANDS_H_
#include "src/globals.h"
namespace v8 {
namespace internal {
namespace interpreter {
#define INVALID_OPERAND_TYPE_LIST(V) V(None, OperandTypeInfo::kNone)
#define REGISTER_INPUT_OPERAND_TYPE_LIST(V) \
V(MaybeReg, OperandTypeInfo::kScalableSignedByte) \
V(Reg, OperandTypeInfo::kScalableSignedByte) \
V(RegPair, OperandTypeInfo::kScalableSignedByte)
#define REGISTER_OUTPUT_OPERAND_TYPE_LIST(V) \
V(RegOut, OperandTypeInfo::kScalableSignedByte) \
V(RegOutPair, OperandTypeInfo::kScalableSignedByte) \
V(RegOutTriple, OperandTypeInfo::kScalableSignedByte)
#define SCALAR_OPERAND_TYPE_LIST(V) \
V(Flag8, OperandTypeInfo::kFixedUnsignedByte) \
V(IntrinsicId, OperandTypeInfo::kFixedUnsignedByte) \
V(Idx, OperandTypeInfo::kScalableUnsignedByte) \
V(UImm, OperandTypeInfo::kScalableUnsignedByte) \
V(Imm, OperandTypeInfo::kScalableSignedByte) \
V(RegCount, OperandTypeInfo::kScalableUnsignedByte) \
V(RuntimeId, OperandTypeInfo::kFixedUnsignedShort)
#define REGISTER_OPERAND_TYPE_LIST(V) \
REGISTER_INPUT_OPERAND_TYPE_LIST(V) \
REGISTER_OUTPUT_OPERAND_TYPE_LIST(V)
#define NON_REGISTER_OPERAND_TYPE_LIST(V) \
INVALID_OPERAND_TYPE_LIST(V) \
SCALAR_OPERAND_TYPE_LIST(V)
// The list of operand types used by bytecodes.
#define OPERAND_TYPE_LIST(V) \
NON_REGISTER_OPERAND_TYPE_LIST(V) \
REGISTER_OPERAND_TYPE_LIST(V)
// Enumeration of scaling factors applicable to scalable operands. Code
// relies on being able to cast values to integer scaling values.
#define OPERAND_SCALE_LIST(V) \
V(Single, 1) \
V(Double, 2) \
V(Quadruple, 4)
enum class OperandScale : uint8_t {
#define DECLARE_OPERAND_SCALE(Name, Scale) k##Name = Scale,
OPERAND_SCALE_LIST(DECLARE_OPERAND_SCALE)
#undef DECLARE_OPERAND_SCALE
kLast = kQuadruple
};
// Enumeration of the size classes of operand types used by
// bytecodes. Code relies on being able to cast values to integer
// types to get the size in bytes.
enum class OperandSize : uint8_t {
kNone = 0,
kByte = 1,
kShort = 2,
kQuad = 4,
kLast = kQuad
};
// Primitive operand info used that summarize properties of operands.
// Columns are Name, IsScalable, IsUnsigned, UnscaledSize.
#define OPERAND_TYPE_INFO_LIST(V) \
V(None, false, false, OperandSize::kNone) \
V(ScalableSignedByte, true, false, OperandSize::kByte) \
V(ScalableUnsignedByte, true, true, OperandSize::kByte) \
V(FixedUnsignedByte, false, true, OperandSize::kByte) \
V(FixedUnsignedShort, false, true, OperandSize::kShort)
enum class OperandTypeInfo : uint8_t {
#define DECLARE_OPERAND_TYPE_INFO(Name, ...) k##Name,
OPERAND_TYPE_INFO_LIST(DECLARE_OPERAND_TYPE_INFO)
#undef DECLARE_OPERAND_TYPE_INFO
};
// Enumeration of operand types used by bytecodes.
enum class OperandType : uint8_t {
#define DECLARE_OPERAND_TYPE(Name, _) k##Name,
OPERAND_TYPE_LIST(DECLARE_OPERAND_TYPE)
#undef DECLARE_OPERAND_TYPE
#define COUNT_OPERAND_TYPES(x, _) +1
// The COUNT_OPERAND macro will turn this into kLast = -1 +1 +1... which will
// evaluate to the same value as the last operand.
kLast = -1 OPERAND_TYPE_LIST(COUNT_OPERAND_TYPES)
#undef COUNT_OPERAND_TYPES
};
enum class AccumulatorUse : uint8_t {
kNone = 0,
kRead = 1 << 0,
kWrite = 1 << 1,
kReadWrite = kRead | kWrite
};
inline AccumulatorUse operator&(AccumulatorUse lhs, AccumulatorUse rhs) {
int result = static_cast<int>(lhs) & static_cast<int>(rhs);
return static_cast<AccumulatorUse>(result);
}
inline AccumulatorUse operator|(AccumulatorUse lhs, AccumulatorUse rhs) {
int result = static_cast<int>(lhs) | static_cast<int>(rhs);
return static_cast<AccumulatorUse>(result);
}
std::ostream& operator<<(std::ostream& os, const AccumulatorUse& use);
std::ostream& operator<<(std::ostream& os, const OperandScale& operand_scale);
std::ostream& operator<<(std::ostream& os, const OperandSize& operand_size);
std::ostream& operator<<(std::ostream& os, const OperandType& operand_type);
} // namespace interpreter
} // namespace internal
} // namespace v8
#endif // V8_INTERPRETER_BYTECODE_OPERANDS_H_

View File

@ -13,17 +13,17 @@ namespace interpreter {
BytecodePeepholeOptimizer::BytecodePeepholeOptimizer(
BytecodePipelineStage* next_stage)
: next_stage_(next_stage) {
: next_stage_(next_stage), last_(Bytecode::kIllegal) {
InvalidateLast();
}
// override
Handle<BytecodeArray> BytecodePeepholeOptimizer::ToBytecodeArray(
Isolate* isolate, int fixed_register_count, int parameter_count,
Isolate* isolate, int register_count, int parameter_count,
Handle<FixedArray> handler_table) {
Flush();
return next_stage_->ToBytecodeArray(isolate, fixed_register_count,
parameter_count, handler_table);
return next_stage_->ToBytecodeArray(isolate, register_count, parameter_count,
handler_table);
}
// override
@ -142,7 +142,7 @@ void TransformLdaSmiBinaryOpToBinaryOpWithSmi(Bytecode new_bytecode,
current->set_bytecode(new_bytecode, last->operand(0), current->operand(0),
current->operand(1));
if (last->source_info().is_valid()) {
current->source_info().Clone(last->source_info());
current->source_info_ptr()->Clone(last->source_info());
}
}
@ -153,7 +153,7 @@ void TransformLdaZeroBinaryOpToBinaryOpWithZero(Bytecode new_bytecode,
current->set_bytecode(new_bytecode, 0, current->operand(0),
current->operand(1));
if (last->source_info().is_valid()) {
current->source_info().Clone(last->source_info());
current->source_info_ptr()->Clone(last->source_info());
}
}
@ -223,7 +223,7 @@ void BytecodePeepholeOptimizer::ElideLastAction(
// |node| can not have a valid source position if the source
// position of last() is valid (per rules in
// CanElideLastBasedOnSourcePosition()).
node->source_info().Clone(last()->source_info());
node->source_info_ptr()->Clone(last()->source_info());
}
SetLast(node);
} else {
@ -314,7 +314,7 @@ void BytecodePeepholeOptimizer::ElideLastBeforeJumpAction(
if (!CanElideLastBasedOnSourcePosition(node)) {
next_stage()->Write(last());
} else if (!node->source_info().is_valid()) {
node->source_info().Clone(last()->source_info());
node->source_info_ptr()->Clone(last()->source_info());
}
InvalidateLast();
}

View File

@ -28,7 +28,7 @@ class BytecodePeepholeOptimizer final : public BytecodePipelineStage,
void BindLabel(BytecodeLabel* label) override;
void BindLabel(const BytecodeLabel& target, BytecodeLabel* label) override;
Handle<BytecodeArray> ToBytecodeArray(
Isolate* isolate, int fixed_register_count, int parameter_count,
Isolate* isolate, int register_count, int parameter_count,
Handle<FixedArray> handler_table) override;
private:

View File

@ -11,45 +11,6 @@ namespace v8 {
namespace internal {
namespace interpreter {
BytecodeNode::BytecodeNode(Bytecode bytecode) {
DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 0);
bytecode_ = bytecode;
}
BytecodeNode::BytecodeNode(Bytecode bytecode, uint32_t operand0) {
DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 1);
bytecode_ = bytecode;
operands_[0] = operand0;
}
BytecodeNode::BytecodeNode(Bytecode bytecode, uint32_t operand0,
uint32_t operand1) {
DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 2);
bytecode_ = bytecode;
operands_[0] = operand0;
operands_[1] = operand1;
}
BytecodeNode::BytecodeNode(Bytecode bytecode, uint32_t operand0,
uint32_t operand1, uint32_t operand2) {
DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 3);
bytecode_ = bytecode;
operands_[0] = operand0;
operands_[1] = operand1;
operands_[2] = operand2;
}
BytecodeNode::BytecodeNode(Bytecode bytecode, uint32_t operand0,
uint32_t operand1, uint32_t operand2,
uint32_t operand3) {
DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 4);
bytecode_ = bytecode;
operands_[0] = operand0;
operands_[1] = operand1;
operands_[2] = operand2;
operands_[3] = operand3;
}
BytecodeNode::BytecodeNode(const BytecodeNode& other) {
memcpy(this, &other, sizeof(other));
}
@ -83,23 +44,6 @@ void BytecodeNode::Print(std::ostream& os) const {
#endif // DEBUG
}
void BytecodeNode::Transform(Bytecode new_bytecode, uint32_t extra_operand) {
DCHECK_EQ(Bytecodes::NumberOfOperands(new_bytecode),
Bytecodes::NumberOfOperands(bytecode()) + 1);
DCHECK(Bytecodes::NumberOfOperands(bytecode()) < 1 ||
Bytecodes::GetOperandType(new_bytecode, 0) ==
Bytecodes::GetOperandType(bytecode(), 0));
DCHECK(Bytecodes::NumberOfOperands(bytecode()) < 2 ||
Bytecodes::GetOperandType(new_bytecode, 1) ==
Bytecodes::GetOperandType(bytecode(), 1));
DCHECK(Bytecodes::NumberOfOperands(bytecode()) < 3 ||
Bytecodes::GetOperandType(new_bytecode, 2) ==
Bytecodes::GetOperandType(bytecode(), 2));
DCHECK(Bytecodes::NumberOfOperands(bytecode()) < 4);
operands_[operand_count()] = extra_operand;
bytecode_ = new_bytecode;
}
bool BytecodeNode::operator==(const BytecodeNode& other) const {
if (this == &other) {
return true;

View File

@ -47,7 +47,7 @@ class BytecodePipelineStage {
// Flush the pipeline and generate a bytecode array.
virtual Handle<BytecodeArray> ToBytecodeArray(
Isolate* isolate, int fixed_register_count, int parameter_count,
Isolate* isolate, int register_count, int parameter_count,
Handle<FixedArray> handler_table) = 0;
};
@ -134,21 +134,69 @@ class BytecodeSourceInfo final {
PositionType position_type_;
int source_position_;
DISALLOW_COPY_AND_ASSIGN(BytecodeSourceInfo);
};
// A container for a generated bytecode, it's operands, and source information.
// These must be allocated by a BytecodeNodeAllocator instance.
class BytecodeNode final : ZoneObject {
public:
explicit BytecodeNode(Bytecode bytecode = Bytecode::kIllegal);
BytecodeNode(Bytecode bytecode, uint32_t operand0);
BytecodeNode(Bytecode bytecode, uint32_t operand0, uint32_t operand1);
BytecodeNode(Bytecode bytecode, uint32_t operand0, uint32_t operand1,
uint32_t operand2);
BytecodeNode(Bytecode bytecode, uint32_t operand0, uint32_t operand1,
uint32_t operand2, uint32_t operand3);
INLINE(BytecodeNode(const Bytecode bytecode,
BytecodeSourceInfo* source_info = nullptr))
: bytecode_(bytecode),
operand_count_(0),
operand_scale_(OperandScale::kSingle) {
DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), operand_count());
AttachSourceInfo(source_info);
}
INLINE(BytecodeNode(const Bytecode bytecode, uint32_t operand0,
BytecodeSourceInfo* source_info = nullptr))
: bytecode_(bytecode),
operand_count_(1),
operand_scale_(OperandScale::kSingle) {
DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), operand_count());
SetOperand(0, operand0);
AttachSourceInfo(source_info);
}
INLINE(BytecodeNode(const Bytecode bytecode, uint32_t operand0,
uint32_t operand1,
BytecodeSourceInfo* source_info = nullptr))
: bytecode_(bytecode),
operand_count_(2),
operand_scale_(OperandScale::kSingle) {
DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), operand_count());
SetOperand(0, operand0);
SetOperand(1, operand1);
AttachSourceInfo(source_info);
}
INLINE(BytecodeNode(const Bytecode bytecode, uint32_t operand0,
uint32_t operand1, uint32_t operand2,
BytecodeSourceInfo* source_info = nullptr))
: bytecode_(bytecode),
operand_count_(3),
operand_scale_(OperandScale::kSingle) {
DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), operand_count());
SetOperand(0, operand0);
SetOperand(1, operand1);
SetOperand(2, operand2);
AttachSourceInfo(source_info);
}
INLINE(BytecodeNode(const Bytecode bytecode, uint32_t operand0,
uint32_t operand1, uint32_t operand2, uint32_t operand3,
BytecodeSourceInfo* source_info = nullptr))
: bytecode_(bytecode),
operand_count_(4),
operand_scale_(OperandScale::kSingle) {
DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), operand_count());
SetOperand(0, operand0);
SetOperand(1, operand1);
SetOperand(2, operand2);
SetOperand(3, operand3);
AttachSourceInfo(source_info);
}
BytecodeNode(const BytecodeNode& other);
BytecodeNode& operator=(const BytecodeNode& other);
@ -162,25 +210,33 @@ class BytecodeNode final : ZoneObject {
void set_bytecode(Bytecode bytecode) {
DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 0);
bytecode_ = bytecode;
operand_count_ = 0;
operand_scale_ = OperandScale::kSingle;
}
void set_bytecode(Bytecode bytecode, uint32_t operand0) {
DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 1);
bytecode_ = bytecode;
operands_[0] = operand0;
operand_count_ = 1;
operand_scale_ = OperandScale::kSingle;
SetOperand(0, operand0);
}
void set_bytecode(Bytecode bytecode, uint32_t operand0, uint32_t operand1) {
DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 2);
bytecode_ = bytecode;
operands_[0] = operand0;
operands_[1] = operand1;
operand_count_ = 2;
operand_scale_ = OperandScale::kSingle;
SetOperand(0, operand0);
SetOperand(1, operand1);
}
void set_bytecode(Bytecode bytecode, uint32_t operand0, uint32_t operand1,
uint32_t operand2) {
DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 3);
bytecode_ = bytecode;
operands_[0] = operand0;
operands_[1] = operand1;
operands_[2] = operand2;
operand_count_ = 3;
operand_scale_ = OperandScale::kSingle;
SetOperand(0, operand0);
SetOperand(1, operand1);
SetOperand(2, operand2);
}
// Clone |other|.
@ -191,7 +247,36 @@ class BytecodeNode final : ZoneObject {
// Transform to a node representing |new_bytecode| which has one
// operand more than the current bytecode.
void Transform(Bytecode new_bytecode, uint32_t extra_operand);
void Transform(Bytecode new_bytecode, uint32_t extra_operand) {
DCHECK_EQ(Bytecodes::NumberOfOperands(new_bytecode),
Bytecodes::NumberOfOperands(bytecode()) + 1);
DCHECK(Bytecodes::NumberOfOperands(bytecode()) < 1 ||
Bytecodes::GetOperandType(new_bytecode, 0) ==
Bytecodes::GetOperandType(bytecode(), 0));
DCHECK(Bytecodes::NumberOfOperands(bytecode()) < 2 ||
Bytecodes::GetOperandType(new_bytecode, 1) ==
Bytecodes::GetOperandType(bytecode(), 1));
DCHECK(Bytecodes::NumberOfOperands(bytecode()) < 3 ||
Bytecodes::GetOperandType(new_bytecode, 2) ==
Bytecodes::GetOperandType(bytecode(), 2));
DCHECK(Bytecodes::NumberOfOperands(bytecode()) < 4);
bytecode_ = new_bytecode;
operand_count_++;
SetOperand(operand_count() - 1, extra_operand);
}
// Updates the operand at |operand_index| to |operand|.
void UpdateOperand(int operand_index, uint32_t operand) {
DCHECK_LE(operand_index, Bytecodes::NumberOfOperands(bytecode()));
operands_[operand_index] = operand;
if ((Bytecodes::OperandIsScalableSignedByte(bytecode(), operand_index) &&
Bytecodes::ScaleForSignedOperand(operand) != operand_scale_) ||
(Bytecodes::OperandIsScalableUnsignedByte(bytecode(), operand_index) &&
Bytecodes::ScaleForUnsignedOperand(operand) != operand_scale_)) {
UpdateScale();
}
}
Bytecode bytecode() const { return bytecode_; }
@ -199,22 +284,60 @@ class BytecodeNode final : ZoneObject {
DCHECK_LT(i, operand_count());
return operands_[i];
}
uint32_t* operands() { return operands_; }
const uint32_t* operands() const { return operands_; }
int operand_count() const { return Bytecodes::NumberOfOperands(bytecode_); }
int operand_count() const { return operand_count_; }
OperandScale operand_scale() const { return operand_scale_; }
const BytecodeSourceInfo& source_info() const { return source_info_; }
BytecodeSourceInfo& source_info() { return source_info_; }
BytecodeSourceInfo* source_info_ptr() { return &source_info_; }
bool operator==(const BytecodeNode& other) const;
bool operator!=(const BytecodeNode& other) const { return !(*this == other); }
private:
static const int kInvalidPosition = kMinInt;
INLINE(void AttachSourceInfo(BytecodeSourceInfo* source_info)) {
if (source_info && source_info->is_valid()) {
// Statement positions need to be emitted immediately. Expression
// positions can be pushed back until a bytecode is found that can
// throw (if expression position filtering is turned on). We only
// invalidate the existing source position information if it is used.
if (source_info->is_statement() ||
!FLAG_ignition_filter_expression_positions ||
!Bytecodes::IsWithoutExternalSideEffects(bytecode())) {
source_info_.Clone(*source_info);
source_info->set_invalid();
}
}
}
INLINE(void UpdateScaleForOperand(int operand_index, uint32_t operand)) {
if (Bytecodes::OperandIsScalableSignedByte(bytecode(), operand_index)) {
operand_scale_ =
std::max(operand_scale_, Bytecodes::ScaleForSignedOperand(operand));
} else if (Bytecodes::OperandIsScalableUnsignedByte(bytecode(),
operand_index)) {
operand_scale_ =
std::max(operand_scale_, Bytecodes::ScaleForUnsignedOperand(operand));
}
}
INLINE(void SetOperand(int operand_index, uint32_t operand)) {
operands_[operand_index] = operand;
UpdateScaleForOperand(operand_index, operand);
}
void UpdateScale() {
operand_scale_ = OperandScale::kSingle;
for (int i = 0; i < operand_count(); i++) {
UpdateScaleForOperand(i, operands_[i]);
}
}
Bytecode bytecode_;
uint32_t operands_[Bytecodes::kMaxOperands];
int operand_count_;
OperandScale operand_scale_;
BytecodeSourceInfo source_info_;
};

View File

@ -174,6 +174,7 @@ BytecodeRegisterOptimizer::BytecodeRegisterOptimizer(
int parameter_count, BytecodePipelineStage* next_stage)
: accumulator_(Register::virtual_accumulator()),
temporary_base_(register_allocator->allocation_base()),
max_register_index_(register_allocator->allocation_base() - 1),
register_info_table_(zone),
equivalence_id_(0),
next_stage_(next_stage),
@ -208,15 +209,17 @@ BytecodeRegisterOptimizer::BytecodeRegisterOptimizer(
// override
Handle<BytecodeArray> BytecodeRegisterOptimizer::ToBytecodeArray(
Isolate* isolate, int fixed_register_count, int parameter_count,
Isolate* isolate, int register_count, int parameter_count,
Handle<FixedArray> handler_table) {
FlushState();
return next_stage_->ToBytecodeArray(isolate, fixed_register_count,
return next_stage_->ToBytecodeArray(isolate, max_register_index_ + 1,
parameter_count, handler_table);
}
// override
void BytecodeRegisterOptimizer::Write(BytecodeNode* node) {
// Jumps are handled by WriteJump.
DCHECK(!Bytecodes::IsJump(node->bytecode()));
//
// Transfers with observable registers as the destination will be
// immediately materialized so the source position information will
@ -245,18 +248,16 @@ void BytecodeRegisterOptimizer::Write(BytecodeNode* node) {
break;
}
if (Bytecodes::IsJump(node->bytecode()) ||
node->bytecode() == Bytecode::kDebugger ||
if (node->bytecode() == Bytecode::kDebugger ||
node->bytecode() == Bytecode::kSuspendGenerator) {
// All state must be flushed before emitting
// - a jump (due to how bytecode offsets for jumps are evaluated),
// - a call to the debugger (as it can manipulate locals and parameters),
// - a generator suspend (as this involves saving all registers).
FlushState();
}
PrepareOperands(node);
WriteToNextStage(node);
next_stage_->Write(node);
}
// override
@ -306,38 +307,29 @@ void BytecodeRegisterOptimizer::FlushState() {
flush_required_ = false;
}
void BytecodeRegisterOptimizer::WriteToNextStage(BytecodeNode* node) const {
next_stage_->Write(node);
}
void BytecodeRegisterOptimizer::WriteToNextStage(
BytecodeNode* node, const BytecodeSourceInfo& source_info) const {
if (source_info.is_valid()) {
node->source_info().Clone(source_info);
}
next_stage_->Write(node);
}
void BytecodeRegisterOptimizer::OutputRegisterTransfer(
RegisterInfo* input_info, RegisterInfo* output_info,
const BytecodeSourceInfo& source_info) {
BytecodeSourceInfo* source_info) {
Register input = input_info->register_value();
Register output = output_info->register_value();
DCHECK_NE(input.index(), output.index());
if (input == accumulator_) {
uint32_t operand = static_cast<uint32_t>(output.ToOperand());
BytecodeNode node(Bytecode::kStar, operand);
WriteToNextStage(&node, source_info);
BytecodeNode node(Bytecode::kStar, operand, source_info);
next_stage_->Write(&node);
} else if (output == accumulator_) {
uint32_t operand = static_cast<uint32_t>(input.ToOperand());
BytecodeNode node(Bytecode::kLdar, operand);
WriteToNextStage(&node, source_info);
BytecodeNode node(Bytecode::kLdar, operand, source_info);
next_stage_->Write(&node);
} else {
uint32_t operand0 = static_cast<uint32_t>(input.ToOperand());
uint32_t operand1 = static_cast<uint32_t>(output.ToOperand());
BytecodeNode node(Bytecode::kMov, operand0, operand1);
WriteToNextStage(&node, source_info);
BytecodeNode node(Bytecode::kMov, operand0, operand1, source_info);
next_stage_->Write(&node);
}
if (output != accumulator_) {
max_register_index_ = std::max(max_register_index_, output.index());
}
output_info->set_materialized(true);
}
@ -389,7 +381,7 @@ void BytecodeRegisterOptimizer::AddToEquivalenceSet(
void BytecodeRegisterOptimizer::RegisterTransfer(
RegisterInfo* input_info, RegisterInfo* output_info,
const BytecodeSourceInfo& source_info) {
BytecodeSourceInfo* source_info) {
// Materialize an alternate in the equivalence set that
// |output_info| is leaving.
if (output_info->materialized()) {
@ -408,42 +400,41 @@ void BytecodeRegisterOptimizer::RegisterTransfer(
output_info->set_materialized(false);
RegisterInfo* materialized_info = input_info->GetMaterializedEquivalent();
OutputRegisterTransfer(materialized_info, output_info, source_info);
} else if (source_info.is_valid()) {
} else if (source_info->is_valid()) {
// Emit a placeholder nop to maintain source position info.
EmitNopForSourceInfo(source_info);
}
}
void BytecodeRegisterOptimizer::EmitNopForSourceInfo(
const BytecodeSourceInfo& source_info) const {
DCHECK(source_info.is_valid());
BytecodeNode nop(Bytecode::kNop);
nop.source_info().Clone(source_info);
WriteToNextStage(&nop);
BytecodeSourceInfo* source_info) const {
DCHECK(source_info->is_valid());
BytecodeNode nop(Bytecode::kNop, source_info);
next_stage_->Write(&nop);
}
void BytecodeRegisterOptimizer::DoLdar(const BytecodeNode* const node) {
void BytecodeRegisterOptimizer::DoLdar(BytecodeNode* node) {
Register input = GetRegisterInputOperand(
0, node->bytecode(), node->operands(), node->operand_count());
RegisterInfo* input_info = GetRegisterInfo(input);
RegisterTransfer(input_info, accumulator_info_, node->source_info());
RegisterTransfer(input_info, accumulator_info_, node->source_info_ptr());
}
void BytecodeRegisterOptimizer::DoMov(const BytecodeNode* const node) {
void BytecodeRegisterOptimizer::DoMov(BytecodeNode* node) {
Register input = GetRegisterInputOperand(
0, node->bytecode(), node->operands(), node->operand_count());
RegisterInfo* input_info = GetRegisterInfo(input);
Register output = GetRegisterOutputOperand(
1, node->bytecode(), node->operands(), node->operand_count());
RegisterInfo* output_info = GetOrCreateRegisterInfo(output);
RegisterTransfer(input_info, output_info, node->source_info());
RegisterTransfer(input_info, output_info, node->source_info_ptr());
}
void BytecodeRegisterOptimizer::DoStar(const BytecodeNode* const node) {
void BytecodeRegisterOptimizer::DoStar(BytecodeNode* node) {
Register output = GetRegisterOutputOperand(
0, node->bytecode(), node->operands(), node->operand_count());
RegisterInfo* output_info = GetOrCreateRegisterInfo(output);
RegisterTransfer(accumulator_info_, output_info, node->source_info());
RegisterTransfer(accumulator_info_, output_info, node->source_info_ptr());
}
void BytecodeRegisterOptimizer::PrepareRegisterOutputOperand(
@ -451,6 +442,8 @@ void BytecodeRegisterOptimizer::PrepareRegisterOutputOperand(
if (reg_info->materialized()) {
CreateMaterializedEquivalent(reg_info);
}
max_register_index_ =
std::max(max_register_index_, reg_info->register_value().index());
reg_info->MoveToNewEquivalenceSet(NextEquivalenceId(), true);
}
@ -481,8 +474,8 @@ Register BytecodeRegisterOptimizer::GetEquivalentRegisterForInputOperand(
void BytecodeRegisterOptimizer::PrepareRegisterInputOperand(
BytecodeNode* const node, Register reg, int operand_index) {
Register equivalent = GetEquivalentRegisterForInputOperand(reg);
node->operands()[operand_index] =
static_cast<uint32_t>(equivalent.ToOperand());
node->UpdateOperand(operand_index,
static_cast<uint32_t>(equivalent.ToOperand()));
}
void BytecodeRegisterOptimizer::PrepareRegisterRangeInputOperand(Register start,

View File

@ -31,7 +31,7 @@ class BytecodeRegisterOptimizer final : public BytecodePipelineStage,
void BindLabel(BytecodeLabel* label) override;
void BindLabel(const BytecodeLabel& target, BytecodeLabel* label) override;
Handle<BytecodeArray> ToBytecodeArray(
Isolate* isolate, int fixed_register_count, int parameter_count,
Isolate* isolate, int register_count, int parameter_count,
Handle<FixedArray> handler_table) override;
private:
@ -44,29 +44,25 @@ class BytecodeRegisterOptimizer final : public BytecodePipelineStage,
// Helpers for BytecodePipelineStage interface.
void FlushState();
void WriteToNextStage(BytecodeNode* node) const;
void WriteToNextStage(BytecodeNode* node,
const BytecodeSourceInfo& output_info) const;
// Update internal state for register transfer from |input| to
// |output| using |source_info| as source position information if
// any bytecodes are emitted due to transfer.
void RegisterTransfer(RegisterInfo* input, RegisterInfo* output,
const BytecodeSourceInfo& source_info);
BytecodeSourceInfo* source_info);
// Emit a register transfer bytecode from |input| to |output|.
void OutputRegisterTransfer(
RegisterInfo* input, RegisterInfo* output,
const BytecodeSourceInfo& source_info = BytecodeSourceInfo());
void OutputRegisterTransfer(RegisterInfo* input, RegisterInfo* output,
BytecodeSourceInfo* source_info = nullptr);
// Emits a Nop to preserve source position information in the
// bytecode pipeline.
void EmitNopForSourceInfo(const BytecodeSourceInfo& source_info) const;
void EmitNopForSourceInfo(BytecodeSourceInfo* source_info) const;
// Handlers for bytecode nodes for register to register transfers.
void DoLdar(const BytecodeNode* const node);
void DoMov(const BytecodeNode* const node);
void DoStar(const BytecodeNode* const node);
void DoLdar(BytecodeNode* node);
void DoMov(BytecodeNode* node);
void DoStar(BytecodeNode* node);
// Operand processing methods for bytecodes other than those
// performing register to register transfers.
@ -133,6 +129,7 @@ class BytecodeRegisterOptimizer final : public BytecodePipelineStage,
const Register accumulator_;
RegisterInfo* accumulator_info_;
const Register temporary_base_;
int max_register_index_;
// Direct mapping to register info.
ZoneVector<RegisterInfo*> register_info_table_;

View File

@ -5,7 +5,7 @@
#ifndef V8_INTERPRETER_BYTECODE_TRAITS_H_
#define V8_INTERPRETER_BYTECODE_TRAITS_H_
#include "src/interpreter/bytecodes.h"
#include "src/interpreter/bytecode-operands.h"
namespace v8 {
namespace internal {
@ -65,208 +65,88 @@ struct OperandScaler {
static const OperandSize kOperandSize = static_cast<OperandSize>(kSize);
};
template <OperandType>
struct RegisterOperandTraits {
static const int kIsRegisterOperand = 0;
template <int... values>
struct SumHelper;
template <int value>
struct SumHelper<value> {
static const int kValue = value;
};
template <int value, int... values>
struct SumHelper<value, values...> {
static const int kValue = value + SumHelper<values...>::kValue;
};
#define DECLARE_REGISTER_OPERAND(Name, _) \
template <> \
struct RegisterOperandTraits<OperandType::k##Name> { \
static const int kIsRegisterOperand = 1; \
};
REGISTER_OPERAND_TYPE_LIST(DECLARE_REGISTER_OPERAND)
#undef DECLARE_REGISTER_OPERAND
template <AccumulatorUse, OperandType...>
struct BytecodeTraits {};
template <AccumulatorUse accumulator_use, OperandType operand_0,
OperandType operand_1, OperandType operand_2, OperandType operand_3>
struct BytecodeTraits<accumulator_use, operand_0, operand_1, operand_2,
operand_3> {
static const OperandType* GetOperandTypes() {
static const OperandType operand_types[] = {operand_0, operand_1, operand_2,
operand_3, OperandType::kNone};
return operand_types;
}
static const OperandTypeInfo* GetOperandTypeInfos() {
static const OperandTypeInfo operand_type_infos[] = {
OperandTraits<operand_0>::kOperandTypeInfo,
OperandTraits<operand_1>::kOperandTypeInfo,
OperandTraits<operand_2>::kOperandTypeInfo,
OperandTraits<operand_3>::kOperandTypeInfo, OperandTypeInfo::kNone};
return operand_type_infos;
}
template <OperandType ot>
static inline bool HasAnyOperandsOfType() {
return operand_0 == ot || operand_1 == ot || operand_2 == ot ||
operand_3 == ot;
}
static inline bool IsScalable() {
return (OperandTraits<operand_0>::TypeInfoTraits::kIsScalable |
OperandTraits<operand_1>::TypeInfoTraits::kIsScalable |
OperandTraits<operand_2>::TypeInfoTraits::kIsScalable |
OperandTraits<operand_3>::TypeInfoTraits::kIsScalable);
}
template <AccumulatorUse accumulator_use, OperandType... operands>
struct BytecodeTraits {
static const OperandType kOperandTypes[];
static const OperandTypeInfo kOperandTypeInfos[];
static const OperandSize kSingleScaleOperandSizes[];
static const OperandSize kDoubleScaleOperandSizes[];
static const OperandSize kQuadrupleScaleOperandSizes[];
static const int kSingleScaleSize = SumHelper<
1, OperandScaler<operands, OperandScale::kSingle>::kSize...>::kValue;
static const int kDoubleScaleSize = SumHelper<
1, OperandScaler<operands, OperandScale::kDouble>::kSize...>::kValue;
static const int kQuadrupleScaleSize = SumHelper<
1, OperandScaler<operands, OperandScale::kQuadruple>::kSize...>::kValue;
static const AccumulatorUse kAccumulatorUse = accumulator_use;
static const int kOperandCount = 4;
static const int kRegisterOperandCount =
RegisterOperandTraits<operand_0>::kIsRegisterOperand +
RegisterOperandTraits<operand_1>::kIsRegisterOperand +
RegisterOperandTraits<operand_2>::kIsRegisterOperand +
RegisterOperandTraits<operand_3>::kIsRegisterOperand;
static const int kOperandCount = sizeof...(operands);
};
template <AccumulatorUse accumulator_use, OperandType operand_0,
OperandType operand_1, OperandType operand_2>
struct BytecodeTraits<accumulator_use, operand_0, operand_1, operand_2> {
static const OperandType* GetOperandTypes() {
static const OperandType operand_types[] = {operand_0, operand_1, operand_2,
OperandType::kNone};
return operand_types;
}
static const OperandTypeInfo* GetOperandTypeInfos() {
static const OperandTypeInfo operand_type_infos[] = {
OperandTraits<operand_0>::kOperandTypeInfo,
OperandTraits<operand_1>::kOperandTypeInfo,
OperandTraits<operand_2>::kOperandTypeInfo, OperandTypeInfo::kNone};
return operand_type_infos;
}
template <OperandType ot>
static inline bool HasAnyOperandsOfType() {
return operand_0 == ot || operand_1 == ot || operand_2 == ot;
}
static inline bool IsScalable() {
return (OperandTraits<operand_0>::TypeInfoTraits::kIsScalable |
OperandTraits<operand_1>::TypeInfoTraits::kIsScalable |
OperandTraits<operand_2>::TypeInfoTraits::kIsScalable);
}
static const AccumulatorUse kAccumulatorUse = accumulator_use;
static const int kOperandCount = 3;
static const int kRegisterOperandCount =
RegisterOperandTraits<operand_0>::kIsRegisterOperand +
RegisterOperandTraits<operand_1>::kIsRegisterOperand +
RegisterOperandTraits<operand_2>::kIsRegisterOperand;
};
template <AccumulatorUse accumulator_use, OperandType operand_0,
OperandType operand_1>
struct BytecodeTraits<accumulator_use, operand_0, operand_1> {
static const OperandType* GetOperandTypes() {
static const OperandType operand_types[] = {operand_0, operand_1,
OperandType::kNone};
return operand_types;
}
static const OperandTypeInfo* GetOperandTypeInfos() {
static const OperandTypeInfo operand_type_infos[] = {
OperandTraits<operand_0>::kOperandTypeInfo,
OperandTraits<operand_1>::kOperandTypeInfo, OperandTypeInfo::kNone};
return operand_type_infos;
}
template <OperandType ot>
static inline bool HasAnyOperandsOfType() {
return operand_0 == ot || operand_1 == ot;
}
static inline bool IsScalable() {
return (OperandTraits<operand_0>::TypeInfoTraits::kIsScalable |
OperandTraits<operand_1>::TypeInfoTraits::kIsScalable);
}
static const AccumulatorUse kAccumulatorUse = accumulator_use;
static const int kOperandCount = 2;
static const int kRegisterOperandCount =
RegisterOperandTraits<operand_0>::kIsRegisterOperand +
RegisterOperandTraits<operand_1>::kIsRegisterOperand;
};
template <AccumulatorUse accumulator_use, OperandType operand_0>
struct BytecodeTraits<accumulator_use, operand_0> {
static const OperandType* GetOperandTypes() {
static const OperandType operand_types[] = {operand_0, OperandType::kNone};
return operand_types;
}
static const OperandTypeInfo* GetOperandTypeInfos() {
static const OperandTypeInfo operand_type_infos[] = {
OperandTraits<operand_0>::kOperandTypeInfo, OperandTypeInfo::kNone};
return operand_type_infos;
}
template <OperandType ot>
static inline bool HasAnyOperandsOfType() {
return operand_0 == ot;
}
static inline bool IsScalable() {
return OperandTraits<operand_0>::TypeInfoTraits::kIsScalable;
}
static const AccumulatorUse kAccumulatorUse = accumulator_use;
static const int kOperandCount = 1;
static const int kRegisterOperandCount =
RegisterOperandTraits<operand_0>::kIsRegisterOperand;
};
template <AccumulatorUse accumulator_use, OperandType... operands>
STATIC_CONST_MEMBER_DEFINITION const OperandType
BytecodeTraits<accumulator_use, operands...>::kOperandTypes[] = {
operands...};
template <AccumulatorUse accumulator_use, OperandType... operands>
STATIC_CONST_MEMBER_DEFINITION const OperandTypeInfo
BytecodeTraits<accumulator_use, operands...>::kOperandTypeInfos[] = {
OperandTraits<operands>::kOperandTypeInfo...};
template <AccumulatorUse accumulator_use, OperandType... operands>
STATIC_CONST_MEMBER_DEFINITION const OperandSize
BytecodeTraits<accumulator_use, operands...>::kSingleScaleOperandSizes[] = {
OperandScaler<operands, OperandScale::kSingle>::kOperandSize...};
template <AccumulatorUse accumulator_use, OperandType... operands>
STATIC_CONST_MEMBER_DEFINITION const OperandSize
BytecodeTraits<accumulator_use, operands...>::kDoubleScaleOperandSizes[] = {
OperandScaler<operands, OperandScale::kDouble>::kOperandSize...};
template <AccumulatorUse accumulator_use, OperandType... operands>
STATIC_CONST_MEMBER_DEFINITION const OperandSize BytecodeTraits<
accumulator_use, operands...>::kQuadrupleScaleOperandSizes[] = {
OperandScaler<operands, OperandScale::kQuadruple>::kOperandSize...};
template <AccumulatorUse accumulator_use>
struct BytecodeTraits<accumulator_use> {
static const OperandType* GetOperandTypes() {
static const OperandType operand_types[] = {OperandType::kNone};
return operand_types;
}
static const OperandTypeInfo* GetOperandTypeInfos() {
static const OperandTypeInfo operand_type_infos[] = {
OperandTypeInfo::kNone};
return operand_type_infos;
}
template <OperandType ot>
static inline bool HasAnyOperandsOfType() {
return false;
}
static inline bool IsScalable() { return false; }
static const OperandType kOperandTypes[];
static const OperandTypeInfo kOperandTypeInfos[];
static const OperandSize kSingleScaleOperandSizes[];
static const OperandSize kDoubleScaleOperandSizes[];
static const OperandSize kQuadrupleScaleOperandSizes[];
static const int kSingleScaleSize = 1;
static const int kDoubleScaleSize = 1;
static const int kQuadrupleScaleSize = 1;
static const AccumulatorUse kAccumulatorUse = accumulator_use;
static const int kOperandCount = 0;
static const int kRegisterOperandCount = 0;
};
static OperandSize ScaledOperandSize(OperandType operand_type,
OperandScale operand_scale) {
STATIC_ASSERT(static_cast<int>(OperandScale::kQuadruple) == 4 &&
OperandScale::kLast == OperandScale::kQuadruple);
int index = static_cast<int>(operand_scale) >> 1;
switch (operand_type) {
#define CASE(Name, TypeInfo) \
case OperandType::k##Name: { \
static const OperandSize kOperandSizes[] = { \
OperandScaler<OperandType::k##Name, \
OperandScale::kSingle>::kOperandSize, \
OperandScaler<OperandType::k##Name, \
OperandScale::kDouble>::kOperandSize, \
OperandScaler<OperandType::k##Name, \
OperandScale::kQuadruple>::kOperandSize}; \
return kOperandSizes[index]; \
}
OPERAND_TYPE_LIST(CASE)
#undef CASE
}
UNREACHABLE();
return OperandSize::kNone;
}
template <AccumulatorUse accumulator_use>
STATIC_CONST_MEMBER_DEFINITION const OperandType
BytecodeTraits<accumulator_use>::kOperandTypes[] = {OperandType::kNone};
template <AccumulatorUse accumulator_use>
STATIC_CONST_MEMBER_DEFINITION const OperandTypeInfo
BytecodeTraits<accumulator_use>::kOperandTypeInfos[] = {
OperandTypeInfo::kNone};
template <AccumulatorUse accumulator_use>
STATIC_CONST_MEMBER_DEFINITION const OperandSize
BytecodeTraits<accumulator_use>::kSingleScaleOperandSizes[] = {
OperandSize::kNone};
template <AccumulatorUse accumulator_use>
STATIC_CONST_MEMBER_DEFINITION const OperandSize
BytecodeTraits<accumulator_use>::kDoubleScaleOperandSizes[] = {
OperandSize::kNone};
template <AccumulatorUse accumulator_use>
STATIC_CONST_MEMBER_DEFINITION const OperandSize
BytecodeTraits<accumulator_use>::kQuadrupleScaleOperandSizes[] = {
OperandSize::kNone};
} // namespace interpreter
} // namespace internal

View File

@ -7,14 +7,59 @@
#include <iomanip>
#include "src/base/bits.h"
#include "src/globals.h"
#include "src/interpreter/bytecode-traits.h"
namespace v8 {
namespace internal {
namespace interpreter {
STATIC_CONST_MEMBER_DEFINITION const int Bytecodes::kMaxOperands;
// clang-format off
STATIC_CONST_MEMBER_DEFINITION
const OperandType* const Bytecodes::kOperandTypes[] = {
#define ENTRY(Name, ...) BytecodeTraits<__VA_ARGS__>::kOperandTypes,
BYTECODE_LIST(ENTRY)
#undef ENTRY
};
STATIC_CONST_MEMBER_DEFINITION
const OperandTypeInfo* const Bytecodes::kOperandTypeInfos[] = {
#define ENTRY(Name, ...) BytecodeTraits<__VA_ARGS__>::kOperandTypeInfos,
BYTECODE_LIST(ENTRY)
#undef ENTRY
};
STATIC_CONST_MEMBER_DEFINITION const int Bytecodes::kOperandCount[] = {
#define ENTRY(Name, ...) BytecodeTraits<__VA_ARGS__>::kOperandCount,
BYTECODE_LIST(ENTRY)
#undef ENTRY
};
STATIC_CONST_MEMBER_DEFINITION
const AccumulatorUse Bytecodes::kAccumulatorUse[] = {
#define ENTRY(Name, ...) BytecodeTraits<__VA_ARGS__>::kAccumulatorUse,
BYTECODE_LIST(ENTRY)
#undef ENTRY
};
STATIC_CONST_MEMBER_DEFINITION const int Bytecodes::kBytecodeSizes[][3] = {
#define ENTRY(Name, ...) \
{ BytecodeTraits<__VA_ARGS__>::kSingleScaleSize, \
BytecodeTraits<__VA_ARGS__>::kDoubleScaleSize, \
BytecodeTraits<__VA_ARGS__>::kQuadrupleScaleSize },
BYTECODE_LIST(ENTRY)
#undef ENTRY
};
STATIC_CONST_MEMBER_DEFINITION
const OperandSize* const Bytecodes::kOperandSizes[][3] = {
#define ENTRY(Name, ...) \
{ BytecodeTraits<__VA_ARGS__>::kSingleScaleOperandSizes, \
BytecodeTraits<__VA_ARGS__>::kDoubleScaleOperandSizes, \
BytecodeTraits<__VA_ARGS__>::kQuadrupleScaleOperandSizes },
BYTECODE_LIST(ENTRY)
#undef ENTRY
};
// clang-format on
// static
const char* Bytecodes::ToString(Bytecode bytecode) {
@ -43,77 +88,6 @@ std::string Bytecodes::ToString(Bytecode bytecode, OperandScale operand_scale) {
}
}
// static
const char* Bytecodes::AccumulatorUseToString(AccumulatorUse accumulator_use) {
switch (accumulator_use) {
case AccumulatorUse::kNone:
return "None";
case AccumulatorUse::kRead:
return "Read";
case AccumulatorUse::kWrite:
return "Write";
case AccumulatorUse::kReadWrite:
return "ReadWrite";
}
UNREACHABLE();
return "";
}
// static
const char* Bytecodes::OperandTypeToString(OperandType operand_type) {
switch (operand_type) {
#define CASE(Name, _) \
case OperandType::k##Name: \
return #Name;
OPERAND_TYPE_LIST(CASE)
#undef CASE
}
UNREACHABLE();
return "";
}
// static
const char* Bytecodes::OperandScaleToString(OperandScale operand_scale) {
switch (operand_scale) {
#define CASE(Name, _) \
case OperandScale::k##Name: \
return #Name;
OPERAND_SCALE_LIST(CASE)
#undef CASE
}
UNREACHABLE();
return "";
}
// static
const char* Bytecodes::OperandSizeToString(OperandSize operand_size) {
switch (operand_size) {
case OperandSize::kNone:
return "None";
case OperandSize::kByte:
return "Byte";
case OperandSize::kShort:
return "Short";
case OperandSize::kQuad:
return "Quad";
}
UNREACHABLE();
return "";
}
// static
uint8_t Bytecodes::ToByte(Bytecode bytecode) {
DCHECK_LE(bytecode, Bytecode::kLast);
return static_cast<uint8_t>(bytecode);
}
// static
Bytecode Bytecodes::FromByte(uint8_t value) {
Bytecode bytecode = static_cast<Bytecode>(value);
DCHECK(bytecode <= Bytecode::kLast);
return bytecode;
}
// static
Bytecode Bytecodes::GetDebugBreak(Bytecode bytecode) {
DCHECK(!IsDebugBreak(bytecode));
@ -124,7 +98,7 @@ Bytecode Bytecodes::GetDebugBreak(Bytecode bytecode) {
return Bytecode::kDebugBreakExtraWide;
}
int bytecode_size = Size(bytecode, OperandScale::kSingle);
#define RETURN_IF_DEBUG_BREAK_SIZE_MATCHES(Name, ...) \
#define RETURN_IF_DEBUG_BREAK_SIZE_MATCHES(Name) \
if (bytecode_size == Size(Bytecode::k##Name, OperandScale::kSingle)) { \
return Bytecode::k##Name; \
}
@ -134,224 +108,6 @@ Bytecode Bytecodes::GetDebugBreak(Bytecode bytecode) {
return Bytecode::kIllegal;
}
// static
int Bytecodes::Size(Bytecode bytecode, OperandScale operand_scale) {
int size = 1;
for (int i = 0; i < NumberOfOperands(bytecode); i++) {
OperandSize operand_size = GetOperandSize(bytecode, i, operand_scale);
int delta = static_cast<int>(operand_size);
DCHECK(base::bits::IsPowerOfTwo32(static_cast<uint32_t>(delta)));
size += delta;
}
return size;
}
// static
size_t Bytecodes::ReturnCount(Bytecode bytecode) {
return bytecode == Bytecode::kReturn ? 1 : 0;
}
// static
int Bytecodes::NumberOfOperands(Bytecode bytecode) {
DCHECK(bytecode <= Bytecode::kLast);
switch (bytecode) {
#define CASE(Name, ...) \
case Bytecode::k##Name: \
return BytecodeTraits<__VA_ARGS__>::kOperandCount;
BYTECODE_LIST(CASE)
#undef CASE
}
UNREACHABLE();
return 0;
}
// static
int Bytecodes::NumberOfRegisterOperands(Bytecode bytecode) {
DCHECK(bytecode <= Bytecode::kLast);
switch (bytecode) {
#define CASE(Name, ...) \
case Bytecode::k##Name: \
typedef BytecodeTraits<__VA_ARGS__> Name##Trait; \
return Name##Trait::kRegisterOperandCount;
BYTECODE_LIST(CASE)
#undef CASE
}
UNREACHABLE();
return false;
}
// static
Bytecode Bytecodes::OperandScaleToPrefixBytecode(OperandScale operand_scale) {
switch (operand_scale) {
case OperandScale::kQuadruple:
return Bytecode::kExtraWide;
case OperandScale::kDouble:
return Bytecode::kWide;
default:
UNREACHABLE();
return Bytecode::kIllegal;
}
}
// static
bool Bytecodes::OperandScaleRequiresPrefixBytecode(OperandScale operand_scale) {
return operand_scale != OperandScale::kSingle;
}
// static
OperandScale Bytecodes::PrefixBytecodeToOperandScale(Bytecode bytecode) {
switch (bytecode) {
case Bytecode::kExtraWide:
case Bytecode::kDebugBreakExtraWide:
return OperandScale::kQuadruple;
case Bytecode::kWide:
case Bytecode::kDebugBreakWide:
return OperandScale::kDouble;
default:
UNREACHABLE();
return OperandScale::kSingle;
}
}
// static
AccumulatorUse Bytecodes::GetAccumulatorUse(Bytecode bytecode) {
DCHECK(bytecode <= Bytecode::kLast);
switch (bytecode) {
#define CASE(Name, ...) \
case Bytecode::k##Name: \
return BytecodeTraits<__VA_ARGS__>::kAccumulatorUse;
BYTECODE_LIST(CASE)
#undef CASE
}
UNREACHABLE();
return AccumulatorUse::kNone;
}
// static
bool Bytecodes::ReadsAccumulator(Bytecode bytecode) {
return (GetAccumulatorUse(bytecode) & AccumulatorUse::kRead) ==
AccumulatorUse::kRead;
}
// static
bool Bytecodes::WritesAccumulator(Bytecode bytecode) {
return (GetAccumulatorUse(bytecode) & AccumulatorUse::kWrite) ==
AccumulatorUse::kWrite;
}
// static
bool Bytecodes::WritesBooleanToAccumulator(Bytecode bytecode) {
switch (bytecode) {
case Bytecode::kLdaTrue:
case Bytecode::kLdaFalse:
case Bytecode::kToBooleanLogicalNot:
case Bytecode::kLogicalNot:
case Bytecode::kTestEqual:
case Bytecode::kTestNotEqual:
case Bytecode::kTestEqualStrict:
case Bytecode::kTestLessThan:
case Bytecode::kTestLessThanOrEqual:
case Bytecode::kTestGreaterThan:
case Bytecode::kTestGreaterThanOrEqual:
case Bytecode::kTestInstanceOf:
case Bytecode::kTestIn:
case Bytecode::kForInContinue:
return true;
default:
return false;
}
}
// static
bool Bytecodes::IsAccumulatorLoadWithoutEffects(Bytecode bytecode) {
switch (bytecode) {
case Bytecode::kLdaZero:
case Bytecode::kLdaSmi:
case Bytecode::kLdaUndefined:
case Bytecode::kLdaNull:
case Bytecode::kLdaTheHole:
case Bytecode::kLdaTrue:
case Bytecode::kLdaFalse:
case Bytecode::kLdaConstant:
case Bytecode::kLdar:
return true;
default:
return false;
}
}
// static
bool Bytecodes::IsJumpWithoutEffects(Bytecode bytecode) {
return IsJump(bytecode) && !IsJumpIfToBoolean(bytecode);
}
// static
bool Bytecodes::IsRegisterLoadWithoutEffects(Bytecode bytecode) {
switch (bytecode) {
case Bytecode::kMov:
case Bytecode::kPopContext:
case Bytecode::kPushContext:
case Bytecode::kStar:
case Bytecode::kLdrUndefined:
return true;
default:
return false;
}
}
// static
bool Bytecodes::IsWithoutExternalSideEffects(Bytecode bytecode) {
// These bytecodes only manipulate interpreter frame state and will
// never throw.
return (IsAccumulatorLoadWithoutEffects(bytecode) ||
IsRegisterLoadWithoutEffects(bytecode) ||
bytecode == Bytecode::kNop || IsJumpWithoutEffects(bytecode));
}
// static
OperandType Bytecodes::GetOperandType(Bytecode bytecode, int i) {
DCHECK_LE(bytecode, Bytecode::kLast);
DCHECK_LT(i, NumberOfOperands(bytecode));
DCHECK_GE(i, 0);
return GetOperandTypes(bytecode)[i];
}
// static
const OperandType* Bytecodes::GetOperandTypes(Bytecode bytecode) {
DCHECK_LE(bytecode, Bytecode::kLast);
switch (bytecode) {
#define CASE(Name, ...) \
case Bytecode::k##Name: \
return BytecodeTraits<__VA_ARGS__>::GetOperandTypes();
BYTECODE_LIST(CASE)
#undef CASE
}
UNREACHABLE();
return nullptr;
}
// static
const OperandTypeInfo* Bytecodes::GetOperandTypeInfos(Bytecode bytecode) {
DCHECK(bytecode <= Bytecode::kLast);
switch (bytecode) {
#define CASE(Name, ...) \
case Bytecode::k##Name: \
return BytecodeTraits<__VA_ARGS__>::GetOperandTypeInfos();
BYTECODE_LIST(CASE)
#undef CASE
}
UNREACHABLE();
return nullptr;
}
// static
OperandSize Bytecodes::GetOperandSize(Bytecode bytecode, int i,
OperandScale operand_scale) {
DCHECK_LT(i, NumberOfOperands(bytecode));
OperandType operand_type = GetOperandType(bytecode, i);
return SizeOfOperand(operand_type, operand_scale);
}
// static
int Bytecodes::GetOperandOffset(Bytecode bytecode, int i,
OperandScale operand_scale) {
@ -366,71 +122,6 @@ int Bytecodes::GetOperandOffset(Bytecode bytecode, int i,
return offset;
}
// static
OperandSize Bytecodes::SizeOfOperand(OperandType operand_type,
OperandScale operand_scale) {
DCHECK_LE(operand_type, OperandType::kLast);
DCHECK_GE(operand_scale, OperandScale::kSingle);
DCHECK_LE(operand_scale, OperandScale::kLast);
return static_cast<OperandSize>(
ScaledOperandSize(operand_type, operand_scale));
}
// static
bool Bytecodes::IsConditionalJumpImmediate(Bytecode bytecode) {
return bytecode == Bytecode::kJumpIfTrue ||
bytecode == Bytecode::kJumpIfFalse ||
bytecode == Bytecode::kJumpIfToBooleanTrue ||
bytecode == Bytecode::kJumpIfToBooleanFalse ||
bytecode == Bytecode::kJumpIfNotHole ||
bytecode == Bytecode::kJumpIfNull ||
bytecode == Bytecode::kJumpIfUndefined;
}
// static
bool Bytecodes::IsConditionalJumpConstant(Bytecode bytecode) {
return bytecode == Bytecode::kJumpIfTrueConstant ||
bytecode == Bytecode::kJumpIfFalseConstant ||
bytecode == Bytecode::kJumpIfToBooleanTrueConstant ||
bytecode == Bytecode::kJumpIfToBooleanFalseConstant ||
bytecode == Bytecode::kJumpIfNotHoleConstant ||
bytecode == Bytecode::kJumpIfNullConstant ||
bytecode == Bytecode::kJumpIfUndefinedConstant;
}
// static
bool Bytecodes::IsConditionalJump(Bytecode bytecode) {
return IsConditionalJumpImmediate(bytecode) ||
IsConditionalJumpConstant(bytecode);
}
// static
bool Bytecodes::IsJumpImmediate(Bytecode bytecode) {
return bytecode == Bytecode::kJump || bytecode == Bytecode::kJumpLoop ||
IsConditionalJumpImmediate(bytecode);
}
// static
bool Bytecodes::IsJumpConstant(Bytecode bytecode) {
return bytecode == Bytecode::kJumpConstant ||
IsConditionalJumpConstant(bytecode);
}
// static
bool Bytecodes::IsJump(Bytecode bytecode) {
return IsJumpImmediate(bytecode) || IsJumpConstant(bytecode);
}
// static
bool Bytecodes::IsJumpIfToBoolean(Bytecode bytecode) {
return bytecode == Bytecode::kJumpIfToBooleanTrue ||
bytecode == Bytecode::kJumpIfToBooleanFalse ||
bytecode == Bytecode::kJumpIfToBooleanTrueConstant ||
bytecode == Bytecode::kJumpIfToBooleanFalseConstant;
}
// static
Bytecode Bytecodes::GetJumpWithoutToBoolean(Bytecode bytecode) {
switch (bytecode) {
@ -449,19 +140,6 @@ Bytecode Bytecodes::GetJumpWithoutToBoolean(Bytecode bytecode) {
return Bytecode::kIllegal;
}
// static
bool Bytecodes::IsCallOrNew(Bytecode bytecode) {
return bytecode == Bytecode::kCall || bytecode == Bytecode::kTailCall ||
bytecode == Bytecode::kNew;
}
// static
bool Bytecodes::IsCallRuntime(Bytecode bytecode) {
return bytecode == Bytecode::kCallRuntime ||
bytecode == Bytecode::kCallRuntimeForPair ||
bytecode == Bytecode::kInvokeIntrinsic;
}
// static
bool Bytecodes::IsDebugBreak(Bytecode bytecode) {
switch (bytecode) {
@ -475,53 +153,6 @@ bool Bytecodes::IsDebugBreak(Bytecode bytecode) {
return false;
}
// static
bool Bytecodes::IsLdarOrStar(Bytecode bytecode) {
return bytecode == Bytecode::kLdar || bytecode == Bytecode::kStar;
}
// static
bool Bytecodes::IsBytecodeWithScalableOperands(Bytecode bytecode) {
switch (bytecode) {
#define CASE(Name, ...) \
case Bytecode::k##Name: \
typedef BytecodeTraits<__VA_ARGS__> Name##Trait; \
return Name##Trait::IsScalable();
BYTECODE_LIST(CASE)
#undef CASE
}
UNREACHABLE();
return false;
}
// static
bool Bytecodes::IsPrefixScalingBytecode(Bytecode bytecode) {
switch (bytecode) {
case Bytecode::kExtraWide:
case Bytecode::kDebugBreakExtraWide:
case Bytecode::kWide:
case Bytecode::kDebugBreakWide:
return true;
default:
return false;
}
}
// static
bool Bytecodes::PutsNameInAccumulator(Bytecode bytecode) {
return bytecode == Bytecode::kTypeOf;
}
// static
bool Bytecodes::IsJumpOrReturn(Bytecode bytecode) {
return bytecode == Bytecode::kReturn || IsJump(bytecode);
}
// static
bool Bytecodes::IsMaybeRegisterOperandType(OperandType operand_type) {
return operand_type == OperandType::kMaybeReg;
}
// static
bool Bytecodes::IsRegisterOperandType(OperandType operand_type) {
switch (operand_type) {
@ -603,21 +234,11 @@ bool Bytecodes::IsStarLookahead(Bytecode bytecode, OperandScale operand_scale) {
}
// static
int Bytecodes::GetNumberOfRegistersRepresentedBy(OperandType operand_type) {
switch (operand_type) {
case OperandType::kMaybeReg:
case OperandType::kReg:
case OperandType::kRegOut:
return 1;
case OperandType::kRegPair:
case OperandType::kRegOutPair:
return 2;
case OperandType::kRegOutTriple:
return 3;
default:
return 0;
bool Bytecodes::IsBytecodeWithScalableOperands(Bytecode bytecode) {
for (int i = 0; i < NumberOfOperands(bytecode); i++) {
if (OperandIsScalable(bytecode, i)) return true;
}
return 0;
return false;
}
// static
@ -634,25 +255,28 @@ bool Bytecodes::IsUnsignedOperandType(OperandType operand_type) {
}
// static
OperandSize Bytecodes::SizeForSignedOperand(int value) {
if (value >= kMinInt8 && value <= kMaxInt8) {
return OperandSize::kByte;
} else if (value >= kMinInt16 && value <= kMaxInt16) {
return OperandSize::kShort;
} else {
return OperandSize::kQuad;
}
}
// static
OperandSize Bytecodes::SizeForUnsignedOperand(uint32_t value) {
if (value <= kMaxUInt8) {
return OperandSize::kByte;
} else if (value <= kMaxUInt16) {
return OperandSize::kShort;
} else {
return OperandSize::kQuad;
}
OperandSize Bytecodes::SizeOfOperand(OperandType operand_type,
OperandScale operand_scale) {
DCHECK_LE(operand_type, OperandType::kLast);
DCHECK_GE(operand_scale, OperandScale::kSingle);
DCHECK_LE(operand_scale, OperandScale::kLast);
STATIC_ASSERT(static_cast<int>(OperandScale::kQuadruple) == 4 &&
OperandScale::kLast == OperandScale::kQuadruple);
int scale_index = static_cast<int>(operand_scale) >> 1;
// clang-format off
static const OperandSize kOperandSizes[][3] = {
#define ENTRY(Name, ...) \
{ OperandScaler<OperandType::k##Name, \
OperandScale::kSingle>::kOperandSize, \
OperandScaler<OperandType::k##Name, \
OperandScale::kDouble>::kOperandSize, \
OperandScaler<OperandType::k##Name, \
OperandScale::kQuadruple>::kOperandSize },
OPERAND_TYPE_LIST(ENTRY)
#undef ENTRY
};
// clang-format on
return kOperandSizes[static_cast<size_t>(operand_type)][scale_index];
}
// static
@ -666,22 +290,6 @@ std::ostream& operator<<(std::ostream& os, const Bytecode& bytecode) {
return os << Bytecodes::ToString(bytecode);
}
std::ostream& operator<<(std::ostream& os, const AccumulatorUse& use) {
return os << Bytecodes::AccumulatorUseToString(use);
}
std::ostream& operator<<(std::ostream& os, const OperandSize& operand_size) {
return os << Bytecodes::OperandSizeToString(operand_size);
}
std::ostream& operator<<(std::ostream& os, const OperandScale& operand_scale) {
return os << Bytecodes::OperandScaleToString(operand_scale);
}
std::ostream& operator<<(std::ostream& os, const OperandType& operand_type) {
return os << Bytecodes::OperandTypeToString(operand_type);
}
} // namespace interpreter
} // namespace internal
} // namespace v8

View File

@ -9,6 +9,9 @@
#include <iosfwd>
#include <string>
#include "src/globals.h"
#include "src/interpreter/bytecode-operands.h"
// This interface and it's implementation are independent of the
// libv8_base library as they are used by the interpreter and the
// standalone mkpeephole table generator program.
@ -17,65 +20,8 @@ namespace v8 {
namespace internal {
namespace interpreter {
#define INVALID_OPERAND_TYPE_LIST(V) V(None, OperandTypeInfo::kNone)
#define REGISTER_INPUT_OPERAND_TYPE_LIST(V) \
V(MaybeReg, OperandTypeInfo::kScalableSignedByte) \
V(Reg, OperandTypeInfo::kScalableSignedByte) \
V(RegPair, OperandTypeInfo::kScalableSignedByte)
#define REGISTER_OUTPUT_OPERAND_TYPE_LIST(V) \
V(RegOut, OperandTypeInfo::kScalableSignedByte) \
V(RegOutPair, OperandTypeInfo::kScalableSignedByte) \
V(RegOutTriple, OperandTypeInfo::kScalableSignedByte)
#define SCALAR_OPERAND_TYPE_LIST(V) \
V(Flag8, OperandTypeInfo::kFixedUnsignedByte) \
V(IntrinsicId, OperandTypeInfo::kFixedUnsignedByte) \
V(Idx, OperandTypeInfo::kScalableUnsignedByte) \
V(UImm, OperandTypeInfo::kScalableUnsignedByte) \
V(Imm, OperandTypeInfo::kScalableSignedByte) \
V(RegCount, OperandTypeInfo::kScalableUnsignedByte) \
V(RuntimeId, OperandTypeInfo::kFixedUnsignedShort)
#define REGISTER_OPERAND_TYPE_LIST(V) \
REGISTER_INPUT_OPERAND_TYPE_LIST(V) \
REGISTER_OUTPUT_OPERAND_TYPE_LIST(V)
#define NON_REGISTER_OPERAND_TYPE_LIST(V) \
INVALID_OPERAND_TYPE_LIST(V) \
SCALAR_OPERAND_TYPE_LIST(V)
// The list of operand types used by bytecodes.
#define OPERAND_TYPE_LIST(V) \
NON_REGISTER_OPERAND_TYPE_LIST(V) \
REGISTER_OPERAND_TYPE_LIST(V)
// Define one debug break bytecode for each possible size of unscaled
// bytecodes. Format is V(<bytecode>, <accumulator_use>, <operands>).
#define DEBUG_BREAK_PLAIN_BYTECODE_LIST(V) \
V(DebugBreak0, AccumulatorUse::kRead) \
V(DebugBreak1, AccumulatorUse::kRead, OperandType::kReg) \
V(DebugBreak2, AccumulatorUse::kRead, OperandType::kReg, OperandType::kReg) \
V(DebugBreak3, AccumulatorUse::kRead, OperandType::kReg, OperandType::kReg, \
OperandType::kReg) \
V(DebugBreak4, AccumulatorUse::kRead, OperandType::kReg, OperandType::kReg, \
OperandType::kReg, OperandType::kReg) \
V(DebugBreak5, AccumulatorUse::kRead, OperandType::kRuntimeId, \
OperandType::kReg, OperandType::kReg) \
V(DebugBreak6, AccumulatorUse::kRead, OperandType::kRuntimeId, \
OperandType::kReg, OperandType::kReg, OperandType::kReg)
// Define one debug break for each widening prefix.
#define DEBUG_BREAK_PREFIX_BYTECODE_LIST(V) \
V(DebugBreakWide, AccumulatorUse::kRead) \
V(DebugBreakExtraWide, AccumulatorUse::kRead)
#define DEBUG_BREAK_BYTECODE_LIST(V) \
DEBUG_BREAK_PLAIN_BYTECODE_LIST(V) \
DEBUG_BREAK_PREFIX_BYTECODE_LIST(V)
// The list of bytecodes which are interpreted by the interpreter.
// Format is V(<bytecode>, <accumulator_use>, <operands>).
#define BYTECODE_LIST(V) \
/* Extended width operands */ \
V(Wide, AccumulatorUse::kNone) \
@ -306,7 +252,22 @@ namespace interpreter {
\
/* Debugger */ \
V(Debugger, AccumulatorUse::kNone) \
DEBUG_BREAK_BYTECODE_LIST(V) \
\
/* Debug Breakpoints - one for each possible size of unscaled bytecodes */ \
/* and one for each operand widening prefix bytecode */ \
V(DebugBreak0, AccumulatorUse::kRead) \
V(DebugBreak1, AccumulatorUse::kRead, OperandType::kReg) \
V(DebugBreak2, AccumulatorUse::kRead, OperandType::kReg, OperandType::kReg) \
V(DebugBreak3, AccumulatorUse::kRead, OperandType::kReg, OperandType::kReg, \
OperandType::kReg) \
V(DebugBreak4, AccumulatorUse::kRead, OperandType::kReg, OperandType::kReg, \
OperandType::kReg, OperandType::kReg) \
V(DebugBreak5, AccumulatorUse::kRead, OperandType::kRuntimeId, \
OperandType::kReg, OperandType::kReg) \
V(DebugBreak6, AccumulatorUse::kRead, OperandType::kRuntimeId, \
OperandType::kReg, OperandType::kReg, OperandType::kReg) \
V(DebugBreakWide, AccumulatorUse::kRead) \
V(DebugBreakExtraWide, AccumulatorUse::kRead) \
\
/* Illegal bytecode (terminates execution) */ \
V(Illegal, AccumulatorUse::kNone) \
@ -315,74 +276,23 @@ namespace interpreter {
/* eliminated bytecodes). */ \
V(Nop, AccumulatorUse::kNone)
enum class AccumulatorUse : uint8_t {
kNone = 0,
kRead = 1 << 0,
kWrite = 1 << 1,
kReadWrite = kRead | kWrite
};
// List of debug break bytecodes.
#define DEBUG_BREAK_PLAIN_BYTECODE_LIST(V) \
V(DebugBreak0) \
V(DebugBreak1) \
V(DebugBreak2) \
V(DebugBreak3) \
V(DebugBreak4) \
V(DebugBreak5) \
V(DebugBreak6)
inline AccumulatorUse operator&(AccumulatorUse lhs, AccumulatorUse rhs) {
int result = static_cast<int>(lhs) & static_cast<int>(rhs);
return static_cast<AccumulatorUse>(result);
}
#define DEBUG_BREAK_PREFIX_BYTECODE_LIST(V) \
V(DebugBreakWide) \
V(DebugBreakExtraWide)
inline AccumulatorUse operator|(AccumulatorUse lhs, AccumulatorUse rhs) {
int result = static_cast<int>(lhs) | static_cast<int>(rhs);
return static_cast<AccumulatorUse>(result);
}
// Enumeration of scaling factors applicable to scalable operands. Code
// relies on being able to cast values to integer scaling values.
#define OPERAND_SCALE_LIST(V) \
V(Single, 1) \
V(Double, 2) \
V(Quadruple, 4)
enum class OperandScale : uint8_t {
#define DECLARE_OPERAND_SCALE(Name, Scale) k##Name = Scale,
OPERAND_SCALE_LIST(DECLARE_OPERAND_SCALE)
#undef DECLARE_OPERAND_SCALE
kLast = kQuadruple
};
// Enumeration of the size classes of operand types used by
// bytecodes. Code relies on being able to cast values to integer
// types to get the size in bytes.
enum class OperandSize : uint8_t {
kNone = 0,
kByte = 1,
kShort = 2,
kQuad = 4,
kLast = kQuad
};
// Primitive operand info used that summarize properties of operands.
// Columns are Name, IsScalable, IsUnsigned, UnscaledSize.
#define OPERAND_TYPE_INFO_LIST(V) \
V(None, false, false, OperandSize::kNone) \
V(ScalableSignedByte, true, false, OperandSize::kByte) \
V(ScalableUnsignedByte, true, true, OperandSize::kByte) \
V(FixedUnsignedByte, false, true, OperandSize::kByte) \
V(FixedUnsignedShort, false, true, OperandSize::kShort)
enum class OperandTypeInfo : uint8_t {
#define DECLARE_OPERAND_TYPE_INFO(Name, ...) k##Name,
OPERAND_TYPE_INFO_LIST(DECLARE_OPERAND_TYPE_INFO)
#undef DECLARE_OPERAND_TYPE_INFO
};
// Enumeration of operand types used by bytecodes.
enum class OperandType : uint8_t {
#define DECLARE_OPERAND_TYPE(Name, _) k##Name,
OPERAND_TYPE_LIST(DECLARE_OPERAND_TYPE)
#undef DECLARE_OPERAND_TYPE
#define COUNT_OPERAND_TYPES(x, _) +1
// The COUNT_OPERAND macro will turn this into kLast = -1 +1 +1... which will
// evaluate to the same value as the last operand.
kLast = -1 OPERAND_TYPE_LIST(COUNT_OPERAND_TYPES)
#undef COUNT_OPERAND_TYPES
};
#define DEBUG_BREAK_BYTECODE_LIST(V) \
DEBUG_BREAK_PLAIN_BYTECODE_LIST(V) \
DEBUG_BREAK_PREFIX_BYTECODE_LIST(V)
// Enumeration of interpreter bytecodes.
enum class Bytecode : uint8_t {
@ -396,6 +306,14 @@ enum class Bytecode : uint8_t {
#undef COUNT_BYTECODE
};
// TODO(rmcilroy): Remove once we switch to MSVC 2015 which supports constexpr.
// See crbug.com/603131.
#if V8_CC_MSVC
#define CONSTEXPR const
#else
#define CONSTEXPR constexpr
#endif
class Bytecodes final {
public:
// The maximum number of operands a bytecode may have.
@ -407,157 +325,315 @@ class Bytecodes final {
// Returns string representation of |bytecode|.
static std::string ToString(Bytecode bytecode, OperandScale operand_scale);
// Returns string representation of |accumulator_use|.
static const char* AccumulatorUseToString(AccumulatorUse accumulator_use);
// Returns string representation of |operand_type|.
static const char* OperandTypeToString(OperandType operand_type);
// Returns string representation of |operand_scale|.
static const char* OperandScaleToString(OperandScale operand_scale);
// Returns string representation of |operand_size|.
static const char* OperandSizeToString(OperandSize operand_size);
// Returns byte value of bytecode.
static uint8_t ToByte(Bytecode bytecode);
static uint8_t ToByte(Bytecode bytecode) {
DCHECK_LE(bytecode, Bytecode::kLast);
return static_cast<uint8_t>(bytecode);
}
// Returns bytecode for |value|.
static Bytecode FromByte(uint8_t value);
// Returns the number of operands expected by |bytecode|.
static int NumberOfOperands(Bytecode bytecode);
// Returns the number of register operands expected by |bytecode|.
static int NumberOfRegisterOperands(Bytecode bytecode);
static Bytecode FromByte(uint8_t value) {
Bytecode bytecode = static_cast<Bytecode>(value);
DCHECK(bytecode <= Bytecode::kLast);
return bytecode;
}
// Returns the prefix bytecode representing an operand scale to be
// applied to a a bytecode.
static Bytecode OperandScaleToPrefixBytecode(OperandScale operand_scale);
static Bytecode OperandScaleToPrefixBytecode(OperandScale operand_scale) {
switch (operand_scale) {
case OperandScale::kQuadruple:
return Bytecode::kExtraWide;
case OperandScale::kDouble:
return Bytecode::kWide;
default:
UNREACHABLE();
return Bytecode::kIllegal;
}
}
// Returns true if the operand scale requires a prefix bytecode.
static bool OperandScaleRequiresPrefixBytecode(OperandScale operand_scale);
static bool OperandScaleRequiresPrefixBytecode(OperandScale operand_scale) {
return operand_scale != OperandScale::kSingle;
}
// Returns the scaling applied to scalable operands if bytecode is
// is a scaling prefix.
static OperandScale PrefixBytecodeToOperandScale(Bytecode bytecode);
static OperandScale PrefixBytecodeToOperandScale(Bytecode bytecode) {
switch (bytecode) {
case Bytecode::kExtraWide:
case Bytecode::kDebugBreakExtraWide:
return OperandScale::kQuadruple;
case Bytecode::kWide:
case Bytecode::kDebugBreakWide:
return OperandScale::kDouble;
default:
UNREACHABLE();
return OperandScale::kSingle;
}
}
// Returns how accumulator is used by |bytecode|.
static AccumulatorUse GetAccumulatorUse(Bytecode bytecode);
static AccumulatorUse GetAccumulatorUse(Bytecode bytecode) {
DCHECK(bytecode <= Bytecode::kLast);
return kAccumulatorUse[static_cast<size_t>(bytecode)];
}
// Returns true if |bytecode| reads the accumulator.
static bool ReadsAccumulator(Bytecode bytecode);
static bool ReadsAccumulator(Bytecode bytecode) {
return (GetAccumulatorUse(bytecode) & AccumulatorUse::kRead) ==
AccumulatorUse::kRead;
}
// Returns true if |bytecode| writes the accumulator.
static bool WritesAccumulator(Bytecode bytecode);
static bool WritesAccumulator(Bytecode bytecode) {
return (GetAccumulatorUse(bytecode) & AccumulatorUse::kWrite) ==
AccumulatorUse::kWrite;
}
// Return true if |bytecode| writes the accumulator with a boolean value.
static bool WritesBooleanToAccumulator(Bytecode bytecode);
static bool WritesBooleanToAccumulator(Bytecode bytecode) {
switch (bytecode) {
case Bytecode::kLdaTrue:
case Bytecode::kLdaFalse:
case Bytecode::kToBooleanLogicalNot:
case Bytecode::kLogicalNot:
case Bytecode::kTestEqual:
case Bytecode::kTestNotEqual:
case Bytecode::kTestEqualStrict:
case Bytecode::kTestLessThan:
case Bytecode::kTestLessThanOrEqual:
case Bytecode::kTestGreaterThan:
case Bytecode::kTestGreaterThanOrEqual:
case Bytecode::kTestInstanceOf:
case Bytecode::kTestIn:
case Bytecode::kForInContinue:
return true;
default:
return false;
}
}
// Return true if |bytecode| is an accumulator load without effects,
// e.g. LdaConstant, LdaTrue, Ldar.
static bool IsAccumulatorLoadWithoutEffects(Bytecode bytecode);
static CONSTEXPR bool IsAccumulatorLoadWithoutEffects(Bytecode bytecode) {
return bytecode == Bytecode::kLdar || bytecode == Bytecode::kLdaZero ||
bytecode == Bytecode::kLdaSmi || bytecode == Bytecode::kLdaNull ||
bytecode == Bytecode::kLdaTrue || bytecode == Bytecode::kLdaFalse ||
bytecode == Bytecode::kLdaUndefined ||
bytecode == Bytecode::kLdaTheHole ||
bytecode == Bytecode::kLdaConstant;
}
// Return true if |bytecode| is a register load without effects,
// e.g. Mov, Star, LdrUndefined.
static CONSTEXPR bool IsRegisterLoadWithoutEffects(Bytecode bytecode) {
return bytecode == Bytecode::kMov || bytecode == Bytecode::kPopContext ||
bytecode == Bytecode::kPushContext || bytecode == Bytecode::kStar ||
bytecode == Bytecode::kLdrUndefined;
}
// Returns true if the bytecode is a conditional jump taking
// an immediate byte operand (OperandType::kImm).
static CONSTEXPR bool IsConditionalJumpImmediate(Bytecode bytecode) {
return bytecode == Bytecode::kJumpIfTrue ||
bytecode == Bytecode::kJumpIfFalse ||
bytecode == Bytecode::kJumpIfToBooleanTrue ||
bytecode == Bytecode::kJumpIfToBooleanFalse ||
bytecode == Bytecode::kJumpIfNotHole ||
bytecode == Bytecode::kJumpIfNull ||
bytecode == Bytecode::kJumpIfUndefined;
}
// Returns true if the bytecode is a conditional jump taking
// a constant pool entry (OperandType::kIdx).
static CONSTEXPR bool IsConditionalJumpConstant(Bytecode bytecode) {
return bytecode == Bytecode::kJumpIfTrueConstant ||
bytecode == Bytecode::kJumpIfFalseConstant ||
bytecode == Bytecode::kJumpIfToBooleanTrueConstant ||
bytecode == Bytecode::kJumpIfToBooleanFalseConstant ||
bytecode == Bytecode::kJumpIfNotHoleConstant ||
bytecode == Bytecode::kJumpIfNullConstant ||
bytecode == Bytecode::kJumpIfUndefinedConstant;
}
// Returns true if the bytecode is a conditional jump taking
// any kind of operand.
static CONSTEXPR bool IsConditionalJump(Bytecode bytecode) {
return IsConditionalJumpImmediate(bytecode) ||
IsConditionalJumpConstant(bytecode);
}
// Returns true if the bytecode is a jump or a conditional jump taking
// an immediate byte operand (OperandType::kImm).
static CONSTEXPR bool IsJumpImmediate(Bytecode bytecode) {
return bytecode == Bytecode::kJump || bytecode == Bytecode::kJumpLoop ||
IsConditionalJumpImmediate(bytecode);
}
// Returns true if the bytecode is a jump or conditional jump taking a
// constant pool entry (OperandType::kIdx).
static CONSTEXPR bool IsJumpConstant(Bytecode bytecode) {
return bytecode == Bytecode::kJumpConstant ||
IsConditionalJumpConstant(bytecode);
}
// Returns true if the bytecode is a jump that internally coerces the
// accumulator to a boolean.
static CONSTEXPR bool IsJumpIfToBoolean(Bytecode bytecode) {
return bytecode == Bytecode::kJumpIfToBooleanTrue ||
bytecode == Bytecode::kJumpIfToBooleanFalse ||
bytecode == Bytecode::kJumpIfToBooleanTrueConstant ||
bytecode == Bytecode::kJumpIfToBooleanFalseConstant;
}
// Returns true if the bytecode is a jump or conditional jump taking
// any kind of operand.
static CONSTEXPR bool IsJump(Bytecode bytecode) {
return IsJumpImmediate(bytecode) || IsJumpConstant(bytecode);
}
// Returns true if the bytecode is a conditional jump, a jump, or a return.
static CONSTEXPR bool IsJumpOrReturn(Bytecode bytecode) {
return bytecode == Bytecode::kReturn || IsJump(bytecode);
}
// Return true if |bytecode| is a jump without effects,
// e.g. any jump excluding those that include type coercion like
// JumpIfTrueToBoolean.
static bool IsJumpWithoutEffects(Bytecode bytecode);
static CONSTEXPR bool IsJumpWithoutEffects(Bytecode bytecode) {
return IsJump(bytecode) && !IsJumpIfToBoolean(bytecode);
}
// Return true if |bytecode| is a register load without effects,
// e.g. Mov, Star, LdrUndefined.
static bool IsRegisterLoadWithoutEffects(Bytecode bytecode);
// Returns true if |bytecode| has no effects. These bytecodes only manipulate
// interpreter frame state and will never throw.
static CONSTEXPR bool IsWithoutExternalSideEffects(Bytecode bytecode) {
return (IsAccumulatorLoadWithoutEffects(bytecode) ||
IsRegisterLoadWithoutEffects(bytecode) ||
bytecode == Bytecode::kNop || IsJumpWithoutEffects(bytecode));
}
// Returns true if |bytecode| has no effects.
static bool IsWithoutExternalSideEffects(Bytecode bytecode);
// Returns true if the bytecode is Ldar or Star.
static CONSTEXPR bool IsLdarOrStar(Bytecode bytecode) {
return bytecode == Bytecode::kLdar || bytecode == Bytecode::kStar;
}
// Returns true if |bytecode| puts a name in the accumulator.
static CONSTEXPR bool PutsNameInAccumulator(Bytecode bytecode) {
return bytecode == Bytecode::kTypeOf;
}
// Returns true if the bytecode is a call or a constructor call.
static CONSTEXPR bool IsCallOrNew(Bytecode bytecode) {
return bytecode == Bytecode::kCall || bytecode == Bytecode::kTailCall ||
bytecode == Bytecode::kNew;
}
// Returns true if the bytecode is a call to the runtime.
static CONSTEXPR bool IsCallRuntime(Bytecode bytecode) {
return bytecode == Bytecode::kCallRuntime ||
bytecode == Bytecode::kCallRuntimeForPair ||
bytecode == Bytecode::kInvokeIntrinsic;
}
// Returns true if the bytecode is a scaling prefix bytecode.
static CONSTEXPR bool IsPrefixScalingBytecode(Bytecode bytecode) {
return bytecode == Bytecode::kExtraWide || bytecode == Bytecode::kWide ||
bytecode == Bytecode::kDebugBreakExtraWide ||
bytecode == Bytecode::kDebugBreakWide;
}
// Returns the number of values which |bytecode| returns.
static CONSTEXPR size_t ReturnCount(Bytecode bytecode) {
return bytecode == Bytecode::kReturn ? 1 : 0;
}
// Returns the number of operands expected by |bytecode|.
static int NumberOfOperands(Bytecode bytecode) {
DCHECK(bytecode <= Bytecode::kLast);
return kOperandCount[static_cast<size_t>(bytecode)];
}
// Returns the i-th operand of |bytecode|.
static OperandType GetOperandType(Bytecode bytecode, int i);
static OperandType GetOperandType(Bytecode bytecode, int i) {
DCHECK_LE(bytecode, Bytecode::kLast);
DCHECK_LT(i, NumberOfOperands(bytecode));
DCHECK_GE(i, 0);
return GetOperandTypes(bytecode)[i];
}
// Returns a pointer to an array of operand types terminated in
// OperandType::kNone.
static const OperandType* GetOperandTypes(Bytecode bytecode);
static const OperandType* GetOperandTypes(Bytecode bytecode) {
DCHECK(bytecode <= Bytecode::kLast);
return kOperandTypes[static_cast<size_t>(bytecode)];
}
// Returns a pointer to an array of operand type info terminated in
// OperandTypeInfo::kNone.
static const OperandTypeInfo* GetOperandTypeInfos(Bytecode bytecode);
static bool OperandIsScalableSignedByte(Bytecode bytecode,
int operand_index) {
DCHECK(bytecode <= Bytecode::kLast);
return kOperandTypeInfos[static_cast<size_t>(bytecode)][operand_index] ==
OperandTypeInfo::kScalableSignedByte;
}
static bool OperandIsScalableUnsignedByte(Bytecode bytecode,
int operand_index) {
DCHECK(bytecode <= Bytecode::kLast);
return kOperandTypeInfos[static_cast<size_t>(bytecode)][operand_index] ==
OperandTypeInfo::kScalableUnsignedByte;
}
static bool OperandIsScalable(Bytecode bytecode, int operand_index) {
return OperandIsScalableSignedByte(bytecode, operand_index) ||
OperandIsScalableUnsignedByte(bytecode, operand_index);
}
// Returns true if the bytecode has wider operand forms.
static bool IsBytecodeWithScalableOperands(Bytecode bytecode);
// Returns the size of the i-th operand of |bytecode|.
static OperandSize GetOperandSize(Bytecode bytecode, int i,
OperandScale operand_scale);
OperandScale operand_scale) {
CHECK_LT(i, NumberOfOperands(bytecode));
return GetOperandSizes(bytecode, operand_scale)[i];
}
// Returns the operand sizes of |bytecode| with scale |operand_scale|.
static const OperandSize* GetOperandSizes(Bytecode bytecode,
OperandScale operand_scale) {
DCHECK(bytecode <= Bytecode::kLast);
DCHECK_GE(operand_scale, OperandScale::kSingle);
DCHECK_LE(operand_scale, OperandScale::kLast);
STATIC_ASSERT(static_cast<int>(OperandScale::kQuadruple) == 4 &&
OperandScale::kLast == OperandScale::kQuadruple);
int scale_index = static_cast<int>(operand_scale) >> 1;
return kOperandSizes[static_cast<size_t>(bytecode)][scale_index];
}
// Returns the offset of the i-th operand of |bytecode| relative to the start
// of the bytecode.
static int GetOperandOffset(Bytecode bytecode, int i,
OperandScale operand_scale);
// Returns a debug break bytecode to replace |bytecode|.
static Bytecode GetDebugBreak(Bytecode bytecode);
// Returns the size of the bytecode including its operands for the
// given |operand_scale|.
static int Size(Bytecode bytecode, OperandScale operand_scale);
static int Size(Bytecode bytecode, OperandScale operand_scale) {
DCHECK(bytecode <= Bytecode::kLast);
STATIC_ASSERT(static_cast<int>(OperandScale::kQuadruple) == 4 &&
OperandScale::kLast == OperandScale::kQuadruple);
int scale_index = static_cast<int>(operand_scale) >> 1;
return kBytecodeSizes[static_cast<size_t>(bytecode)][scale_index];
}
// Returns the size of |operand|.
static OperandSize SizeOfOperand(OperandType operand, OperandScale scale);
// Returns the number of values which |bytecode| returns.
static size_t ReturnCount(Bytecode bytecode);
// Returns true if the bytecode is a conditional jump taking
// an immediate byte operand (OperandType::kImm).
static bool IsConditionalJumpImmediate(Bytecode bytecode);
// Returns true if the bytecode is a conditional jump taking
// a constant pool entry (OperandType::kIdx).
static bool IsConditionalJumpConstant(Bytecode bytecode);
// Returns true if the bytecode is a conditional jump taking
// any kind of operand.
static bool IsConditionalJump(Bytecode bytecode);
// Returns true if the bytecode is a jump or a conditional jump taking
// an immediate byte operand (OperandType::kImm).
static bool IsJumpImmediate(Bytecode bytecode);
// Returns true if the bytecode is a jump or conditional jump taking a
// constant pool entry (OperandType::kIdx).
static bool IsJumpConstant(Bytecode bytecode);
// Returns true if the bytecode is a jump or conditional jump taking
// any kind of operand.
static bool IsJump(Bytecode bytecode);
// Returns true if the bytecode is a jump that internally coerces the
// accumulator to a boolean.
static bool IsJumpIfToBoolean(Bytecode bytecode);
// Returns a debug break bytecode to replace |bytecode|.
static Bytecode GetDebugBreak(Bytecode bytecode);
// Returns the equivalent jump bytecode without the accumulator coercion.
static Bytecode GetJumpWithoutToBoolean(Bytecode bytecode);
// Returns true if the bytecode is a conditional jump, a jump, or a return.
static bool IsJumpOrReturn(Bytecode bytecode);
// Returns true if the bytecode is a call or a constructor call.
static bool IsCallOrNew(Bytecode bytecode);
// Returns true if the bytecode is a call to the runtime.
static bool IsCallRuntime(Bytecode bytecode);
// Returns true if the bytecode is a debug break.
static bool IsDebugBreak(Bytecode bytecode);
// Returns true if the bytecode is Ldar or Star.
static bool IsLdarOrStar(Bytecode bytecode);
// Returns true if the bytecode has wider operand forms.
static bool IsBytecodeWithScalableOperands(Bytecode bytecode);
// Returns true if the bytecode is a scaling prefix bytecode.
static bool IsPrefixScalingBytecode(Bytecode bytecode);
// Returns true if |bytecode| puts a name in the accumulator.
static bool PutsNameInAccumulator(Bytecode bytecode);
// Returns true if |operand_type| is any type of register operand.
static bool IsRegisterOperandType(OperandType operand_type);
@ -573,11 +649,25 @@ class Bytecodes final {
// Returns the number of registers represented by a register operand. For
// instance, a RegPair represents two registers.
static int GetNumberOfRegistersRepresentedBy(OperandType operand_type);
static int GetNumberOfRegistersRepresentedBy(OperandType operand_type) {
switch (operand_type) {
case OperandType::kMaybeReg:
case OperandType::kReg:
case OperandType::kRegOut:
return 1;
case OperandType::kRegPair:
case OperandType::kRegOutPair:
return 2;
case OperandType::kRegOutTriple:
return 3;
default:
return 0;
}
return 0;
}
// Returns true if |operand_type| is a maybe register operand
// (kMaybeReg).
static bool IsMaybeRegisterOperandType(OperandType operand_type);
// Returns the size of |operand| for |operand_scale|.
static OperandSize SizeOfOperand(OperandType operand, OperandScale scale);
// Returns true if |operand_type| is a runtime-id operand (kRuntimeId).
static bool IsRuntimeIdOperandType(OperandType operand_type);
@ -591,18 +681,55 @@ class Bytecodes final {
// OperandScale values.
static bool BytecodeHasHandler(Bytecode bytecode, OperandScale operand_scale);
// Return the operand size required to hold a signed operand.
static OperandSize SizeForSignedOperand(int value);
// Return the operand scale required to hold a signed operand with |value|.
static OperandScale ScaleForSignedOperand(int32_t value) {
if (value >= kMinInt8 && value <= kMaxInt8) {
return OperandScale::kSingle;
} else if (value >= kMinInt16 && value <= kMaxInt16) {
return OperandScale::kDouble;
} else {
return OperandScale::kQuadruple;
}
}
// Return the operand size required to hold an unsigned operand.
static OperandSize SizeForUnsignedOperand(uint32_t value);
// Return the operand scale required to hold an unsigned operand with |value|.
static OperandScale ScaleForUnsignedOperand(uint32_t value) {
if (value <= kMaxUInt8) {
return OperandScale::kSingle;
} else if (value <= kMaxUInt16) {
return OperandScale::kDouble;
} else {
return OperandScale::kQuadruple;
}
}
// Return the operand size required to hold an unsigned operand with |value|.
static OperandSize SizeForUnsignedOperand(uint32_t value) {
if (value <= kMaxUInt8) {
return OperandSize::kByte;
} else if (value <= kMaxUInt16) {
return OperandSize::kShort;
} else {
return OperandSize::kQuad;
}
}
private:
static const OperandType* const kOperandTypes[];
static const OperandTypeInfo* const kOperandTypeInfos[];
static const int kOperandCount[];
static const int kNumberOfRegisterOperands[];
static const AccumulatorUse kAccumulatorUse[];
static const bool kIsScalable[];
static const int kBytecodeSizes[][3];
static const OperandSize* const kOperandSizes[][3];
};
// TODO(rmcilroy): Remove once we switch to MSVC 2015 which supports constexpr.
// See crbug.com/603131.
#undef CONSTEXPR
std::ostream& operator<<(std::ostream& os, const Bytecode& bytecode);
std::ostream& operator<<(std::ostream& os, const AccumulatorUse& use);
std::ostream& operator<<(std::ostream& os, const OperandScale& operand_scale);
std::ostream& operator<<(std::ostream& os, const OperandSize& operand_size);
std::ostream& operator<<(std::ostream& os, const OperandType& operand_type);
} // namespace interpreter
} // namespace internal

View File

@ -983,6 +983,8 @@
'interpreter/bytecode-generator.h',
'interpreter/bytecode-label.cc',
'interpreter/bytecode-label.h',
'interpreter/bytecode-operands.cc',
'interpreter/bytecode-operands.h',
'interpreter/bytecode-peephole-optimizer.cc',
'interpreter/bytecode-peephole-optimizer.h',
'interpreter/bytecode-peephole-table.h',
@ -2434,7 +2436,10 @@
'..',
],
'sources': [
'interpreter/bytecode-operands.h',
'interpreter/bytecode-operands.cc',
'interpreter/bytecode-peephole-table.h',
'interpreter/bytecode-traits.h',
'interpreter/bytecodes.h',
'interpreter/bytecodes.cc',
'interpreter/mkpeephole.cc'

View File

@ -28,93 +28,78 @@ class BytecodeArrayWriterUnittest : public TestWithIsolateAndZone {
SourcePositionTableBuilder::RECORD_SOURCE_POSITIONS) {}
~BytecodeArrayWriterUnittest() override {}
void Write(BytecodeNode* node, const BytecodeSourceInfo& info);
void Write(Bytecode bytecode,
const BytecodeSourceInfo& info = BytecodeSourceInfo());
void Write(Bytecode bytecode, BytecodeSourceInfo info = BytecodeSourceInfo());
void Write(Bytecode bytecode, uint32_t operand0,
const BytecodeSourceInfo& info = BytecodeSourceInfo());
BytecodeSourceInfo info = BytecodeSourceInfo());
void Write(Bytecode bytecode, uint32_t operand0, uint32_t operand1,
const BytecodeSourceInfo& info = BytecodeSourceInfo());
BytecodeSourceInfo info = BytecodeSourceInfo());
void Write(Bytecode bytecode, uint32_t operand0, uint32_t operand1,
uint32_t operand2,
const BytecodeSourceInfo& info = BytecodeSourceInfo());
uint32_t operand2, BytecodeSourceInfo info = BytecodeSourceInfo());
void Write(Bytecode bytecode, uint32_t operand0, uint32_t operand1,
uint32_t operand2, uint32_t operand3,
const BytecodeSourceInfo& info = BytecodeSourceInfo());
BytecodeSourceInfo info = BytecodeSourceInfo());
void WriteJump(Bytecode bytecode, BytecodeLabel* label,
const BytecodeSourceInfo& info = BytecodeSourceInfo());
void WriteJumpLoop(Bytecode bytecode, BytecodeLabel* label, int depth);
BytecodeSourceInfo info = BytecodeSourceInfo());
void WriteJumpLoop(Bytecode bytecode, BytecodeLabel* label, int depth,
BytecodeSourceInfo info = BytecodeSourceInfo());
BytecodeArrayWriter* writer() { return &bytecode_array_writer_; }
ZoneVector<unsigned char>* bytecodes() { return writer()->bytecodes(); }
SourcePositionTableBuilder* source_position_table_builder() {
return writer()->source_position_table_builder();
}
int max_register_count() { return writer()->max_register_count(); }
private:
ConstantArrayBuilder constant_array_builder_;
BytecodeArrayWriter bytecode_array_writer_;
};
void BytecodeArrayWriterUnittest::Write(BytecodeNode* node,
const BytecodeSourceInfo& info) {
if (info.is_valid()) {
node->source_info().Clone(info);
}
writer()->Write(node);
}
void BytecodeArrayWriterUnittest::Write(Bytecode bytecode,
const BytecodeSourceInfo& info) {
BytecodeNode node(bytecode);
Write(&node, info);
BytecodeSourceInfo info) {
BytecodeNode node(bytecode, &info);
writer()->Write(&node);
}
void BytecodeArrayWriterUnittest::Write(Bytecode bytecode, uint32_t operand0,
const BytecodeSourceInfo& info) {
BytecodeNode node(bytecode, operand0);
Write(&node, info);
BytecodeSourceInfo info) {
BytecodeNode node(bytecode, operand0, &info);
writer()->Write(&node);
}
void BytecodeArrayWriterUnittest::Write(Bytecode bytecode, uint32_t operand0,
uint32_t operand1,
const BytecodeSourceInfo& info) {
BytecodeNode node(bytecode, operand0, operand1);
Write(&node, info);
BytecodeSourceInfo info) {
BytecodeNode node(bytecode, operand0, operand1, &info);
writer()->Write(&node);
}
void BytecodeArrayWriterUnittest::Write(Bytecode bytecode, uint32_t operand0,
uint32_t operand1, uint32_t operand2,
const BytecodeSourceInfo& info) {
BytecodeNode node(bytecode, operand0, operand1, operand2);
Write(&node, info);
BytecodeSourceInfo info) {
BytecodeNode node(bytecode, operand0, operand1, operand2, &info);
writer()->Write(&node);
}
void BytecodeArrayWriterUnittest::Write(Bytecode bytecode, uint32_t operand0,
uint32_t operand1, uint32_t operand2,
uint32_t operand3,
const BytecodeSourceInfo& info) {
BytecodeNode node(bytecode, operand0, operand1, operand2, operand3);
Write(&node, info);
BytecodeSourceInfo info) {
BytecodeNode node(bytecode, operand0, operand1, operand2, operand3, &info);
writer()->Write(&node);
}
void BytecodeArrayWriterUnittest::WriteJump(Bytecode bytecode,
BytecodeLabel* label,
const BytecodeSourceInfo& info) {
BytecodeNode node(bytecode, 0);
if (info.is_valid()) {
node.source_info().Clone(info);
}
BytecodeSourceInfo info) {
BytecodeNode node(bytecode, 0, &info);
writer()->WriteJump(&node, label);
}
void BytecodeArrayWriterUnittest::WriteJumpLoop(Bytecode bytecode,
BytecodeLabel* label,
int depth) {
BytecodeNode node(bytecode, 0, depth);
BytecodeLabel* label, int depth,
BytecodeSourceInfo info) {
BytecodeNode node(bytecode, 0, depth, &info);
writer()->WriteJump(&node, label);
}
@ -123,19 +108,15 @@ TEST_F(BytecodeArrayWriterUnittest, SimpleExample) {
Write(Bytecode::kStackCheck, {10, false});
CHECK_EQ(bytecodes()->size(), 1);
CHECK_EQ(max_register_count(), 0);
Write(Bytecode::kLdaSmi, 127, {55, true});
CHECK_EQ(bytecodes()->size(), 3);
CHECK_EQ(max_register_count(), 0);
Write(Bytecode::kLdar, Register(200).ToOperand());
CHECK_EQ(bytecodes()->size(), 7);
CHECK_EQ(max_register_count(), 201);
Write(Bytecode::kReturn, {70, true});
CHECK_EQ(bytecodes()->size(), 8);
CHECK_EQ(max_register_count(), 201);
static const uint8_t bytes[] = {B(StackCheck), B(LdaSmi), U8(127), B(Wide),
B(Ldar), R16(200), B(Return)};
@ -167,7 +148,7 @@ TEST_F(BytecodeArrayWriterUnittest, ComplexExample) {
// clang-format off
/* 0 30 E> */ B(StackCheck),
/* 1 42 S> */ B(LdaConstant), U8(0),
/* 3 42 E> */ B(Star), R8(1),
/* 3 42 E> */ B(Add), R8(1), U8(1),
/* 5 68 S> */ B(JumpIfUndefined), U8(39),
/* 7 */ B(JumpIfNull), U8(37),
/* 9 */ B(ToObject), R8(3),
@ -192,30 +173,23 @@ TEST_F(BytecodeArrayWriterUnittest, ComplexExample) {
};
static const PositionTableEntry expected_positions[] = {
{0, 30, false}, {1, 42, true}, {3, 42, false}, {5, 68, true},
{17, 63, true}, {31, 54, false}, {36, 85, true}, {45, 85, true}};
{0, 30, false}, {1, 42, true}, {3, 42, false}, {6, 68, true},
{18, 63, true}, {32, 54, false}, {37, 85, true}, {46, 85, true}};
BytecodeLabel back_jump, jump_for_in, jump_end_1, jump_end_2, jump_end_3;
#define R(i) static_cast<uint32_t>(Register(i).ToOperand())
Write(Bytecode::kStackCheck, {30, false});
Write(Bytecode::kLdaConstant, U8(0), {42, true});
CHECK_EQ(max_register_count(), 0);
Write(Bytecode::kStar, R(1), {42, false});
CHECK_EQ(max_register_count(), 2);
Write(Bytecode::kAdd, R(1), U8(1), {42, false});
WriteJump(Bytecode::kJumpIfUndefined, &jump_end_1, {68, true});
WriteJump(Bytecode::kJumpIfNull, &jump_end_2);
Write(Bytecode::kToObject, R(3));
CHECK_EQ(max_register_count(), 4);
Write(Bytecode::kForInPrepare, R(3), R(4));
CHECK_EQ(max_register_count(), 7);
Write(Bytecode::kLdaZero);
CHECK_EQ(max_register_count(), 7);
Write(Bytecode::kStar, R(7));
CHECK_EQ(max_register_count(), 8);
writer()->BindLabel(&back_jump);
Write(Bytecode::kForInContinue, R(7), R(6), {63, true});
CHECK_EQ(max_register_count(), 8);
WriteJump(Bytecode::kJumpIfFalse, &jump_end_3);
Write(Bytecode::kForInNext, R(3), R(7), R(4), U8(1));
WriteJump(Bytecode::kJumpIfUndefined, &jump_for_in);
@ -233,7 +207,6 @@ TEST_F(BytecodeArrayWriterUnittest, ComplexExample) {
writer()->BindLabel(&jump_end_3);
Write(Bytecode::kLdaUndefined);
Write(Bytecode::kReturn, {85, true});
CHECK_EQ(max_register_count(), 8);
#undef R
CHECK_EQ(bytecodes()->size(), arraysize(expected_bytes));

View File

@ -16,7 +16,8 @@ namespace interpreter {
class BytecodeDeadCodeOptimizerTest : public BytecodePipelineStage,
public TestWithIsolateAndZone {
public:
BytecodeDeadCodeOptimizerTest() : dead_code_optimizer_(this) {}
BytecodeDeadCodeOptimizerTest()
: dead_code_optimizer_(this), last_written_(Bytecode::kIllegal) {}
~BytecodeDeadCodeOptimizerTest() override {}
void Write(BytecodeNode* node) override {
@ -56,7 +57,7 @@ TEST_F(BytecodeDeadCodeOptimizerTest, LiveCodeKept) {
CHECK_EQ(add, last_written());
BytecodeLabel target;
BytecodeNode jump(Bytecode::kJump, 0);
BytecodeNode jump(Bytecode::kJump, 0, nullptr);
optimizer()->WriteJump(&jump, &target);
CHECK_EQ(write_count(), 2);
CHECK_EQ(jump, last_written());
@ -100,7 +101,7 @@ TEST_F(BytecodeDeadCodeOptimizerTest, DeadCodeAfterReThrowEliminated) {
TEST_F(BytecodeDeadCodeOptimizerTest, DeadCodeAfterJumpEliminated) {
BytecodeLabel target;
BytecodeNode jump(Bytecode::kJump, 0);
BytecodeNode jump(Bytecode::kJump, 0, nullptr);
optimizer()->WriteJump(&jump, &target);
CHECK_EQ(write_count(), 1);
CHECK_EQ(jump, last_written());
@ -118,7 +119,7 @@ TEST_F(BytecodeDeadCodeOptimizerTest, DeadCodeStillDeadAfterConditinalJump) {
CHECK_EQ(ret, last_written());
BytecodeLabel target;
BytecodeNode jump(Bytecode::kJumpIfTrue, 0);
BytecodeNode jump(Bytecode::kJumpIfTrue, 0, nullptr);
optimizer()->WriteJump(&jump, &target);
CHECK_EQ(write_count(), 1);
CHECK_EQ(ret, last_written());

View File

@ -18,7 +18,8 @@ namespace interpreter {
class BytecodePeepholeOptimizerTest : public BytecodePipelineStage,
public TestWithIsolateAndZone {
public:
BytecodePeepholeOptimizerTest() : peephole_optimizer_(this) {}
BytecodePeepholeOptimizerTest()
: peephole_optimizer_(this), last_written_(Bytecode::kIllegal) {}
~BytecodePeepholeOptimizerTest() override {}
void Reset() {
@ -71,7 +72,7 @@ TEST_F(BytecodePeepholeOptimizerTest, FlushOnJump) {
CHECK_EQ(write_count(), 0);
BytecodeLabel target;
BytecodeNode jump(Bytecode::kJump, 0);
BytecodeNode jump(Bytecode::kJump, 0, nullptr);
optimizer()->WriteJump(&jump, &target);
CHECK_EQ(write_count(), 2);
CHECK_EQ(jump, last_written());
@ -103,8 +104,8 @@ TEST_F(BytecodePeepholeOptimizerTest, ElideEmptyNop) {
}
TEST_F(BytecodePeepholeOptimizerTest, ElideExpressionNop) {
BytecodeNode nop(Bytecode::kNop);
nop.source_info().MakeExpressionPosition(3);
BytecodeSourceInfo source_info(3, false);
BytecodeNode nop(Bytecode::kNop, &source_info);
optimizer()->Write(&nop);
BytecodeNode add(Bytecode::kAdd, Register(0).ToOperand(), 1);
optimizer()->Write(&add);
@ -114,11 +115,11 @@ TEST_F(BytecodePeepholeOptimizerTest, ElideExpressionNop) {
}
TEST_F(BytecodePeepholeOptimizerTest, KeepStatementNop) {
BytecodeNode nop(Bytecode::kNop);
nop.source_info().MakeStatementPosition(3);
BytecodeSourceInfo source_info(3, true);
BytecodeNode nop(Bytecode::kNop, &source_info);
optimizer()->Write(&nop);
BytecodeNode add(Bytecode::kAdd, Register(0).ToOperand(), 1);
add.source_info().MakeExpressionPosition(3);
source_info.MakeExpressionPosition(3);
BytecodeNode add(Bytecode::kAdd, Register(0).ToOperand(), 1, &source_info);
optimizer()->Write(&add);
Flush();
CHECK_EQ(write_count(), 2);
@ -204,8 +205,8 @@ TEST_F(BytecodePeepholeOptimizerTest, StarRxLdarRx) {
TEST_F(BytecodePeepholeOptimizerTest, StarRxLdarRxStatement) {
BytecodeNode first(Bytecode::kStar, Register(0).ToOperand());
BytecodeNode second(Bytecode::kLdar, Register(0).ToOperand());
second.source_info().MakeStatementPosition(0);
BytecodeSourceInfo source_info(3, true);
BytecodeNode second(Bytecode::kLdar, Register(0).ToOperand(), &source_info);
optimizer()->Write(&first);
CHECK_EQ(write_count(), 0);
optimizer()->Write(&second);
@ -220,9 +221,9 @@ TEST_F(BytecodePeepholeOptimizerTest, StarRxLdarRxStatement) {
TEST_F(BytecodePeepholeOptimizerTest, StarRxLdarRxStatementStarRy) {
BytecodeLabel label;
BytecodeNode first(Bytecode::kStar, Register(0).ToOperand());
BytecodeNode second(Bytecode::kLdar, Register(0).ToOperand());
BytecodeSourceInfo source_info(0, true);
BytecodeNode second(Bytecode::kLdar, Register(0).ToOperand(), &source_info);
BytecodeNode third(Bytecode::kStar, Register(3).ToOperand());
second.source_info().MakeStatementPosition(0);
optimizer()->Write(&first);
CHECK_EQ(write_count(), 0);
optimizer()->Write(&second);
@ -277,8 +278,8 @@ TEST_F(BytecodePeepholeOptimizerTest, LdaTrueLdaFalse) {
}
TEST_F(BytecodePeepholeOptimizerTest, LdaTrueStatementLdaFalse) {
BytecodeNode first(Bytecode::kLdaTrue);
first.source_info().MakeExpressionPosition(3);
BytecodeSourceInfo source_info(3, true);
BytecodeNode first(Bytecode::kLdaTrue, &source_info);
BytecodeNode second(Bytecode::kLdaFalse);
optimizer()->Write(&first);
CHECK_EQ(write_count(), 0);
@ -287,13 +288,13 @@ TEST_F(BytecodePeepholeOptimizerTest, LdaTrueStatementLdaFalse) {
Flush();
CHECK_EQ(write_count(), 1);
CHECK_EQ(last_written(), second);
CHECK(second.source_info().is_expression());
CHECK(second.source_info().is_statement());
CHECK_EQ(second.source_info().source_position(), 3);
}
TEST_F(BytecodePeepholeOptimizerTest, NopStackCheck) {
BytecodeNode first(Bytecode::kNop);
BytecodeNode second(Bytecode::kStackCheck);
BytecodeNode second(Bytecode::kStackCheck, nullptr);
optimizer()->Write(&first);
CHECK_EQ(write_count(), 0);
optimizer()->Write(&second);
@ -304,8 +305,8 @@ TEST_F(BytecodePeepholeOptimizerTest, NopStackCheck) {
}
TEST_F(BytecodePeepholeOptimizerTest, NopStatementStackCheck) {
BytecodeNode first(Bytecode::kNop);
first.source_info().MakeExpressionPosition(3);
BytecodeSourceInfo source_info(3, true);
BytecodeNode first(Bytecode::kNop, &source_info);
BytecodeNode second(Bytecode::kStackCheck);
optimizer()->Write(&first);
CHECK_EQ(write_count(), 0);
@ -313,9 +314,9 @@ TEST_F(BytecodePeepholeOptimizerTest, NopStatementStackCheck) {
CHECK_EQ(write_count(), 0);
Flush();
CHECK_EQ(write_count(), 1);
second.source_info().MakeExpressionPosition(
first.source_info().source_position());
CHECK_EQ(last_written(), second);
BytecodeSourceInfo expected_source_info(3, true);
BytecodeNode expected(Bytecode::kStackCheck, &expected_source_info);
CHECK_EQ(last_written(), expected);
}
// Tests covering BytecodePeepholeOptimizer::UpdateLastAndCurrentBytecodes().
@ -352,7 +353,8 @@ TEST_F(BytecodePeepholeOptimizerTest, MergeLdaKeyedPropertyStar) {
static_cast<uint32_t>(Register(1).ToOperand())};
const int expected_operand_count = static_cast<int>(arraysize(operands));
BytecodeNode first(Bytecode::kLdaKeyedProperty, operands[0], operands[1]);
BytecodeNode first(Bytecode::kLdaKeyedProperty, operands[0], operands[1],
nullptr);
BytecodeNode second(Bytecode::kStar, operands[2]);
BytecodeNode third(Bytecode::kReturn);
optimizer()->Write(&first);
@ -457,8 +459,8 @@ TEST_F(BytecodePeepholeOptimizerTest, MergeLdaSmiWithBinaryOp) {
for (auto operator_replacement : operator_replacement_pairs) {
uint32_t imm_operand = 17;
BytecodeNode first(Bytecode::kLdaSmi, imm_operand);
first.source_info().Clone({3, true});
BytecodeSourceInfo source_info(3, true);
BytecodeNode first(Bytecode::kLdaSmi, imm_operand, &source_info);
uint32_t reg_operand = Register(0).ToOperand();
uint32_t idx_operand = 1;
BytecodeNode second(operator_replacement[0], reg_operand, idx_operand);
@ -487,11 +489,11 @@ TEST_F(BytecodePeepholeOptimizerTest, NotMergingLdaSmiWithBinaryOp) {
for (auto operator_replacement : operator_replacement_pairs) {
uint32_t imm_operand = 17;
BytecodeNode first(Bytecode::kLdaSmi, imm_operand);
first.source_info().Clone({3, true});
BytecodeSourceInfo source_info(3, true);
BytecodeNode first(Bytecode::kLdaSmi, imm_operand, &source_info);
uint32_t reg_operand = Register(0).ToOperand();
BytecodeNode second(operator_replacement[0], reg_operand, 1);
second.source_info().Clone({4, true});
source_info.MakeStatementPosition(4);
BytecodeNode second(operator_replacement[0], reg_operand, 1, &source_info);
optimizer()->Write(&first);
optimizer()->Write(&second);
CHECK_EQ(last_written(), first);

View File

@ -51,12 +51,6 @@ TEST(BytecodeSourceInfo, Operations) {
CHECK_EQ(y.is_statement(), true);
}
TEST_F(BytecodeNodeTest, Constructor0) {
BytecodeNode node;
CHECK_EQ(node.bytecode(), Bytecode::kIllegal);
CHECK(!node.source_info().is_valid());
}
TEST_F(BytecodeNodeTest, Constructor1) {
BytecodeNode node(Bytecode::kLdaZero);
CHECK_EQ(node.bytecode(), Bytecode::kLdaZero);
@ -119,21 +113,21 @@ TEST_F(BytecodeNodeTest, Equality) {
TEST_F(BytecodeNodeTest, EqualityWithSourceInfo) {
uint32_t operands[] = {0x71, 0xa5, 0x5a, 0xfc};
BytecodeSourceInfo first_source_info(3, true);
BytecodeNode node(Bytecode::kForInNext, operands[0], operands[1], operands[2],
operands[3]);
node.source_info().MakeStatementPosition(3);
operands[3], &first_source_info);
CHECK_EQ(node, node);
BytecodeSourceInfo second_source_info(3, true);
BytecodeNode other(Bytecode::kForInNext, operands[0], operands[1],
operands[2], operands[3]);
other.source_info().MakeStatementPosition(3);
operands[2], operands[3], &second_source_info);
CHECK_EQ(node, other);
}
TEST_F(BytecodeNodeTest, NoEqualityWithDifferentSourceInfo) {
uint32_t operands[] = {0x71, 0xa5, 0x5a, 0xfc};
BytecodeSourceInfo source_info(77, true);
BytecodeNode node(Bytecode::kForInNext, operands[0], operands[1], operands[2],
operands[3]);
node.source_info().MakeStatementPosition(3);
operands[3], &source_info);
BytecodeNode other(Bytecode::kForInNext, operands[0], operands[1],
operands[2], operands[3]);
CHECK_NE(node, other);
@ -143,41 +137,39 @@ TEST_F(BytecodeNodeTest, Clone) {
uint32_t operands[] = {0x71, 0xa5, 0x5a, 0xfc};
BytecodeNode node(Bytecode::kForInNext, operands[0], operands[1], operands[2],
operands[3]);
BytecodeNode clone;
BytecodeNode clone(Bytecode::kIllegal);
clone.Clone(&node);
CHECK_EQ(clone, node);
}
TEST_F(BytecodeNodeTest, SetBytecode0) {
uint32_t operands[] = {0x71, 0xa5, 0x5a, 0xfc};
BytecodeNode node(Bytecode::kForInNext, operands[0], operands[1], operands[2],
operands[3]);
BytecodeSourceInfo source_info(77, false);
node.source_info().Clone(source_info);
CHECK_EQ(node.source_info(), source_info);
BytecodeNode node(Bytecode::kForInNext, operands[0], operands[1], operands[2],
operands[3], &source_info);
CHECK_EQ(node.source_info(), BytecodeSourceInfo(77, false));
BytecodeNode clone;
BytecodeNode clone(Bytecode::kIllegal);
clone.Clone(&node);
clone.set_bytecode(Bytecode::kNop);
CHECK_EQ(clone.bytecode(), Bytecode::kNop);
CHECK_EQ(clone.operand_count(), 0);
CHECK_EQ(clone.source_info(), source_info);
CHECK_EQ(clone.source_info(), BytecodeSourceInfo(77, false));
}
TEST_F(BytecodeNodeTest, SetBytecode1) {
uint32_t operands[] = {0x71, 0xa5, 0x5a, 0xfc};
BytecodeNode node(Bytecode::kForInNext, operands[0], operands[1], operands[2],
operands[3]);
BytecodeSourceInfo source_info(77, false);
node.source_info().Clone(source_info);
BytecodeNode node(Bytecode::kForInNext, operands[0], operands[1], operands[2],
operands[3], &source_info);
BytecodeNode clone;
BytecodeNode clone(Bytecode::kIllegal);
clone.Clone(&node);
clone.set_bytecode(Bytecode::kJump, 0x01aabbcc);
CHECK_EQ(clone.bytecode(), Bytecode::kJump);
CHECK_EQ(clone.operand_count(), 1);
CHECK_EQ(clone.operand(0), 0x01aabbcc);
CHECK_EQ(clone.source_info(), source_info);
CHECK_EQ(clone.source_info(), BytecodeSourceInfo(77, false));
}
} // namespace interpreter

View File

@ -74,8 +74,8 @@ TEST_F(BytecodeRegisterOptimizerTest, WriteNop) {
TEST_F(BytecodeRegisterOptimizerTest, WriteNopExpression) {
Initialize(1, 1);
BytecodeNode node(Bytecode::kNop);
node.source_info().MakeExpressionPosition(3);
BytecodeSourceInfo source_info(3, false);
BytecodeNode node(Bytecode::kNop, &source_info);
optimizer()->Write(&node);
CHECK_EQ(write_count(), 1);
CHECK_EQ(node, last_written());
@ -83,8 +83,8 @@ TEST_F(BytecodeRegisterOptimizerTest, WriteNopExpression) {
TEST_F(BytecodeRegisterOptimizerTest, WriteNopStatement) {
Initialize(1, 1);
BytecodeSourceInfo source_info(3, true);
BytecodeNode node(Bytecode::kNop);
node.source_info().MakeStatementPosition(3);
optimizer()->Write(&node);
CHECK_EQ(write_count(), 1);
CHECK_EQ(node, last_written());
@ -97,7 +97,7 @@ TEST_F(BytecodeRegisterOptimizerTest, TemporaryMaterializedForJump) {
optimizer()->Write(&node);
CHECK_EQ(write_count(), 0);
BytecodeLabel label;
BytecodeNode jump(Bytecode::kJump, 0);
BytecodeNode jump(Bytecode::kJump, 0, nullptr);
optimizer()->WriteJump(&jump, &label);
CHECK_EQ(write_count(), 2);
CHECK_EQ(output()->at(0).bytecode(), Bytecode::kStar);

View File

@ -161,18 +161,47 @@ TEST(Bytecodes, PrefixMappings) {
}
}
TEST(Bytecodes, SizesForSignedOperands) {
CHECK(Bytecodes::SizeForSignedOperand(0) == OperandSize::kByte);
CHECK(Bytecodes::SizeForSignedOperand(kMaxInt8) == OperandSize::kByte);
CHECK(Bytecodes::SizeForSignedOperand(kMinInt8) == OperandSize::kByte);
CHECK(Bytecodes::SizeForSignedOperand(kMaxInt8 + 1) == OperandSize::kShort);
CHECK(Bytecodes::SizeForSignedOperand(kMinInt8 - 1) == OperandSize::kShort);
CHECK(Bytecodes::SizeForSignedOperand(kMaxInt16) == OperandSize::kShort);
CHECK(Bytecodes::SizeForSignedOperand(kMinInt16) == OperandSize::kShort);
CHECK(Bytecodes::SizeForSignedOperand(kMaxInt16 + 1) == OperandSize::kQuad);
CHECK(Bytecodes::SizeForSignedOperand(kMinInt16 - 1) == OperandSize::kQuad);
CHECK(Bytecodes::SizeForSignedOperand(kMaxInt) == OperandSize::kQuad);
CHECK(Bytecodes::SizeForSignedOperand(kMinInt) == OperandSize::kQuad);
TEST(Bytecodes, ScaleForSignedOperand) {
CHECK(Bytecodes::ScaleForSignedOperand(0) == OperandScale::kSingle);
CHECK(Bytecodes::ScaleForSignedOperand(kMaxInt8) == OperandScale::kSingle);
CHECK(Bytecodes::ScaleForSignedOperand(kMinInt8) == OperandScale::kSingle);
CHECK(Bytecodes::ScaleForSignedOperand(kMaxInt8 + 1) ==
OperandScale::kDouble);
CHECK(Bytecodes::ScaleForSignedOperand(kMinInt8 - 1) ==
OperandScale::kDouble);
CHECK(Bytecodes::ScaleForSignedOperand(kMaxInt16) == OperandScale::kDouble);
CHECK(Bytecodes::ScaleForSignedOperand(kMinInt16) == OperandScale::kDouble);
CHECK(Bytecodes::ScaleForSignedOperand(kMaxInt16 + 1) ==
OperandScale::kQuadruple);
CHECK(Bytecodes::ScaleForSignedOperand(kMinInt16 - 1) ==
OperandScale::kQuadruple);
CHECK(Bytecodes::ScaleForSignedOperand(kMaxInt) == OperandScale::kQuadruple);
CHECK(Bytecodes::ScaleForSignedOperand(kMinInt) == OperandScale::kQuadruple);
}
TEST(Bytecodes, ScaleForUnsignedOperands) {
// int overloads
CHECK(Bytecodes::ScaleForUnsignedOperand(0) == OperandScale::kSingle);
CHECK(Bytecodes::ScaleForUnsignedOperand(kMaxUInt8) == OperandScale::kSingle);
CHECK(Bytecodes::ScaleForUnsignedOperand(kMaxUInt8 + 1) ==
OperandScale::kDouble);
CHECK(Bytecodes::ScaleForUnsignedOperand(kMaxUInt16) ==
OperandScale::kDouble);
CHECK(Bytecodes::ScaleForUnsignedOperand(kMaxUInt16 + 1) ==
OperandScale::kQuadruple);
// size_t overloads
CHECK(Bytecodes::ScaleForUnsignedOperand(static_cast<size_t>(0)) ==
OperandScale::kSingle);
CHECK(Bytecodes::ScaleForUnsignedOperand(static_cast<size_t>(kMaxUInt8)) ==
OperandScale::kSingle);
CHECK(Bytecodes::ScaleForUnsignedOperand(
static_cast<size_t>(kMaxUInt8 + 1)) == OperandScale::kDouble);
CHECK(Bytecodes::ScaleForUnsignedOperand(static_cast<size_t>(kMaxUInt16)) ==
OperandScale::kDouble);
CHECK(Bytecodes::ScaleForUnsignedOperand(
static_cast<size_t>(kMaxUInt16 + 1)) == OperandScale::kQuadruple);
CHECK(Bytecodes::ScaleForUnsignedOperand(static_cast<size_t>(kMaxUInt32)) ==
OperandScale::kQuadruple);
}
TEST(Bytecodes, SizesForUnsignedOperands) {
@ -236,14 +265,6 @@ TEST(AccumulatorUse, SampleBytecodes) {
AccumulatorUse::kReadWrite);
}
TEST(AccumulatorUse, AccumulatorUseToString) {
std::set<std::string> names;
names.insert(Bytecodes::AccumulatorUseToString(AccumulatorUse::kNone));
names.insert(Bytecodes::AccumulatorUseToString(AccumulatorUse::kRead));
names.insert(Bytecodes::AccumulatorUseToString(AccumulatorUse::kWrite));
names.insert(Bytecodes::AccumulatorUseToString(AccumulatorUse::kReadWrite));
CHECK_EQ(names.size(), 4);
}
} // namespace interpreter
} // namespace internal
} // namespace v8