2015-09-28 18:05:56 +00:00
|
|
|
// Copyright 2015 the V8 project authors. All rights reserved.
|
2015-09-02 17:48:10 +00:00
|
|
|
// Use of this source code is governed by a BSD-style license that can be
|
|
|
|
// found in the LICENSE file.
|
|
|
|
|
|
|
|
#include <vector>
|
|
|
|
|
|
|
|
#include "src/v8.h"
|
|
|
|
|
2016-07-15 12:03:04 +00:00
|
|
|
#include "src/interpreter/bytecode-register.h"
|
2015-09-02 17:48:10 +00:00
|
|
|
#include "src/interpreter/bytecodes.h"
|
|
|
|
#include "test/unittests/test-utils.h"
|
|
|
|
|
|
|
|
namespace v8 {
|
|
|
|
namespace internal {
|
|
|
|
namespace interpreter {
|
|
|
|
|
|
|
|
TEST(OperandConversion, Registers) {
|
2016-03-21 17:08:21 +00:00
|
|
|
int register_count = 128;
|
2016-01-26 13:55:28 +00:00
|
|
|
int step = register_count / 7;
|
|
|
|
for (int i = 0; i < register_count; i += step) {
|
|
|
|
if (i <= kMaxInt8) {
|
2016-03-21 17:08:21 +00:00
|
|
|
uint32_t operand0 = Register(i).ToOperand();
|
2016-01-26 13:55:28 +00:00
|
|
|
Register reg0 = Register::FromOperand(operand0);
|
|
|
|
CHECK_EQ(i, reg0.index());
|
|
|
|
}
|
|
|
|
|
2016-03-21 17:08:21 +00:00
|
|
|
uint32_t operand1 = Register(i).ToOperand();
|
|
|
|
Register reg1 = Register::FromOperand(operand1);
|
2016-01-26 13:55:28 +00:00
|
|
|
CHECK_EQ(i, reg1.index());
|
|
|
|
|
2016-03-21 17:08:21 +00:00
|
|
|
uint32_t operand2 = Register(i).ToOperand();
|
|
|
|
Register reg2 = Register::FromOperand(operand2);
|
2016-01-26 13:55:28 +00:00
|
|
|
CHECK_EQ(i, reg2.index());
|
|
|
|
}
|
2015-09-02 17:48:10 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
TEST(OperandConversion, Parameters) {
|
|
|
|
int parameter_counts[] = {7, 13, 99};
|
|
|
|
|
|
|
|
size_t count = sizeof(parameter_counts) / sizeof(parameter_counts[0]);
|
|
|
|
for (size_t p = 0; p < count; p++) {
|
|
|
|
int parameter_count = parameter_counts[p];
|
|
|
|
for (int i = 0; i < parameter_count; i++) {
|
|
|
|
Register r = Register::FromParameterIndex(i, parameter_count);
|
2016-03-21 17:08:21 +00:00
|
|
|
uint32_t operand_value = r.ToOperand();
|
2015-09-02 17:48:10 +00:00
|
|
|
Register s = Register::FromOperand(operand_value);
|
|
|
|
CHECK_EQ(i, s.ToParameterIndex(parameter_count));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(OperandConversion, RegistersParametersNoOverlap) {
|
2016-03-21 17:08:21 +00:00
|
|
|
int register_count = 128;
|
|
|
|
int parameter_count = 100;
|
2016-01-26 13:55:28 +00:00
|
|
|
int32_t register_space_size = base::bits::RoundUpToPowerOfTwo32(
|
|
|
|
static_cast<uint32_t>(register_count + parameter_count));
|
|
|
|
uint32_t range = static_cast<uint32_t>(register_space_size);
|
|
|
|
std::vector<uint8_t> operand_count(range);
|
2015-09-02 17:48:10 +00:00
|
|
|
|
2016-01-26 13:55:28 +00:00
|
|
|
for (int i = 0; i < register_count; i += 1) {
|
2015-09-02 17:48:10 +00:00
|
|
|
Register r = Register(i);
|
2016-03-21 17:08:21 +00:00
|
|
|
int32_t operand = r.ToOperand();
|
|
|
|
uint8_t index = static_cast<uint8_t>(operand);
|
|
|
|
CHECK_LT(index, operand_count.size());
|
|
|
|
operand_count[index] += 1;
|
|
|
|
CHECK_EQ(operand_count[index], 1);
|
2015-09-02 17:48:10 +00:00
|
|
|
}
|
|
|
|
|
2016-01-26 13:55:28 +00:00
|
|
|
for (int i = 0; i < parameter_count; i += 1) {
|
2015-09-02 17:48:10 +00:00
|
|
|
Register r = Register::FromParameterIndex(i, parameter_count);
|
2016-03-21 17:08:21 +00:00
|
|
|
uint32_t operand = r.ToOperand();
|
|
|
|
uint8_t index = static_cast<uint8_t>(operand);
|
|
|
|
CHECK_LT(index, operand_count.size());
|
|
|
|
operand_count[index] += 1;
|
|
|
|
CHECK_EQ(operand_count[index], 1);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(OperandScaling, ScalableAndNonScalable) {
|
2016-05-12 19:18:07 +00:00
|
|
|
const OperandScale kOperandScales[] = {
|
|
|
|
#define VALUE(Name, _) OperandScale::k##Name,
|
|
|
|
OPERAND_SCALE_LIST(VALUE)
|
|
|
|
#undef VALUE
|
|
|
|
};
|
|
|
|
|
|
|
|
for (OperandScale operand_scale : kOperandScales) {
|
2016-03-21 17:08:21 +00:00
|
|
|
int scale = static_cast<int>(operand_scale);
|
|
|
|
CHECK_EQ(Bytecodes::Size(Bytecode::kCallRuntime, operand_scale),
|
|
|
|
1 + 2 + 2 * scale);
|
|
|
|
CHECK_EQ(Bytecodes::Size(Bytecode::kCreateObjectLiteral, operand_scale),
|
2018-11-08 08:55:43 +00:00
|
|
|
1 + 2 * scale + 1);
|
2019-02-27 22:52:30 +00:00
|
|
|
CHECK_EQ(Bytecodes::Size(Bytecode::kTestIn, operand_scale), 1 + 2 * scale);
|
2015-09-02 17:48:10 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-01-27 11:15:25 +00:00
|
|
|
TEST(Bytecodes, RegisterOperands) {
|
2016-03-21 17:08:21 +00:00
|
|
|
CHECK(Bytecodes::IsRegisterOperandType(OperandType::kReg));
|
2016-10-24 20:47:41 +00:00
|
|
|
CHECK(Bytecodes::IsRegisterOperandType(OperandType::kRegPair));
|
2016-03-21 17:08:21 +00:00
|
|
|
CHECK(Bytecodes::IsRegisterInputOperandType(OperandType::kReg));
|
2016-10-24 20:47:41 +00:00
|
|
|
CHECK(Bytecodes::IsRegisterInputOperandType(OperandType::kRegPair));
|
|
|
|
CHECK(Bytecodes::IsRegisterInputOperandType(OperandType::kRegList));
|
2016-03-21 17:08:21 +00:00
|
|
|
CHECK(!Bytecodes::IsRegisterOutputOperandType(OperandType::kReg));
|
|
|
|
CHECK(!Bytecodes::IsRegisterInputOperandType(OperandType::kRegOut));
|
|
|
|
CHECK(Bytecodes::IsRegisterOutputOperandType(OperandType::kRegOut));
|
2016-10-24 20:47:41 +00:00
|
|
|
CHECK(Bytecodes::IsRegisterOutputOperandType(OperandType::kRegOutPair));
|
2016-01-27 11:15:25 +00:00
|
|
|
}
|
|
|
|
|
2016-03-21 17:08:21 +00:00
|
|
|
TEST(Bytecodes, DebugBreakExistForEachBytecode) {
|
|
|
|
static const OperandScale kOperandScale = OperandScale::kSingle;
|
|
|
|
#define CHECK_DEBUG_BREAK_SIZE(Name, ...) \
|
|
|
|
if (!Bytecodes::IsDebugBreak(Bytecode::k##Name) && \
|
|
|
|
!Bytecodes::IsPrefixScalingBytecode(Bytecode::k##Name)) { \
|
|
|
|
Bytecode debug_bytecode = Bytecodes::GetDebugBreak(Bytecode::k##Name); \
|
|
|
|
CHECK_EQ(Bytecodes::Size(Bytecode::k##Name, kOperandScale), \
|
|
|
|
Bytecodes::Size(debug_bytecode, kOperandScale)); \
|
2016-02-22 13:16:46 +00:00
|
|
|
}
|
2016-03-21 17:08:21 +00:00
|
|
|
BYTECODE_LIST(CHECK_DEBUG_BREAK_SIZE)
|
|
|
|
#undef CHECK_DEBUG_BREAK_SIZE
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(Bytecodes, DebugBreakForPrefixBytecodes) {
|
|
|
|
CHECK_EQ(Bytecode::kDebugBreakWide,
|
|
|
|
Bytecodes::GetDebugBreak(Bytecode::kWide));
|
|
|
|
CHECK_EQ(Bytecode::kDebugBreakExtraWide,
|
|
|
|
Bytecodes::GetDebugBreak(Bytecode::kExtraWide));
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(Bytecodes, PrefixMappings) {
|
|
|
|
Bytecode prefixes[] = {Bytecode::kWide, Bytecode::kExtraWide};
|
|
|
|
TRACED_FOREACH(Bytecode, prefix, prefixes) {
|
|
|
|
CHECK_EQ(prefix, Bytecodes::OperandScaleToPrefixBytecode(
|
|
|
|
Bytecodes::PrefixBytecodeToOperandScale(prefix)));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-09-22 16:34:16 +00:00
|
|
|
TEST(Bytecodes, ScaleForSignedOperand) {
|
2017-10-18 09:06:55 +00:00
|
|
|
CHECK_EQ(Bytecodes::ScaleForSignedOperand(0), OperandScale::kSingle);
|
|
|
|
CHECK_EQ(Bytecodes::ScaleForSignedOperand(kMaxInt8), OperandScale::kSingle);
|
|
|
|
CHECK_EQ(Bytecodes::ScaleForSignedOperand(kMinInt8), OperandScale::kSingle);
|
|
|
|
CHECK_EQ(Bytecodes::ScaleForSignedOperand(kMaxInt8 + 1),
|
|
|
|
OperandScale::kDouble);
|
|
|
|
CHECK_EQ(Bytecodes::ScaleForSignedOperand(kMinInt8 - 1),
|
|
|
|
OperandScale::kDouble);
|
|
|
|
CHECK_EQ(Bytecodes::ScaleForSignedOperand(kMaxInt16), OperandScale::kDouble);
|
|
|
|
CHECK_EQ(Bytecodes::ScaleForSignedOperand(kMinInt16), OperandScale::kDouble);
|
|
|
|
CHECK_EQ(Bytecodes::ScaleForSignedOperand(kMaxInt16 + 1),
|
|
|
|
OperandScale::kQuadruple);
|
|
|
|
CHECK_EQ(Bytecodes::ScaleForSignedOperand(kMinInt16 - 1),
|
|
|
|
OperandScale::kQuadruple);
|
|
|
|
CHECK_EQ(Bytecodes::ScaleForSignedOperand(kMaxInt), OperandScale::kQuadruple);
|
|
|
|
CHECK_EQ(Bytecodes::ScaleForSignedOperand(kMinInt), OperandScale::kQuadruple);
|
2016-09-22 16:34:16 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
TEST(Bytecodes, ScaleForUnsignedOperands) {
|
|
|
|
// int overloads
|
2017-10-18 09:06:55 +00:00
|
|
|
CHECK_EQ(Bytecodes::ScaleForUnsignedOperand(0), OperandScale::kSingle);
|
|
|
|
CHECK_EQ(Bytecodes::ScaleForUnsignedOperand(kMaxUInt8),
|
|
|
|
OperandScale::kSingle);
|
|
|
|
CHECK_EQ(Bytecodes::ScaleForUnsignedOperand(kMaxUInt8 + 1),
|
|
|
|
OperandScale::kDouble);
|
|
|
|
CHECK_EQ(Bytecodes::ScaleForUnsignedOperand(kMaxUInt16),
|
|
|
|
OperandScale::kDouble);
|
|
|
|
CHECK_EQ(Bytecodes::ScaleForUnsignedOperand(kMaxUInt16 + 1),
|
|
|
|
OperandScale::kQuadruple);
|
2016-09-22 16:34:16 +00:00
|
|
|
// size_t overloads
|
2017-10-18 09:06:55 +00:00
|
|
|
CHECK_EQ(Bytecodes::ScaleForUnsignedOperand(static_cast<size_t>(0)),
|
|
|
|
OperandScale::kSingle);
|
|
|
|
CHECK_EQ(Bytecodes::ScaleForUnsignedOperand(static_cast<size_t>(kMaxUInt8)),
|
|
|
|
OperandScale::kSingle);
|
2016-09-22 16:34:16 +00:00
|
|
|
CHECK(Bytecodes::ScaleForUnsignedOperand(
|
|
|
|
static_cast<size_t>(kMaxUInt8 + 1)) == OperandScale::kDouble);
|
2017-10-18 09:06:55 +00:00
|
|
|
CHECK_EQ(Bytecodes::ScaleForUnsignedOperand(static_cast<size_t>(kMaxUInt16)),
|
|
|
|
OperandScale::kDouble);
|
2016-09-22 16:34:16 +00:00
|
|
|
CHECK(Bytecodes::ScaleForUnsignedOperand(
|
|
|
|
static_cast<size_t>(kMaxUInt16 + 1)) == OperandScale::kQuadruple);
|
2017-10-18 09:06:55 +00:00
|
|
|
CHECK_EQ(Bytecodes::ScaleForUnsignedOperand(static_cast<size_t>(kMaxUInt32)),
|
|
|
|
OperandScale::kQuadruple);
|
2016-05-12 19:18:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
TEST(Bytecodes, SizesForUnsignedOperands) {
|
|
|
|
// int overloads
|
2017-10-18 09:06:55 +00:00
|
|
|
CHECK_EQ(Bytecodes::SizeForUnsignedOperand(0), OperandSize::kByte);
|
|
|
|
CHECK_EQ(Bytecodes::SizeForUnsignedOperand(kMaxUInt8), OperandSize::kByte);
|
|
|
|
CHECK_EQ(Bytecodes::SizeForUnsignedOperand(kMaxUInt8 + 1),
|
|
|
|
OperandSize::kShort);
|
|
|
|
CHECK_EQ(Bytecodes::SizeForUnsignedOperand(kMaxUInt16), OperandSize::kShort);
|
|
|
|
CHECK_EQ(Bytecodes::SizeForUnsignedOperand(kMaxUInt16 + 1),
|
|
|
|
OperandSize::kQuad);
|
2016-05-12 19:18:07 +00:00
|
|
|
// size_t overloads
|
2017-10-18 09:06:55 +00:00
|
|
|
CHECK_EQ(Bytecodes::SizeForUnsignedOperand(static_cast<size_t>(0)),
|
|
|
|
OperandSize::kByte);
|
|
|
|
CHECK_EQ(Bytecodes::SizeForUnsignedOperand(static_cast<size_t>(kMaxUInt8)),
|
|
|
|
OperandSize::kByte);
|
|
|
|
CHECK_EQ(
|
|
|
|
Bytecodes::SizeForUnsignedOperand(static_cast<size_t>(kMaxUInt8 + 1)),
|
|
|
|
OperandSize::kShort);
|
|
|
|
CHECK_EQ(Bytecodes::SizeForUnsignedOperand(static_cast<size_t>(kMaxUInt16)),
|
|
|
|
OperandSize::kShort);
|
2016-05-12 19:18:07 +00:00
|
|
|
CHECK(Bytecodes::SizeForUnsignedOperand(
|
|
|
|
static_cast<size_t>(kMaxUInt16 + 1)) == OperandSize::kQuad);
|
2017-10-18 09:06:55 +00:00
|
|
|
CHECK_EQ(Bytecodes::SizeForUnsignedOperand(static_cast<size_t>(kMaxUInt32)),
|
|
|
|
OperandSize::kQuad);
|
2016-03-21 17:08:21 +00:00
|
|
|
}
|
|
|
|
|
2016-12-05 18:25:55 +00:00
|
|
|
// Helper macros to generate a check for if a bytecode is in a macro list of
|
|
|
|
// bytecodes. We can use these to exhaustively test a check over all bytecodes,
|
|
|
|
// both those that should pass and those that should fail the check.
|
|
|
|
#define OR_IS_BYTECODE(Name, ...) || bytecode == Bytecode::k##Name
|
|
|
|
#define IN_BYTECODE_LIST(BYTECODE, LIST) \
|
|
|
|
([](Bytecode bytecode) { return false LIST(OR_IS_BYTECODE); }(BYTECODE))
|
|
|
|
|
|
|
|
TEST(Bytecodes, IsJump) {
|
|
|
|
#define TEST_BYTECODE(Name, ...) \
|
|
|
|
if (IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_BYTECODE_LIST)) { \
|
|
|
|
EXPECT_TRUE(Bytecodes::IsJump(Bytecode::k##Name)); \
|
|
|
|
} else { \
|
|
|
|
EXPECT_FALSE(Bytecodes::IsJump(Bytecode::k##Name)); \
|
|
|
|
}
|
|
|
|
|
|
|
|
BYTECODE_LIST(TEST_BYTECODE)
|
|
|
|
#undef TEST_BYTECODE
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(Bytecodes, IsForwardJump) {
|
|
|
|
#define TEST_BYTECODE(Name, ...) \
|
|
|
|
if (IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_FORWARD_BYTECODE_LIST)) { \
|
|
|
|
EXPECT_TRUE(Bytecodes::IsForwardJump(Bytecode::k##Name)); \
|
|
|
|
} else { \
|
|
|
|
EXPECT_FALSE(Bytecodes::IsForwardJump(Bytecode::k##Name)); \
|
|
|
|
}
|
|
|
|
|
|
|
|
BYTECODE_LIST(TEST_BYTECODE)
|
|
|
|
#undef TEST_BYTECODE
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(Bytecodes, IsConditionalJump) {
|
|
|
|
#define TEST_BYTECODE(Name, ...) \
|
|
|
|
if (IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_CONDITIONAL_BYTECODE_LIST)) { \
|
|
|
|
EXPECT_TRUE(Bytecodes::IsConditionalJump(Bytecode::k##Name)); \
|
|
|
|
} else { \
|
|
|
|
EXPECT_FALSE(Bytecodes::IsConditionalJump(Bytecode::k##Name)); \
|
|
|
|
}
|
|
|
|
|
|
|
|
BYTECODE_LIST(TEST_BYTECODE)
|
|
|
|
#undef TEST_BYTECODE
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(Bytecodes, IsUnconditionalJump) {
|
|
|
|
#define TEST_BYTECODE(Name, ...) \
|
|
|
|
if (IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_UNCONDITIONAL_BYTECODE_LIST)) { \
|
|
|
|
EXPECT_TRUE(Bytecodes::IsUnconditionalJump(Bytecode::k##Name)); \
|
|
|
|
} else { \
|
|
|
|
EXPECT_FALSE(Bytecodes::IsUnconditionalJump(Bytecode::k##Name)); \
|
|
|
|
}
|
|
|
|
|
|
|
|
BYTECODE_LIST(TEST_BYTECODE)
|
|
|
|
#undef TEST_BYTECODE
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(Bytecodes, IsJumpImmediate) {
|
|
|
|
#define TEST_BYTECODE(Name, ...) \
|
|
|
|
if (IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_IMMEDIATE_BYTECODE_LIST)) { \
|
|
|
|
EXPECT_TRUE(Bytecodes::IsJumpImmediate(Bytecode::k##Name)); \
|
|
|
|
} else { \
|
|
|
|
EXPECT_FALSE(Bytecodes::IsJumpImmediate(Bytecode::k##Name)); \
|
|
|
|
}
|
|
|
|
|
|
|
|
BYTECODE_LIST(TEST_BYTECODE)
|
|
|
|
#undef TEST_BYTECODE
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(Bytecodes, IsJumpConstant) {
|
|
|
|
#define TEST_BYTECODE(Name, ...) \
|
|
|
|
if (IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_CONSTANT_BYTECODE_LIST)) { \
|
|
|
|
EXPECT_TRUE(Bytecodes::IsJumpConstant(Bytecode::k##Name)); \
|
|
|
|
} else { \
|
|
|
|
EXPECT_FALSE(Bytecodes::IsJumpConstant(Bytecode::k##Name)); \
|
|
|
|
}
|
|
|
|
|
|
|
|
BYTECODE_LIST(TEST_BYTECODE)
|
|
|
|
#undef TEST_BYTECODE
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(Bytecodes, IsConditionalJumpImmediate) {
|
|
|
|
#define TEST_BYTECODE(Name, ...) \
|
|
|
|
if (IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_CONDITIONAL_BYTECODE_LIST) && \
|
|
|
|
IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_IMMEDIATE_BYTECODE_LIST)) { \
|
|
|
|
EXPECT_TRUE(Bytecodes::IsConditionalJumpImmediate(Bytecode::k##Name)); \
|
|
|
|
} else { \
|
|
|
|
EXPECT_FALSE(Bytecodes::IsConditionalJumpImmediate(Bytecode::k##Name)); \
|
|
|
|
}
|
|
|
|
|
|
|
|
BYTECODE_LIST(TEST_BYTECODE)
|
|
|
|
#undef TEST_BYTECODE
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(Bytecodes, IsConditionalJumpConstant) {
|
|
|
|
#define TEST_BYTECODE(Name, ...) \
|
|
|
|
if (IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_CONDITIONAL_BYTECODE_LIST) && \
|
|
|
|
IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_CONSTANT_BYTECODE_LIST)) { \
|
|
|
|
EXPECT_TRUE(Bytecodes::IsConditionalJumpConstant(Bytecode::k##Name)); \
|
|
|
|
} else { \
|
|
|
|
EXPECT_FALSE(Bytecodes::IsConditionalJumpConstant(Bytecode::k##Name)); \
|
|
|
|
}
|
|
|
|
|
|
|
|
BYTECODE_LIST(TEST_BYTECODE)
|
|
|
|
#undef TEST_BYTECODE
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(Bytecodes, IsJumpIfToBoolean) {
|
|
|
|
#define TEST_BYTECODE(Name, ...) \
|
|
|
|
if (IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_TO_BOOLEAN_BYTECODE_LIST)) { \
|
|
|
|
EXPECT_TRUE(Bytecodes::IsJumpIfToBoolean(Bytecode::k##Name)); \
|
|
|
|
} else { \
|
|
|
|
EXPECT_FALSE(Bytecodes::IsJumpIfToBoolean(Bytecode::k##Name)); \
|
|
|
|
}
|
|
|
|
|
|
|
|
BYTECODE_LIST(TEST_BYTECODE)
|
|
|
|
#undef TEST_BYTECODE
|
|
|
|
}
|
|
|
|
|
|
|
|
#undef OR_IS_BYTECODE
|
|
|
|
#undef IN_BYTECODE_LIST
|
|
|
|
|
2016-03-21 17:08:21 +00:00
|
|
|
TEST(OperandScale, PrefixesRequired) {
|
|
|
|
CHECK(!Bytecodes::OperandScaleRequiresPrefixBytecode(OperandScale::kSingle));
|
|
|
|
CHECK(Bytecodes::OperandScaleRequiresPrefixBytecode(OperandScale::kDouble));
|
|
|
|
CHECK(
|
|
|
|
Bytecodes::OperandScaleRequiresPrefixBytecode(OperandScale::kQuadruple));
|
2017-10-18 09:06:55 +00:00
|
|
|
CHECK_EQ(Bytecodes::OperandScaleToPrefixBytecode(OperandScale::kDouble),
|
|
|
|
Bytecode::kWide);
|
|
|
|
CHECK_EQ(Bytecodes::OperandScaleToPrefixBytecode(OperandScale::kQuadruple),
|
|
|
|
Bytecode::kExtraWide);
|
2016-02-22 13:16:46 +00:00
|
|
|
}
|
|
|
|
|
2016-04-06 07:57:35 +00:00
|
|
|
TEST(AccumulatorUse, LogicalOperators) {
|
|
|
|
CHECK_EQ(AccumulatorUse::kNone | AccumulatorUse::kRead,
|
|
|
|
AccumulatorUse::kRead);
|
|
|
|
CHECK_EQ(AccumulatorUse::kRead | AccumulatorUse::kWrite,
|
|
|
|
AccumulatorUse::kReadWrite);
|
|
|
|
CHECK_EQ(AccumulatorUse::kRead & AccumulatorUse::kReadWrite,
|
|
|
|
AccumulatorUse::kRead);
|
|
|
|
CHECK_EQ(AccumulatorUse::kRead & AccumulatorUse::kWrite,
|
|
|
|
AccumulatorUse::kNone);
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(AccumulatorUse, SampleBytecodes) {
|
|
|
|
CHECK(Bytecodes::ReadsAccumulator(Bytecode::kStar));
|
|
|
|
CHECK(!Bytecodes::WritesAccumulator(Bytecode::kStar));
|
|
|
|
CHECK_EQ(Bytecodes::GetAccumulatorUse(Bytecode::kStar),
|
|
|
|
AccumulatorUse::kRead);
|
|
|
|
CHECK(!Bytecodes::ReadsAccumulator(Bytecode::kLdar));
|
|
|
|
CHECK(Bytecodes::WritesAccumulator(Bytecode::kLdar));
|
|
|
|
CHECK_EQ(Bytecodes::GetAccumulatorUse(Bytecode::kLdar),
|
|
|
|
AccumulatorUse::kWrite);
|
|
|
|
CHECK(Bytecodes::ReadsAccumulator(Bytecode::kAdd));
|
|
|
|
CHECK(Bytecodes::WritesAccumulator(Bytecode::kAdd));
|
|
|
|
CHECK_EQ(Bytecodes::GetAccumulatorUse(Bytecode::kAdd),
|
|
|
|
AccumulatorUse::kReadWrite);
|
|
|
|
}
|
|
|
|
|
2015-09-02 17:48:10 +00:00
|
|
|
} // namespace interpreter
|
|
|
|
} // namespace internal
|
|
|
|
} // namespace v8
|