[maglev] Add BranchIfTypeOf

Add a shortcutting branch for TestTypeOf, similar to the compare
branches.

To do this, move the TestTypeOf implementation into MaglevAssembler. We
want to support label distances and fallthroughs correctly, so
additionally implement a generic Branch for labels with distances and
support for fallthroughs.

Bug: v8:7700
Change-Id: Ib8c6b0eeeec0a7f3429d3692081853d25278fba4
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/4181034
Auto-Submit: Leszek Swirski <leszeks@chromium.org>
Commit-Queue: Leszek Swirski <leszeks@chromium.org>
Reviewed-by: Victor Gomes <victorgomes@chromium.org>
Cr-Commit-Position: refs/heads/main@{#85420}
This commit is contained in:
Leszek Swirski 2023-01-20 14:05:25 +01:00 committed by V8 LUCI CQ
parent da27bc1719
commit d2ff82cbc9
14 changed files with 410 additions and 261 deletions

View File

@ -74,6 +74,19 @@ TestTypeOfFlags::LiteralFlag TestTypeOfFlags::Decode(uint8_t raw_flag) {
return static_cast<LiteralFlag>(raw_flag);
}
// static
const char* TestTypeOfFlags::ToString(LiteralFlag literal_flag) {
switch (literal_flag) {
#define CASE(Name, name) \
case LiteralFlag::k##Name: \
return #name;
TYPEOF_LITERAL_LIST(CASE)
#undef CASE
default:
return "<invalid>";
}
}
// static
uint8_t StoreLookupSlotFlags::Encode(LanguageMode language_mode,
LookupHoistingMode lookup_hoisting_mode) {

View File

@ -75,6 +75,8 @@ class TestTypeOfFlags {
static uint8_t Encode(LiteralFlag literal_flag);
static LiteralFlag Decode(uint8_t raw_flag);
static const char* ToString(LiteralFlag literal_flag);
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(TestTypeOfFlags);
};

View File

@ -320,23 +320,6 @@ inline Condition MaglevAssembler::IsRootConstant(Input input,
return eq;
}
void MaglevAssembler::Branch(Condition condition, BasicBlock* if_true,
BasicBlock* if_false, BasicBlock* next_block) {
// We don't have any branch probability information, so try to jump
// over whatever the next block emitted is.
if (if_false == next_block) {
// Jump over the false block if true, otherwise fall through into it.
JumpIf(condition, if_true->label());
} else {
// Jump to the false block if true.
JumpIf(NegateCondition(condition), if_false->label());
// Jump to the true block if it's not the next block.
if (if_true != next_block) {
Jump(if_true->label());
}
}
}
inline MemOperand MaglevAssembler::StackSlotOperand(StackSlot slot) {
return MemOperand(fp, slot.index);
}
@ -521,14 +504,15 @@ inline void MaglevAssembler::CompareInt32(Register src1, Register src2) {
Cmp(src1.W(), src2.W());
}
inline void MaglevAssembler::Jump(Label* target) { B(target); }
inline void MaglevAssembler::Jump(Label* target, Label::Distance) { B(target); }
inline void MaglevAssembler::JumpIf(Condition cond, Label* target) {
inline void MaglevAssembler::JumpIf(Condition cond, Label* target,
Label::Distance) {
b(target, cond);
}
inline void MaglevAssembler::JumpIfTaggedEqual(Register r1, Register r2,
Label* target) {
Label* target, Label::Distance) {
CmpTagged(r1, r2);
b(target, eq);
}

View File

@ -169,6 +169,129 @@ void MaglevAssembler::ToBoolean(Register value, ZoneLabelRef is_true,
}
}
void MaglevAssembler::TestTypeOf(
Register object, interpreter::TestTypeOfFlags::LiteralFlag literal,
Label* is_true, Label::Distance true_distance, bool fallthrough_when_true,
Label* is_false, Label::Distance false_distance,
bool fallthrough_when_false) {
// If both true and false are fallthroughs, we don't have to do anything.
if (fallthrough_when_true && fallthrough_when_false) return;
// IMPORTANT: Note that `object` could be a register that aliases registers in
// the ScratchRegisterScope. Make sure that all reads of `object` are before
// any writes to scratch registers
using LiteralFlag = interpreter::TestTypeOfFlags::LiteralFlag;
switch (literal) {
case LiteralFlag::kNumber: {
MaglevAssembler::ScratchRegisterScope temps(this);
Register scratch = temps.Acquire();
JumpIfSmi(object, is_true);
Ldr(scratch.W(), FieldMemOperand(object, HeapObject::kMapOffset));
CompareRoot(scratch.W(), RootIndex::kHeapNumberMap);
Branch(eq, is_true, true_distance, fallthrough_when_true, is_false,
false_distance, fallthrough_when_false);
return;
}
case LiteralFlag::kString: {
MaglevAssembler::ScratchRegisterScope temps(this);
Register scratch = temps.Acquire();
JumpIfSmi(object, is_false);
LoadMap(scratch, object);
CompareInstanceTypeRange(scratch, scratch, FIRST_STRING_TYPE,
LAST_STRING_TYPE);
Branch(le, is_true, true_distance, fallthrough_when_true, is_false,
false_distance, fallthrough_when_false);
return;
}
case LiteralFlag::kSymbol: {
MaglevAssembler::ScratchRegisterScope temps(this);
Register scratch = temps.Acquire();
JumpIfSmi(object, is_false);
LoadMap(scratch, object);
CompareInstanceType(scratch, scratch, SYMBOL_TYPE);
Branch(eq, is_true, true_distance, fallthrough_when_true, is_false,
false_distance, fallthrough_when_false);
return;
}
case LiteralFlag::kBoolean:
CompareRoot(object, RootIndex::kTrueValue);
B(eq, is_true);
CompareRoot(object, RootIndex::kFalseValue);
Branch(eq, is_true, true_distance, fallthrough_when_true, is_false,
false_distance, fallthrough_when_false);
return;
case LiteralFlag::kBigInt: {
MaglevAssembler::ScratchRegisterScope temps(this);
Register scratch = temps.Acquire();
JumpIfSmi(object, is_false);
LoadMap(scratch, object);
CompareInstanceType(scratch, scratch, BIGINT_TYPE);
Branch(eq, is_true, true_distance, fallthrough_when_true, is_false,
false_distance, fallthrough_when_false);
return;
}
case LiteralFlag::kUndefined: {
MaglevAssembler::ScratchRegisterScope temps(this);
// Make sure `object` isn't a valid temp here, since we re-use it.
temps.SetAvailable(temps.Available() - object);
Register map = temps.Acquire();
JumpIfSmi(object, is_false);
// Check it has the undetectable bit set and it is not null.
LoadMap(map, object);
Ldr(map.W(), FieldMemOperand(map, Map::kBitFieldOffset));
TestAndBranchIfAllClear(map.W(), Map::Bits1::IsUndetectableBit::kMask,
is_false);
CompareRoot(object, RootIndex::kNullValue);
Branch(ne, is_true, true_distance, fallthrough_when_true, is_false,
false_distance, fallthrough_when_false);
return;
}
case LiteralFlag::kFunction: {
MaglevAssembler::ScratchRegisterScope temps(this);
Register scratch = temps.Acquire();
JumpIfSmi(object, is_false);
// Check if callable bit is set and not undetectable.
LoadMap(scratch, object);
Ldr(scratch.W(), FieldMemOperand(scratch, Map::kBitFieldOffset));
And(scratch.W(), scratch.W(),
Map::Bits1::IsUndetectableBit::kMask |
Map::Bits1::IsCallableBit::kMask);
Cmp(scratch.W(), Map::Bits1::IsCallableBit::kMask);
Branch(eq, is_true, true_distance, fallthrough_when_true, is_false,
false_distance, fallthrough_when_false);
return;
}
case LiteralFlag::kObject: {
MaglevAssembler::ScratchRegisterScope temps(this);
Register scratch = temps.Acquire();
JumpIfSmi(object, is_false);
// If the object is null then return true.
CompareRoot(object, RootIndex::kNullValue);
B(eq, is_true);
// Check if the object is a receiver type,
LoadMap(scratch, object);
{
MaglevAssembler::ScratchRegisterScope temps(this);
CompareInstanceType(scratch, temps.Acquire(), FIRST_JS_RECEIVER_TYPE);
}
B(lt, is_false);
// ... and is not undefined (undetectable) nor callable.
Ldr(scratch.W(), FieldMemOperand(scratch, Map::kBitFieldOffset));
Tst(scratch.W(), Immediate(Map::Bits1::IsUndetectableBit::kMask |
Map::Bits1::IsCallableBit::kMask));
Branch(eq, is_true, true_distance, fallthrough_when_true, is_false,
false_distance, fallthrough_when_false);
return;
}
case LiteralFlag::kOther:
if (!fallthrough_when_false) {
Jump(is_false, false_distance);
}
return;
}
UNREACHABLE();
}
void MaglevAssembler::Prologue(Graph* graph) {
if (v8_flags.maglev_ool_prologue) {
// TODO(v8:7700): Implement!

View File

@ -2528,106 +2528,6 @@ void TestUndetectable::GenerateCode(MaglevAssembler* masm,
__ bind(&done);
}
void TestTypeOf::SetValueLocationConstraints() {
UseRegister(value());
DefineAsRegister(this);
}
void TestTypeOf::GenerateCode(MaglevAssembler* masm,
const ProcessingState& state) {
using LiteralFlag = interpreter::TestTypeOfFlags::LiteralFlag;
Register object = ToRegister(value());
// Use return register as temporary if needed. Be careful: {object} and
// {scratch} could alias (which means that {object} should be considered dead
// once {scratch} has been written to).
Register scratch = ToRegister(result());
Label is_true, is_false, done;
switch (literal_) {
case LiteralFlag::kNumber:
__ JumpIfSmi(object, &is_true);
__ Ldr(scratch.W(), FieldMemOperand(object, HeapObject::kMapOffset));
__ CompareRoot(scratch.W(), RootIndex::kHeapNumberMap);
__ B(ne, &is_false);
break;
case LiteralFlag::kString:
__ JumpIfSmi(object, &is_false);
__ LoadMap(scratch, object);
__ CompareInstanceTypeRange(scratch, scratch, FIRST_STRING_TYPE,
LAST_STRING_TYPE);
__ B(hi, &is_false);
break;
case LiteralFlag::kSymbol:
__ JumpIfSmi(object, &is_false);
__ LoadMap(scratch, object);
__ CompareInstanceType(scratch, scratch, SYMBOL_TYPE);
__ B(ne, &is_false);
break;
case LiteralFlag::kBoolean:
__ CompareRoot(object, RootIndex::kTrueValue);
__ B(eq, &is_true);
__ CompareRoot(object, RootIndex::kFalseValue);
__ B(ne, &is_false);
break;
case LiteralFlag::kBigInt:
__ JumpIfSmi(object, &is_false);
__ LoadMap(scratch, object);
__ CompareInstanceType(scratch, scratch, BIGINT_TYPE);
__ B(ne, &is_false);
break;
case LiteralFlag::kUndefined: {
MaglevAssembler::ScratchRegisterScope temps(masm);
Register map = temps.Acquire();
__ JumpIfSmi(object, &is_false);
// Check it has the undetectable bit set and it is not null.
__ LoadMap(map, object);
__ Ldr(map.W(), FieldMemOperand(map, Map::kBitFieldOffset));
__ TestAndBranchIfAllClear(map.W(), Map::Bits1::IsUndetectableBit::kMask,
&is_false);
__ CompareRoot(object, RootIndex::kNullValue);
__ B(eq, &is_false);
break;
}
case LiteralFlag::kFunction:
__ JumpIfSmi(object, &is_false);
// Check if callable bit is set and not undetectable.
__ LoadMap(scratch, object);
__ Ldr(scratch.W(), FieldMemOperand(scratch, Map::kBitFieldOffset));
__ And(scratch.W(), scratch.W(),
Map::Bits1::IsUndetectableBit::kMask |
Map::Bits1::IsCallableBit::kMask);
__ Cmp(scratch.W(), Map::Bits1::IsCallableBit::kMask);
__ B(ne, &is_false);
break;
case LiteralFlag::kObject:
__ JumpIfSmi(object, &is_false);
// If the object is null then return true.
__ CompareRoot(object, RootIndex::kNullValue);
__ B(eq, &is_true);
// Check if the object is a receiver type,
__ LoadMap(scratch, object);
{
MaglevAssembler::ScratchRegisterScope temps(masm);
__ CompareInstanceType(scratch, temps.Acquire(),
FIRST_JS_RECEIVER_TYPE);
}
__ B(lt, &is_false);
// ... and is not undefined (undetectable) nor callable.
__ Ldr(scratch.W(), FieldMemOperand(scratch, Map::kBitFieldOffset));
__ TestAndBranchIfAnySet(scratch.W(),
Map::Bits1::IsUndetectableBit::kMask |
Map::Bits1::IsCallableBit::kMask,
&is_false);
break;
case LiteralFlag::kOther:
UNREACHABLE();
}
__ bind(&is_true);
__ LoadRoot(ToRegister(result()), RootIndex::kTrueValue);
__ B(&done);
__ bind(&is_false);
__ LoadRoot(ToRegister(result()), RootIndex::kFalseValue);
__ bind(&done);
}
int ThrowIfNotSuperConstructor::MaxCallStackArgs() const { return 2; }
void ThrowIfNotSuperConstructor::SetValueLocationConstraints() {
UseRegister(constructor());

View File

@ -206,6 +206,36 @@ inline void MaglevAssembler::JumpToDeferredIf(Condition cond,
JumpIf(cond, &deferred_code->deferred_code_label);
}
inline void MaglevAssembler::Branch(Condition condition, BasicBlock* if_true,
BasicBlock* if_false,
BasicBlock* next_block) {
Branch(condition, if_true->label(), Label::kFar, if_true == next_block,
if_false->label(), Label::kFar, if_false == next_block);
}
inline void MaglevAssembler::Branch(Condition condition, Label* if_true,
Label::Distance true_distance,
bool fallthrough_when_true, Label* if_false,
Label::Distance false_distance,
bool fallthrough_when_false) {
if (fallthrough_when_false) {
if (fallthrough_when_true) {
// If both paths are a fallthrough, do nothing.
DCHECK_EQ(if_true, if_false);
return;
}
// Jump over the false block if true, otherwise fall through into it.
JumpIf(condition, if_true, true_distance);
} else {
// Jump to the false block if true.
JumpIf(NegateCondition(condition), if_false, false_distance);
// Jump to the true block if it's not the next block.
if (!fallthrough_when_true) {
Jump(if_true, true_distance);
}
}
}
} // namespace maglev
} // namespace internal
} // namespace v8

View File

@ -8,6 +8,7 @@
#include "src/codegen/machine-type.h"
#include "src/codegen/macro-assembler.h"
#include "src/flags/flags.h"
#include "src/interpreter/bytecode-flags.h"
#include "src/maglev/maglev-code-gen-state.h"
#include "src/maglev/maglev-ir.h"
@ -90,6 +91,11 @@ class MaglevAssembler : public MacroAssembler {
inline void Branch(Condition condition, BasicBlock* if_true,
BasicBlock* if_false, BasicBlock* next_block);
inline void Branch(Condition condition, Label* if_true,
Label::Distance true_distance, bool fallthrough_when_true,
Label* if_false, Label::Distance false_distance,
bool fallthrough_when_false);
Register FromAnyToRegister(const Input& input, Register scratch);
inline void LoadBoundedSizeFromObject(Register result, Register object,
@ -130,6 +136,11 @@ class MaglevAssembler : public MacroAssembler {
void ToBoolean(Register value, ZoneLabelRef is_true, ZoneLabelRef is_false,
bool fallthrough_when_true);
void TestTypeOf(Register object,
interpreter::TestTypeOfFlags::LiteralFlag literal,
Label* if_true, Label::Distance true_distance,
bool fallthrough_when_true, Label* if_false,
Label::Distance false_distance, bool fallthrough_when_false);
inline void DoubleToInt64Repr(Register dst, DoubleRegister src);
void TruncateDoubleToInt32(Register dst, DoubleRegister src);
@ -186,9 +197,11 @@ class MaglevAssembler : public MacroAssembler {
inline void CompareInt32(Register reg, int32_t imm);
inline void CompareInt32(Register src1, Register src2);
inline void Jump(Label* target);
inline void JumpIf(Condition cond, Label* target);
inline void JumpIfTaggedEqual(Register r1, Register r2, Label* target);
inline void Jump(Label* target, Label::Distance distance = Label::kFar);
inline void JumpIf(Condition cond, Label* target,
Label::Distance distance = Label::kFar);
inline void JumpIfTaggedEqual(Register r1, Register r2, Label* target,
Label::Distance distance = Label::kFar);
// TODO(victorgomes): Import baseline Pop(T...) methods.
inline void Pop(Register dst);

View File

@ -792,10 +792,9 @@ void MaglevGraphBuilder::VisitBinarySmiOperation() {
BuildGenericBinarySmiOperationNode<kOperation>();
}
template <typename CompareControlNode>
bool MaglevGraphBuilder::TryBuildCompareOperation(Operation operation,
ValueNode* left,
ValueNode* right) {
template <typename BranchControlNodeT, typename... Args>
bool MaglevGraphBuilder::TryBuildBranchFor(
std::initializer_list<ValueNode*> control_inputs, Args&&... args) {
// Don't emit the shortcut branch if the next bytecode is a merge target.
if (IsOffsetAMergePoint(next_offset())) return false;
@ -833,8 +832,8 @@ bool MaglevGraphBuilder::TryBuildCompareOperation(Operation operation,
return false;
}
BasicBlock* block = FinishBlock<CompareControlNode>(
{left, right}, operation, &jump_targets_[true_offset],
BasicBlock* block = FinishBlock<BranchControlNodeT>(
control_inputs, std::forward<Args>(args)..., &jump_targets_[true_offset],
&jump_targets_[false_offset]);
if (true_offset == iterator_.GetJumpTargetOffset()) {
block->control_node()
@ -863,8 +862,7 @@ void MaglevGraphBuilder::VisitCompareOperation() {
case CompareOperationHint::kSignedSmall: {
ValueNode* left = LoadRegisterInt32(0);
ValueNode* right = GetAccumulatorInt32();
if (TryBuildCompareOperation<BranchIfInt32Compare>(kOperation, left,
right)) {
if (TryBuildBranchFor<BranchIfInt32Compare>({left, right}, kOperation)) {
return;
}
SetAccumulator(AddNewNode<Int32NodeFor<kOperation>>({left, right}));
@ -873,8 +871,8 @@ void MaglevGraphBuilder::VisitCompareOperation() {
case CompareOperationHint::kNumber: {
ValueNode* left = LoadRegisterFloat64(0);
ValueNode* right = GetAccumulatorFloat64();
if (TryBuildCompareOperation<BranchIfFloat64Compare>(kOperation, left,
right)) {
if (TryBuildBranchFor<BranchIfFloat64Compare>({left, right},
kOperation)) {
return;
}
SetAccumulator(AddNewNode<Float64NodeFor<kOperation>>({left, right}));
@ -893,8 +891,8 @@ void MaglevGraphBuilder::VisitCompareOperation() {
right =
GetInternalizedString(interpreter::Register::virtual_accumulator());
}
if (TryBuildCompareOperation<BranchIfReferenceCompare>(kOperation, left,
right)) {
if (TryBuildBranchFor<BranchIfReferenceCompare>({left, right},
kOperation)) {
return;
}
SetAccumulator(AddNewNode<TaggedEqual>({left, right}));
@ -907,8 +905,8 @@ void MaglevGraphBuilder::VisitCompareOperation() {
ValueNode* right = GetAccumulatorTagged();
BuildCheckSymbol(left);
BuildCheckSymbol(right);
if (TryBuildCompareOperation<BranchIfReferenceCompare>(kOperation, left,
right)) {
if (TryBuildBranchFor<BranchIfReferenceCompare>({left, right},
kOperation)) {
return;
}
SetAccumulator(AddNewNode<TaggedEqual>({left, right}));
@ -1137,8 +1135,8 @@ void MaglevGraphBuilder::VisitTestReferenceEqual() {
SetAccumulator(GetRootConstant(RootIndex::kTrueValue));
return;
}
if (TryBuildCompareOperation<BranchIfReferenceCompare>(
Operation::kStrictEqual, lhs, rhs)) {
if (TryBuildBranchFor<BranchIfReferenceCompare>({lhs, rhs},
Operation::kStrictEqual)) {
return;
}
SetAccumulator(AddNewNode<TaggedEqual>({lhs, rhs}));
@ -1180,7 +1178,9 @@ void MaglevGraphBuilder::VisitTestTypeOf() {
return;
}
ValueNode* value = GetAccumulatorTagged();
// TODO(victorgomes): Add fast path for constants.
if (TryBuildBranchFor<BranchIfTypeOf>({value}, literal)) {
return;
}
SetAccumulator(AddNewNode<TestTypeOf>({value}, literal));
}

View File

@ -1392,9 +1392,10 @@ class MaglevGraphBuilder {
template <Operation kOperation>
void VisitBinarySmiOperation();
template <typename CompareControlNode>
bool TryBuildCompareOperation(Operation operation, ValueNode* left,
ValueNode* right);
template <typename BranchControlNodeT, typename... Args>
bool TryBuildBranchFor(std::initializer_list<ValueNode*> control_inputs,
Args&&... args);
template <Operation kOperation>
void VisitCompareOperation();

View File

@ -10,6 +10,7 @@
#include "src/execution/isolate-inl.h"
#include "src/heap/local-heap.h"
#include "src/heap/parked-scope.h"
#include "src/interpreter/bytecode-flags.h"
#include "src/maglev/maglev-assembler-inl.h"
#include "src/maglev/maglev-graph-labeller.h"
#include "src/maglev/maglev-graph-processor.h"
@ -1673,6 +1674,31 @@ void TestInstanceOf::GenerateCode(MaglevAssembler* masm,
masm->DefineExceptionHandlerAndLazyDeoptPoint(this);
}
void TestTypeOf::SetValueLocationConstraints() {
UseRegister(value());
DefineAsRegister(this);
}
void TestTypeOf::GenerateCode(MaglevAssembler* masm,
const ProcessingState& state) {
Register object = ToRegister(value());
// Use return register as temporary if needed. Be careful: {object} and
// {scratch} could alias (which means that {object} should be considered dead
// once {scratch} has been written to).
MaglevAssembler::ScratchRegisterScope temps(masm);
temps.Include(ToRegister(result()));
Label is_true, is_false, done;
__ TestTypeOf(object, literal_, &is_true, Label::kNear, true, &is_false,
Label::kNear, false);
// Fallthrough into true.
__ bind(&is_true);
__ LoadRoot(ToRegister(result()), RootIndex::kTrueValue);
__ Jump(&done, Label::kNear);
__ bind(&is_false);
__ LoadRoot(ToRegister(result()), RootIndex::kFalseValue);
__ bind(&done);
}
void ToBoolean::SetValueLocationConstraints() {
UseRegister(value());
DefineAsRegister(this);
@ -2487,6 +2513,19 @@ void BranchIfUndefinedOrNull::GenerateCode(MaglevAssembler* masm,
}
}
void BranchIfTypeOf::SetValueLocationConstraints() {
UseRegister(value_input());
// One temporary for TestTypeOf.
set_temporaries_needed(1);
}
void BranchIfTypeOf::GenerateCode(MaglevAssembler* masm,
const ProcessingState& state) {
Register value = ToRegister(value_input());
__ TestTypeOf(value, literal_, if_true()->label(), Label::kFar,
if_true() == state.next_block(), if_false()->label(),
Label::kFar, if_false() == state.next_block());
}
void Switch::SetValueLocationConstraints() {
UseAndClobberRegister(value());
// TODO(victorgomes): Create a arch-agnostic scratch register scope.
@ -2749,6 +2788,11 @@ void CallRuntime::PrintParams(std::ostream& os,
os << "(" << Runtime::FunctionForId(function_id())->name << ")";
}
void TestTypeOf::PrintParams(std::ostream& os,
MaglevGraphLabeller* graph_labeller) const {
os << "(" << interpreter::TestTypeOfFlags::ToString(literal_) << ")";
}
void IncreaseInterruptBudget::PrintParams(
std::ostream& os, MaglevGraphLabeller* graph_labeller) const {
os << "(" << amount() << ")";
@ -2789,6 +2833,11 @@ void BranchIfReferenceCompare::PrintParams(
os << "(" << operation_ << ")";
}
void BranchIfTypeOf::PrintParams(std::ostream& os,
MaglevGraphLabeller* graph_labeller) const {
os << "(" << interpreter::TestTypeOfFlags::ToString(literal_) << ")";
}
} // namespace maglev
} // namespace internal
} // namespace v8

View File

@ -277,7 +277,8 @@ class CompactInterpreterFrameState;
V(BranchIfInt32Compare) \
V(BranchIfFloat64Compare) \
V(BranchIfUndefinedOrNull) \
V(BranchIfJSReceiver)
V(BranchIfJSReceiver) \
V(BranchIfTypeOf)
#define CONDITIONAL_CONTROL_NODE_LIST(V) \
V(Switch) \
@ -2712,7 +2713,7 @@ class TestTypeOf : public FixedInputValueNodeT<1, TestTypeOf> {
void SetValueLocationConstraints();
void GenerateCode(MaglevAssembler*, const ProcessingState&);
void PrintParams(std::ostream&, MaglevGraphLabeller*) const {}
void PrintParams(std::ostream&, MaglevGraphLabeller*) const;
private:
interpreter::TestTypeOfFlags::LiteralFlag literal_;
@ -5997,6 +5998,30 @@ class BranchIfReferenceCompare
Operation operation_;
};
class BranchIfTypeOf : public BranchControlNodeT<1, BranchIfTypeOf> {
using Base = BranchControlNodeT<1, BranchIfTypeOf>;
public:
static constexpr int kValueIndex = 0;
Input& value_input() { return NodeBase::input(kValueIndex); }
explicit BranchIfTypeOf(uint64_t bitfield,
interpreter::TestTypeOfFlags::LiteralFlag literal,
BasicBlockRef* if_true_refs,
BasicBlockRef* if_false_refs)
: Base(bitfield, if_true_refs, if_false_refs), literal_(literal) {}
static constexpr
typename Base::InputTypes kInputTypes{ValueRepresentation::kTagged};
void SetValueLocationConstraints();
void GenerateCode(MaglevAssembler*, const ProcessingState&);
void PrintParams(std::ostream&, MaglevGraphLabeller*) const;
private:
interpreter::TestTypeOfFlags::LiteralFlag literal_;
};
} // namespace maglev
} // namespace internal
} // namespace v8

View File

@ -197,23 +197,6 @@ inline Condition MaglevAssembler::IsRootConstant(Input input,
return equal;
}
void MaglevAssembler::Branch(Condition condition, BasicBlock* if_true,
BasicBlock* if_false, BasicBlock* next_block) {
// We don't have any branch probability information, so try to jump
// over whatever the next block emitted is.
if (if_false == next_block) {
// Jump over the false block if true, otherwise fall through into it.
j(condition, if_true->label());
} else {
// Jump to the false block if true.
j(NegateCondition(condition), if_false->label());
// Jump to the true block if it's not the next block.
if (if_true != next_block) {
jmp(if_true->label());
}
}
}
inline MemOperand MaglevAssembler::GetStackSlot(
const compiler::AllocatedOperand& operand) {
return MemOperand(rbp, GetFramePointerOffsetForStackSlot(operand));
@ -410,16 +393,20 @@ inline void MaglevAssembler::CompareInt32(Register src1, Register src2) {
cmpl(src1, src2);
}
inline void MaglevAssembler::Jump(Label* target) { jmp(target); }
inline void MaglevAssembler::Jump(Label* target, Label::Distance distance) {
jmp(target, distance);
}
inline void MaglevAssembler::JumpIf(Condition cond, Label* target) {
j(cond, target);
inline void MaglevAssembler::JumpIf(Condition cond, Label* target,
Label::Distance distance) {
j(cond, target, distance);
}
inline void MaglevAssembler::JumpIfTaggedEqual(Register r1, Register r2,
Label* target) {
Label* target,
Label::Distance distance) {
cmp_tagged(r1, r2);
j(equal, target);
j(equal, target, distance);
}
inline void MaglevAssembler::Pop(Register dst) { MacroAssembler::Pop(dst); }

View File

@ -4,6 +4,7 @@
#include "src/codegen/interface-descriptors-inl.h"
#include "src/common/globals.h"
#include "src/interpreter/bytecode-flags.h"
#include "src/maglev/maglev-assembler-inl.h"
#include "src/maglev/maglev-graph.h"
#include "src/objects/heap-number.h"
@ -344,6 +345,121 @@ void MaglevAssembler::ToBoolean(Register value, ZoneLabelRef is_true,
}
}
void MaglevAssembler::TestTypeOf(
Register object, interpreter::TestTypeOfFlags::LiteralFlag literal,
Label* is_true, Label::Distance true_distance, bool fallthrough_when_true,
Label* is_false, Label::Distance false_distance,
bool fallthrough_when_false) {
// If both true and false are fallthroughs, we don't have to do anything.
if (fallthrough_when_true && fallthrough_when_false) return;
// IMPORTANT: Note that `object` could be a register that aliases registers in
// the ScratchRegisterScope. Make sure that all reads of `object` are before
// any writes to scratch registers
using LiteralFlag = interpreter::TestTypeOfFlags::LiteralFlag;
switch (literal) {
case LiteralFlag::kNumber:
JumpIfSmi(object, is_true, true_distance);
CompareRoot(FieldOperand(object, HeapObject::kMapOffset),
RootIndex::kHeapNumberMap);
Branch(equal, is_true, true_distance, fallthrough_when_true, is_false,
false_distance, fallthrough_when_false);
return;
case LiteralFlag::kString: {
MaglevAssembler::ScratchRegisterScope temps(this);
Register scratch = temps.Acquire();
JumpIfSmi(object, is_false, false_distance);
LoadMap(scratch, object);
cmpw(FieldOperand(scratch, Map::kInstanceTypeOffset),
Immediate(LAST_STRING_TYPE));
Branch(less_equal, is_true, true_distance, fallthrough_when_true,
is_false, false_distance, fallthrough_when_false);
return;
}
case LiteralFlag::kSymbol: {
MaglevAssembler::ScratchRegisterScope temps(this);
Register scratch = temps.Acquire();
JumpIfSmi(object, is_false, false_distance);
LoadMap(scratch, object);
cmpw(FieldOperand(scratch, Map::kInstanceTypeOffset),
Immediate(SYMBOL_TYPE));
Branch(equal, is_true, true_distance, fallthrough_when_true, is_false,
false_distance, fallthrough_when_false);
return;
}
case LiteralFlag::kBoolean:
CompareRoot(object, RootIndex::kTrueValue);
JumpIf(equal, is_true, true_distance);
CompareRoot(object, RootIndex::kFalseValue);
Branch(equal, is_true, true_distance, fallthrough_when_true, is_false,
false_distance, fallthrough_when_false);
return;
case LiteralFlag::kBigInt: {
MaglevAssembler::ScratchRegisterScope temps(this);
Register scratch = temps.Acquire();
JumpIfSmi(object, is_false, false_distance);
LoadMap(scratch, object);
cmpw(FieldOperand(scratch, Map::kInstanceTypeOffset),
Immediate(BIGINT_TYPE));
Branch(equal, is_true, true_distance, fallthrough_when_true, is_false,
false_distance, fallthrough_when_false);
return;
}
case LiteralFlag::kUndefined: {
JumpIfSmi(object, is_false, false_distance);
// Check it has the undetectable bit set and it is not null.
LoadMap(kScratchRegister, object);
testl(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
Immediate(Map::Bits1::IsUndetectableBit::kMask));
JumpIf(zero, is_false, false_distance);
CompareRoot(object, RootIndex::kNullValue);
Branch(not_equal, is_true, true_distance, fallthrough_when_true, is_false,
false_distance, fallthrough_when_false);
return;
}
case LiteralFlag::kFunction: {
MaglevAssembler::ScratchRegisterScope temps(this);
Register scratch = temps.Acquire();
JumpIfSmi(object, is_false, false_distance);
// Check if callable bit is set and not undetectable.
LoadMap(scratch, object);
movl(scratch, FieldOperand(scratch, Map::kBitFieldOffset));
andl(scratch, Immediate(Map::Bits1::IsUndetectableBit::kMask |
Map::Bits1::IsCallableBit::kMask));
cmpl(scratch, Immediate(Map::Bits1::IsCallableBit::kMask));
Branch(equal, is_true, true_distance, fallthrough_when_true, is_false,
false_distance, fallthrough_when_false);
return;
}
case LiteralFlag::kObject: {
MaglevAssembler::ScratchRegisterScope temps(this);
Register scratch = temps.Acquire();
JumpIfSmi(object, is_false, false_distance);
// If the object is null then return true.
CompareRoot(object, RootIndex::kNullValue);
JumpIf(equal, is_true, true_distance);
// Check if the object is a receiver type,
LoadMap(scratch, object);
cmpw(FieldOperand(scratch, Map::kInstanceTypeOffset),
Immediate(FIRST_JS_RECEIVER_TYPE));
JumpIf(less, is_false, false_distance);
// ... and is not undefined (undetectable) nor callable.
testl(FieldOperand(scratch, Map::kBitFieldOffset),
Immediate(Map::Bits1::IsUndetectableBit::kMask |
Map::Bits1::IsCallableBit::kMask));
Branch(equal, is_true, true_distance, fallthrough_when_true, is_false,
false_distance, fallthrough_when_false);
return;
}
case LiteralFlag::kOther:
if (!fallthrough_when_false) {
Jump(is_false, false_distance);
}
return;
}
UNREACHABLE();
}
void MaglevAssembler::TruncateDoubleToInt32(Register dst, DoubleRegister src) {
ZoneLabelRef done(this);

View File

@ -2220,100 +2220,6 @@ void TestUndetectable::GenerateCode(MaglevAssembler* masm,
__ bind(&done);
}
void TestTypeOf::SetValueLocationConstraints() {
UseRegister(value());
DefineAsRegister(this);
}
void TestTypeOf::GenerateCode(MaglevAssembler* masm,
const ProcessingState& state) {
using LiteralFlag = interpreter::TestTypeOfFlags::LiteralFlag;
Register object = ToRegister(value());
// Use return register as temporary if needed. Be careful: {object} and {tmp}
// could alias (which means that {object} should be considered dead once {tmp}
// has been written to).
Register tmp = ToRegister(result());
Label is_true, is_false, done;
switch (literal_) {
case LiteralFlag::kNumber:
__ JumpIfSmi(object, &is_true, Label::kNear);
__ CompareRoot(FieldOperand(object, HeapObject::kMapOffset),
RootIndex::kHeapNumberMap);
__ j(not_equal, &is_false, Label::kNear);
break;
case LiteralFlag::kString:
__ JumpIfSmi(object, &is_false, Label::kNear);
__ LoadMap(tmp, object);
__ cmpw(FieldOperand(tmp, Map::kInstanceTypeOffset),
Immediate(FIRST_NONSTRING_TYPE));
__ j(greater_equal, &is_false, Label::kNear);
break;
case LiteralFlag::kSymbol:
__ JumpIfSmi(object, &is_false, Label::kNear);
__ LoadMap(tmp, object);
__ cmpw(FieldOperand(tmp, Map::kInstanceTypeOffset),
Immediate(SYMBOL_TYPE));
__ j(not_equal, &is_false, Label::kNear);
break;
case LiteralFlag::kBoolean:
__ CompareRoot(object, RootIndex::kTrueValue);
__ j(equal, &is_true, Label::kNear);
__ CompareRoot(object, RootIndex::kFalseValue);
__ j(not_equal, &is_false, Label::kNear);
break;
case LiteralFlag::kBigInt:
__ JumpIfSmi(object, &is_false, Label::kNear);
__ LoadMap(tmp, object);
__ cmpw(FieldOperand(tmp, Map::kInstanceTypeOffset),
Immediate(BIGINT_TYPE));
__ j(not_equal, &is_false, Label::kNear);
break;
case LiteralFlag::kUndefined:
__ JumpIfSmi(object, &is_false, Label::kNear);
// Check it has the undetectable bit set and it is not null.
__ LoadMap(kScratchRegister, object);
__ testl(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
Immediate(Map::Bits1::IsUndetectableBit::kMask));
__ j(zero, &is_false, Label::kNear);
__ CompareRoot(object, RootIndex::kNullValue);
__ j(equal, &is_false, Label::kNear);
break;
case LiteralFlag::kFunction:
__ JumpIfSmi(object, &is_false, Label::kNear);
// Check if callable bit is set and not undetectable.
__ LoadMap(tmp, object);
__ movl(tmp, FieldOperand(tmp, Map::kBitFieldOffset));
__ andl(tmp, Immediate(Map::Bits1::IsUndetectableBit::kMask |
Map::Bits1::IsCallableBit::kMask));
__ cmpl(tmp, Immediate(Map::Bits1::IsCallableBit::kMask));
__ j(not_equal, &is_false, Label::kNear);
break;
case LiteralFlag::kObject:
__ JumpIfSmi(object, &is_false, Label::kNear);
// If the object is null then return true.
__ CompareRoot(object, RootIndex::kNullValue);
__ j(equal, &is_true, Label::kNear);
// Check if the object is a receiver type,
__ LoadMap(tmp, object);
__ cmpw(FieldOperand(tmp, Map::kInstanceTypeOffset),
Immediate(FIRST_JS_RECEIVER_TYPE));
__ j(less, &is_false, Label::kNear);
// ... and is not undefined (undetectable) nor callable.
__ testl(FieldOperand(tmp, Map::kBitFieldOffset),
Immediate(Map::Bits1::IsUndetectableBit::kMask |
Map::Bits1::IsCallableBit::kMask));
__ j(not_zero, &is_false, Label::kNear);
break;
case LiteralFlag::kOther:
UNREACHABLE();
}
__ bind(&is_true);
__ LoadRoot(ToRegister(result()), RootIndex::kTrueValue);
__ jmp(&done, Label::kNear);
__ bind(&is_false);
__ LoadRoot(ToRegister(result()), RootIndex::kFalseValue);
__ bind(&done);
}
int ToObject::MaxCallStackArgs() const {
using D = CallInterfaceDescriptorFor<Builtin::kToObject>::type;
return D::GetStackParameterCount();