[turbofan] Introduce DeoptimizeIf And DeoptimizeUnless common operators.
These macro operators represent a conditional eager deoptimization exit without explicit branching, which greatly reduces overhead of both scheduling and register allocation, and thereby greatly reduces overall compilation time, esp. when there are a lot of eager deoptimization exits. R=jarin@chromium.org Review URL: https://codereview.chromium.org/1721103003 Cr-Commit-Position: refs/heads/master@{#34239}
This commit is contained in:
parent
9146bc5e20
commit
c129aa4d39
@ -54,6 +54,7 @@ class ArmOperandConverter final : public InstructionOperandConverter {
|
||||
SBit OutputSBit() const {
|
||||
switch (instr_->flags_mode()) {
|
||||
case kFlags_branch:
|
||||
case kFlags_deoptimize:
|
||||
case kFlags_set:
|
||||
return SetCC;
|
||||
case kFlags_none:
|
||||
@ -409,7 +410,7 @@ void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) {
|
||||
void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
|
||||
ArmOperandConverter i(this, instr);
|
||||
|
||||
masm()->MaybeCheckConstPool();
|
||||
__ MaybeCheckConstPool();
|
||||
|
||||
switch (ArchOpcodeField::decode(instr->opcode())) {
|
||||
case kArchCallCodeObject: {
|
||||
@ -1155,7 +1156,11 @@ void CodeGenerator::AssembleDeoptimizerCall(
|
||||
int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
|
||||
Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
|
||||
isolate(), deoptimization_id, bailout_type);
|
||||
// TODO(turbofan): We should be able to generate better code by sharing the
|
||||
// actual final call site and just bl'ing to it here, similar to what we do
|
||||
// in the lithium backend.
|
||||
__ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
|
||||
__ CheckConstPool(false, false);
|
||||
}
|
||||
|
||||
|
||||
|
@ -237,8 +237,13 @@ void VisitBinop(InstructionSelector* selector, Node* node,
|
||||
DCHECK_GE(arraysize(outputs), output_count);
|
||||
DCHECK_NE(kMode_None, AddressingModeField::decode(opcode));
|
||||
|
||||
selector->Emit(cont->Encode(opcode), output_count, outputs, input_count,
|
||||
inputs);
|
||||
opcode = cont->Encode(opcode);
|
||||
if (cont->IsDeoptimize()) {
|
||||
selector->EmitDeoptimize(opcode, output_count, outputs, input_count, inputs,
|
||||
cont->frame_state());
|
||||
} else {
|
||||
selector->Emit(opcode, output_count, outputs, input_count, inputs);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -691,8 +696,13 @@ void VisitShift(InstructionSelector* selector, Node* node,
|
||||
DCHECK_GE(arraysize(outputs), output_count);
|
||||
DCHECK_NE(kMode_None, AddressingModeField::decode(opcode));
|
||||
|
||||
selector->Emit(cont->Encode(opcode), output_count, outputs, input_count,
|
||||
inputs);
|
||||
opcode = cont->Encode(opcode);
|
||||
if (cont->IsDeoptimize()) {
|
||||
selector->EmitDeoptimize(opcode, output_count, outputs, input_count, inputs,
|
||||
cont->frame_state());
|
||||
} else {
|
||||
selector->Emit(opcode, output_count, outputs, input_count, inputs);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1284,6 +1294,9 @@ void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
|
||||
if (cont->IsBranch()) {
|
||||
selector->Emit(opcode, g.NoOutput(), left, right,
|
||||
g.Label(cont->true_block()), g.Label(cont->false_block()));
|
||||
} else if (cont->IsDeoptimize()) {
|
||||
selector->EmitDeoptimize(opcode, g.NoOutput(), left, right,
|
||||
cont->frame_state());
|
||||
} else {
|
||||
DCHECK(cont->IsSet());
|
||||
selector->Emit(opcode, g.DefineAsRegister(cont->result()), left, right);
|
||||
@ -1357,8 +1370,7 @@ void VisitWordCompare(InstructionSelector* selector, Node* node,
|
||||
if (cont->IsBranch()) {
|
||||
inputs[input_count++] = g.Label(cont->true_block());
|
||||
inputs[input_count++] = g.Label(cont->false_block());
|
||||
} else {
|
||||
DCHECK(cont->IsSet());
|
||||
} else if (cont->IsSet()) {
|
||||
outputs[output_count++] = g.DefineAsRegister(cont->result());
|
||||
}
|
||||
|
||||
@ -1366,8 +1378,13 @@ void VisitWordCompare(InstructionSelector* selector, Node* node,
|
||||
DCHECK_GE(arraysize(inputs), input_count);
|
||||
DCHECK_GE(arraysize(outputs), output_count);
|
||||
|
||||
selector->Emit(cont->Encode(opcode), output_count, outputs, input_count,
|
||||
inputs);
|
||||
opcode = cont->Encode(opcode);
|
||||
if (cont->IsDeoptimize()) {
|
||||
selector->EmitDeoptimize(opcode, output_count, outputs, input_count, inputs,
|
||||
cont->frame_state());
|
||||
} else {
|
||||
selector->Emit(opcode, output_count, outputs, input_count, inputs);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1482,7 +1499,11 @@ void VisitWordCompareZero(InstructionSelector* selector, Node* user,
|
||||
if (cont->IsBranch()) {
|
||||
selector->Emit(opcode, g.NoOutput(), value_operand, value_operand,
|
||||
g.Label(cont->true_block()), g.Label(cont->false_block()));
|
||||
} else if (cont->IsDeoptimize()) {
|
||||
selector->EmitDeoptimize(opcode, g.NoOutput(), value_operand, value_operand,
|
||||
cont->frame_state());
|
||||
} else {
|
||||
DCHECK(cont->IsSet());
|
||||
selector->Emit(opcode, g.DefineAsRegister(cont->result()), value_operand,
|
||||
value_operand);
|
||||
}
|
||||
@ -1490,13 +1511,23 @@ void VisitWordCompareZero(InstructionSelector* selector, Node* user,
|
||||
|
||||
} // namespace
|
||||
|
||||
|
||||
void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
|
||||
BasicBlock* fbranch) {
|
||||
FlagsContinuation cont(kNotEqual, tbranch, fbranch);
|
||||
VisitWordCompareZero(this, branch, branch->InputAt(0), &cont);
|
||||
}
|
||||
|
||||
void InstructionSelector::VisitDeoptimizeIf(Node* node) {
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForDeoptimize(kNotEqual, node->InputAt(1));
|
||||
VisitWordCompareZero(this, node, node->InputAt(0), &cont);
|
||||
}
|
||||
|
||||
void InstructionSelector::VisitDeoptimizeUnless(Node* node) {
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForDeoptimize(kEqual, node->InputAt(1));
|
||||
VisitWordCompareZero(this, node, node->InputAt(0), &cont);
|
||||
}
|
||||
|
||||
void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
|
||||
ArmOperandGenerator g(this);
|
||||
@ -1527,7 +1558,7 @@ void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
|
||||
|
||||
|
||||
void InstructionSelector::VisitWord32Equal(Node* const node) {
|
||||
FlagsContinuation cont(kEqual, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
|
||||
Int32BinopMatcher m(node);
|
||||
if (m.right().Is(0)) {
|
||||
return VisitWordCompareZero(this, m.node(), m.left().node(), &cont);
|
||||
@ -1537,32 +1568,34 @@ void InstructionSelector::VisitWord32Equal(Node* const node) {
|
||||
|
||||
|
||||
void InstructionSelector::VisitInt32LessThan(Node* node) {
|
||||
FlagsContinuation cont(kSignedLessThan, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
|
||||
VisitWordCompare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
|
||||
FlagsContinuation cont(kSignedLessThanOrEqual, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
|
||||
VisitWordCompare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitUint32LessThan(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedLessThan, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
|
||||
VisitWordCompare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedLessThanOrEqual, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
|
||||
VisitWordCompare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
|
||||
if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
|
||||
FlagsContinuation cont(kOverflow, ovf);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
|
||||
return VisitBinop(this, node, kArmAdd, kArmAdd, &cont);
|
||||
}
|
||||
FlagsContinuation cont;
|
||||
@ -1572,7 +1605,7 @@ void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
|
||||
|
||||
void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
|
||||
if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
|
||||
FlagsContinuation cont(kOverflow, ovf);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
|
||||
return VisitBinop(this, node, kArmSub, kArmRsb, &cont);
|
||||
}
|
||||
FlagsContinuation cont;
|
||||
@ -1581,37 +1614,39 @@ void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat32Equal(Node* node) {
|
||||
FlagsContinuation cont(kEqual, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
|
||||
VisitFloat32Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat32LessThan(Node* node) {
|
||||
FlagsContinuation cont(kFloatLessThan, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kFloatLessThan, node);
|
||||
VisitFloat32Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
|
||||
FlagsContinuation cont(kFloatLessThanOrEqual, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kFloatLessThanOrEqual, node);
|
||||
VisitFloat32Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat64Equal(Node* node) {
|
||||
FlagsContinuation cont(kEqual, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
|
||||
VisitFloat64Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat64LessThan(Node* node) {
|
||||
FlagsContinuation cont(kFloatLessThan, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kFloatLessThan, node);
|
||||
VisitFloat64Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
|
||||
FlagsContinuation cont(kFloatLessThanOrEqual, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kFloatLessThanOrEqual, node);
|
||||
VisitFloat64Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
@ -289,8 +289,13 @@ void VisitBinop(InstructionSelector* selector, Node* node,
|
||||
DCHECK_GE(arraysize(inputs), input_count);
|
||||
DCHECK_GE(arraysize(outputs), output_count);
|
||||
|
||||
selector->Emit(cont->Encode(opcode), output_count, outputs, input_count,
|
||||
inputs);
|
||||
opcode = cont->Encode(opcode);
|
||||
if (cont->IsDeoptimize()) {
|
||||
selector->EmitDeoptimize(opcode, output_count, outputs, input_count, inputs,
|
||||
cont->frame_state());
|
||||
} else {
|
||||
selector->Emit(opcode, output_count, outputs, input_count, inputs);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1674,6 +1679,9 @@ void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
|
||||
if (cont->IsBranch()) {
|
||||
selector->Emit(opcode, g.NoOutput(), left, right,
|
||||
g.Label(cont->true_block()), g.Label(cont->false_block()));
|
||||
} else if (cont->IsDeoptimize()) {
|
||||
selector->EmitDeoptimize(opcode, g.NoOutput(), left, right,
|
||||
cont->frame_state());
|
||||
} else {
|
||||
DCHECK(cont->IsSet());
|
||||
selector->Emit(opcode, g.DefineAsRegister(cont->result()), left, right);
|
||||
@ -1789,85 +1797,72 @@ void VisitFloat64Compare(InstructionSelector* selector, Node* node,
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
|
||||
void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
|
||||
BasicBlock* fbranch) {
|
||||
OperandGenerator g(this);
|
||||
Node* user = branch;
|
||||
Node* value = branch->InputAt(0);
|
||||
|
||||
FlagsContinuation cont(kNotEqual, tbranch, fbranch);
|
||||
|
||||
// Try to combine with comparisons against 0 by simply inverting the branch.
|
||||
while (CanCover(user, value) && value->opcode() == IrOpcode::kWord32Equal) {
|
||||
Int32BinopMatcher m(value);
|
||||
if (m.right().Is(0)) {
|
||||
user = value;
|
||||
value = m.left().node();
|
||||
cont.Negate();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Try to combine the branch with a comparison.
|
||||
if (CanCover(user, value)) {
|
||||
void VisitWordCompareZero(InstructionSelector* selector, Node* user,
|
||||
Node* value, FlagsContinuation* cont) {
|
||||
Arm64OperandGenerator g(selector);
|
||||
while (selector->CanCover(user, value)) {
|
||||
switch (value->opcode()) {
|
||||
case IrOpcode::kWord32Equal:
|
||||
cont.OverwriteAndNegateIfEqual(kEqual);
|
||||
return VisitWord32Compare(this, value, &cont);
|
||||
case IrOpcode::kWord32Equal: {
|
||||
Int32BinopMatcher m(value);
|
||||
if (m.right().Is(0)) {
|
||||
user = value;
|
||||
value = m.left().node();
|
||||
cont->Negate();
|
||||
continue;
|
||||
}
|
||||
cont->OverwriteAndNegateIfEqual(kEqual);
|
||||
return VisitWord32Compare(selector, value, cont);
|
||||
}
|
||||
case IrOpcode::kInt32LessThan:
|
||||
cont.OverwriteAndNegateIfEqual(kSignedLessThan);
|
||||
return VisitWord32Compare(this, value, &cont);
|
||||
cont->OverwriteAndNegateIfEqual(kSignedLessThan);
|
||||
return VisitWord32Compare(selector, value, cont);
|
||||
case IrOpcode::kInt32LessThanOrEqual:
|
||||
cont.OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
|
||||
return VisitWord32Compare(this, value, &cont);
|
||||
cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
|
||||
return VisitWord32Compare(selector, value, cont);
|
||||
case IrOpcode::kUint32LessThan:
|
||||
cont.OverwriteAndNegateIfEqual(kUnsignedLessThan);
|
||||
return VisitWord32Compare(this, value, &cont);
|
||||
cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
|
||||
return VisitWord32Compare(selector, value, cont);
|
||||
case IrOpcode::kUint32LessThanOrEqual:
|
||||
cont.OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
|
||||
return VisitWord32Compare(this, value, &cont);
|
||||
cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
|
||||
return VisitWord32Compare(selector, value, cont);
|
||||
case IrOpcode::kWord64Equal:
|
||||
cont.OverwriteAndNegateIfEqual(kEqual);
|
||||
return VisitWordCompare(this, value, kArm64Cmp, &cont, false,
|
||||
cont->OverwriteAndNegateIfEqual(kEqual);
|
||||
return VisitWordCompare(selector, value, kArm64Cmp, cont, false,
|
||||
kArithmeticImm);
|
||||
case IrOpcode::kInt64LessThan:
|
||||
cont.OverwriteAndNegateIfEqual(kSignedLessThan);
|
||||
return VisitWordCompare(this, value, kArm64Cmp, &cont, false,
|
||||
cont->OverwriteAndNegateIfEqual(kSignedLessThan);
|
||||
return VisitWordCompare(selector, value, kArm64Cmp, cont, false,
|
||||
kArithmeticImm);
|
||||
case IrOpcode::kInt64LessThanOrEqual:
|
||||
cont.OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
|
||||
return VisitWordCompare(this, value, kArm64Cmp, &cont, false,
|
||||
cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
|
||||
return VisitWordCompare(selector, value, kArm64Cmp, cont, false,
|
||||
kArithmeticImm);
|
||||
case IrOpcode::kUint64LessThan:
|
||||
cont.OverwriteAndNegateIfEqual(kUnsignedLessThan);
|
||||
return VisitWordCompare(this, value, kArm64Cmp, &cont, false,
|
||||
cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
|
||||
return VisitWordCompare(selector, value, kArm64Cmp, cont, false,
|
||||
kArithmeticImm);
|
||||
case IrOpcode::kUint64LessThanOrEqual:
|
||||
cont.OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
|
||||
return VisitWordCompare(this, value, kArm64Cmp, &cont, false,
|
||||
cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
|
||||
return VisitWordCompare(selector, value, kArm64Cmp, cont, false,
|
||||
kArithmeticImm);
|
||||
case IrOpcode::kFloat32Equal:
|
||||
cont.OverwriteAndNegateIfEqual(kEqual);
|
||||
return VisitFloat32Compare(this, value, &cont);
|
||||
cont->OverwriteAndNegateIfEqual(kEqual);
|
||||
return VisitFloat32Compare(selector, value, cont);
|
||||
case IrOpcode::kFloat32LessThan:
|
||||
cont.OverwriteAndNegateIfEqual(kFloatLessThan);
|
||||
return VisitFloat32Compare(this, value, &cont);
|
||||
cont->OverwriteAndNegateIfEqual(kFloatLessThan);
|
||||
return VisitFloat32Compare(selector, value, cont);
|
||||
case IrOpcode::kFloat32LessThanOrEqual:
|
||||
cont.OverwriteAndNegateIfEqual(kFloatLessThanOrEqual);
|
||||
return VisitFloat32Compare(this, value, &cont);
|
||||
cont->OverwriteAndNegateIfEqual(kFloatLessThanOrEqual);
|
||||
return VisitFloat32Compare(selector, value, cont);
|
||||
case IrOpcode::kFloat64Equal:
|
||||
cont.OverwriteAndNegateIfEqual(kEqual);
|
||||
return VisitFloat64Compare(this, value, &cont);
|
||||
cont->OverwriteAndNegateIfEqual(kEqual);
|
||||
return VisitFloat64Compare(selector, value, cont);
|
||||
case IrOpcode::kFloat64LessThan:
|
||||
cont.OverwriteAndNegateIfEqual(kFloatLessThan);
|
||||
return VisitFloat64Compare(this, value, &cont);
|
||||
cont->OverwriteAndNegateIfEqual(kFloatLessThan);
|
||||
return VisitFloat64Compare(selector, value, cont);
|
||||
case IrOpcode::kFloat64LessThanOrEqual:
|
||||
cont.OverwriteAndNegateIfEqual(kFloatLessThanOrEqual);
|
||||
return VisitFloat64Compare(this, value, &cont);
|
||||
cont->OverwriteAndNegateIfEqual(kFloatLessThanOrEqual);
|
||||
return VisitFloat64Compare(selector, value, cont);
|
||||
case IrOpcode::kProjection:
|
||||
// Check if this is the overflow output projection of an
|
||||
// <Operation>WithOverflow node.
|
||||
@ -1879,24 +1874,24 @@ void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
|
||||
// *AFTER* this branch).
|
||||
Node* const node = value->InputAt(0);
|
||||
Node* const result = NodeProperties::FindProjection(node, 0);
|
||||
if (result == nullptr || IsDefined(result)) {
|
||||
if (result == nullptr || selector->IsDefined(result)) {
|
||||
switch (node->opcode()) {
|
||||
case IrOpcode::kInt32AddWithOverflow:
|
||||
cont.OverwriteAndNegateIfEqual(kOverflow);
|
||||
return VisitBinop<Int32BinopMatcher>(this, node, kArm64Add32,
|
||||
kArithmeticImm, &cont);
|
||||
cont->OverwriteAndNegateIfEqual(kOverflow);
|
||||
return VisitBinop<Int32BinopMatcher>(
|
||||
selector, node, kArm64Add32, kArithmeticImm, cont);
|
||||
case IrOpcode::kInt32SubWithOverflow:
|
||||
cont.OverwriteAndNegateIfEqual(kOverflow);
|
||||
return VisitBinop<Int32BinopMatcher>(this, node, kArm64Sub32,
|
||||
kArithmeticImm, &cont);
|
||||
cont->OverwriteAndNegateIfEqual(kOverflow);
|
||||
return VisitBinop<Int32BinopMatcher>(
|
||||
selector, node, kArm64Sub32, kArithmeticImm, cont);
|
||||
case IrOpcode::kInt64AddWithOverflow:
|
||||
cont.OverwriteAndNegateIfEqual(kOverflow);
|
||||
return VisitBinop<Int64BinopMatcher>(this, node, kArm64Add,
|
||||
kArithmeticImm, &cont);
|
||||
cont->OverwriteAndNegateIfEqual(kOverflow);
|
||||
return VisitBinop<Int64BinopMatcher>(selector, node, kArm64Add,
|
||||
kArithmeticImm, cont);
|
||||
case IrOpcode::kInt64SubWithOverflow:
|
||||
cont.OverwriteAndNegateIfEqual(kOverflow);
|
||||
return VisitBinop<Int64BinopMatcher>(this, node, kArm64Sub,
|
||||
kArithmeticImm, &cont);
|
||||
cont->OverwriteAndNegateIfEqual(kOverflow);
|
||||
return VisitBinop<Int64BinopMatcher>(selector, node, kArm64Sub,
|
||||
kArithmeticImm, cont);
|
||||
default:
|
||||
break;
|
||||
}
|
||||
@ -1904,55 +1899,84 @@ void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
|
||||
}
|
||||
break;
|
||||
case IrOpcode::kInt32Add:
|
||||
return VisitWordCompare(this, value, kArm64Cmn32, &cont, true,
|
||||
return VisitWordCompare(selector, value, kArm64Cmn32, cont, true,
|
||||
kArithmeticImm);
|
||||
case IrOpcode::kInt32Sub:
|
||||
return VisitWord32Compare(this, value, &cont);
|
||||
return VisitWord32Compare(selector, value, cont);
|
||||
case IrOpcode::kWord32And: {
|
||||
Int32BinopMatcher m(value);
|
||||
if (m.right().HasValue() &&
|
||||
if (cont->IsBranch() && m.right().HasValue() &&
|
||||
(base::bits::CountPopulation32(m.right().Value()) == 1)) {
|
||||
// If the mask has only one bit set, we can use tbz/tbnz.
|
||||
DCHECK((cont.condition() == kEqual) ||
|
||||
(cont.condition() == kNotEqual));
|
||||
Emit(cont.Encode(kArm64TestAndBranch32), g.NoOutput(),
|
||||
g.UseRegister(m.left().node()),
|
||||
g.TempImmediate(
|
||||
base::bits::CountTrailingZeros32(m.right().Value())),
|
||||
g.Label(cont.true_block()), g.Label(cont.false_block()));
|
||||
DCHECK((cont->condition() == kEqual) ||
|
||||
(cont->condition() == kNotEqual));
|
||||
selector->Emit(
|
||||
cont->Encode(kArm64TestAndBranch32), g.NoOutput(),
|
||||
g.UseRegister(m.left().node()),
|
||||
g.TempImmediate(
|
||||
base::bits::CountTrailingZeros32(m.right().Value())),
|
||||
g.Label(cont->true_block()), g.Label(cont->false_block()));
|
||||
return;
|
||||
}
|
||||
return VisitWordCompare(this, value, kArm64Tst32, &cont, true,
|
||||
return VisitWordCompare(selector, value, kArm64Tst32, cont, true,
|
||||
kLogical32Imm);
|
||||
}
|
||||
case IrOpcode::kWord64And: {
|
||||
Int64BinopMatcher m(value);
|
||||
if (m.right().HasValue() &&
|
||||
if (cont->IsBranch() && m.right().HasValue() &&
|
||||
(base::bits::CountPopulation64(m.right().Value()) == 1)) {
|
||||
// If the mask has only one bit set, we can use tbz/tbnz.
|
||||
DCHECK((cont.condition() == kEqual) ||
|
||||
(cont.condition() == kNotEqual));
|
||||
Emit(cont.Encode(kArm64TestAndBranch), g.NoOutput(),
|
||||
g.UseRegister(m.left().node()),
|
||||
g.TempImmediate(
|
||||
base::bits::CountTrailingZeros64(m.right().Value())),
|
||||
g.Label(cont.true_block()), g.Label(cont.false_block()));
|
||||
DCHECK((cont->condition() == kEqual) ||
|
||||
(cont->condition() == kNotEqual));
|
||||
selector->Emit(
|
||||
cont->Encode(kArm64TestAndBranch), g.NoOutput(),
|
||||
g.UseRegister(m.left().node()),
|
||||
g.TempImmediate(
|
||||
base::bits::CountTrailingZeros64(m.right().Value())),
|
||||
g.Label(cont->true_block()), g.Label(cont->false_block()));
|
||||
return;
|
||||
}
|
||||
return VisitWordCompare(this, value, kArm64Tst, &cont, true,
|
||||
return VisitWordCompare(selector, value, kArm64Tst, cont, true,
|
||||
kLogical64Imm);
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
// Branch could not be combined with a compare, compare against 0 and branch.
|
||||
Emit(cont.Encode(kArm64CompareAndBranch32), g.NoOutput(),
|
||||
g.UseRegister(value), g.Label(cont.true_block()),
|
||||
g.Label(cont.false_block()));
|
||||
if (cont->IsBranch()) {
|
||||
selector->Emit(cont->Encode(kArm64CompareAndBranch32), g.NoOutput(),
|
||||
g.UseRegister(value), g.Label(cont->true_block()),
|
||||
g.Label(cont->false_block()));
|
||||
} else {
|
||||
DCHECK(cont->IsDeoptimize());
|
||||
selector->EmitDeoptimize(cont->Encode(kArm64Tst32), g.NoOutput(),
|
||||
g.UseRegister(value), g.UseRegister(value),
|
||||
cont->frame_state());
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
|
||||
BasicBlock* fbranch) {
|
||||
FlagsContinuation cont(kNotEqual, tbranch, fbranch);
|
||||
VisitWordCompareZero(this, branch, branch->InputAt(0), &cont);
|
||||
}
|
||||
|
||||
void InstructionSelector::VisitDeoptimizeIf(Node* node) {
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForDeoptimize(kNotEqual, node->InputAt(1));
|
||||
VisitWordCompareZero(this, node, node->InputAt(0), &cont);
|
||||
}
|
||||
|
||||
void InstructionSelector::VisitDeoptimizeUnless(Node* node) {
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForDeoptimize(kEqual, node->InputAt(1));
|
||||
VisitWordCompareZero(this, node, node->InputAt(0), &cont);
|
||||
}
|
||||
|
||||
void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
|
||||
Arm64OperandGenerator g(this);
|
||||
@ -1984,7 +2008,7 @@ void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
|
||||
|
||||
void InstructionSelector::VisitWord32Equal(Node* const node) {
|
||||
Node* const user = node;
|
||||
FlagsContinuation cont(kEqual, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
|
||||
Int32BinopMatcher m(user);
|
||||
if (m.right().Is(0)) {
|
||||
Node* const value = m.left().node();
|
||||
@ -2018,32 +2042,34 @@ void InstructionSelector::VisitWord32Equal(Node* const node) {
|
||||
|
||||
|
||||
void InstructionSelector::VisitInt32LessThan(Node* node) {
|
||||
FlagsContinuation cont(kSignedLessThan, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
|
||||
VisitWord32Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
|
||||
FlagsContinuation cont(kSignedLessThanOrEqual, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
|
||||
VisitWord32Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitUint32LessThan(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedLessThan, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
|
||||
VisitWord32Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedLessThanOrEqual, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
|
||||
VisitWord32Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitWord64Equal(Node* const node) {
|
||||
Node* const user = node;
|
||||
FlagsContinuation cont(kEqual, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
|
||||
Int64BinopMatcher m(user);
|
||||
if (m.right().Is(0)) {
|
||||
Node* const value = m.left().node();
|
||||
@ -2064,7 +2090,7 @@ void InstructionSelector::VisitWord64Equal(Node* const node) {
|
||||
|
||||
void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
|
||||
if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
|
||||
FlagsContinuation cont(kOverflow, ovf);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
|
||||
return VisitBinop<Int32BinopMatcher>(this, node, kArm64Add32,
|
||||
kArithmeticImm, &cont);
|
||||
}
|
||||
@ -2075,7 +2101,7 @@ void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
|
||||
|
||||
void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
|
||||
if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
|
||||
FlagsContinuation cont(kOverflow, ovf);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
|
||||
return VisitBinop<Int32BinopMatcher>(this, node, kArm64Sub32,
|
||||
kArithmeticImm, &cont);
|
||||
}
|
||||
@ -2086,7 +2112,7 @@ void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
|
||||
|
||||
void InstructionSelector::VisitInt64AddWithOverflow(Node* node) {
|
||||
if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
|
||||
FlagsContinuation cont(kOverflow, ovf);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
|
||||
return VisitBinop<Int64BinopMatcher>(this, node, kArm64Add, kArithmeticImm,
|
||||
&cont);
|
||||
}
|
||||
@ -2097,7 +2123,7 @@ void InstructionSelector::VisitInt64AddWithOverflow(Node* node) {
|
||||
|
||||
void InstructionSelector::VisitInt64SubWithOverflow(Node* node) {
|
||||
if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
|
||||
FlagsContinuation cont(kOverflow, ovf);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
|
||||
return VisitBinop<Int64BinopMatcher>(this, node, kArm64Sub, kArithmeticImm,
|
||||
&cont);
|
||||
}
|
||||
@ -2107,61 +2133,65 @@ void InstructionSelector::VisitInt64SubWithOverflow(Node* node) {
|
||||
|
||||
|
||||
void InstructionSelector::VisitInt64LessThan(Node* node) {
|
||||
FlagsContinuation cont(kSignedLessThan, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
|
||||
VisitWordCompare(this, node, kArm64Cmp, &cont, false, kArithmeticImm);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
|
||||
FlagsContinuation cont(kSignedLessThanOrEqual, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
|
||||
VisitWordCompare(this, node, kArm64Cmp, &cont, false, kArithmeticImm);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitUint64LessThan(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedLessThan, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
|
||||
VisitWordCompare(this, node, kArm64Cmp, &cont, false, kArithmeticImm);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitUint64LessThanOrEqual(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedLessThanOrEqual, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
|
||||
VisitWordCompare(this, node, kArm64Cmp, &cont, false, kArithmeticImm);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat32Equal(Node* node) {
|
||||
FlagsContinuation cont(kEqual, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
|
||||
VisitFloat32Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat32LessThan(Node* node) {
|
||||
FlagsContinuation cont(kFloatLessThan, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kFloatLessThan, node);
|
||||
VisitFloat32Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
|
||||
FlagsContinuation cont(kFloatLessThanOrEqual, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kFloatLessThanOrEqual, node);
|
||||
VisitFloat32Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat64Equal(Node* node) {
|
||||
FlagsContinuation cont(kEqual, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
|
||||
VisitFloat64Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat64LessThan(Node* node) {
|
||||
FlagsContinuation cont(kFloatLessThan, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kFloatLessThan, node);
|
||||
VisitFloat64Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
|
||||
FlagsContinuation cont(kFloatLessThanOrEqual, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kFloatLessThanOrEqual, node);
|
||||
VisitFloat64Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
@ -15,11 +15,11 @@ namespace compiler {
|
||||
BranchElimination::BranchElimination(Editor* editor, JSGraph* js_graph,
|
||||
Zone* zone)
|
||||
: AdvancedReducer(editor),
|
||||
jsgraph_(js_graph),
|
||||
node_conditions_(zone, js_graph->graph()->NodeCount()),
|
||||
zone_(zone),
|
||||
dead_(js_graph->graph()->NewNode(js_graph->common()->Dead())) {}
|
||||
|
||||
|
||||
BranchElimination::~BranchElimination() {}
|
||||
|
||||
|
||||
@ -27,6 +27,9 @@ Reduction BranchElimination::Reduce(Node* node) {
|
||||
switch (node->opcode()) {
|
||||
case IrOpcode::kDead:
|
||||
return NoChange();
|
||||
case IrOpcode::kDeoptimizeIf:
|
||||
case IrOpcode::kDeoptimizeUnless:
|
||||
return ReduceDeoptimizeConditional(node);
|
||||
case IrOpcode::kMerge:
|
||||
return ReduceMerge(node);
|
||||
case IrOpcode::kLoop:
|
||||
@ -76,6 +79,41 @@ Reduction BranchElimination::ReduceBranch(Node* node) {
|
||||
return TakeConditionsFromFirstControl(node);
|
||||
}
|
||||
|
||||
Reduction BranchElimination::ReduceDeoptimizeConditional(Node* node) {
|
||||
DCHECK(node->opcode() == IrOpcode::kDeoptimizeIf ||
|
||||
node->opcode() == IrOpcode::kDeoptimizeUnless);
|
||||
bool condition_is_true = node->opcode() == IrOpcode::kDeoptimizeUnless;
|
||||
Node* condition = NodeProperties::GetValueInput(node, 0);
|
||||
Node* frame_state = NodeProperties::GetValueInput(node, 1);
|
||||
Node* effect = NodeProperties::GetEffectInput(node);
|
||||
Node* control = NodeProperties::GetControlInput(node);
|
||||
ControlPathConditions const* conditions = node_conditions_.Get(control);
|
||||
// If we do not know anything about the predecessor, do not propagate just
|
||||
// yet because we will have to recompute anyway once we compute the
|
||||
// predecessor.
|
||||
if (conditions == nullptr) {
|
||||
DCHECK_NULL(node_conditions_.Get(node));
|
||||
return NoChange();
|
||||
}
|
||||
Maybe<bool> condition_value = conditions->LookupCondition(condition);
|
||||
if (condition_value.IsJust()) {
|
||||
// If we know the condition we can discard the branch.
|
||||
if (condition_is_true == condition_value.FromJust()) {
|
||||
// We don't to update the conditions here, because we're replacing with
|
||||
// the {control} node that already contains the right information.
|
||||
return Replace(control);
|
||||
} else {
|
||||
control = graph()->NewNode(common()->Deoptimize(DeoptimizeKind::kEager),
|
||||
frame_state, effect, control);
|
||||
// TODO(bmeurer): This should be on the AdvancedReducer somehow.
|
||||
NodeProperties::MergeControlToEnd(graph(), common(), control);
|
||||
Revisit(graph()->end());
|
||||
return Replace(dead());
|
||||
}
|
||||
}
|
||||
return UpdateConditions(
|
||||
node, conditions->AddCondition(zone_, condition, condition_is_true));
|
||||
}
|
||||
|
||||
Reduction BranchElimination::ReduceIf(Node* node, bool is_true_branch) {
|
||||
// Add the condition to the list arriving from the input branch.
|
||||
@ -264,6 +302,12 @@ bool BranchElimination::ControlPathConditions::operator==(
|
||||
return false;
|
||||
}
|
||||
|
||||
Graph* BranchElimination::graph() const { return jsgraph()->graph(); }
|
||||
|
||||
CommonOperatorBuilder* BranchElimination::common() const {
|
||||
return jsgraph()->common();
|
||||
}
|
||||
|
||||
} // namespace compiler
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
@ -11,6 +11,8 @@ namespace v8 {
|
||||
namespace internal {
|
||||
namespace compiler {
|
||||
|
||||
// Forward declarations.
|
||||
class CommonOperatorBuilder;
|
||||
class JSGraph;
|
||||
|
||||
|
||||
@ -73,6 +75,7 @@ class BranchElimination final : public AdvancedReducer {
|
||||
};
|
||||
|
||||
Reduction ReduceBranch(Node* node);
|
||||
Reduction ReduceDeoptimizeConditional(Node* node);
|
||||
Reduction ReduceIf(Node* node, bool is_true_branch);
|
||||
Reduction ReduceLoop(Node* node);
|
||||
Reduction ReduceMerge(Node* node);
|
||||
@ -84,7 +87,11 @@ class BranchElimination final : public AdvancedReducer {
|
||||
const ControlPathConditions* conditions);
|
||||
|
||||
Node* dead() const { return dead_; }
|
||||
Graph* graph() const;
|
||||
JSGraph* jsgraph() const { return jsgraph_; }
|
||||
CommonOperatorBuilder* common() const;
|
||||
|
||||
JSGraph* const jsgraph_;
|
||||
PathConditionsForControlNodes node_conditions_;
|
||||
Zone* zone_;
|
||||
Node* dead_;
|
||||
|
@ -139,6 +139,19 @@ class InstructionOperandConverter {
|
||||
Instruction* instr_;
|
||||
};
|
||||
|
||||
// Eager deoptimization exit.
|
||||
class DeoptimizationExit : public ZoneObject {
|
||||
public:
|
||||
explicit DeoptimizationExit(int deoptimization_id)
|
||||
: deoptimization_id_(deoptimization_id) {}
|
||||
|
||||
int deoptimization_id() const { return deoptimization_id_; }
|
||||
Label* label() { return &label_; }
|
||||
|
||||
private:
|
||||
int const deoptimization_id_;
|
||||
Label label_;
|
||||
};
|
||||
|
||||
// Generator for out-of-line code that is emitted after the main code is done.
|
||||
class OutOfLineCode : public ZoneObject {
|
||||
|
@ -31,7 +31,6 @@ class CodeGenerator::JumpTable final : public ZoneObject {
|
||||
size_t const target_count_;
|
||||
};
|
||||
|
||||
|
||||
CodeGenerator::CodeGenerator(Frame* frame, Linkage* linkage,
|
||||
InstructionSequence* code, CompilationInfo* info)
|
||||
: frame_access_state_(new (code->zone()) FrameAccessState(frame)),
|
||||
@ -45,6 +44,7 @@ CodeGenerator::CodeGenerator(Frame* frame, Linkage* linkage,
|
||||
resolver_(this),
|
||||
safepoints_(code->zone()),
|
||||
handlers_(code->zone()),
|
||||
deoptimization_exits_(code->zone()),
|
||||
deoptimization_states_(code->zone()),
|
||||
deoptimization_literals_(code->zone()),
|
||||
inlined_function_count_(0),
|
||||
@ -158,6 +158,12 @@ Handle<Code> CodeGenerator::GenerateCode() {
|
||||
}
|
||||
}
|
||||
|
||||
// Assemble all eager deoptimization exits.
|
||||
for (DeoptimizationExit* exit : deoptimization_exits_) {
|
||||
masm()->bind(exit->label());
|
||||
AssembleDeoptimizerCall(exit->deoptimization_id(), Deoptimizer::EAGER);
|
||||
}
|
||||
|
||||
// Ensure there is space for lazy deoptimization in the code.
|
||||
if (info->ShouldEnsureSpaceForLazyDeopt()) {
|
||||
int target_offset = masm()->pc_offset() + Deoptimizer::patch_size();
|
||||
@ -291,34 +297,59 @@ void CodeGenerator::AssembleInstruction(Instruction* instr) {
|
||||
|
||||
FlagsMode mode = FlagsModeField::decode(instr->opcode());
|
||||
FlagsCondition condition = FlagsConditionField::decode(instr->opcode());
|
||||
if (mode == kFlags_branch) {
|
||||
// Assemble a branch after this instruction.
|
||||
InstructionOperandConverter i(this, instr);
|
||||
RpoNumber true_rpo = i.InputRpo(instr->InputCount() - 2);
|
||||
RpoNumber false_rpo = i.InputRpo(instr->InputCount() - 1);
|
||||
switch (mode) {
|
||||
case kFlags_branch: {
|
||||
// Assemble a branch after this instruction.
|
||||
InstructionOperandConverter i(this, instr);
|
||||
RpoNumber true_rpo = i.InputRpo(instr->InputCount() - 2);
|
||||
RpoNumber false_rpo = i.InputRpo(instr->InputCount() - 1);
|
||||
|
||||
if (true_rpo == false_rpo) {
|
||||
// redundant branch.
|
||||
if (!IsNextInAssemblyOrder(true_rpo)) {
|
||||
AssembleArchJump(true_rpo);
|
||||
if (true_rpo == false_rpo) {
|
||||
// redundant branch.
|
||||
if (!IsNextInAssemblyOrder(true_rpo)) {
|
||||
AssembleArchJump(true_rpo);
|
||||
}
|
||||
return;
|
||||
}
|
||||
return;
|
||||
if (IsNextInAssemblyOrder(true_rpo)) {
|
||||
// true block is next, can fall through if condition negated.
|
||||
std::swap(true_rpo, false_rpo);
|
||||
condition = NegateFlagsCondition(condition);
|
||||
}
|
||||
BranchInfo branch;
|
||||
branch.condition = condition;
|
||||
branch.true_label = GetLabel(true_rpo);
|
||||
branch.false_label = GetLabel(false_rpo);
|
||||
branch.fallthru = IsNextInAssemblyOrder(false_rpo);
|
||||
// Assemble architecture-specific branch.
|
||||
AssembleArchBranch(instr, &branch);
|
||||
break;
|
||||
}
|
||||
if (IsNextInAssemblyOrder(true_rpo)) {
|
||||
// true block is next, can fall through if condition negated.
|
||||
std::swap(true_rpo, false_rpo);
|
||||
condition = NegateFlagsCondition(condition);
|
||||
case kFlags_deoptimize: {
|
||||
// Assemble a conditional eager deoptimization after this instruction.
|
||||
InstructionOperandConverter i(this, instr);
|
||||
size_t frame_state_offset = MiscField::decode(instr->opcode());
|
||||
DeoptimizationExit* const exit =
|
||||
AddDeoptimizationExit(instr, frame_state_offset);
|
||||
Label continue_label;
|
||||
BranchInfo branch;
|
||||
branch.condition = condition;
|
||||
branch.true_label = exit->label();
|
||||
branch.false_label = &continue_label;
|
||||
branch.fallthru = true;
|
||||
// Assemble architecture-specific branch.
|
||||
AssembleArchBranch(instr, &branch);
|
||||
masm()->bind(&continue_label);
|
||||
break;
|
||||
}
|
||||
case kFlags_set: {
|
||||
// Assemble a boolean materialization after this instruction.
|
||||
AssembleArchBoolean(instr, condition);
|
||||
break;
|
||||
}
|
||||
case kFlags_none: {
|
||||
break;
|
||||
}
|
||||
BranchInfo branch;
|
||||
branch.condition = condition;
|
||||
branch.true_label = GetLabel(true_rpo);
|
||||
branch.false_label = GetLabel(false_rpo);
|
||||
branch.fallthru = IsNextInAssemblyOrder(false_rpo);
|
||||
// Assemble architecture-specific branch.
|
||||
AssembleArchBranch(instr, &branch);
|
||||
} else if (mode == kFlags_set) {
|
||||
// Assemble a boolean materialization after this instruction.
|
||||
AssembleArchBoolean(instr, condition);
|
||||
}
|
||||
}
|
||||
|
||||
@ -714,6 +745,15 @@ void CodeGenerator::MarkLazyDeoptSite() {
|
||||
last_lazy_deopt_pc_ = masm()->pc_offset();
|
||||
}
|
||||
|
||||
DeoptimizationExit* CodeGenerator::AddDeoptimizationExit(
|
||||
Instruction* instr, size_t frame_state_offset) {
|
||||
int const deoptimization_id = BuildTranslation(
|
||||
instr, -1, frame_state_offset, OutputFrameStateCombine::Ignore());
|
||||
DeoptimizationExit* const exit =
|
||||
new (zone()) DeoptimizationExit(deoptimization_id);
|
||||
deoptimization_exits_.push_back(exit);
|
||||
return exit;
|
||||
}
|
||||
|
||||
int CodeGenerator::TailCallFrameStackSlotDelta(int stack_param_delta) {
|
||||
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
|
||||
|
@ -16,6 +16,7 @@ namespace internal {
|
||||
namespace compiler {
|
||||
|
||||
// Forward declarations.
|
||||
class DeoptimizationExit;
|
||||
class FrameAccessState;
|
||||
class Linkage;
|
||||
class OutOfLineCode;
|
||||
@ -144,10 +145,10 @@ class CodeGenerator final : public GapResolver::Assembler {
|
||||
void RecordCallPosition(Instruction* instr);
|
||||
void PopulateDeoptimizationData(Handle<Code> code);
|
||||
int DefineDeoptimizationLiteral(Handle<Object> literal);
|
||||
FrameStateDescriptor* GetFrameStateDescriptor(
|
||||
Instruction* instr, size_t frame_access_state_offset);
|
||||
FrameStateDescriptor* GetFrameStateDescriptor(Instruction* instr,
|
||||
size_t frame_state_offset);
|
||||
int BuildTranslation(Instruction* instr, int pc_offset,
|
||||
size_t frame_access_state_offset,
|
||||
size_t frame_state_offset,
|
||||
OutputFrameStateCombine state_combine);
|
||||
void BuildTranslationForFrameStateDescriptor(
|
||||
FrameStateDescriptor* descriptor, InstructionOperandIterator* iter,
|
||||
@ -165,6 +166,9 @@ class CodeGenerator final : public GapResolver::Assembler {
|
||||
void EnsureSpaceForLazyDeopt();
|
||||
void MarkLazyDeoptSite();
|
||||
|
||||
DeoptimizationExit* AddDeoptimizationExit(Instruction* instr,
|
||||
size_t frame_state_offset);
|
||||
|
||||
// Converts the delta in the number of stack parameter passed from a tail
|
||||
// caller to the callee into the distance (in pointers) the SP must be
|
||||
// adjusted, taking frame elision and other relevant factors into
|
||||
@ -210,6 +214,7 @@ class CodeGenerator final : public GapResolver::Assembler {
|
||||
GapResolver resolver_;
|
||||
SafepointTableBuilder safepoints_;
|
||||
ZoneVector<HandlerInfo> handlers_;
|
||||
ZoneDeque<DeoptimizationExit*> deoptimization_exits_;
|
||||
ZoneDeque<DeoptimizationState*> deoptimization_states_;
|
||||
ZoneDeque<Handle<Object>> deoptimization_literals_;
|
||||
size_t inlined_function_count_;
|
||||
|
@ -57,6 +57,9 @@ Reduction CommonOperatorReducer::Reduce(Node* node) {
|
||||
switch (node->opcode()) {
|
||||
case IrOpcode::kBranch:
|
||||
return ReduceBranch(node);
|
||||
case IrOpcode::kDeoptimizeIf:
|
||||
case IrOpcode::kDeoptimizeUnless:
|
||||
return ReduceDeoptimizeConditional(node);
|
||||
case IrOpcode::kMerge:
|
||||
return ReduceMerge(node);
|
||||
case IrOpcode::kEffectPhi:
|
||||
@ -123,6 +126,37 @@ Reduction CommonOperatorReducer::ReduceBranch(Node* node) {
|
||||
return Replace(dead());
|
||||
}
|
||||
|
||||
Reduction CommonOperatorReducer::ReduceDeoptimizeConditional(Node* node) {
|
||||
DCHECK(node->opcode() == IrOpcode::kDeoptimizeIf ||
|
||||
node->opcode() == IrOpcode::kDeoptimizeUnless);
|
||||
bool condition_is_true = node->opcode() == IrOpcode::kDeoptimizeUnless;
|
||||
Node* condition = NodeProperties::GetValueInput(node, 0);
|
||||
Node* frame_state = NodeProperties::GetValueInput(node, 1);
|
||||
Node* effect = NodeProperties::GetEffectInput(node);
|
||||
Node* control = NodeProperties::GetControlInput(node);
|
||||
// Swap DeoptimizeIf/DeoptimizeUnless on {node} if {cond} is a BooleaNot
|
||||
// and use the input to BooleanNot as new condition for {node}. Note we
|
||||
// assume that {cond} was already properly optimized before we get here
|
||||
// (as guaranteed by the graph reduction logic).
|
||||
if (condition->opcode() == IrOpcode::kBooleanNot) {
|
||||
NodeProperties::ReplaceValueInput(node, condition->InputAt(0), 0);
|
||||
NodeProperties::ChangeOp(node, condition_is_true
|
||||
? common()->DeoptimizeIf()
|
||||
: common()->DeoptimizeUnless());
|
||||
return Changed(node);
|
||||
}
|
||||
Decision const decision = DecideCondition(condition);
|
||||
if (decision == Decision::kUnknown) return NoChange();
|
||||
if (condition_is_true == (decision == Decision::kTrue)) {
|
||||
return Replace(control);
|
||||
}
|
||||
control = graph()->NewNode(common()->Deoptimize(DeoptimizeKind::kEager),
|
||||
frame_state, effect, control);
|
||||
// TODO(bmeurer): This should be on the AdvancedReducer somehow.
|
||||
NodeProperties::MergeControlToEnd(graph(), common(), control);
|
||||
Revisit(graph()->end());
|
||||
return Replace(dead());
|
||||
}
|
||||
|
||||
Reduction CommonOperatorReducer::ReduceMerge(Node* node) {
|
||||
DCHECK_EQ(IrOpcode::kMerge, node->opcode());
|
||||
|
@ -30,6 +30,7 @@ class CommonOperatorReducer final : public AdvancedReducer {
|
||||
|
||||
private:
|
||||
Reduction ReduceBranch(Node* node);
|
||||
Reduction ReduceDeoptimizeConditional(Node* node);
|
||||
Reduction ReduceMerge(Node* node);
|
||||
Reduction ReduceEffectPhi(Node* node);
|
||||
Reduction ReducePhi(Node* node);
|
||||
|
@ -142,21 +142,21 @@ std::ostream& operator<<(std::ostream& os, ParameterInfo const& i) {
|
||||
return os;
|
||||
}
|
||||
|
||||
|
||||
#define CACHED_OP_LIST(V) \
|
||||
V(Dead, Operator::kFoldable, 0, 0, 0, 1, 1, 1) \
|
||||
V(IfTrue, Operator::kKontrol, 0, 0, 1, 0, 0, 1) \
|
||||
V(IfFalse, Operator::kKontrol, 0, 0, 1, 0, 0, 1) \
|
||||
V(IfSuccess, Operator::kKontrol, 0, 0, 1, 0, 0, 1) \
|
||||
V(IfDefault, Operator::kKontrol, 0, 0, 1, 0, 0, 1) \
|
||||
V(Throw, Operator::kKontrol, 1, 1, 1, 0, 0, 1) \
|
||||
V(Terminate, Operator::kKontrol, 0, 1, 1, 0, 0, 1) \
|
||||
V(OsrNormalEntry, Operator::kFoldable, 0, 1, 1, 0, 1, 1) \
|
||||
V(OsrLoopEntry, Operator::kFoldable, 0, 1, 1, 0, 1, 1) \
|
||||
V(BeginRegion, Operator::kNoThrow, 0, 1, 0, 0, 1, 0) \
|
||||
#define CACHED_OP_LIST(V) \
|
||||
V(Dead, Operator::kFoldable, 0, 0, 0, 1, 1, 1) \
|
||||
V(DeoptimizeIf, Operator::kFoldable, 2, 1, 1, 0, 0, 1) \
|
||||
V(DeoptimizeUnless, Operator::kFoldable, 2, 1, 1, 0, 0, 1) \
|
||||
V(IfTrue, Operator::kKontrol, 0, 0, 1, 0, 0, 1) \
|
||||
V(IfFalse, Operator::kKontrol, 0, 0, 1, 0, 0, 1) \
|
||||
V(IfSuccess, Operator::kKontrol, 0, 0, 1, 0, 0, 1) \
|
||||
V(IfDefault, Operator::kKontrol, 0, 0, 1, 0, 0, 1) \
|
||||
V(Throw, Operator::kKontrol, 1, 1, 1, 0, 0, 1) \
|
||||
V(Terminate, Operator::kKontrol, 0, 1, 1, 0, 0, 1) \
|
||||
V(OsrNormalEntry, Operator::kFoldable, 0, 1, 1, 0, 1, 1) \
|
||||
V(OsrLoopEntry, Operator::kFoldable, 0, 1, 1, 0, 1, 1) \
|
||||
V(BeginRegion, Operator::kNoThrow, 0, 1, 0, 0, 1, 0) \
|
||||
V(FinishRegion, Operator::kNoThrow, 1, 1, 0, 1, 1, 0)
|
||||
|
||||
|
||||
#define CACHED_RETURN_LIST(V) \
|
||||
V(1) \
|
||||
V(2) \
|
||||
|
@ -133,6 +133,8 @@ class CommonOperatorBuilder final : public ZoneObject {
|
||||
const Operator* IfDefault();
|
||||
const Operator* Throw();
|
||||
const Operator* Deoptimize(DeoptimizeKind kind);
|
||||
const Operator* DeoptimizeIf();
|
||||
const Operator* DeoptimizeUnless();
|
||||
const Operator* Return(int value_input_count = 1);
|
||||
const Operator* Terminate();
|
||||
|
||||
|
@ -404,10 +404,11 @@ void InstructionSelector::VisitCheckedStore(Node* node) {
|
||||
}
|
||||
}
|
||||
|
||||
namespace {
|
||||
|
||||
// Shared routine for multiple binary operations.
|
||||
static void VisitBinop(InstructionSelector* selector, Node* node,
|
||||
InstructionCode opcode, FlagsContinuation* cont) {
|
||||
void VisitBinop(InstructionSelector* selector, Node* node,
|
||||
InstructionCode opcode, FlagsContinuation* cont) {
|
||||
IA32OperandGenerator g(selector);
|
||||
Int32BinopMatcher m(node);
|
||||
Node* left = m.left().node();
|
||||
@ -456,18 +457,24 @@ static void VisitBinop(InstructionSelector* selector, Node* node,
|
||||
DCHECK_GE(arraysize(inputs), input_count);
|
||||
DCHECK_GE(arraysize(outputs), output_count);
|
||||
|
||||
selector->Emit(cont->Encode(opcode), output_count, outputs, input_count,
|
||||
inputs);
|
||||
opcode = cont->Encode(opcode);
|
||||
if (cont->IsDeoptimize()) {
|
||||
selector->EmitDeoptimize(opcode, output_count, outputs, input_count, inputs,
|
||||
cont->frame_state());
|
||||
} else {
|
||||
selector->Emit(opcode, output_count, outputs, input_count, inputs);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Shared routine for multiple binary operations.
|
||||
static void VisitBinop(InstructionSelector* selector, Node* node,
|
||||
InstructionCode opcode) {
|
||||
void VisitBinop(InstructionSelector* selector, Node* node,
|
||||
InstructionCode opcode) {
|
||||
FlagsContinuation cont;
|
||||
VisitBinop(selector, node, opcode, &cont);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
void InstructionSelector::VisitWord32And(Node* node) {
|
||||
VisitBinop(this, node, kIA32And);
|
||||
@ -1007,6 +1014,9 @@ void VisitCompareWithMemoryOperand(InstructionSelector* selector,
|
||||
inputs[input_count++] = g.Label(cont->true_block());
|
||||
inputs[input_count++] = g.Label(cont->false_block());
|
||||
selector->Emit(opcode, 0, nullptr, input_count, inputs);
|
||||
} else if (cont->IsDeoptimize()) {
|
||||
selector->EmitDeoptimize(opcode, 0, nullptr, input_count, inputs,
|
||||
cont->frame_state());
|
||||
} else {
|
||||
DCHECK(cont->IsSet());
|
||||
InstructionOperand output = g.DefineAsRegister(cont->result());
|
||||
@ -1034,13 +1044,16 @@ void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
|
||||
InstructionOperand left, InstructionOperand right,
|
||||
FlagsContinuation* cont) {
|
||||
IA32OperandGenerator g(selector);
|
||||
opcode = cont->Encode(opcode);
|
||||
if (cont->IsBranch()) {
|
||||
selector->Emit(cont->Encode(opcode), g.NoOutput(), left, right,
|
||||
selector->Emit(opcode, g.NoOutput(), left, right,
|
||||
g.Label(cont->true_block()), g.Label(cont->false_block()));
|
||||
} else if (cont->IsDeoptimize()) {
|
||||
selector->EmitDeoptimize(opcode, g.NoOutput(), left, right,
|
||||
cont->frame_state());
|
||||
} else {
|
||||
DCHECK(cont->IsSet());
|
||||
selector->Emit(cont->Encode(opcode), g.DefineAsByteRegister(cont->result()),
|
||||
left, right);
|
||||
selector->Emit(opcode, g.DefineAsByteRegister(cont->result()), left, right);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1125,6 +1138,9 @@ void VisitWordCompare(InstructionSelector* selector, Node* node,
|
||||
if (cont->IsBranch()) {
|
||||
selector->Emit(opcode, g.NoOutput(), g.Label(cont->true_block()),
|
||||
g.Label(cont->false_block()));
|
||||
} else if (cont->IsDeoptimize()) {
|
||||
selector->EmitDeoptimize(opcode, 0, nullptr, 0, nullptr,
|
||||
cont->frame_state());
|
||||
} else {
|
||||
DCHECK(cont->IsSet());
|
||||
selector->Emit(opcode, g.DefineAsRegister(cont->result()));
|
||||
@ -1227,13 +1243,23 @@ void VisitWordCompareZero(InstructionSelector* selector, Node* user,
|
||||
|
||||
} // namespace
|
||||
|
||||
|
||||
void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
|
||||
BasicBlock* fbranch) {
|
||||
FlagsContinuation cont(kNotEqual, tbranch, fbranch);
|
||||
VisitWordCompareZero(this, branch, branch->InputAt(0), &cont);
|
||||
}
|
||||
|
||||
void InstructionSelector::VisitDeoptimizeIf(Node* node) {
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForDeoptimize(kNotEqual, node->InputAt(1));
|
||||
VisitWordCompareZero(this, node, node->InputAt(0), &cont);
|
||||
}
|
||||
|
||||
void InstructionSelector::VisitDeoptimizeUnless(Node* node) {
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForDeoptimize(kEqual, node->InputAt(1));
|
||||
VisitWordCompareZero(this, node, node->InputAt(0), &cont);
|
||||
}
|
||||
|
||||
void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
|
||||
IA32OperandGenerator g(this);
|
||||
@ -1264,7 +1290,7 @@ void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
|
||||
|
||||
|
||||
void InstructionSelector::VisitWord32Equal(Node* const node) {
|
||||
FlagsContinuation cont(kEqual, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
|
||||
Int32BinopMatcher m(node);
|
||||
if (m.right().Is(0)) {
|
||||
return VisitWordCompareZero(this, m.node(), m.left().node(), &cont);
|
||||
@ -1274,32 +1300,34 @@ void InstructionSelector::VisitWord32Equal(Node* const node) {
|
||||
|
||||
|
||||
void InstructionSelector::VisitInt32LessThan(Node* node) {
|
||||
FlagsContinuation cont(kSignedLessThan, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
|
||||
VisitWordCompare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
|
||||
FlagsContinuation cont(kSignedLessThanOrEqual, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
|
||||
VisitWordCompare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitUint32LessThan(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedLessThan, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
|
||||
VisitWordCompare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedLessThanOrEqual, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
|
||||
VisitWordCompare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
|
||||
if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
|
||||
FlagsContinuation cont(kOverflow, ovf);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
|
||||
return VisitBinop(this, node, kIA32Add, &cont);
|
||||
}
|
||||
FlagsContinuation cont;
|
||||
@ -1309,7 +1337,7 @@ void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
|
||||
|
||||
void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
|
||||
if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
|
||||
FlagsContinuation cont(kOverflow, ovf);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
|
||||
return VisitBinop(this, node, kIA32Sub, &cont);
|
||||
}
|
||||
FlagsContinuation cont;
|
||||
@ -1318,37 +1346,41 @@ void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat32Equal(Node* node) {
|
||||
FlagsContinuation cont(kUnorderedEqual, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kUnorderedEqual, node);
|
||||
VisitFloat32Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat32LessThan(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedGreaterThan, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kUnsignedGreaterThan, node);
|
||||
VisitFloat32Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedGreaterThanOrEqual, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kUnsignedGreaterThanOrEqual, node);
|
||||
VisitFloat32Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat64Equal(Node* node) {
|
||||
FlagsContinuation cont(kUnorderedEqual, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kUnorderedEqual, node);
|
||||
VisitFloat64Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat64LessThan(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedGreaterThan, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kUnsignedGreaterThan, node);
|
||||
VisitFloat64Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedGreaterThanOrEqual, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kUnsignedGreaterThanOrEqual, node);
|
||||
VisitFloat64Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
@ -110,7 +110,12 @@ enum AddressingMode {
|
||||
std::ostream& operator<<(std::ostream& os, const AddressingMode& am);
|
||||
|
||||
// The mode of the flags continuation (see below).
|
||||
enum FlagsMode { kFlags_none = 0, kFlags_branch = 1, kFlags_set = 2 };
|
||||
enum FlagsMode {
|
||||
kFlags_none = 0,
|
||||
kFlags_branch = 1,
|
||||
kFlags_deoptimize = 2,
|
||||
kFlags_set = 3
|
||||
};
|
||||
|
||||
std::ostream& operator<<(std::ostream& os, const FlagsMode& fm);
|
||||
|
||||
|
@ -303,22 +303,32 @@ class FlagsContinuation final {
|
||||
DCHECK_NOT_NULL(false_block);
|
||||
}
|
||||
|
||||
// Creates a new flags continuation from the given condition and result node.
|
||||
FlagsContinuation(FlagsCondition condition, Node* result)
|
||||
: mode_(kFlags_set), condition_(condition), result_(result) {
|
||||
DCHECK_NOT_NULL(result);
|
||||
// Creates a new flags continuation for an eager deoptimization exit.
|
||||
static FlagsContinuation ForDeoptimize(FlagsCondition condition,
|
||||
Node* frame_state) {
|
||||
return FlagsContinuation(kFlags_deoptimize, condition, frame_state);
|
||||
}
|
||||
|
||||
// Creates a new flags continuation for a boolean value.
|
||||
static FlagsContinuation ForSet(FlagsCondition condition, Node* result) {
|
||||
return FlagsContinuation(kFlags_set, condition, result);
|
||||
}
|
||||
|
||||
bool IsNone() const { return mode_ == kFlags_none; }
|
||||
bool IsBranch() const { return mode_ == kFlags_branch; }
|
||||
bool IsDeoptimize() const { return mode_ == kFlags_deoptimize; }
|
||||
bool IsSet() const { return mode_ == kFlags_set; }
|
||||
FlagsCondition condition() const {
|
||||
DCHECK(!IsNone());
|
||||
return condition_;
|
||||
}
|
||||
Node* frame_state() const {
|
||||
DCHECK(IsDeoptimize());
|
||||
return frame_state_or_result_;
|
||||
}
|
||||
Node* result() const {
|
||||
DCHECK(IsSet());
|
||||
return result_;
|
||||
return frame_state_or_result_;
|
||||
}
|
||||
BasicBlock* true_block() const {
|
||||
DCHECK(IsBranch());
|
||||
@ -355,11 +365,20 @@ class FlagsContinuation final {
|
||||
}
|
||||
|
||||
private:
|
||||
FlagsMode mode_;
|
||||
FlagsContinuation(FlagsMode mode, FlagsCondition condition,
|
||||
Node* frame_state_or_result)
|
||||
: mode_(mode),
|
||||
condition_(condition),
|
||||
frame_state_or_result_(frame_state_or_result) {
|
||||
DCHECK_NOT_NULL(frame_state_or_result);
|
||||
}
|
||||
|
||||
FlagsMode const mode_;
|
||||
FlagsCondition condition_;
|
||||
Node* result_; // Only valid if mode_ == kFlags_set.
|
||||
BasicBlock* true_block_; // Only valid if mode_ == kFlags_branch.
|
||||
BasicBlock* false_block_; // Only valid if mode_ == kFlags_branch.
|
||||
Node* frame_state_or_result_; // Only valid if mode_ == kFlags_deoptimize
|
||||
// or mode_ == kFlags_set.
|
||||
BasicBlock* true_block_; // Only valid if mode_ == kFlags_branch.
|
||||
BasicBlock* false_block_; // Only valid if mode_ == kFlags_branch.
|
||||
};
|
||||
|
||||
} // namespace compiler
|
||||
|
@ -873,6 +873,10 @@ void InstructionSelector::VisitNode(Node* node) {
|
||||
}
|
||||
case IrOpcode::kCall:
|
||||
return VisitCall(node);
|
||||
case IrOpcode::kDeoptimizeIf:
|
||||
return VisitDeoptimizeIf(node);
|
||||
case IrOpcode::kDeoptimizeUnless:
|
||||
return VisitDeoptimizeUnless(node);
|
||||
case IrOpcode::kFrameState:
|
||||
case IrOpcode::kStateValues:
|
||||
case IrOpcode::kObjectState:
|
||||
@ -1627,25 +1631,41 @@ void InstructionSelector::VisitReturn(Node* ret) {
|
||||
}
|
||||
}
|
||||
|
||||
Instruction* InstructionSelector::EmitDeoptimize(InstructionCode opcode,
|
||||
InstructionOperand output,
|
||||
InstructionOperand a,
|
||||
InstructionOperand b,
|
||||
Node* frame_state) {
|
||||
size_t output_count = output.IsInvalid() ? 0 : 1;
|
||||
InstructionOperand inputs[] = {a, b};
|
||||
size_t input_count = arraysize(inputs);
|
||||
return EmitDeoptimize(opcode, output_count, &output, input_count, inputs,
|
||||
frame_state);
|
||||
}
|
||||
|
||||
Instruction* InstructionSelector::EmitDeoptimize(
|
||||
InstructionCode opcode, size_t output_count, InstructionOperand* outputs,
|
||||
size_t input_count, InstructionOperand* inputs, Node* frame_state) {
|
||||
OperandGenerator g(this);
|
||||
FrameStateDescriptor* const descriptor = GetFrameStateDescriptor(frame_state);
|
||||
InstructionOperandVector args(instruction_zone());
|
||||
args.reserve(input_count + 1 + descriptor->GetTotalSize());
|
||||
for (size_t i = 0; i < input_count; ++i) {
|
||||
args.push_back(inputs[i]);
|
||||
}
|
||||
opcode |= MiscField::encode(static_cast<int>(input_count));
|
||||
InstructionSequence::StateId const state_id =
|
||||
sequence()->AddFrameStateDescriptor(descriptor);
|
||||
args.push_back(g.TempImmediate(state_id.ToInt()));
|
||||
StateObjectDeduplicator deduplicator(instruction_zone());
|
||||
AddInputsToFrameStateDescriptor(descriptor, frame_state, &g, &deduplicator,
|
||||
&args, FrameStateInputKind::kAny,
|
||||
instruction_zone());
|
||||
return Emit(opcode, output_count, outputs, args.size(), &args.front(), 0,
|
||||
nullptr);
|
||||
}
|
||||
|
||||
void InstructionSelector::VisitDeoptimize(DeoptimizeKind kind, Node* value) {
|
||||
OperandGenerator g(this);
|
||||
|
||||
FrameStateDescriptor* desc = GetFrameStateDescriptor(value);
|
||||
|
||||
InstructionOperandVector args(instruction_zone());
|
||||
args.reserve(desc->GetTotalSize() + 1); // Include deopt id.
|
||||
|
||||
InstructionSequence::StateId state_id =
|
||||
sequence()->AddFrameStateDescriptor(desc);
|
||||
args.push_back(g.TempImmediate(state_id.ToInt()));
|
||||
|
||||
StateObjectDeduplicator deduplicator(instruction_zone());
|
||||
|
||||
AddInputsToFrameStateDescriptor(desc, value, &g, &deduplicator, &args,
|
||||
FrameStateInputKind::kAny,
|
||||
instruction_zone());
|
||||
|
||||
InstructionCode opcode = kArchDeoptimize;
|
||||
switch (kind) {
|
||||
case DeoptimizeKind::kEager:
|
||||
@ -1655,7 +1675,7 @@ void InstructionSelector::VisitDeoptimize(DeoptimizeKind kind, Node* value) {
|
||||
opcode |= MiscField::encode(Deoptimizer::SOFT);
|
||||
break;
|
||||
}
|
||||
Emit(opcode, 0, nullptr, args.size(), &args.front(), 0, nullptr);
|
||||
EmitDeoptimize(opcode, 0, nullptr, 0, nullptr, value);
|
||||
}
|
||||
|
||||
|
||||
|
@ -99,6 +99,17 @@ class InstructionSelector final {
|
||||
InstructionOperand* temps = nullptr);
|
||||
Instruction* Emit(Instruction* instr);
|
||||
|
||||
// ===========================================================================
|
||||
// ===== Architecture-independent deoptimization exit emission methods. ======
|
||||
// ===========================================================================
|
||||
|
||||
Instruction* EmitDeoptimize(InstructionCode opcode, InstructionOperand output,
|
||||
InstructionOperand a, InstructionOperand b,
|
||||
Node* frame_state);
|
||||
Instruction* EmitDeoptimize(InstructionCode opcode, size_t output_count,
|
||||
InstructionOperand* outputs, size_t input_count,
|
||||
InstructionOperand* inputs, Node* frame_state);
|
||||
|
||||
// ===========================================================================
|
||||
// ============== Architecture-independent CPU feature methods. ==============
|
||||
// ===========================================================================
|
||||
@ -243,6 +254,8 @@ class InstructionSelector final {
|
||||
void VisitProjection(Node* node);
|
||||
void VisitConstant(Node* node);
|
||||
void VisitCall(Node* call, BasicBlock* handler = nullptr);
|
||||
void VisitDeoptimizeIf(Node* node);
|
||||
void VisitDeoptimizeUnless(Node* node);
|
||||
void VisitTailCall(Node* call);
|
||||
void VisitGoto(BasicBlock* target);
|
||||
void VisitBranch(Node* input, BasicBlock* tbranch, BasicBlock* fbranch);
|
||||
|
@ -393,6 +393,8 @@ std::ostream& operator<<(std::ostream& os, const FlagsMode& fm) {
|
||||
return os;
|
||||
case kFlags_branch:
|
||||
return os << "branch";
|
||||
case kFlags_deoptimize:
|
||||
return os << "deoptimize";
|
||||
case kFlags_set:
|
||||
return os << "set";
|
||||
}
|
||||
|
@ -329,16 +329,8 @@ Reduction JSCallReducer::ReduceJSCallFunction(Node* node) {
|
||||
Node* check = effect =
|
||||
graph()->NewNode(javascript()->StrictEqual(), target, array_function,
|
||||
context, effect, control);
|
||||
Node* branch =
|
||||
graph()->NewNode(common()->Branch(BranchHint::kTrue), check, control);
|
||||
Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
|
||||
Node* deoptimize =
|
||||
graph()->NewNode(common()->Deoptimize(DeoptimizeKind::kEager),
|
||||
frame_state, effect, if_false);
|
||||
// TODO(bmeurer): This should be on the AdvancedReducer somehow.
|
||||
NodeProperties::MergeControlToEnd(graph(), common(), deoptimize);
|
||||
Revisit(graph()->end());
|
||||
control = graph()->NewNode(common()->IfTrue(), branch);
|
||||
control = graph()->NewNode(common()->DeoptimizeUnless(), check, frame_state,
|
||||
effect, control);
|
||||
|
||||
// Turn the {node} into a {JSCreateArray} call.
|
||||
NodeProperties::ReplaceValueInput(node, array_function, 0);
|
||||
@ -355,16 +347,8 @@ Reduction JSCallReducer::ReduceJSCallFunction(Node* node) {
|
||||
Node* check = effect =
|
||||
graph()->NewNode(javascript()->StrictEqual(), target, target_function,
|
||||
context, effect, control);
|
||||
Node* branch =
|
||||
graph()->NewNode(common()->Branch(BranchHint::kTrue), check, control);
|
||||
Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
|
||||
Node* deoptimize =
|
||||
graph()->NewNode(common()->Deoptimize(DeoptimizeKind::kEager),
|
||||
frame_state, effect, if_false);
|
||||
// TODO(bmeurer): This should be on the AdvancedReducer somehow.
|
||||
NodeProperties::MergeControlToEnd(graph(), common(), deoptimize);
|
||||
Revisit(graph()->end());
|
||||
control = graph()->NewNode(common()->IfTrue(), branch);
|
||||
control = graph()->NewNode(common()->DeoptimizeUnless(), check,
|
||||
frame_state, effect, control);
|
||||
|
||||
// Specialize the JSCallFunction node to the {target_function}.
|
||||
NodeProperties::ReplaceValueInput(node, target_function, 0);
|
||||
@ -473,16 +457,8 @@ Reduction JSCallReducer::ReduceJSCallConstruct(Node* node) {
|
||||
Node* check = effect =
|
||||
graph()->NewNode(javascript()->StrictEqual(), target, array_function,
|
||||
context, effect, control);
|
||||
Node* branch =
|
||||
graph()->NewNode(common()->Branch(BranchHint::kTrue), check, control);
|
||||
Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
|
||||
Node* deoptimize =
|
||||
graph()->NewNode(common()->Deoptimize(DeoptimizeKind::kEager),
|
||||
frame_state, effect, if_false);
|
||||
// TODO(bmeurer): This should be on the AdvancedReducer somehow.
|
||||
NodeProperties::MergeControlToEnd(graph(), common(), deoptimize);
|
||||
Revisit(graph()->end());
|
||||
control = graph()->NewNode(common()->IfTrue(), branch);
|
||||
control = graph()->NewNode(common()->DeoptimizeUnless(), check, frame_state,
|
||||
effect, control);
|
||||
|
||||
// Turn the {node} into a {JSCreateArray} call.
|
||||
NodeProperties::ReplaceEffectInput(node, effect);
|
||||
@ -505,16 +481,8 @@ Reduction JSCallReducer::ReduceJSCallConstruct(Node* node) {
|
||||
Node* check = effect =
|
||||
graph()->NewNode(javascript()->StrictEqual(), target, target_function,
|
||||
context, effect, control);
|
||||
Node* branch =
|
||||
graph()->NewNode(common()->Branch(BranchHint::kTrue), check, control);
|
||||
Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
|
||||
Node* deoptimize =
|
||||
graph()->NewNode(common()->Deoptimize(DeoptimizeKind::kEager),
|
||||
frame_state, effect, if_false);
|
||||
// TODO(bmeurer): This should be on the AdvancedReducer somehow.
|
||||
NodeProperties::MergeControlToEnd(graph(), common(), deoptimize);
|
||||
Revisit(graph()->end());
|
||||
control = graph()->NewNode(common()->IfTrue(), branch);
|
||||
control = graph()->NewNode(common()->DeoptimizeUnless(), check,
|
||||
frame_state, effect, control);
|
||||
|
||||
// Specialize the JSCallConstruct node to the {target_function}.
|
||||
NodeProperties::ReplaceValueInput(node, target_function, 0);
|
||||
|
@ -20,7 +20,7 @@ class JSOperatorBuilder;
|
||||
|
||||
// Performs strength reduction on {JSCallConstruct} and {JSCallFunction} nodes,
|
||||
// which might allow inlining or other optimizations to be performed afterwards.
|
||||
class JSCallReducer final : public AdvancedReducer {
|
||||
class JSCallReducer final : public Reducer {
|
||||
public:
|
||||
// Flags that control the mode of operation.
|
||||
enum Flag {
|
||||
@ -29,12 +29,9 @@ class JSCallReducer final : public AdvancedReducer {
|
||||
};
|
||||
typedef base::Flags<Flag> Flags;
|
||||
|
||||
JSCallReducer(Editor* editor, JSGraph* jsgraph, Flags flags,
|
||||
JSCallReducer(JSGraph* jsgraph, Flags flags,
|
||||
MaybeHandle<Context> native_context)
|
||||
: AdvancedReducer(editor),
|
||||
jsgraph_(jsgraph),
|
||||
flags_(flags),
|
||||
native_context_(native_context) {}
|
||||
: jsgraph_(jsgraph), flags_(flags), native_context_(native_context) {}
|
||||
|
||||
Reduction Reduce(Node* node) final;
|
||||
|
||||
|
@ -45,6 +45,8 @@ Reduction JSGenericLowering::Reduce(Node* node) {
|
||||
JS_OP_LIST(DECLARE_CASE)
|
||||
#undef DECLARE_CASE
|
||||
case IrOpcode::kBranch:
|
||||
case IrOpcode::kDeoptimizeIf:
|
||||
case IrOpcode::kDeoptimizeUnless:
|
||||
// TODO(mstarzinger): If typing is enabled then simplified lowering will
|
||||
// have inserted the correct ChangeBoolToBit, otherwise we need to perform
|
||||
// poor-man's representation inference here and insert manual change.
|
||||
|
@ -171,16 +171,8 @@ Reduction JSGlobalObjectSpecialization::ReduceJSStoreGlobal(Node* node) {
|
||||
Node* check =
|
||||
graph()->NewNode(simplified()->ReferenceEqual(Type::Tagged()), value,
|
||||
jsgraph()->Constant(property_cell_value));
|
||||
Node* branch =
|
||||
graph()->NewNode(common()->Branch(BranchHint::kTrue), check, control);
|
||||
Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
|
||||
Node* deoptimize =
|
||||
graph()->NewNode(common()->Deoptimize(DeoptimizeKind::kEager),
|
||||
frame_state, effect, if_false);
|
||||
// TODO(bmeurer): This should be on the AdvancedReducer somehow.
|
||||
NodeProperties::MergeControlToEnd(graph(), common(), deoptimize);
|
||||
Revisit(graph()->end());
|
||||
control = graph()->NewNode(common()->IfTrue(), branch);
|
||||
control = graph()->NewNode(common()->DeoptimizeUnless(), check,
|
||||
frame_state, effect, control);
|
||||
break;
|
||||
}
|
||||
case PropertyCellType::kConstantType: {
|
||||
@ -191,16 +183,8 @@ Reduction JSGlobalObjectSpecialization::ReduceJSStoreGlobal(Node* node) {
|
||||
Type* property_cell_value_type = Type::TaggedSigned();
|
||||
if (property_cell_value->IsHeapObject()) {
|
||||
// Deoptimize if the {value} is a Smi.
|
||||
Node* branch = graph()->NewNode(common()->Branch(BranchHint::kFalse),
|
||||
check, control);
|
||||
Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
|
||||
Node* deoptimize =
|
||||
graph()->NewNode(common()->Deoptimize(DeoptimizeKind::kEager),
|
||||
frame_state, effect, if_true);
|
||||
// TODO(bmeurer): This should be on the AdvancedReducer somehow.
|
||||
NodeProperties::MergeControlToEnd(graph(), common(), deoptimize);
|
||||
Revisit(graph()->end());
|
||||
control = graph()->NewNode(common()->IfFalse(), branch);
|
||||
control = graph()->NewNode(common()->DeoptimizeIf(), check, frame_state,
|
||||
effect, control);
|
||||
|
||||
// Load the {value} map check against the {property_cell} map.
|
||||
Node* value_map = effect =
|
||||
@ -213,16 +197,8 @@ Reduction JSGlobalObjectSpecialization::ReduceJSStoreGlobal(Node* node) {
|
||||
jsgraph()->HeapConstant(property_cell_value_map));
|
||||
property_cell_value_type = Type::TaggedPointer();
|
||||
}
|
||||
Node* branch =
|
||||
graph()->NewNode(common()->Branch(BranchHint::kTrue), check, control);
|
||||
Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
|
||||
Node* deoptimize =
|
||||
graph()->NewNode(common()->Deoptimize(DeoptimizeKind::kEager),
|
||||
frame_state, effect, if_false);
|
||||
// TODO(bmeurer): This should be on the AdvancedReducer somehow.
|
||||
NodeProperties::MergeControlToEnd(graph(), common(), deoptimize);
|
||||
Revisit(graph()->end());
|
||||
control = graph()->NewNode(common()->IfTrue(), branch);
|
||||
control = graph()->NewNode(common()->DeoptimizeUnless(), check,
|
||||
frame_state, effect, control);
|
||||
effect = graph()->NewNode(
|
||||
simplified()->StoreField(
|
||||
AccessBuilder::ForPropertyCellValue(property_cell_value_type)),
|
||||
|
@ -108,27 +108,36 @@ Reduction JSNativeContextSpecialization::ReduceNamedAccess(
|
||||
ZoneVector<Node*> effects(zone());
|
||||
ZoneVector<Node*> controls(zone());
|
||||
|
||||
// The list of "exiting" controls, which currently go to a single deoptimize.
|
||||
// TODO(bmeurer): Consider using an IC as fallback.
|
||||
Node* const exit_effect = effect;
|
||||
ZoneVector<Node*> exit_controls(zone());
|
||||
|
||||
// Ensure that {index} matches the specified {name} (if {index} is given).
|
||||
if (index != nullptr) {
|
||||
Node* check = graph()->NewNode(simplified()->ReferenceEqual(Type::Name()),
|
||||
index, jsgraph()->HeapConstant(name));
|
||||
Node* branch =
|
||||
graph()->NewNode(common()->Branch(BranchHint::kTrue), check, control);
|
||||
exit_controls.push_back(graph()->NewNode(common()->IfFalse(), branch));
|
||||
control = graph()->NewNode(common()->IfTrue(), branch);
|
||||
control = graph()->NewNode(common()->DeoptimizeUnless(), check, frame_state,
|
||||
effect, control);
|
||||
}
|
||||
|
||||
// Check if {receiver} may be a number.
|
||||
bool receiverissmi_possible = false;
|
||||
for (PropertyAccessInfo const& access_info : access_infos) {
|
||||
if (access_info.receiver_type()->Is(Type::Number())) {
|
||||
receiverissmi_possible = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure that {receiver} is a heap object.
|
||||
Node* check = graph()->NewNode(simplified()->ObjectIsSmi(), receiver);
|
||||
Node* branch = graph()->NewNode(common()->Branch(), check, control);
|
||||
control = graph()->NewNode(common()->IfFalse(), branch);
|
||||
Node* receiverissmi_control = graph()->NewNode(common()->IfTrue(), branch);
|
||||
Node* receiverissmi_control = nullptr;
|
||||
Node* receiverissmi_effect = effect;
|
||||
if (receiverissmi_possible) {
|
||||
Node* branch = graph()->NewNode(common()->Branch(), check, control);
|
||||
control = graph()->NewNode(common()->IfFalse(), branch);
|
||||
receiverissmi_control = graph()->NewNode(common()->IfTrue(), branch);
|
||||
receiverissmi_effect = effect;
|
||||
} else {
|
||||
control = graph()->NewNode(common()->DeoptimizeIf(), check, frame_state,
|
||||
effect, control);
|
||||
}
|
||||
|
||||
// Load the {receiver} map. The resulting effect is the dominating effect for
|
||||
// all (polymorphic) branches.
|
||||
@ -138,7 +147,8 @@ Reduction JSNativeContextSpecialization::ReduceNamedAccess(
|
||||
|
||||
// Generate code for the various different property access patterns.
|
||||
Node* fallthrough_control = control;
|
||||
for (PropertyAccessInfo const& access_info : access_infos) {
|
||||
for (size_t j = 0; j < access_infos.size(); ++j) {
|
||||
PropertyAccessInfo const& access_info = access_infos[j];
|
||||
Node* this_value = value;
|
||||
Node* this_receiver = receiver;
|
||||
Node* this_effect = effect;
|
||||
@ -154,37 +164,52 @@ Reduction JSNativeContextSpecialization::ReduceNamedAccess(
|
||||
Node* check =
|
||||
graph()->NewNode(machine()->Uint32LessThan(), receiver_instance_type,
|
||||
jsgraph()->Uint32Constant(FIRST_NONSTRING_TYPE));
|
||||
Node* branch =
|
||||
graph()->NewNode(common()->Branch(), check, fallthrough_control);
|
||||
fallthrough_control = graph()->NewNode(common()->IfFalse(), branch);
|
||||
this_control = graph()->NewNode(common()->IfTrue(), branch);
|
||||
if (j == access_infos.size() - 1) {
|
||||
this_control =
|
||||
graph()->NewNode(common()->DeoptimizeUnless(), check, frame_state,
|
||||
this_effect, fallthrough_control);
|
||||
fallthrough_control = nullptr;
|
||||
} else {
|
||||
Node* branch =
|
||||
graph()->NewNode(common()->Branch(), check, fallthrough_control);
|
||||
fallthrough_control = graph()->NewNode(common()->IfFalse(), branch);
|
||||
this_control = graph()->NewNode(common()->IfTrue(), branch);
|
||||
}
|
||||
} else {
|
||||
// Emit a (sequence of) map checks for other {receiver}s.
|
||||
ZoneVector<Node*> this_controls(zone());
|
||||
ZoneVector<Node*> this_effects(zone());
|
||||
int num_classes = access_info.receiver_type()->NumClasses();
|
||||
for (auto i = access_info.receiver_type()->Classes(); !i.Done();
|
||||
i.Advance()) {
|
||||
DCHECK_LT(0, num_classes);
|
||||
Handle<Map> map = i.Current();
|
||||
Node* check =
|
||||
graph()->NewNode(simplified()->ReferenceEqual(Type::Internal()),
|
||||
receiver_map, jsgraph()->Constant(map));
|
||||
Node* branch =
|
||||
graph()->NewNode(common()->Branch(), check, fallthrough_control);
|
||||
fallthrough_control = graph()->NewNode(common()->IfFalse(), branch);
|
||||
this_controls.push_back(graph()->NewNode(common()->IfTrue(), branch));
|
||||
this_effects.push_back(this_effect);
|
||||
if (--num_classes == 0 && j == access_infos.size() - 1) {
|
||||
this_controls.push_back(
|
||||
graph()->NewNode(common()->DeoptimizeUnless(), check, frame_state,
|
||||
this_effect, fallthrough_control));
|
||||
this_effects.push_back(this_effect);
|
||||
fallthrough_control = nullptr;
|
||||
} else {
|
||||
Node* branch =
|
||||
graph()->NewNode(common()->Branch(), check, fallthrough_control);
|
||||
fallthrough_control = graph()->NewNode(common()->IfFalse(), branch);
|
||||
this_controls.push_back(graph()->NewNode(common()->IfTrue(), branch));
|
||||
this_effects.push_back(this_effect);
|
||||
}
|
||||
}
|
||||
|
||||
// The Number case requires special treatment to also deal with Smis.
|
||||
if (receiver_type->Is(Type::Number())) {
|
||||
// Join this check with the "receiver is smi" check above, and mark the
|
||||
// "receiver is smi" check as "consumed" so that we don't deoptimize if
|
||||
// the {receiver} is actually a Smi.
|
||||
if (receiverissmi_control != nullptr) {
|
||||
this_controls.push_back(receiverissmi_control);
|
||||
this_effects.push_back(receiverissmi_effect);
|
||||
receiverissmi_control = receiverissmi_effect = nullptr;
|
||||
}
|
||||
// Join this check with the "receiver is smi" check above.
|
||||
DCHECK_NOT_NULL(receiverissmi_effect);
|
||||
DCHECK_NOT_NULL(receiverissmi_control);
|
||||
this_effects.push_back(receiverissmi_effect);
|
||||
this_controls.push_back(receiverissmi_control);
|
||||
receiverissmi_effect = receiverissmi_control = nullptr;
|
||||
}
|
||||
|
||||
// Create dominating Merge+EffectPhi for this {receiver} type.
|
||||
@ -212,23 +237,14 @@ Reduction JSNativeContextSpecialization::ReduceNamedAccess(
|
||||
// Generate the actual property access.
|
||||
if (access_info.IsNotFound()) {
|
||||
DCHECK_EQ(AccessMode::kLoad, access_mode);
|
||||
if (is_strong(language_mode)) {
|
||||
// TODO(bmeurer/mstarzinger): Add support for lowering inside try
|
||||
// blocks rewiring the IfException edge to a runtime call/throw.
|
||||
exit_controls.push_back(this_control);
|
||||
continue;
|
||||
} else {
|
||||
this_value = jsgraph()->UndefinedConstant();
|
||||
}
|
||||
this_value = jsgraph()->UndefinedConstant();
|
||||
} else if (access_info.IsDataConstant()) {
|
||||
this_value = jsgraph()->Constant(access_info.constant());
|
||||
if (access_mode == AccessMode::kStore) {
|
||||
Node* check = graph()->NewNode(
|
||||
simplified()->ReferenceEqual(Type::Tagged()), value, this_value);
|
||||
Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
|
||||
check, this_control);
|
||||
exit_controls.push_back(graph()->NewNode(common()->IfFalse(), branch));
|
||||
this_control = graph()->NewNode(common()->IfTrue(), branch);
|
||||
this_control = graph()->NewNode(common()->DeoptimizeUnless(), check,
|
||||
frame_state, this_effect, this_control);
|
||||
}
|
||||
} else {
|
||||
DCHECK(access_info.IsDataField());
|
||||
@ -253,10 +269,9 @@ Reduction JSNativeContextSpecialization::ReduceNamedAccess(
|
||||
jsgraph()->Int32Constant(
|
||||
1 << JSArrayBuffer::WasNeutered::kShift)),
|
||||
jsgraph()->Int32Constant(0));
|
||||
Node* branch = graph()->NewNode(common()->Branch(BranchHint::kFalse),
|
||||
check, this_control);
|
||||
exit_controls.push_back(graph()->NewNode(common()->IfTrue(), branch));
|
||||
this_control = graph()->NewNode(common()->IfFalse(), branch);
|
||||
this_control =
|
||||
graph()->NewNode(common()->DeoptimizeIf(), check, frame_state,
|
||||
this_effect, this_control);
|
||||
break;
|
||||
}
|
||||
}
|
||||
@ -292,11 +307,9 @@ Reduction JSNativeContextSpecialization::ReduceNamedAccess(
|
||||
if (field_type->Is(Type::UntaggedFloat64())) {
|
||||
Node* check =
|
||||
graph()->NewNode(simplified()->ObjectIsNumber(), this_value);
|
||||
Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
|
||||
check, this_control);
|
||||
exit_controls.push_back(
|
||||
graph()->NewNode(common()->IfFalse(), branch));
|
||||
this_control = graph()->NewNode(common()->IfTrue(), branch);
|
||||
this_control =
|
||||
graph()->NewNode(common()->DeoptimizeUnless(), check, frame_state,
|
||||
this_effect, this_control);
|
||||
this_value = graph()->NewNode(common()->Guard(Type::Number()),
|
||||
this_value, this_control);
|
||||
|
||||
@ -335,46 +348,30 @@ Reduction JSNativeContextSpecialization::ReduceNamedAccess(
|
||||
} else if (field_type->Is(Type::TaggedSigned())) {
|
||||
Node* check =
|
||||
graph()->NewNode(simplified()->ObjectIsSmi(), this_value);
|
||||
Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
|
||||
check, this_control);
|
||||
exit_controls.push_back(
|
||||
graph()->NewNode(common()->IfFalse(), branch));
|
||||
this_control = graph()->NewNode(common()->IfTrue(), branch);
|
||||
this_control =
|
||||
graph()->NewNode(common()->DeoptimizeUnless(), check, frame_state,
|
||||
this_effect, this_control);
|
||||
this_value = graph()->NewNode(common()->Guard(type_cache_.kSmi),
|
||||
this_value, this_control);
|
||||
} else if (field_type->Is(Type::TaggedPointer())) {
|
||||
Node* check =
|
||||
graph()->NewNode(simplified()->ObjectIsSmi(), this_value);
|
||||
Node* branch = graph()->NewNode(common()->Branch(BranchHint::kFalse),
|
||||
check, this_control);
|
||||
exit_controls.push_back(graph()->NewNode(common()->IfTrue(), branch));
|
||||
this_control = graph()->NewNode(common()->IfFalse(), branch);
|
||||
if (field_type->NumClasses() > 0) {
|
||||
// Emit a (sequence of) map checks for the value.
|
||||
ZoneVector<Node*> this_controls(zone());
|
||||
this_control =
|
||||
graph()->NewNode(common()->DeoptimizeIf(), check, frame_state,
|
||||
this_effect, this_control);
|
||||
if (field_type->NumClasses() == 1) {
|
||||
// Emit a map check for the value.
|
||||
Node* this_value_map = this_effect = graph()->NewNode(
|
||||
simplified()->LoadField(AccessBuilder::ForMap()), this_value,
|
||||
this_effect, this_control);
|
||||
for (auto i = field_type->Classes(); !i.Done(); i.Advance()) {
|
||||
Handle<Map> field_map(i.Current());
|
||||
check = graph()->NewNode(
|
||||
simplified()->ReferenceEqual(Type::Internal()),
|
||||
this_value_map, jsgraph()->Constant(field_map));
|
||||
branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
|
||||
check, this_control);
|
||||
this_control = graph()->NewNode(common()->IfFalse(), branch);
|
||||
this_controls.push_back(
|
||||
graph()->NewNode(common()->IfTrue(), branch));
|
||||
}
|
||||
exit_controls.push_back(this_control);
|
||||
int const this_control_count =
|
||||
static_cast<int>(this_controls.size());
|
||||
Node* check = graph()->NewNode(
|
||||
simplified()->ReferenceEqual(Type::Internal()), this_value_map,
|
||||
jsgraph()->Constant(field_type->Classes().Current()));
|
||||
this_control =
|
||||
(this_control_count == 1)
|
||||
? this_controls.front()
|
||||
: graph()->NewNode(common()->Merge(this_control_count),
|
||||
this_control_count,
|
||||
&this_controls.front());
|
||||
graph()->NewNode(common()->DeoptimizeUnless(), check,
|
||||
frame_state, this_effect, this_control);
|
||||
} else {
|
||||
DCHECK_EQ(0, field_type->NumClasses());
|
||||
}
|
||||
} else {
|
||||
DCHECK(field_type->Is(Type::Tagged()));
|
||||
@ -403,39 +400,7 @@ Reduction JSNativeContextSpecialization::ReduceNamedAccess(
|
||||
controls.push_back(this_control);
|
||||
}
|
||||
|
||||
// Collect the fallthrough control as final "exit" control.
|
||||
if (fallthrough_control != control) {
|
||||
// Mark the last fallthrough branch as deferred.
|
||||
MarkAsDeferred(fallthrough_control);
|
||||
}
|
||||
exit_controls.push_back(fallthrough_control);
|
||||
|
||||
// Also collect the "receiver is smi" control if we didn't handle the case of
|
||||
// Number primitives in the polymorphic branches above.
|
||||
if (receiverissmi_control != nullptr) {
|
||||
// Mark the "receiver is smi" case as deferred.
|
||||
MarkAsDeferred(receiverissmi_control);
|
||||
DCHECK_EQ(exit_effect, receiverissmi_effect);
|
||||
exit_controls.push_back(receiverissmi_control);
|
||||
}
|
||||
|
||||
// Generate the single "exit" point, where we get if either all map/instance
|
||||
// type checks failed, or one of the assumptions inside one of the cases
|
||||
// failes (i.e. failing prototype chain check).
|
||||
// TODO(bmeurer): Consider falling back to IC here if deoptimization is
|
||||
// disabled.
|
||||
int const exit_control_count = static_cast<int>(exit_controls.size());
|
||||
Node* exit_control =
|
||||
(exit_control_count == 1)
|
||||
? exit_controls.front()
|
||||
: graph()->NewNode(common()->Merge(exit_control_count),
|
||||
exit_control_count, &exit_controls.front());
|
||||
Node* deoptimize =
|
||||
graph()->NewNode(common()->Deoptimize(DeoptimizeKind::kEager),
|
||||
frame_state, exit_effect, exit_control);
|
||||
// TODO(bmeurer): This should be on the AdvancedReducer somehow.
|
||||
NodeProperties::MergeControlToEnd(graph(), common(), deoptimize);
|
||||
Revisit(graph()->end());
|
||||
DCHECK_NULL(fallthrough_control);
|
||||
|
||||
// Generate the final merge point for all (polymorphic) branches.
|
||||
int const control_count = static_cast<int>(controls.size());
|
||||
@ -562,17 +527,10 @@ Reduction JSNativeContextSpecialization::ReduceElementAccess(
|
||||
ZoneVector<Node*> effects(zone());
|
||||
ZoneVector<Node*> controls(zone());
|
||||
|
||||
// The list of "exiting" controls, which currently go to a single deoptimize.
|
||||
// TODO(bmeurer): Consider using an IC as fallback.
|
||||
Node* const exit_effect = effect;
|
||||
ZoneVector<Node*> exit_controls(zone());
|
||||
|
||||
// Ensure that {receiver} is a heap object.
|
||||
Node* check = graph()->NewNode(simplified()->ObjectIsSmi(), receiver);
|
||||
Node* branch =
|
||||
graph()->NewNode(common()->Branch(BranchHint::kFalse), check, control);
|
||||
exit_controls.push_back(graph()->NewNode(common()->IfTrue(), branch));
|
||||
control = graph()->NewNode(common()->IfFalse(), branch);
|
||||
control = graph()->NewNode(common()->DeoptimizeIf(), check, frame_state,
|
||||
effect, control);
|
||||
|
||||
// Load the {receiver} map. The resulting effect is the dominating effect for
|
||||
// all (polymorphic) branches.
|
||||
@ -582,7 +540,8 @@ Reduction JSNativeContextSpecialization::ReduceElementAccess(
|
||||
|
||||
// Generate code for the various different element access patterns.
|
||||
Node* fallthrough_control = control;
|
||||
for (ElementAccessInfo const& access_info : access_infos) {
|
||||
for (size_t j = 0; j < access_infos.size(); ++j) {
|
||||
ElementAccessInfo const& access_info = access_infos[j];
|
||||
Node* this_receiver = receiver;
|
||||
Node* this_value = value;
|
||||
Node* this_index = index;
|
||||
@ -595,35 +554,61 @@ Reduction JSNativeContextSpecialization::ReduceElementAccess(
|
||||
{
|
||||
ZoneVector<Node*> this_controls(zone());
|
||||
ZoneVector<Node*> this_effects(zone());
|
||||
size_t num_transitions = access_info.transitions().size();
|
||||
int num_classes = access_info.receiver_type()->NumClasses();
|
||||
for (auto i = access_info.receiver_type()->Classes(); !i.Done();
|
||||
i.Advance()) {
|
||||
DCHECK_LT(0, num_classes);
|
||||
Handle<Map> map = i.Current();
|
||||
Node* check =
|
||||
graph()->NewNode(simplified()->ReferenceEqual(Type::Any()),
|
||||
receiver_map, jsgraph()->Constant(map));
|
||||
Node* branch =
|
||||
graph()->NewNode(common()->Branch(), check, fallthrough_control);
|
||||
this_controls.push_back(graph()->NewNode(common()->IfTrue(), branch));
|
||||
if (--num_classes == 0 && num_transitions == 0 &&
|
||||
j == access_infos.size() - 1) {
|
||||
// Last map check on the fallthrough control path, do a conditional
|
||||
// eager deoptimization exit here.
|
||||
// TODO(turbofan): This is ugly as hell! We should probably introduce
|
||||
// macro-ish operators for property access that encapsulate this whole
|
||||
// mess.
|
||||
this_controls.push_back(graph()->NewNode(common()->DeoptimizeUnless(),
|
||||
check, frame_state, effect,
|
||||
fallthrough_control));
|
||||
fallthrough_control = nullptr;
|
||||
} else {
|
||||
Node* branch =
|
||||
graph()->NewNode(common()->Branch(), check, fallthrough_control);
|
||||
this_controls.push_back(graph()->NewNode(common()->IfTrue(), branch));
|
||||
fallthrough_control = graph()->NewNode(common()->IfFalse(), branch);
|
||||
}
|
||||
this_effects.push_back(effect);
|
||||
fallthrough_control = graph()->NewNode(common()->IfFalse(), branch);
|
||||
if (!map->IsJSArrayMap()) receiver_is_jsarray = false;
|
||||
}
|
||||
|
||||
// Generate possible elements kind transitions.
|
||||
for (auto transition : access_info.transitions()) {
|
||||
DCHECK_LT(0u, num_transitions);
|
||||
Handle<Map> transition_source = transition.first;
|
||||
Handle<Map> transition_target = transition.second;
|
||||
Node* transition_control;
|
||||
Node* transition_effect = effect;
|
||||
|
||||
// Check if {receiver} has the specified {transition_source} map.
|
||||
Node* check = graph()->NewNode(
|
||||
simplified()->ReferenceEqual(Type::Any()), receiver_map,
|
||||
jsgraph()->HeapConstant(transition_source));
|
||||
Node* branch =
|
||||
graph()->NewNode(common()->Branch(), check, fallthrough_control);
|
||||
if (--num_transitions == 0 && j == access_infos.size() - 1) {
|
||||
transition_control =
|
||||
graph()->NewNode(common()->DeoptimizeUnless(), check, frame_state,
|
||||
transition_effect, fallthrough_control);
|
||||
fallthrough_control = nullptr;
|
||||
} else {
|
||||
Node* branch =
|
||||
graph()->NewNode(common()->Branch(), check, fallthrough_control);
|
||||
fallthrough_control = graph()->NewNode(common()->IfFalse(), branch);
|
||||
transition_control = graph()->NewNode(common()->IfTrue(), branch);
|
||||
}
|
||||
|
||||
// Migrate {receiver} from {transition_source} to {transition_target}.
|
||||
Node* transition_control = graph()->NewNode(common()->IfTrue(), branch);
|
||||
Node* transition_effect = effect;
|
||||
if (IsSimpleMapChangeTransition(transition_source->elements_kind(),
|
||||
transition_target->elements_kind())) {
|
||||
// In-place migration, just store the {transition_target} map.
|
||||
@ -647,8 +632,6 @@ Reduction JSNativeContextSpecialization::ReduceElementAccess(
|
||||
}
|
||||
this_controls.push_back(transition_control);
|
||||
this_effects.push_back(transition_effect);
|
||||
|
||||
fallthrough_control = graph()->NewNode(common()->IfFalse(), branch);
|
||||
}
|
||||
|
||||
// Create single chokepoint for the control.
|
||||
@ -679,10 +662,8 @@ Reduction JSNativeContextSpecialization::ReduceElementAccess(
|
||||
if (!NumberMatcher(this_index).HasValue()) {
|
||||
Node* check =
|
||||
graph()->NewNode(simplified()->ObjectIsNumber(), this_index);
|
||||
Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
|
||||
check, this_control);
|
||||
exit_controls.push_back(graph()->NewNode(common()->IfFalse(), branch));
|
||||
this_control = graph()->NewNode(common()->IfTrue(), branch);
|
||||
this_control = graph()->NewNode(common()->DeoptimizeUnless(), check,
|
||||
frame_state, this_effect, this_control);
|
||||
this_index = graph()->NewNode(common()->Guard(Type::Number()), this_index,
|
||||
this_control);
|
||||
}
|
||||
@ -694,10 +675,8 @@ Reduction JSNativeContextSpecialization::ReduceElementAccess(
|
||||
graph()->NewNode(simplified()->NumberToUint32(), this_index);
|
||||
Node* check = graph()->NewNode(simplified()->NumberEqual(), this_index32,
|
||||
this_index);
|
||||
Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
|
||||
check, this_control);
|
||||
exit_controls.push_back(graph()->NewNode(common()->IfFalse(), branch));
|
||||
this_control = graph()->NewNode(common()->IfTrue(), branch);
|
||||
this_control = graph()->NewNode(common()->DeoptimizeUnless(), check,
|
||||
frame_state, this_effect, this_control);
|
||||
this_index = this_index32;
|
||||
}
|
||||
|
||||
@ -716,13 +695,11 @@ Reduction JSNativeContextSpecialization::ReduceElementAccess(
|
||||
Node* this_elements_map = this_effect =
|
||||
graph()->NewNode(simplified()->LoadField(AccessBuilder::ForMap()),
|
||||
this_elements, this_effect, this_control);
|
||||
check = graph()->NewNode(
|
||||
Node* check = graph()->NewNode(
|
||||
simplified()->ReferenceEqual(Type::Any()), this_elements_map,
|
||||
jsgraph()->HeapConstant(factory()->fixed_array_map()));
|
||||
branch = graph()->NewNode(common()->Branch(BranchHint::kTrue), check,
|
||||
this_control);
|
||||
exit_controls.push_back(graph()->NewNode(common()->IfFalse(), branch));
|
||||
this_control = graph()->NewNode(common()->IfTrue(), branch);
|
||||
this_control = graph()->NewNode(common()->DeoptimizeUnless(), check,
|
||||
frame_state, this_effect, this_control);
|
||||
}
|
||||
|
||||
// Load the length of the {receiver}.
|
||||
@ -739,10 +716,8 @@ Reduction JSNativeContextSpecialization::ReduceElementAccess(
|
||||
// Check that the {index} is in the valid range for the {receiver}.
|
||||
Node* check = graph()->NewNode(simplified()->NumberLessThan(), this_index,
|
||||
this_length);
|
||||
Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue), check,
|
||||
this_control);
|
||||
exit_controls.push_back(graph()->NewNode(common()->IfFalse(), branch));
|
||||
this_control = graph()->NewNode(common()->IfTrue(), branch);
|
||||
this_control = graph()->NewNode(common()->DeoptimizeUnless(), check,
|
||||
frame_state, this_effect, this_control);
|
||||
|
||||
// Compute the element access.
|
||||
Type* element_type = Type::Any();
|
||||
@ -781,16 +756,16 @@ Reduction JSNativeContextSpecialization::ReduceElementAccess(
|
||||
Node* check =
|
||||
graph()->NewNode(simplified()->ReferenceEqual(element_access.type),
|
||||
this_value, jsgraph()->TheHoleConstant());
|
||||
Node* branch = graph()->NewNode(common()->Branch(BranchHint::kFalse),
|
||||
check, this_control);
|
||||
Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
|
||||
Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
|
||||
// Check if we are allowed to turn the hole into undefined.
|
||||
Type* initial_holey_array_type = Type::Class(
|
||||
handle(isolate()->get_initial_js_array_map(elements_kind)),
|
||||
graph()->zone());
|
||||
if (receiver_type->NowIs(initial_holey_array_type) &&
|
||||
isolate()->IsFastArrayConstructorPrototypeChainIntact()) {
|
||||
Node* branch = graph()->NewNode(common()->Branch(BranchHint::kFalse),
|
||||
check, this_control);
|
||||
Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
|
||||
Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
|
||||
// Add a code dependency on the array protector cell.
|
||||
AssumePrototypesStable(receiver_type, native_context,
|
||||
isolate()->initial_object_prototype());
|
||||
@ -805,8 +780,9 @@ Reduction JSNativeContextSpecialization::ReduceElementAccess(
|
||||
Type::Union(element_type, Type::Undefined(), graph()->zone());
|
||||
} else {
|
||||
// Deoptimize in case of the hole.
|
||||
exit_controls.push_back(if_true);
|
||||
this_control = if_false;
|
||||
this_control =
|
||||
graph()->NewNode(common()->DeoptimizeIf(), check, frame_state,
|
||||
this_effect, this_control);
|
||||
}
|
||||
// Rename the result to represent the actual type (not polluted by the
|
||||
// hole).
|
||||
@ -833,29 +809,24 @@ Reduction JSNativeContextSpecialization::ReduceElementAccess(
|
||||
check, jsgraph()->UndefinedConstant(), this_value);
|
||||
} else {
|
||||
// Deoptimize in case of the hole.
|
||||
Node* branch = graph()->NewNode(common()->Branch(BranchHint::kFalse),
|
||||
check, this_control);
|
||||
this_control = graph()->NewNode(common()->IfFalse(), branch);
|
||||
exit_controls.push_back(graph()->NewNode(common()->IfTrue(), branch));
|
||||
this_control =
|
||||
graph()->NewNode(common()->DeoptimizeIf(), check, frame_state,
|
||||
this_effect, this_control);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
DCHECK_EQ(AccessMode::kStore, access_mode);
|
||||
if (IsFastSmiElementsKind(elements_kind)) {
|
||||
Node* check = graph()->NewNode(simplified()->ObjectIsSmi(), this_value);
|
||||
Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
|
||||
check, this_control);
|
||||
exit_controls.push_back(graph()->NewNode(common()->IfFalse(), branch));
|
||||
this_control = graph()->NewNode(common()->IfTrue(), branch);
|
||||
this_control = graph()->NewNode(common()->DeoptimizeUnless(), check,
|
||||
frame_state, this_effect, this_control);
|
||||
this_value = graph()->NewNode(common()->Guard(type_cache_.kSmi),
|
||||
this_value, this_control);
|
||||
} else if (IsFastDoubleElementsKind(elements_kind)) {
|
||||
Node* check =
|
||||
graph()->NewNode(simplified()->ObjectIsNumber(), this_value);
|
||||
Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
|
||||
check, this_control);
|
||||
exit_controls.push_back(graph()->NewNode(common()->IfFalse(), branch));
|
||||
this_control = graph()->NewNode(common()->IfTrue(), branch);
|
||||
this_control = graph()->NewNode(common()->DeoptimizeUnless(), check,
|
||||
frame_state, this_effect, this_control);
|
||||
this_value = graph()->NewNode(common()->Guard(Type::Number()),
|
||||
this_value, this_control);
|
||||
}
|
||||
@ -870,30 +841,7 @@ Reduction JSNativeContextSpecialization::ReduceElementAccess(
|
||||
controls.push_back(this_control);
|
||||
}
|
||||
|
||||
// Collect the fallthrough control as final "exit" control.
|
||||
if (fallthrough_control != control) {
|
||||
// Mark the last fallthrough branch as deferred.
|
||||
MarkAsDeferred(fallthrough_control);
|
||||
}
|
||||
exit_controls.push_back(fallthrough_control);
|
||||
|
||||
// Generate the single "exit" point, where we get if either all map/instance
|
||||
// type checks failed, or one of the assumptions inside one of the cases
|
||||
// failes (i.e. failing prototype chain check).
|
||||
// TODO(bmeurer): Consider falling back to IC here if deoptimization is
|
||||
// disabled.
|
||||
int const exit_control_count = static_cast<int>(exit_controls.size());
|
||||
Node* exit_control =
|
||||
(exit_control_count == 1)
|
||||
? exit_controls.front()
|
||||
: graph()->NewNode(common()->Merge(exit_control_count),
|
||||
exit_control_count, &exit_controls.front());
|
||||
Node* deoptimize =
|
||||
graph()->NewNode(common()->Deoptimize(DeoptimizeKind::kEager),
|
||||
frame_state, exit_effect, exit_control);
|
||||
// TODO(bmeurer): This should be on the AdvancedReducer somehow.
|
||||
NodeProperties::MergeControlToEnd(graph(), common(), deoptimize);
|
||||
Revisit(graph()->end());
|
||||
DCHECK_NULL(fallthrough_control);
|
||||
|
||||
// Generate the final merge point for all (polymorphic) branches.
|
||||
int const control_count = static_cast<int>(controls.size());
|
||||
@ -1048,18 +996,6 @@ void JSNativeContextSpecialization::AssumePrototypesStable(
|
||||
}
|
||||
|
||||
|
||||
void JSNativeContextSpecialization::MarkAsDeferred(Node* if_projection) {
|
||||
Node* branch = NodeProperties::GetControlInput(if_projection);
|
||||
DCHECK_EQ(IrOpcode::kBranch, branch->opcode());
|
||||
if (if_projection->opcode() == IrOpcode::kIfTrue) {
|
||||
NodeProperties::ChangeOp(branch, common()->Branch(BranchHint::kFalse));
|
||||
} else {
|
||||
DCHECK_EQ(IrOpcode::kIfFalse, if_projection->opcode());
|
||||
NodeProperties::ChangeOp(branch, common()->Branch(BranchHint::kTrue));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
MaybeHandle<Context> JSNativeContextSpecialization::GetNativeContext(
|
||||
Node* node) {
|
||||
Node* const context = NodeProperties::GetContextInput(node);
|
||||
|
@ -85,10 +85,6 @@ class JSNativeContextSpecialization final : public AdvancedReducer {
|
||||
Handle<Context> native_context,
|
||||
Handle<JSObject> holder);
|
||||
|
||||
// Assuming that {if_projection} is either IfTrue or IfFalse, adds a hint on
|
||||
// the dominating Branch that {if_projection} is the unlikely (deferred) case.
|
||||
void MarkAsDeferred(Node* if_projection);
|
||||
|
||||
// Retrieve the native context from the given {node} if known.
|
||||
MaybeHandle<Context> GetNativeContext(Node* node);
|
||||
|
||||
|
@ -114,8 +114,13 @@ static void VisitBinop(InstructionSelector* selector, Node* node,
|
||||
DCHECK_GE(arraysize(inputs), input_count);
|
||||
DCHECK_GE(arraysize(outputs), output_count);
|
||||
|
||||
selector->Emit(cont->Encode(opcode), output_count, outputs, input_count,
|
||||
inputs);
|
||||
opcode = cont->Encode(opcode);
|
||||
if (cont->IsDeoptimize()) {
|
||||
selector->EmitDeoptimize(opcode, output_count, outputs, input_count, inputs,
|
||||
cont->frame_state());
|
||||
} else {
|
||||
selector->Emit(opcode, output_count, outputs, input_count, inputs);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -979,6 +984,9 @@ static void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
|
||||
if (cont->IsBranch()) {
|
||||
selector->Emit(opcode, g.NoOutput(), left, right,
|
||||
g.Label(cont->true_block()), g.Label(cont->false_block()));
|
||||
} else if (cont->IsDeoptimize()) {
|
||||
selector->EmitDeoptimize(opcode, g.NoOutput(), left, right,
|
||||
cont->frame_state());
|
||||
} else {
|
||||
DCHECK(cont->IsSet());
|
||||
selector->Emit(opcode, g.DefineAsRegister(cont->result()), left, right);
|
||||
@ -1084,9 +1092,6 @@ void VisitWordCompare(InstructionSelector* selector, Node* node,
|
||||
VisitWordCompare(selector, node, kMipsCmp, cont, false);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
|
||||
// Shared routine for word comparisons against zero.
|
||||
void VisitWordCompareZero(InstructionSelector* selector, Node* user,
|
||||
Node* value, FlagsContinuation* cont) {
|
||||
@ -1175,12 +1180,17 @@ void VisitWordCompareZero(InstructionSelector* selector, Node* user,
|
||||
if (cont->IsBranch()) {
|
||||
selector->Emit(opcode, g.NoOutput(), value_operand, g.TempImmediate(0),
|
||||
g.Label(cont->true_block()), g.Label(cont->false_block()));
|
||||
} else if (cont->IsDeoptimize()) {
|
||||
selector->EmitDeoptimize(opcode, g.NoOutput(), value_operand,
|
||||
g.TempImmediate(0), cont->frame_state());
|
||||
} else {
|
||||
DCHECK(cont->IsSet());
|
||||
selector->Emit(opcode, g.DefineAsRegister(cont->result()), value_operand,
|
||||
g.TempImmediate(0));
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
|
||||
BasicBlock* fbranch) {
|
||||
@ -1188,6 +1198,17 @@ void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
|
||||
VisitWordCompareZero(this, branch, branch->InputAt(0), &cont);
|
||||
}
|
||||
|
||||
void InstructionSelector::VisitDeoptimizeIf(Node* node) {
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForDeoptimize(kNotEqual, node->InputAt(1));
|
||||
VisitWordCompareZero(this, node, node->InputAt(0), &cont);
|
||||
}
|
||||
|
||||
void InstructionSelector::VisitDeoptimizeUnless(Node* node) {
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForDeoptimize(kEqual, node->InputAt(1));
|
||||
VisitWordCompareZero(this, node, node->InputAt(0), &cont);
|
||||
}
|
||||
|
||||
void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
|
||||
MipsOperandGenerator g(this);
|
||||
@ -1218,7 +1239,7 @@ void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
|
||||
|
||||
|
||||
void InstructionSelector::VisitWord32Equal(Node* const node) {
|
||||
FlagsContinuation cont(kEqual, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
|
||||
Int32BinopMatcher m(node);
|
||||
if (m.right().Is(0)) {
|
||||
return VisitWordCompareZero(this, m.node(), m.left().node(), &cont);
|
||||
@ -1228,32 +1249,34 @@ void InstructionSelector::VisitWord32Equal(Node* const node) {
|
||||
|
||||
|
||||
void InstructionSelector::VisitInt32LessThan(Node* node) {
|
||||
FlagsContinuation cont(kSignedLessThan, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
|
||||
VisitWordCompare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
|
||||
FlagsContinuation cont(kSignedLessThanOrEqual, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
|
||||
VisitWordCompare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitUint32LessThan(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedLessThan, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
|
||||
VisitWordCompare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedLessThanOrEqual, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
|
||||
VisitWordCompare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
|
||||
if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
|
||||
FlagsContinuation cont(kOverflow, ovf);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
|
||||
return VisitBinop(this, node, kMipsAddOvf, &cont);
|
||||
}
|
||||
FlagsContinuation cont;
|
||||
@ -1263,7 +1286,7 @@ void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
|
||||
|
||||
void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
|
||||
if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
|
||||
FlagsContinuation cont(kOverflow, ovf);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
|
||||
return VisitBinop(this, node, kMipsSubOvf, &cont);
|
||||
}
|
||||
FlagsContinuation cont;
|
||||
@ -1272,37 +1295,39 @@ void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat32Equal(Node* node) {
|
||||
FlagsContinuation cont(kEqual, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
|
||||
VisitFloat32Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat32LessThan(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedLessThan, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
|
||||
VisitFloat32Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedLessThanOrEqual, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
|
||||
VisitFloat32Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat64Equal(Node* node) {
|
||||
FlagsContinuation cont(kEqual, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
|
||||
VisitFloat64Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat64LessThan(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedLessThan, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
|
||||
VisitFloat64Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedLessThanOrEqual, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
|
||||
VisitFloat64Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
@ -119,8 +119,13 @@ static void VisitBinop(InstructionSelector* selector, Node* node,
|
||||
DCHECK_GE(arraysize(inputs), input_count);
|
||||
DCHECK_GE(arraysize(outputs), output_count);
|
||||
|
||||
selector->Emit(cont->Encode(opcode), output_count, outputs, input_count,
|
||||
inputs);
|
||||
opcode = cont->Encode(opcode);
|
||||
if (cont->IsDeoptimize()) {
|
||||
selector->EmitDeoptimize(opcode, output_count, outputs, input_count, inputs,
|
||||
cont->frame_state());
|
||||
} else {
|
||||
selector->Emit(opcode, output_count, outputs, input_count, inputs);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1431,6 +1436,9 @@ static void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
|
||||
if (cont->IsBranch()) {
|
||||
selector->Emit(opcode, g.NoOutput(), left, right,
|
||||
g.Label(cont->true_block()), g.Label(cont->false_block()));
|
||||
} else if (cont->IsDeoptimize()) {
|
||||
selector->EmitDeoptimize(opcode, g.NoOutput(), left, right,
|
||||
cont->frame_state());
|
||||
} else {
|
||||
DCHECK(cont->IsSet());
|
||||
selector->Emit(opcode, g.DefineAsRegister(cont->result()), left, right);
|
||||
@ -1542,7 +1550,6 @@ void VisitWord64Compare(InstructionSelector* selector, Node* node,
|
||||
VisitWordCompare(selector, node, kMips64Cmp, cont, false);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
|
||||
void EmitWordCompareZero(InstructionSelector* selector, Node* value,
|
||||
@ -1677,6 +1684,7 @@ void VisitWordCompareZero(InstructionSelector* selector, Node* user,
|
||||
EmitWordCompareZero(selector, value, cont);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
|
||||
BasicBlock* fbranch) {
|
||||
@ -1684,6 +1692,17 @@ void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
|
||||
VisitWordCompareZero(this, branch, branch->InputAt(0), &cont);
|
||||
}
|
||||
|
||||
void InstructionSelector::VisitDeoptimizeIf(Node* node) {
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForDeoptimize(kNotEqual, node->InputAt(1));
|
||||
VisitWordCompareZero(this, node, node->InputAt(0), &cont);
|
||||
}
|
||||
|
||||
void InstructionSelector::VisitDeoptimizeUnless(Node* node) {
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForDeoptimize(kEqual, node->InputAt(1));
|
||||
VisitWordCompareZero(this, node, node->InputAt(0), &cont);
|
||||
}
|
||||
|
||||
void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
|
||||
Mips64OperandGenerator g(this);
|
||||
@ -1714,7 +1733,7 @@ void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
|
||||
|
||||
|
||||
void InstructionSelector::VisitWord32Equal(Node* const node) {
|
||||
FlagsContinuation cont(kEqual, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
|
||||
Int32BinopMatcher m(node);
|
||||
if (m.right().Is(0)) {
|
||||
return VisitWordCompareZero(this, m.node(), m.left().node(), &cont);
|
||||
@ -1725,32 +1744,34 @@ void InstructionSelector::VisitWord32Equal(Node* const node) {
|
||||
|
||||
|
||||
void InstructionSelector::VisitInt32LessThan(Node* node) {
|
||||
FlagsContinuation cont(kSignedLessThan, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
|
||||
VisitWord32Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
|
||||
FlagsContinuation cont(kSignedLessThanOrEqual, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
|
||||
VisitWord32Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitUint32LessThan(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedLessThan, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
|
||||
VisitWord32Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedLessThanOrEqual, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
|
||||
VisitWord32Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
|
||||
if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
|
||||
FlagsContinuation cont(kOverflow, ovf);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
|
||||
return VisitBinop(this, node, kMips64Dadd, &cont);
|
||||
}
|
||||
FlagsContinuation cont;
|
||||
@ -1760,7 +1781,7 @@ void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
|
||||
|
||||
void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
|
||||
if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
|
||||
FlagsContinuation cont(kOverflow, ovf);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
|
||||
return VisitBinop(this, node, kMips64Dsub, &cont);
|
||||
}
|
||||
FlagsContinuation cont;
|
||||
@ -1770,7 +1791,7 @@ void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
|
||||
|
||||
void InstructionSelector::VisitInt64AddWithOverflow(Node* node) {
|
||||
if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
|
||||
FlagsContinuation cont(kOverflow, ovf);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
|
||||
return VisitBinop(this, node, kMips64DaddOvf, &cont);
|
||||
}
|
||||
FlagsContinuation cont;
|
||||
@ -1780,7 +1801,7 @@ void InstructionSelector::VisitInt64AddWithOverflow(Node* node) {
|
||||
|
||||
void InstructionSelector::VisitInt64SubWithOverflow(Node* node) {
|
||||
if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
|
||||
FlagsContinuation cont(kOverflow, ovf);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
|
||||
return VisitBinop(this, node, kMips64DsubOvf, &cont);
|
||||
}
|
||||
FlagsContinuation cont;
|
||||
@ -1789,7 +1810,7 @@ void InstructionSelector::VisitInt64SubWithOverflow(Node* node) {
|
||||
|
||||
|
||||
void InstructionSelector::VisitWord64Equal(Node* const node) {
|
||||
FlagsContinuation cont(kEqual, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
|
||||
Int64BinopMatcher m(node);
|
||||
if (m.right().Is(0)) {
|
||||
return VisitWordCompareZero(this, m.node(), m.left().node(), &cont);
|
||||
@ -1800,61 +1821,65 @@ void InstructionSelector::VisitWord64Equal(Node* const node) {
|
||||
|
||||
|
||||
void InstructionSelector::VisitInt64LessThan(Node* node) {
|
||||
FlagsContinuation cont(kSignedLessThan, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
|
||||
VisitWord64Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
|
||||
FlagsContinuation cont(kSignedLessThanOrEqual, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
|
||||
VisitWord64Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitUint64LessThan(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedLessThan, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
|
||||
VisitWord64Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitUint64LessThanOrEqual(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedLessThanOrEqual, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
|
||||
VisitWord64Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat32Equal(Node* node) {
|
||||
FlagsContinuation cont(kEqual, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
|
||||
VisitFloat32Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat32LessThan(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedLessThan, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
|
||||
VisitFloat32Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedLessThanOrEqual, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
|
||||
VisitFloat32Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat64Equal(Node* node) {
|
||||
FlagsContinuation cont(kEqual, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
|
||||
VisitFloat64Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat64LessThan(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedLessThan, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
|
||||
VisitFloat64Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedLessThanOrEqual, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
|
||||
VisitFloat64Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
@ -21,6 +21,8 @@
|
||||
V(IfDefault) \
|
||||
V(Merge) \
|
||||
V(Deoptimize) \
|
||||
V(DeoptimizeIf) \
|
||||
V(DeoptimizeUnless) \
|
||||
V(Return) \
|
||||
V(TailCall) \
|
||||
V(Terminate) \
|
||||
|
@ -539,7 +539,7 @@ struct InliningPhase {
|
||||
data->common());
|
||||
CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
|
||||
data->common(), data->machine());
|
||||
JSCallReducer call_reducer(&graph_reducer, data->jsgraph(),
|
||||
JSCallReducer call_reducer(data->jsgraph(),
|
||||
data->info()->is_deoptimization_enabled()
|
||||
? JSCallReducer::kDeoptimizationEnabled
|
||||
: JSCallReducer::kNoFlags,
|
||||
|
@ -862,6 +862,12 @@ class RepresentationSelector {
|
||||
case IrOpcode::kHeapConstant:
|
||||
return VisitLeaf(node, NodeOutputInfo::AnyTagged());
|
||||
|
||||
case IrOpcode::kDeoptimizeIf:
|
||||
case IrOpcode::kDeoptimizeUnless:
|
||||
ProcessInput(node, 0, UseInfo::Bool());
|
||||
ProcessInput(node, 1, UseInfo::AnyTagged());
|
||||
ProcessRemainingInputs(node, 2);
|
||||
break;
|
||||
case IrOpcode::kBranch:
|
||||
ProcessInput(node, 0, UseInfo::Bool());
|
||||
EnqueueInput(node, NodeProperties::FirstControlIndex(node));
|
||||
|
@ -114,6 +114,8 @@ class Typer::Visitor : public Reducer {
|
||||
DECLARE_CASE(IfDefault)
|
||||
DECLARE_CASE(Merge)
|
||||
DECLARE_CASE(Deoptimize)
|
||||
DECLARE_CASE(DeoptimizeIf)
|
||||
DECLARE_CASE(DeoptimizeUnless)
|
||||
DECLARE_CASE(Return)
|
||||
DECLARE_CASE(TailCall)
|
||||
DECLARE_CASE(Terminate)
|
||||
@ -158,6 +160,8 @@ class Typer::Visitor : public Reducer {
|
||||
DECLARE_CASE(IfDefault)
|
||||
DECLARE_CASE(Merge)
|
||||
DECLARE_CASE(Deoptimize)
|
||||
DECLARE_CASE(DeoptimizeIf)
|
||||
DECLARE_CASE(DeoptimizeUnless)
|
||||
DECLARE_CASE(Return)
|
||||
DECLARE_CASE(TailCall)
|
||||
DECLARE_CASE(Terminate)
|
||||
|
@ -274,6 +274,11 @@ void Verifier::Visitor::Check(Node* node) {
|
||||
// Type is empty.
|
||||
CheckNotTyped(node);
|
||||
break;
|
||||
case IrOpcode::kDeoptimizeIf:
|
||||
case IrOpcode::kDeoptimizeUnless:
|
||||
// Type is empty.
|
||||
CheckNotTyped(node);
|
||||
break;
|
||||
case IrOpcode::kDeoptimize:
|
||||
case IrOpcode::kReturn:
|
||||
case IrOpcode::kThrow:
|
||||
|
@ -399,8 +399,13 @@ static void VisitBinop(InstructionSelector* selector, Node* node,
|
||||
DCHECK_GE(arraysize(inputs), input_count);
|
||||
DCHECK_GE(arraysize(outputs), output_count);
|
||||
|
||||
selector->Emit(cont->Encode(opcode), output_count, outputs, input_count,
|
||||
inputs);
|
||||
opcode = cont->Encode(opcode);
|
||||
if (cont->IsDeoptimize()) {
|
||||
selector->EmitDeoptimize(opcode, output_count, outputs, input_count, inputs,
|
||||
cont->frame_state());
|
||||
} else {
|
||||
selector->Emit(opcode, output_count, outputs, input_count, inputs);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -668,7 +673,7 @@ void InstructionSelector::VisitInt64Add(Node* node) {
|
||||
|
||||
void InstructionSelector::VisitInt64AddWithOverflow(Node* node) {
|
||||
if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
|
||||
FlagsContinuation cont(kOverflow, ovf);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
|
||||
VisitBinop(this, node, kX64Add, &cont);
|
||||
}
|
||||
FlagsContinuation cont;
|
||||
@ -708,7 +713,7 @@ void InstructionSelector::VisitInt64Sub(Node* node) {
|
||||
|
||||
void InstructionSelector::VisitInt64SubWithOverflow(Node* node) {
|
||||
if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
|
||||
FlagsContinuation cont(kOverflow, ovf);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
|
||||
return VisitBinop(this, node, kX64Sub, &cont);
|
||||
}
|
||||
FlagsContinuation cont;
|
||||
@ -1356,6 +1361,9 @@ void VisitCompareWithMemoryOperand(InstructionSelector* selector,
|
||||
inputs[input_count++] = g.Label(cont->true_block());
|
||||
inputs[input_count++] = g.Label(cont->false_block());
|
||||
selector->Emit(opcode, 0, nullptr, input_count, inputs);
|
||||
} else if (cont->IsDeoptimize()) {
|
||||
selector->EmitDeoptimize(opcode, 0, nullptr, input_count, inputs,
|
||||
cont->frame_state());
|
||||
} else {
|
||||
DCHECK(cont->IsSet());
|
||||
InstructionOperand output = g.DefineAsRegister(cont->result());
|
||||
@ -1389,6 +1397,9 @@ void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
|
||||
if (cont->IsBranch()) {
|
||||
selector->Emit(opcode, g.NoOutput(), left, right,
|
||||
g.Label(cont->true_block()), g.Label(cont->false_block()));
|
||||
} else if (cont->IsDeoptimize()) {
|
||||
selector->EmitDeoptimize(opcode, g.NoOutput(), left, right,
|
||||
cont->frame_state());
|
||||
} else {
|
||||
DCHECK(cont->IsSet());
|
||||
selector->Emit(opcode, g.DefineAsRegister(cont->result()), left, right);
|
||||
@ -1459,6 +1470,9 @@ void VisitWord64Compare(InstructionSelector* selector, Node* node,
|
||||
if (cont->IsBranch()) {
|
||||
selector->Emit(opcode, g.NoOutput(), g.Label(cont->true_block()),
|
||||
g.Label(cont->false_block()));
|
||||
} else if (cont->IsDeoptimize()) {
|
||||
selector->EmitDeoptimize(opcode, 0, nullptr, 0, nullptr,
|
||||
cont->frame_state());
|
||||
} else {
|
||||
DCHECK(cont->IsSet());
|
||||
selector->Emit(opcode, g.DefineAsRegister(cont->result()));
|
||||
@ -1499,98 +1513,87 @@ void VisitFloat64Compare(InstructionSelector* selector, Node* node,
|
||||
VisitCompare(selector, opcode, right, left, cont, false);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
|
||||
void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
|
||||
BasicBlock* fbranch) {
|
||||
X64OperandGenerator g(this);
|
||||
Node* user = branch;
|
||||
Node* value = branch->InputAt(0);
|
||||
|
||||
FlagsContinuation cont(kNotEqual, tbranch, fbranch);
|
||||
|
||||
// Try to combine with comparisons against 0 by simply inverting the branch.
|
||||
while (CanCover(user, value) && value->opcode() == IrOpcode::kWord32Equal) {
|
||||
Int32BinopMatcher m(value);
|
||||
if (m.right().Is(0)) {
|
||||
user = value;
|
||||
value = m.left().node();
|
||||
cont.Negate();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Try to combine the branch with a comparison.
|
||||
if (CanCover(user, value)) {
|
||||
// Shared routine for word comparison against zero.
|
||||
void VisitWordCompareZero(InstructionSelector* selector, Node* user,
|
||||
Node* value, FlagsContinuation* cont) {
|
||||
while (selector->CanCover(user, value)) {
|
||||
switch (value->opcode()) {
|
||||
case IrOpcode::kWord32Equal:
|
||||
cont.OverwriteAndNegateIfEqual(kEqual);
|
||||
return VisitWordCompare(this, value, kX64Cmp32, &cont);
|
||||
case IrOpcode::kWord32Equal: {
|
||||
// Combine with comparisons against 0 by simply inverting the
|
||||
// continuation.
|
||||
Int32BinopMatcher m(value);
|
||||
if (m.right().Is(0)) {
|
||||
user = value;
|
||||
value = m.left().node();
|
||||
cont->Negate();
|
||||
continue;
|
||||
}
|
||||
cont->OverwriteAndNegateIfEqual(kEqual);
|
||||
return VisitWordCompare(selector, value, kX64Cmp32, cont);
|
||||
}
|
||||
case IrOpcode::kInt32LessThan:
|
||||
cont.OverwriteAndNegateIfEqual(kSignedLessThan);
|
||||
return VisitWordCompare(this, value, kX64Cmp32, &cont);
|
||||
cont->OverwriteAndNegateIfEqual(kSignedLessThan);
|
||||
return VisitWordCompare(selector, value, kX64Cmp32, cont);
|
||||
case IrOpcode::kInt32LessThanOrEqual:
|
||||
cont.OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
|
||||
return VisitWordCompare(this, value, kX64Cmp32, &cont);
|
||||
cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
|
||||
return VisitWordCompare(selector, value, kX64Cmp32, cont);
|
||||
case IrOpcode::kUint32LessThan:
|
||||
cont.OverwriteAndNegateIfEqual(kUnsignedLessThan);
|
||||
return VisitWordCompare(this, value, kX64Cmp32, &cont);
|
||||
cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
|
||||
return VisitWordCompare(selector, value, kX64Cmp32, cont);
|
||||
case IrOpcode::kUint32LessThanOrEqual:
|
||||
cont.OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
|
||||
return VisitWordCompare(this, value, kX64Cmp32, &cont);
|
||||
cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
|
||||
return VisitWordCompare(selector, value, kX64Cmp32, cont);
|
||||
case IrOpcode::kWord64Equal: {
|
||||
cont.OverwriteAndNegateIfEqual(kEqual);
|
||||
cont->OverwriteAndNegateIfEqual(kEqual);
|
||||
Int64BinopMatcher m(value);
|
||||
if (m.right().Is(0)) {
|
||||
// Try to combine the branch with a comparison.
|
||||
Node* const user = m.node();
|
||||
Node* const value = m.left().node();
|
||||
if (CanCover(user, value)) {
|
||||
if (selector->CanCover(user, value)) {
|
||||
switch (value->opcode()) {
|
||||
case IrOpcode::kInt64Sub:
|
||||
return VisitWord64Compare(this, value, &cont);
|
||||
return VisitWord64Compare(selector, value, cont);
|
||||
case IrOpcode::kWord64And:
|
||||
return VisitWordCompare(this, value, kX64Test, &cont);
|
||||
return VisitWordCompare(selector, value, kX64Test, cont);
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
return VisitCompareZero(this, value, kX64Cmp, &cont);
|
||||
return VisitCompareZero(selector, value, kX64Cmp, cont);
|
||||
}
|
||||
return VisitWord64Compare(this, value, &cont);
|
||||
return VisitWord64Compare(selector, value, cont);
|
||||
}
|
||||
case IrOpcode::kInt64LessThan:
|
||||
cont.OverwriteAndNegateIfEqual(kSignedLessThan);
|
||||
return VisitWord64Compare(this, value, &cont);
|
||||
cont->OverwriteAndNegateIfEqual(kSignedLessThan);
|
||||
return VisitWord64Compare(selector, value, cont);
|
||||
case IrOpcode::kInt64LessThanOrEqual:
|
||||
cont.OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
|
||||
return VisitWord64Compare(this, value, &cont);
|
||||
cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
|
||||
return VisitWord64Compare(selector, value, cont);
|
||||
case IrOpcode::kUint64LessThan:
|
||||
cont.OverwriteAndNegateIfEqual(kUnsignedLessThan);
|
||||
return VisitWord64Compare(this, value, &cont);
|
||||
cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
|
||||
return VisitWord64Compare(selector, value, cont);
|
||||
case IrOpcode::kUint64LessThanOrEqual:
|
||||
cont.OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
|
||||
return VisitWord64Compare(this, value, &cont);
|
||||
cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
|
||||
return VisitWord64Compare(selector, value, cont);
|
||||
case IrOpcode::kFloat32Equal:
|
||||
cont.OverwriteAndNegateIfEqual(kUnorderedEqual);
|
||||
return VisitFloat32Compare(this, value, &cont);
|
||||
cont->OverwriteAndNegateIfEqual(kUnorderedEqual);
|
||||
return VisitFloat32Compare(selector, value, cont);
|
||||
case IrOpcode::kFloat32LessThan:
|
||||
cont.OverwriteAndNegateIfEqual(kUnsignedGreaterThan);
|
||||
return VisitFloat32Compare(this, value, &cont);
|
||||
cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThan);
|
||||
return VisitFloat32Compare(selector, value, cont);
|
||||
case IrOpcode::kFloat32LessThanOrEqual:
|
||||
cont.OverwriteAndNegateIfEqual(kUnsignedGreaterThanOrEqual);
|
||||
return VisitFloat32Compare(this, value, &cont);
|
||||
cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThanOrEqual);
|
||||
return VisitFloat32Compare(selector, value, cont);
|
||||
case IrOpcode::kFloat64Equal:
|
||||
cont.OverwriteAndNegateIfEqual(kUnorderedEqual);
|
||||
return VisitFloat64Compare(this, value, &cont);
|
||||
cont->OverwriteAndNegateIfEqual(kUnorderedEqual);
|
||||
return VisitFloat64Compare(selector, value, cont);
|
||||
case IrOpcode::kFloat64LessThan:
|
||||
cont.OverwriteAndNegateIfEqual(kUnsignedGreaterThan);
|
||||
return VisitFloat64Compare(this, value, &cont);
|
||||
cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThan);
|
||||
return VisitFloat64Compare(selector, value, cont);
|
||||
case IrOpcode::kFloat64LessThanOrEqual:
|
||||
cont.OverwriteAndNegateIfEqual(kUnsignedGreaterThanOrEqual);
|
||||
return VisitFloat64Compare(this, value, &cont);
|
||||
cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThanOrEqual);
|
||||
return VisitFloat64Compare(selector, value, cont);
|
||||
case IrOpcode::kProjection:
|
||||
// Check if this is the overflow output projection of an
|
||||
// <Operation>WithOverflow node.
|
||||
@ -1602,20 +1605,20 @@ void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
|
||||
// *AFTER* this branch).
|
||||
Node* const node = value->InputAt(0);
|
||||
Node* const result = NodeProperties::FindProjection(node, 0);
|
||||
if (result == nullptr || IsDefined(result)) {
|
||||
if (result == nullptr || selector->IsDefined(result)) {
|
||||
switch (node->opcode()) {
|
||||
case IrOpcode::kInt32AddWithOverflow:
|
||||
cont.OverwriteAndNegateIfEqual(kOverflow);
|
||||
return VisitBinop(this, node, kX64Add32, &cont);
|
||||
cont->OverwriteAndNegateIfEqual(kOverflow);
|
||||
return VisitBinop(selector, node, kX64Add32, cont);
|
||||
case IrOpcode::kInt32SubWithOverflow:
|
||||
cont.OverwriteAndNegateIfEqual(kOverflow);
|
||||
return VisitBinop(this, node, kX64Sub32, &cont);
|
||||
cont->OverwriteAndNegateIfEqual(kOverflow);
|
||||
return VisitBinop(selector, node, kX64Sub32, cont);
|
||||
case IrOpcode::kInt64AddWithOverflow:
|
||||
cont.OverwriteAndNegateIfEqual(kOverflow);
|
||||
return VisitBinop(this, node, kX64Add, &cont);
|
||||
cont->OverwriteAndNegateIfEqual(kOverflow);
|
||||
return VisitBinop(selector, node, kX64Add, cont);
|
||||
case IrOpcode::kInt64SubWithOverflow:
|
||||
cont.OverwriteAndNegateIfEqual(kOverflow);
|
||||
return VisitBinop(this, node, kX64Sub, &cont);
|
||||
cont->OverwriteAndNegateIfEqual(kOverflow);
|
||||
return VisitBinop(selector, node, kX64Sub, cont);
|
||||
default:
|
||||
break;
|
||||
}
|
||||
@ -1623,22 +1626,42 @@ void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
|
||||
}
|
||||
break;
|
||||
case IrOpcode::kInt32Sub:
|
||||
return VisitWordCompare(this, value, kX64Cmp32, &cont);
|
||||
return VisitWordCompare(selector, value, kX64Cmp32, cont);
|
||||
case IrOpcode::kInt64Sub:
|
||||
return VisitWord64Compare(this, value, &cont);
|
||||
return VisitWord64Compare(selector, value, cont);
|
||||
case IrOpcode::kWord32And:
|
||||
return VisitWordCompare(this, value, kX64Test32, &cont);
|
||||
return VisitWordCompare(selector, value, kX64Test32, cont);
|
||||
case IrOpcode::kWord64And:
|
||||
return VisitWordCompare(this, value, kX64Test, &cont);
|
||||
return VisitWordCompare(selector, value, kX64Test, cont);
|
||||
default:
|
||||
break;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
// Branch could not be combined with a compare, emit compare against 0.
|
||||
VisitCompareZero(this, value, kX64Cmp32, &cont);
|
||||
VisitCompareZero(selector, value, kX64Cmp32, cont);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
|
||||
BasicBlock* fbranch) {
|
||||
FlagsContinuation cont(kNotEqual, tbranch, fbranch);
|
||||
VisitWordCompareZero(this, branch, branch->InputAt(0), &cont);
|
||||
}
|
||||
|
||||
void InstructionSelector::VisitDeoptimizeIf(Node* node) {
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForDeoptimize(kNotEqual, node->InputAt(1));
|
||||
VisitWordCompareZero(this, node, node->InputAt(0), &cont);
|
||||
}
|
||||
|
||||
void InstructionSelector::VisitDeoptimizeUnless(Node* node) {
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForDeoptimize(kEqual, node->InputAt(1));
|
||||
VisitWordCompareZero(this, node, node->InputAt(0), &cont);
|
||||
}
|
||||
|
||||
void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
|
||||
X64OperandGenerator g(this);
|
||||
@ -1673,7 +1696,7 @@ void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
|
||||
|
||||
void InstructionSelector::VisitWord32Equal(Node* const node) {
|
||||
Node* user = node;
|
||||
FlagsContinuation cont(kEqual, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
|
||||
Int32BinopMatcher m(user);
|
||||
if (m.right().Is(0)) {
|
||||
Node* value = m.left().node();
|
||||
@ -1708,31 +1731,33 @@ void InstructionSelector::VisitWord32Equal(Node* const node) {
|
||||
|
||||
|
||||
void InstructionSelector::VisitInt32LessThan(Node* node) {
|
||||
FlagsContinuation cont(kSignedLessThan, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
|
||||
VisitWordCompare(this, node, kX64Cmp32, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
|
||||
FlagsContinuation cont(kSignedLessThanOrEqual, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
|
||||
VisitWordCompare(this, node, kX64Cmp32, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitUint32LessThan(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedLessThan, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
|
||||
VisitWordCompare(this, node, kX64Cmp32, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedLessThanOrEqual, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
|
||||
VisitWordCompare(this, node, kX64Cmp32, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitWord64Equal(Node* const node) {
|
||||
FlagsContinuation cont(kEqual, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
|
||||
Int64BinopMatcher m(node);
|
||||
if (m.right().Is(0)) {
|
||||
// Try to combine the equality check with a comparison.
|
||||
@ -1755,7 +1780,7 @@ void InstructionSelector::VisitWord64Equal(Node* const node) {
|
||||
|
||||
void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
|
||||
if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
|
||||
FlagsContinuation cont(kOverflow, ovf);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
|
||||
VisitBinop(this, node, kX64Add32, &cont);
|
||||
}
|
||||
FlagsContinuation cont;
|
||||
@ -1765,7 +1790,7 @@ void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
|
||||
|
||||
void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
|
||||
if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
|
||||
FlagsContinuation cont(kOverflow, ovf);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
|
||||
return VisitBinop(this, node, kX64Sub32, &cont);
|
||||
}
|
||||
FlagsContinuation cont;
|
||||
@ -1774,61 +1799,67 @@ void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
|
||||
|
||||
|
||||
void InstructionSelector::VisitInt64LessThan(Node* node) {
|
||||
FlagsContinuation cont(kSignedLessThan, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
|
||||
VisitWord64Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
|
||||
FlagsContinuation cont(kSignedLessThanOrEqual, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
|
||||
VisitWord64Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitUint64LessThan(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedLessThan, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
|
||||
VisitWord64Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitUint64LessThanOrEqual(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedLessThanOrEqual, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
|
||||
VisitWord64Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat32Equal(Node* node) {
|
||||
FlagsContinuation cont(kUnorderedEqual, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kUnorderedEqual, node);
|
||||
VisitFloat32Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat32LessThan(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedGreaterThan, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kUnsignedGreaterThan, node);
|
||||
VisitFloat32Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedGreaterThanOrEqual, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kUnsignedGreaterThanOrEqual, node);
|
||||
VisitFloat32Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat64Equal(Node* node) {
|
||||
FlagsContinuation cont(kUnorderedEqual, node);
|
||||
FlagsContinuation cont = FlagsContinuation::ForSet(kUnorderedEqual, node);
|
||||
VisitFloat64Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat64LessThan(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedGreaterThan, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kUnsignedGreaterThan, node);
|
||||
VisitFloat64Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
|
||||
FlagsContinuation cont(kUnsignedGreaterThanOrEqual, node);
|
||||
FlagsContinuation cont =
|
||||
FlagsContinuation::ForSet(kUnsignedGreaterThanOrEqual, node);
|
||||
VisitFloat64Compare(this, node, &cont);
|
||||
}
|
||||
|
||||
|
@ -177,6 +177,9 @@
|
||||
# issue 4078:
|
||||
'allocation-site-info': [PASS, NO_VARIANTS],
|
||||
|
||||
# TODO(turbofan): The escape analysis needs some investigation.
|
||||
'compiler/escape-analysis-deopt-5': [PASS, NO_VARIANTS],
|
||||
|
||||
##############################################################################
|
||||
# Too slow in debug mode with --stress-opt mode.
|
||||
'compiler/regress-stacktrace-methods': [PASS, ['mode == debug', SKIP]],
|
||||
|
Loading…
Reference in New Issue
Block a user