[turbofan] Support variable size argument removal in TF-generated functions

This is preparation for using TF to create builtins that handle variable number of
arguments and have to remove these arguments dynamically from the stack upon
return.

The gist of the changes:
- Added a second argument to the Return node which specifies the number of stack
  slots to pop upon return in addition to those specified by the Linkage of the
  compiled function.
- Removed Tail -> Non-Tail fallback in the instruction selector. Since TF now should
  handles all tail-call cases except where the return value type differs, this fallback
  was not really useful and in fact caused unexpected behavior with variable
  sized argument popping, since it wasn't possible to materialize a Return node
  with the right pop count from the TailCall without additional context.
- Modified existing Return generation to pass a constant zero as the additional
  pop argument since the variable pop functionality

LOG=N

Review-Url: https://codereview.chromium.org/2446543002
Cr-Commit-Position: refs/heads/master@{#40699}
This commit is contained in:
danno 2016-11-02 06:15:39 -07:00 committed by Commit bot
parent 588641f242
commit fe552636be
41 changed files with 530 additions and 292 deletions

View File

@ -2891,7 +2891,9 @@ compiler::Node* FastCloneShallowArrayStub::Generate(
{
Node* abort_id = assembler->SmiConstant(
Smi::FromInt(BailoutReason::kExpectedFixedDoubleArrayMap));
assembler->TailCallRuntime(Runtime::kAbort, context, abort_id);
assembler->CallRuntime(Runtime::kAbort, context, abort_id);
result.Bind(assembler->UndefinedConstant());
assembler->Goto(&return_result);
}
assembler->Bind(&correct_elements_map);
}

View File

@ -744,7 +744,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
break;
}
case kArchRet:
AssembleReturn();
AssembleReturn(instr->InputAt(0));
DCHECK_EQ(LeaveCC, i.OutputSBit());
break;
case kArchStackPointer:
@ -1737,8 +1737,7 @@ void CodeGenerator::AssembleConstructFrame() {
}
}
void CodeGenerator::AssembleReturn() {
void CodeGenerator::AssembleReturn(InstructionOperand* pop) {
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
int pop_count = static_cast<int>(descriptor->StackParameterCount());
@ -1762,19 +1761,33 @@ void CodeGenerator::AssembleReturn() {
unwinding_info_writer_.MarkBlockWillExit();
ArmOperandConverter g(this, nullptr);
if (descriptor->IsCFunctionCall()) {
AssembleDeconstructFrame();
} else if (frame_access_state()->has_frame()) {
// Canonicalize JSFunction return sites for now.
if (return_label_.is_bound()) {
__ b(&return_label_);
return;
// Canonicalize JSFunction return sites for now unless they have an variable
// number of stack slot pops.
if (pop->IsImmediate() && g.ToConstant(pop).ToInt32() == 0) {
if (return_label_.is_bound()) {
__ b(&return_label_);
return;
} else {
__ bind(&return_label_);
AssembleDeconstructFrame();
}
} else {
__ bind(&return_label_);
AssembleDeconstructFrame();
}
}
__ Ret(pop_count);
if (pop->IsImmediate()) {
DCHECK_EQ(Constant::kInt32, g.ToConstant(pop).type());
pop_count += g.ToConstant(pop).ToInt32();
} else {
__ Drop(g.ToRegister(pop));
}
__ Drop(pop_count);
__ Ret();
}
void CodeGenerator::AssembleMove(InstructionOperand* source,

View File

@ -783,7 +783,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
break;
}
case kArchRet:
AssembleReturn();
AssembleReturn(instr->InputAt(0));
break;
case kArchStackPointer:
__ mov(i.OutputRegister(), masm()->StackPointer());
@ -1854,8 +1854,7 @@ void CodeGenerator::AssembleConstructFrame() {
}
}
void CodeGenerator::AssembleReturn() {
void CodeGenerator::AssembleReturn(InstructionOperand* pop) {
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
// Restore registers.
@ -1874,16 +1873,25 @@ void CodeGenerator::AssembleReturn() {
unwinding_info_writer_.MarkBlockWillExit();
Arm64OperandConverter g(this, nullptr);
int pop_count = static_cast<int>(descriptor->StackParameterCount());
if (descriptor->IsCFunctionCall()) {
AssembleDeconstructFrame();
} else if (frame_access_state()->has_frame()) {
// Canonicalize JSFunction return sites for now.
if (return_label_.is_bound()) {
__ B(&return_label_);
return;
// Canonicalize JSFunction return sites for now unless they have an variable
// number of stack slot pops.
if (pop->IsImmediate() && g.ToConstant(pop).ToInt32() == 0) {
if (return_label_.is_bound()) {
__ B(&return_label_);
return;
} else {
__ Bind(&return_label_);
AssembleDeconstructFrame();
if (descriptor->UseNativeStack()) {
pop_count += (pop_count & 1); // align
}
}
} else {
__ Bind(&return_label_);
AssembleDeconstructFrame();
if (descriptor->UseNativeStack()) {
pop_count += (pop_count & 1); // align
@ -1892,7 +1900,16 @@ void CodeGenerator::AssembleReturn() {
} else if (descriptor->UseNativeStack()) {
pop_count += (pop_count & 1); // align
}
__ Drop(pop_count);
if (pop->IsImmediate()) {
DCHECK_EQ(Constant::kInt32, g.ToConstant(pop).type());
pop_count += g.ToConstant(pop).ToInt32();
__ Drop(pop_count);
} else {
Register pop_reg = g.ToRegister(pop);
__ Add(pop_reg, pop_reg, pop_count);
__ Drop(pop_reg);
}
if (descriptor->UseNativeStack()) {
__ AssertCspAligned();

View File

@ -3777,7 +3777,8 @@ Node* AstGraphBuilder::BuildReturn(Node* return_value) {
return_value =
NewNode(javascript()->CallRuntime(Runtime::kTraceExit), return_value);
}
Node* control = NewNode(common()->Return(), return_value);
Node* pop_node = jsgraph()->Int32Constant(0);
Node* control = NewNode(common()->Return(), pop_node, return_value);
UpdateControlDependencyToLeaveFunction(control);
return control;
}

View File

@ -1762,8 +1762,9 @@ void BytecodeGraphBuilder::VisitStackCheck() {
void BytecodeGraphBuilder::VisitReturn() {
BuildLoopExitsForFunctionExit();
Node* pop_node = jsgraph()->Int32Constant(0);
Node* control =
NewNode(common()->Return(), environment()->LookupAccumulator());
NewNode(common()->Return(), pop_node, environment()->LookupAccumulator());
MergeControlToLeaveFunction(control);
}

View File

@ -180,6 +180,10 @@ void CodeAssembler::Return(Node* value) {
return raw_assembler_->Return(value);
}
void CodeAssembler::PopAndReturn(Node* pop, Node* value) {
return raw_assembler_->PopAndReturn(pop, value);
}
void CodeAssembler::DebugBreak() { raw_assembler_->DebugBreak(); }
void CodeAssembler::Comment(const char* format, ...) {

View File

@ -236,6 +236,7 @@ class V8_EXPORT_PRIVATE CodeAssembler {
Node* Parameter(int value);
void Return(Node* value);
void PopAndReturn(Node* pop, Node* value);
void DebugBreak();
void Comment(const char* format, ...);

View File

@ -129,7 +129,7 @@ class CodeGenerator final : public GapResolver::Assembler {
// Generates an architecture-specific, descriptor-specific return sequence
// to tear down a stack frame.
void AssembleReturn();
void AssembleReturn(InstructionOperand* pop);
void AssembleDeconstructFrame();

View File

@ -284,7 +284,7 @@ Reduction CommonOperatorReducer::ReducePhi(Node* node) {
Reduction CommonOperatorReducer::ReduceReturn(Node* node) {
DCHECK_EQ(IrOpcode::kReturn, node->opcode());
Node* const value = node->InputAt(0);
Node* const value = node->InputAt(1);
Node* effect = NodeProperties::GetEffectInput(node);
Node* const control = NodeProperties::GetControlInput(node);
bool changed = false;
@ -311,8 +311,9 @@ Reduction CommonOperatorReducer::ReduceReturn(Node* node) {
// {end} as revisit, because we mark {node} as {Dead} below, which was
// previously connected to {end}, so we know for sure that at some point
// the reducer logic will visit {end} again.
Node* ret = graph()->NewNode(common()->Return(), value->InputAt(i),
effect->InputAt(i), control->InputAt(i));
Node* ret = graph()->NewNode(common()->Return(), node->InputAt(0),
value->InputAt(i), effect->InputAt(i),
control->InputAt(i));
NodeProperties::MergeControlToEnd(graph(), common(), ret);
}
// Mark the merge {control} and return {node} as {dead}.

View File

@ -256,8 +256,8 @@ OsrGuardType OsrGuardTypeOf(Operator const* op) {
#define CACHED_RETURN_LIST(V) \
V(1) \
V(2) \
V(3)
V(3) \
V(4)
#define CACHED_END_LIST(V) \
V(1) \
@ -396,16 +396,16 @@ struct CommonOperatorGlobalCache final {
CACHED_END_LIST(CACHED_END)
#undef CACHED_END
template <size_t kInputCount>
template <size_t kValueInputCount>
struct ReturnOperator final : public Operator {
ReturnOperator()
: Operator( // --
IrOpcode::kReturn, Operator::kNoThrow, // opcode
"Return", // name
kInputCount, 1, 1, 0, 0, 1) {} // counts
: Operator( // --
IrOpcode::kReturn, Operator::kNoThrow, // opcode
"Return", // name
kValueInputCount + 1, 1, 1, 0, 0, 1) {} // counts
};
#define CACHED_RETURN(input_count) \
ReturnOperator<input_count> kReturn##input_count##Operator;
#define CACHED_RETURN(value_input_count) \
ReturnOperator<value_input_count> kReturn##value_input_count##Operator;
CACHED_RETURN_LIST(CACHED_RETURN)
#undef CACHED_RETURN
@ -632,7 +632,6 @@ const Operator* CommonOperatorBuilder::End(size_t control_input_count) {
0, 0, control_input_count, 0, 0, 0); // counts
}
const Operator* CommonOperatorBuilder::Return(int value_input_count) {
switch (value_input_count) {
#define CACHED_RETURN(input_count) \
@ -647,7 +646,7 @@ const Operator* CommonOperatorBuilder::Return(int value_input_count) {
return new (zone()) Operator( //--
IrOpcode::kReturn, Operator::kNoThrow, // opcode
"Return", // name
value_input_count, 1, 1, 0, 0, 1); // counts
value_input_count + 1, 1, 1, 0, 0, 1); // counts
}

View File

@ -649,7 +649,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
break;
}
case kArchRet:
AssembleReturn();
AssembleReturn(instr->InputAt(0));
break;
case kArchStackPointer:
__ mov(i.OutputRegister(), esp);
@ -1981,8 +1981,7 @@ void CodeGenerator::AssembleConstructFrame() {
}
}
void CodeGenerator::AssembleReturn() {
void CodeGenerator::AssembleReturn(InstructionOperand* pop) {
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
const RegList saves = descriptor->CalleeSavedRegisters();
@ -1994,22 +1993,41 @@ void CodeGenerator::AssembleReturn() {
}
}
// Might need ecx for scratch if pop_size is too big or if there is a variable
// pop count.
DCHECK_EQ(0u, descriptor->CalleeSavedRegisters() & ecx.bit());
size_t pop_size = descriptor->StackParameterCount() * kPointerSize;
IA32OperandConverter g(this, nullptr);
if (descriptor->IsCFunctionCall()) {
AssembleDeconstructFrame();
} else if (frame_access_state()->has_frame()) {
// Canonicalize JSFunction return sites for now.
if (return_label_.is_bound()) {
__ jmp(&return_label_);
return;
// Canonicalize JSFunction return sites for now if they always have the same
// number of return args.
if (pop->IsImmediate() && g.ToConstant(pop).ToInt32() == 0) {
if (return_label_.is_bound()) {
__ jmp(&return_label_);
return;
} else {
__ bind(&return_label_);
AssembleDeconstructFrame();
}
} else {
__ bind(&return_label_);
AssembleDeconstructFrame();
}
}
size_t pop_size = descriptor->StackParameterCount() * kPointerSize;
// Might need ecx for scratch if pop_size is too big.
DCHECK_EQ(0u, descriptor->CalleeSavedRegisters() & edx.bit());
DCHECK_EQ(0u, descriptor->CalleeSavedRegisters() & ecx.bit());
__ Ret(static_cast<int>(pop_size), ecx);
if (pop->IsImmediate()) {
DCHECK_EQ(Constant::kInt32, g.ToConstant(pop).type());
pop_size += g.ToConstant(pop).ToInt32() * kPointerSize;
__ Ret(static_cast<int>(pop_size), ecx);
} else {
Register pop_reg = g.ToRegister(pop);
Register scratch_reg = pop_reg.is(ecx) ? edx : ecx;
__ pop(scratch_reg);
__ lea(esp, Operand(esp, pop_reg, times_4, static_cast<int>(pop_size)));
__ jmp(scratch_reg);
}
}

View File

@ -1889,106 +1889,63 @@ void InstructionSelector::VisitTailCall(Node* node) {
DCHECK_NE(0, descriptor->flags() & CallDescriptor::kSupportsTailCalls);
CallDescriptor* caller = linkage()->GetIncomingDescriptor();
if (caller->CanTailCall(node)) {
const CallDescriptor* callee = CallDescriptorOf(node->op());
int stack_param_delta = callee->GetStackParameterDelta(caller);
CallBuffer buffer(zone(), descriptor, nullptr);
DCHECK(caller->CanTailCall(node));
const CallDescriptor* callee = CallDescriptorOf(node->op());
int stack_param_delta = callee->GetStackParameterDelta(caller);
CallBuffer buffer(zone(), descriptor, nullptr);
// Compute InstructionOperands for inputs and outputs.
CallBufferFlags flags(kCallCodeImmediate | kCallTail);
if (IsTailCallAddressImmediate()) {
flags |= kCallAddressImmediate;
}
InitializeCallBuffer(node, &buffer, flags, stack_param_delta);
// Compute InstructionOperands for inputs and outputs.
CallBufferFlags flags(kCallCodeImmediate | kCallTail);
if (IsTailCallAddressImmediate()) {
flags |= kCallAddressImmediate;
}
InitializeCallBuffer(node, &buffer, flags, stack_param_delta);
// Select the appropriate opcode based on the call type.
InstructionCode opcode;
InstructionOperandVector temps(zone());
if (linkage()->GetIncomingDescriptor()->IsJSFunctionCall()) {
switch (descriptor->kind()) {
case CallDescriptor::kCallCodeObject:
opcode = kArchTailCallCodeObjectFromJSFunction;
break;
case CallDescriptor::kCallJSFunction:
opcode = kArchTailCallJSFunctionFromJSFunction;
break;
default:
UNREACHABLE();
return;
}
int temps_count = GetTempsCountForTailCallFromJSFunction();
for (int i = 0; i < temps_count; i++) {
temps.push_back(g.TempRegister());
}
} else {
switch (descriptor->kind()) {
case CallDescriptor::kCallCodeObject:
opcode = kArchTailCallCodeObject;
break;
case CallDescriptor::kCallAddress:
opcode = kArchTailCallAddress;
break;
default:
UNREACHABLE();
return;
}
}
opcode |= MiscField::encode(descriptor->flags());
Emit(kArchPrepareTailCall, g.NoOutput());
int first_unused_stack_slot =
(V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK ? 1 : 0) +
stack_param_delta;
buffer.instruction_args.push_back(g.TempImmediate(first_unused_stack_slot));
// Emit the tailcall instruction.
Emit(opcode, 0, nullptr, buffer.instruction_args.size(),
&buffer.instruction_args.front(), temps.size(),
temps.empty() ? nullptr : &temps.front());
} else {
FrameStateDescriptor* frame_state_descriptor =
descriptor->NeedsFrameState()
? GetFrameStateDescriptor(
node->InputAt(static_cast<int>(descriptor->InputCount())))
: nullptr;
CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
// Compute InstructionOperands for inputs and outputs.
CallBufferFlags flags = kCallCodeImmediate;
if (IsTailCallAddressImmediate()) {
flags |= kCallAddressImmediate;
}
InitializeCallBuffer(node, &buffer, flags);
EmitPrepareArguments(&(buffer.pushed_nodes), descriptor, node);
// Select the appropriate opcode based on the call type.
InstructionCode opcode;
// Select the appropriate opcode based on the call type.
InstructionCode opcode;
InstructionOperandVector temps(zone());
if (linkage()->GetIncomingDescriptor()->IsJSFunctionCall()) {
switch (descriptor->kind()) {
case CallDescriptor::kCallCodeObject:
opcode = kArchCallCodeObject;
opcode = kArchTailCallCodeObjectFromJSFunction;
break;
case CallDescriptor::kCallJSFunction:
opcode = kArchCallJSFunction;
opcode = kArchTailCallJSFunctionFromJSFunction;
break;
default:
UNREACHABLE();
return;
}
int temps_count = GetTempsCountForTailCallFromJSFunction();
for (int i = 0; i < temps_count; i++) {
temps.push_back(g.TempRegister());
}
} else {
switch (descriptor->kind()) {
case CallDescriptor::kCallCodeObject:
opcode = kArchTailCallCodeObject;
break;
case CallDescriptor::kCallAddress:
opcode = kArchTailCallAddress;
break;
default:
UNREACHABLE();
return;
}
opcode |= MiscField::encode(descriptor->flags());
// Emit the call instruction.
size_t output_count = buffer.outputs.size();
auto* outputs = &buffer.outputs.front();
Instruction* call_instr =
Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
&buffer.instruction_args.front());
if (instruction_selection_failed()) return;
call_instr->MarkAsCall();
Emit(kArchRet, 0, nullptr, output_count, outputs);
}
opcode |= MiscField::encode(descriptor->flags());
Emit(kArchPrepareTailCall, g.NoOutput());
int first_unused_stack_slot =
(V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK ? 1 : 0) +
stack_param_delta;
buffer.instruction_args.push_back(g.TempImmediate(first_unused_stack_slot));
// Emit the tailcall instruction.
Emit(opcode, 0, nullptr, buffer.instruction_args.size(),
&buffer.instruction_args.front(), temps.size(),
temps.empty() ? nullptr : &temps.front());
}
@ -1998,20 +1955,22 @@ void InstructionSelector::VisitGoto(BasicBlock* target) {
Emit(kArchJmp, g.NoOutput(), g.Label(target));
}
void InstructionSelector::VisitReturn(Node* ret) {
OperandGenerator g(this);
if (linkage()->GetIncomingDescriptor()->ReturnCount() == 0) {
Emit(kArchRet, g.NoOutput());
} else {
const int ret_count = ret->op()->ValueInputCount();
auto value_locations = zone()->NewArray<InstructionOperand>(ret_count);
for (int i = 0; i < ret_count; ++i) {
value_locations[i] =
g.UseLocation(ret->InputAt(i), linkage()->GetReturnLocation(i));
}
Emit(kArchRet, 0, nullptr, ret_count, value_locations);
const int input_count = linkage()->GetIncomingDescriptor()->ReturnCount() == 0
? 1
: ret->op()->ValueInputCount();
DCHECK_GE(input_count, 1);
auto value_locations = zone()->NewArray<InstructionOperand>(input_count);
Node* pop_count = ret->InputAt(0);
value_locations[0] = pop_count->opcode() == IrOpcode::kInt32Constant
? g.UseImmediate(pop_count)
: g.UseRegister(pop_count);
for (int i = 1; i < input_count; ++i) {
value_locations[i] =
g.UseLocation(ret->InputAt(i), linkage()->GetReturnLocation(i - 1));
}
Emit(kArchRet, 0, nullptr, input_count, value_locations);
}
Instruction* InstructionSelector::EmitDeoptimize(InstructionCode opcode,

View File

@ -184,7 +184,7 @@ Reduction JSInliner::InlineCall(Node* call, Node* new_target, Node* context,
for (Node* const input : end->inputs()) {
switch (input->opcode()) {
case IrOpcode::kReturn:
values.push_back(NodeProperties::GetValueInput(input, 0));
values.push_back(NodeProperties::GetValueInput(input, 1));
effects.push_back(NodeProperties::GetEffectInput(input));
controls.push_back(NodeProperties::GetControlInput(input));
break;

View File

@ -709,7 +709,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
break;
}
case kArchRet:
AssembleReturn();
AssembleReturn(instr->InputAt(0));
break;
case kArchStackPointer:
__ mov(i.OutputRegister(), sp);
@ -1960,8 +1960,7 @@ void CodeGenerator::AssembleConstructFrame() {
}
}
void CodeGenerator::AssembleReturn() {
void CodeGenerator::AssembleReturn(InstructionOperand* pop) {
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
int pop_count = static_cast<int>(descriptor->StackParameterCount());
@ -1977,18 +1976,32 @@ void CodeGenerator::AssembleReturn() {
__ MultiPopFPU(saves_fpu);
}
MipsOperandConverter g(this, nullptr);
if (descriptor->IsCFunctionCall()) {
AssembleDeconstructFrame();
} else if (frame_access_state()->has_frame()) {
// Canonicalize JSFunction return sites for now.
if (return_label_.is_bound()) {
__ Branch(&return_label_);
return;
// Canonicalize JSFunction return sites for now unless they have an variable
// number of stack slot pops.
if (pop->IsImmediate() && g.ToConstant(pop).ToInt32() == 0) {
if (return_label_.is_bound()) {
__ Branch(&return_label_);
return;
} else {
__ bind(&return_label_);
AssembleDeconstructFrame();
}
} else {
__ bind(&return_label_);
AssembleDeconstructFrame();
}
}
if (pop->IsImmediate()) {
DCHECK_EQ(Constant::kInt32, g.ToConstant(pop).type());
pop_count += g.ToConstant(pop).ToInt32();
} else {
Register pop_reg = g.ToRegister(pop);
__ sll(pop_reg, pop_reg, kPointerSizeLog2);
__ Addu(sp, sp, Operand(pop_reg));
}
if (pop_count != 0) {
__ DropAndRet(pop_count);
} else {

View File

@ -719,7 +719,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
break;
}
case kArchRet:
AssembleReturn();
AssembleReturn(instr->InputAt(0));
break;
case kArchStackPointer:
__ mov(i.OutputRegister(), sp);
@ -2278,8 +2278,7 @@ void CodeGenerator::AssembleConstructFrame() {
}
}
void CodeGenerator::AssembleReturn() {
void CodeGenerator::AssembleReturn(InstructionOperand* pop) {
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
// Restore GP registers.
@ -2294,19 +2293,33 @@ void CodeGenerator::AssembleReturn() {
__ MultiPopFPU(saves_fpu);
}
MipsOperandConverter g(this, nullptr);
if (descriptor->IsCFunctionCall()) {
AssembleDeconstructFrame();
} else if (frame_access_state()->has_frame()) {
// Canonicalize JSFunction return sites for now.
if (return_label_.is_bound()) {
__ Branch(&return_label_);
return;
// Canonicalize JSFunction return sites for now unless they have an variable
// number of stack slot pops.
if (pop->IsImmediate() && g.ToConstant(pop).ToInt32() == 0) {
if (return_label_.is_bound()) {
__ Branch(&return_label_);
return;
} else {
__ bind(&return_label_);
AssembleDeconstructFrame();
}
} else {
__ bind(&return_label_);
AssembleDeconstructFrame();
}
}
int pop_count = static_cast<int>(descriptor->StackParameterCount());
if (pop->IsImmediate()) {
DCHECK_EQ(Constant::kInt32, g.ToConstant(pop).type());
pop_count += g.ToConstant(pop).ToInt32();
} else {
Register pop_reg = g.ToRegister(pop);
__ dsll(pop_reg, pop_reg, kPointerSizeLog2);
__ Daddu(sp, sp, pop_reg);
}
if (pop_count != 0) {
__ DropAndRet(pop_count);
} else {

View File

@ -120,23 +120,46 @@ void RawMachineAssembler::Switch(Node* index, RawMachineLabel* default_label,
}
void RawMachineAssembler::Return(Node* value) {
Node* ret = MakeNode(common()->Return(), 1, &value);
Node* values[] = {Int32Constant(0), value};
Node* ret = MakeNode(common()->Return(1), 2, values);
schedule()->AddReturn(CurrentBlock(), ret);
current_block_ = nullptr;
}
void RawMachineAssembler::Return(Node* v1, Node* v2) {
Node* values[] = {v1, v2};
Node* ret = MakeNode(common()->Return(2), 2, values);
Node* values[] = {Int32Constant(0), v1, v2};
Node* ret = MakeNode(common()->Return(2), 3, values);
schedule()->AddReturn(CurrentBlock(), ret);
current_block_ = nullptr;
}
void RawMachineAssembler::Return(Node* v1, Node* v2, Node* v3) {
Node* values[] = {v1, v2, v3};
Node* ret = MakeNode(common()->Return(3), 3, values);
Node* values[] = {Int32Constant(0), v1, v2, v3};
Node* ret = MakeNode(common()->Return(3), 4, values);
schedule()->AddReturn(CurrentBlock(), ret);
current_block_ = nullptr;
}
void RawMachineAssembler::PopAndReturn(Node* pop, Node* value) {
Node* values[] = {pop, value};
Node* ret = MakeNode(common()->Return(1), 2, values);
schedule()->AddReturn(CurrentBlock(), ret);
current_block_ = nullptr;
}
void RawMachineAssembler::PopAndReturn(Node* pop, Node* v1, Node* v2) {
Node* values[] = {pop, v1, v2};
Node* ret = MakeNode(common()->Return(2), 3, values);
schedule()->AddReturn(CurrentBlock(), ret);
current_block_ = nullptr;
}
void RawMachineAssembler::PopAndReturn(Node* pop, Node* v1, Node* v2,
Node* v3) {
Node* values[] = {pop, v1, v2, v3};
Node* ret = MakeNode(common()->Return(3), 4, values);
schedule()->AddReturn(CurrentBlock(), ret);
current_block_ = nullptr;
}

View File

@ -774,6 +774,9 @@ class V8_EXPORT_PRIVATE RawMachineAssembler {
void Return(Node* value);
void Return(Node* v1, Node* v2);
void Return(Node* v1, Node* v2, Node* v3);
void PopAndReturn(Node* pop, Node* value);
void PopAndReturn(Node* pop, Node* v1, Node* v2);
void PopAndReturn(Node* pop, Node* v1, Node* v2, Node* v3);
void Bind(RawMachineLabel* label);
void Deoptimize(Node* state);
void DebugBreak();

View File

@ -749,6 +749,23 @@ class RepresentationSelector {
}
}
void VisitReturn(Node* node) {
int tagged_limit = node->op()->ValueInputCount() +
OperatorProperties::GetContextInputCount(node->op()) +
OperatorProperties::GetFrameStateInputCount(node->op());
// Visit integer slot count to pop
ProcessInput(node, 0, UseInfo::TruncatingWord32());
// Visit value, context and frame state inputs as tagged.
for (int i = 1; i < tagged_limit; i++) {
ProcessInput(node, i, UseInfo::AnyTagged());
}
// Only enqueue other inputs (effects, control).
for (int i = tagged_limit; i < node->InputCount(); i++) {
EnqueueInput(node, i);
}
}
// Helper for an unused node.
void VisitUnused(Node* node) {
int value_count = node->op()->ValueInputCount() +
@ -2448,10 +2465,14 @@ class RepresentationSelector {
case IrOpcode::kOsrGuard:
return VisitOsrGuard(node);
case IrOpcode::kReturn:
VisitReturn(node);
// Assume the output is tagged.
return SetOutput(node, MachineRepresentation::kTagged);
// Operators with all inputs tagged and no or tagged output have uniform
// handling.
case IrOpcode::kEnd:
case IrOpcode::kReturn:
case IrOpcode::kIfSuccess:
case IrOpcode::kIfException:
case IrOpcode::kIfTrue:

View File

@ -7,6 +7,7 @@
#include "src/compiler/common-operator.h"
#include "src/compiler/graph.h"
#include "src/compiler/linkage.h"
#include "src/compiler/node-matchers.h"
#include "src/compiler/node-properties.h"
namespace v8 {
@ -18,12 +19,15 @@ Reduction TailCallOptimization::Reduce(Node* node) {
// The value which is returned must be the result of a potential tail call,
// there must be no try/catch/finally around the Call, and there must be no
// other effect between the Call and the Return nodes.
Node* const call = NodeProperties::GetValueInput(node, 0);
Node* const call = NodeProperties::GetValueInput(node, 1);
if (call->opcode() == IrOpcode::kCall &&
CallDescriptorOf(call->op())->SupportsTailCalls() &&
NodeProperties::GetEffectInput(node) == call &&
!NodeProperties::IsExceptionalCall(call)) {
Node* const control = NodeProperties::GetControlInput(node);
// Ensure that no additional arguments are being popped other than those in
// the CallDescriptor, otherwise the tail call transformation is invalid.
DCHECK_EQ(0, Int32Matcher(NodeProperties::GetValueInput(node, 0)).Value());
if (control->opcode() == IrOpcode::kIfSuccess &&
call->OwnedBy(node, control) && control->OwnedBy(node)) {
// Furthermore, control has to flow via an IfSuccess from the Call, so
@ -62,9 +66,10 @@ Reduction TailCallOptimization::Reduce(Node* node) {
// |
DCHECK_EQ(call, NodeProperties::GetControlInput(control, 0));
DCHECK_EQ(3, node->InputCount());
DCHECK_EQ(4, node->InputCount());
node->ReplaceInput(0, NodeProperties::GetEffectInput(call));
node->ReplaceInput(1, NodeProperties::GetControlInput(call));
node->RemoveInput(3);
node->RemoveInput(2);
for (int index = 0; index < call->op()->ValueInputCount(); ++index) {
node->InsertInput(graph()->zone(), index,

View File

@ -281,7 +281,8 @@ class WasmTrapHelper : public ZoneObject {
} else {
// End the control flow with returning 0xdeadbeef
Node* ret_value = GetTrapValue(builder_->GetFunctionSignature());
end = graph()->NewNode(jsgraph()->common()->Return(), ret_value,
end = graph()->NewNode(jsgraph()->common()->Return(),
jsgraph()->Int32Constant(0), ret_value,
*effect_ptr, *control_ptr);
}
@ -1041,11 +1042,13 @@ Node* WasmGraphBuilder::Return(unsigned count, Node** vals) {
DCHECK_NOT_NULL(*control_);
DCHECK_NOT_NULL(*effect_);
Node** buf = Realloc(vals, count, count + 2);
buf[count] = *effect_;
buf[count + 1] = *control_;
Node** buf = Realloc(vals, count, count + 3);
memmove(buf + 1, buf, sizeof(void*) * count);
buf[0] = jsgraph()->Int32Constant(0);
buf[count + 1] = *effect_;
buf[count + 2] = *control_;
Node* ret =
graph()->NewNode(jsgraph()->common()->Return(count), count + 2, vals);
graph()->NewNode(jsgraph()->common()->Return(count), count + 3, buf);
MergeControlToEnd(jsgraph(), ret);
return ret;
@ -2675,8 +2678,8 @@ void WasmGraphBuilder::BuildJSToWasmWrapper(Handle<Code> wasm_code,
}
Node* jsval = ToJS(
retval, sig->return_count() == 0 ? wasm::kAstStmt : sig->GetReturn());
Node* ret =
graph()->NewNode(jsgraph()->common()->Return(), jsval, call, start);
Node* ret = graph()->NewNode(jsgraph()->common()->Return(),
jsgraph()->Int32Constant(0), jsval, call, start);
MergeControlToEnd(jsgraph(), ret);
}
@ -2787,14 +2790,16 @@ void WasmGraphBuilder::BuildWasmToJSWrapper(Handle<JSReceiver> target,
Node* val =
FromJS(call, HeapConstant(isolate->native_context()),
sig->return_count() == 0 ? wasm::kAstStmt : sig->GetReturn());
Node* pop_size = jsgraph()->Int32Constant(0);
if (jsgraph()->machine()->Is32() && sig->return_count() > 0 &&
sig->GetReturn() == wasm::kAstI64) {
ret = graph()->NewNode(jsgraph()->common()->Return(), val,
ret = graph()->NewNode(jsgraph()->common()->Return(), pop_size, val,
graph()->NewNode(jsgraph()->machine()->Word32Sar(),
val, jsgraph()->Int32Constant(31)),
call, start);
} else {
ret = graph()->NewNode(jsgraph()->common()->Return(), val, call, start);
ret = graph()->NewNode(jsgraph()->common()->Return(), pop_size, val, call,
start);
}
MergeControlToEnd(jsgraph(), ret);

View File

@ -4,6 +4,8 @@
#include "src/compiler/code-generator.h"
#include <limits>
#include "src/compilation-info.h"
#include "src/compiler/code-generator-impl.h"
#include "src/compiler/gap-resolver.h"
@ -918,7 +920,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
break;
}
case kArchRet:
AssembleReturn();
AssembleReturn(instr->InputAt(0));
break;
case kArchStackPointer:
__ movq(i.OutputRegister(), rsp);
@ -2447,8 +2449,7 @@ void CodeGenerator::AssembleConstructFrame() {
}
}
void CodeGenerator::AssembleReturn() {
void CodeGenerator::AssembleReturn(InstructionOperand* pop) {
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
// Restore registers.
@ -2477,22 +2478,41 @@ void CodeGenerator::AssembleReturn() {
unwinding_info_writer_.MarkBlockWillExit();
// Might need rcx for scratch if pop_size is too big or if there is a variable
// pop count.
DCHECK_EQ(0u, descriptor->CalleeSavedRegisters() & rcx.bit());
DCHECK_EQ(0u, descriptor->CalleeSavedRegisters() & rdx.bit());
size_t pop_size = descriptor->StackParameterCount() * kPointerSize;
X64OperandConverter g(this, nullptr);
if (descriptor->IsCFunctionCall()) {
AssembleDeconstructFrame();
} else if (frame_access_state()->has_frame()) {
// Canonicalize JSFunction return sites for now.
if (return_label_.is_bound()) {
__ jmp(&return_label_);
return;
if (pop->IsImmediate() && g.ToConstant(pop).ToInt32() == 0) {
// Canonicalize JSFunction return sites for now.
if (return_label_.is_bound()) {
__ jmp(&return_label_);
return;
} else {
__ bind(&return_label_);
AssembleDeconstructFrame();
}
} else {
__ bind(&return_label_);
AssembleDeconstructFrame();
}
}
size_t pop_size = descriptor->StackParameterCount() * kPointerSize;
// Might need rcx for scratch if pop_size is too big.
DCHECK_EQ(0u, descriptor->CalleeSavedRegisters() & rcx.bit());
__ Ret(static_cast<int>(pop_size), rcx);
if (pop->IsImmediate()) {
DCHECK_EQ(Constant::kInt32, g.ToConstant(pop).type());
pop_size += g.ToConstant(pop).ToInt32() * kPointerSize;
CHECK_LT(pop_size, std::numeric_limits<int>::max());
__ Ret(static_cast<int>(pop_size), rcx);
} else {
Register pop_reg = g.ToRegister(pop);
Register scratch_reg = pop_reg.is(rcx) ? rdx : rcx;
__ popq(scratch_reg);
__ leaq(rsp, Operand(rsp, pop_reg, times_8, static_cast<int>(pop_size)));
__ jmp(scratch_reg);
}
}

View File

@ -86,8 +86,9 @@ class GraphBuilderTester : public HandleAndZoneScope,
}
void Return(Node* value) {
return_ =
graph()->NewNode(common()->Return(), value, effect_, graph()->start());
Node* zero = graph()->NewNode(common()->Int32Constant(0));
return_ = graph()->NewNode(common()->Return(), zero, value, effect_,
graph()->start());
effect_ = NULL;
}

View File

@ -694,6 +694,7 @@ TEST(RemoveToNumberEffects) {
JSTypedLoweringTester R;
Node* effect_use = NULL;
Node* zero = R.graph.NewNode(R.common.Int32Constant(0));
for (int i = 0; i < 10; i++) {
Node* p0 = R.Parameter(Type::Number());
Node* ton = R.Unop(R.javascript.ToNumber(), p0);
@ -724,10 +725,12 @@ TEST(RemoveToNumberEffects) {
R.context(), frame_state, ton, R.start());
break;
case 5:
effect_use = R.graph.NewNode(R.common.Return(), p0, ton, R.start());
effect_use =
R.graph.NewNode(R.common.Return(), zero, p0, ton, R.start());
break;
case 6:
effect_use = R.graph.NewNode(R.common.Return(), ton, ton, R.start());
effect_use =
R.graph.NewNode(R.common.Return(), zero, ton, ton, R.start());
}
R.CheckEffectInput(R.start(), ton);

View File

@ -116,7 +116,8 @@ class LoopFinderTester : HandleAndZoneScope {
}
Node* Return(Node* val, Node* effect, Node* control) {
Node* ret = graph.NewNode(common.Return(), val, effect, control);
Node* zero = graph.NewNode(common.Int32Constant(0));
Node* ret = graph.NewNode(common.Return(), zero, val, effect, control);
end->ReplaceInput(0, ret);
return ret;
}
@ -696,7 +697,8 @@ TEST(LaEdgeMatrix1) {
Node* if_true = t.graph.NewNode(t.common.IfTrue(), branch);
Node* exit = t.graph.NewNode(t.common.IfFalse(), branch);
loop->ReplaceInput(1, if_true);
Node* ret = t.graph.NewNode(t.common.Return(), p3, t.start, exit);
Node* zero = t.graph.NewNode(t.common.Int32Constant(0));
Node* ret = t.graph.NewNode(t.common.Return(), zero, p3, t.start, exit);
t.graph.SetEnd(ret);
Node* choices[] = {p1, phi, cond};
@ -743,7 +745,9 @@ void RunEdgeMatrix2(int i) {
loop2->ReplaceInput(1, if_true2);
loop1->ReplaceInput(1, exit2);
Node* ret = t.graph.NewNode(t.common.Return(), phi1, t.start, exit1);
Node* zero = t.graph.NewNode(t.common.Int32Constant(0));
Node* ret =
t.graph.NewNode(t.common.Return(), zero, phi1, t.start, exit1);
t.graph.SetEnd(ret);
Node* choices[] = {p1, phi1, cond1, phi2, cond2};
@ -830,7 +834,8 @@ void RunEdgeMatrix3(int c1a, int c1b, int c1c, // line break
loop2->ReplaceInput(1, exit3);
loop1->ReplaceInput(1, exit2);
Node* ret = t.graph.NewNode(t.common.Return(), phi1, t.start, exit1);
Node* zero = t.graph.NewNode(t.common.Int32Constant(0));
Node* ret = t.graph.NewNode(t.common.Return(), zero, phi1, t.start, exit1);
t.graph.SetEnd(ret);
// Mutate the graph according to the edge choices.
@ -943,7 +948,8 @@ static void RunManyChainedLoops_i(int count) {
last = exit;
}
Node* ret = t.graph.NewNode(t.common.Return(), t.p0, t.start, last);
Node* zero = t.graph.NewNode(t.common.Int32Constant(0));
Node* ret = t.graph.NewNode(t.common.Return(), zero, t.p0, t.start, last);
t.graph.SetEnd(ret);
// Verify loops.
@ -962,6 +968,7 @@ static void RunManyNestedLoops_i(int count) {
Node* entry = t.start;
// Build loops.
Node* zero = t.graph.NewNode(t.common.Int32Constant(0));
for (int i = 0; i < count; i++) {
Node* loop = t.graph.NewNode(t.common.Loop(2), entry, t.start);
Node* phi = t.graph.NewNode(t.common.Phi(MachineRepresentation::kWord32, 2),
@ -981,7 +988,7 @@ static void RunManyNestedLoops_i(int count) {
outer->ReplaceInput(1, exit);
} else {
// outer loop.
Node* ret = t.graph.NewNode(t.common.Return(), t.p0, t.start, exit);
Node* ret = t.graph.NewNode(t.common.Return(), zero, t.p0, t.start, exit);
t.graph.SetEnd(ret);
}
outer = loop;

View File

@ -84,8 +84,8 @@ class RepresentationChangerTester : public HandleAndZoneScope,
}
Node* Return(Node* input) {
Node* n = graph()->NewNode(common()->Return(), input, graph()->start(),
graph()->start());
Node* n = graph()->NewNode(common()->Return(), jsgraph()->Int32Constant(0),
input, graph()->start(), graph()->start());
return n;
}

View File

@ -293,7 +293,9 @@ Handle<Code> WrapWithCFunction(Handle<Code> inner, CallDescriptor* desc) {
// Build the call and return nodes.
Node* call =
b.graph()->NewNode(b.common()->Call(desc), param_count + 3, args);
Node* ret = b.graph()->NewNode(b.common()->Return(), call, call, start);
Node* zero = b.graph()->NewNode(b.common()->Int32Constant(0));
Node* ret =
b.graph()->NewNode(b.common()->Return(), zero, call, call, start);
b.graph()->SetEnd(ret);
}
@ -531,7 +533,9 @@ static void TestInt32Sub(CallDescriptor* desc) {
Node* p0 = b.graph()->NewNode(b.common()->Parameter(0), start);
Node* p1 = b.graph()->NewNode(b.common()->Parameter(1), start);
Node* add = b.graph()->NewNode(b.machine()->Int32Sub(), p0, p1);
Node* ret = b.graph()->NewNode(b.common()->Return(), add, start, start);
Node* zero = b.graph()->NewNode(b.common()->Int32Constant(0));
Node* ret =
b.graph()->NewNode(b.common()->Return(), zero, add, start, start);
b.graph()->SetEnd(ret);
}

View File

@ -47,10 +47,11 @@ TEST(RunStringLengthStub) {
Node* vectorParam = graph.NewNode(common.Parameter(4), start);
Node* theCode = graph.NewNode(common.HeapConstant(code));
Node* dummyContext = graph.NewNode(common.NumberConstant(0.0));
Node* zero = graph.NewNode(common.Int32Constant(0));
Node* call =
graph.NewNode(common.Call(descriptor), theCode, receiverParam, nameParam,
slotParam, vectorParam, dummyContext, start, start);
Node* ret = graph.NewNode(common.Return(), call, call, start);
Node* ret = graph.NewNode(common.Return(), zero, call, call, start);
Node* end = graph.NewNode(common.End(1), ret);
graph.SetStart(start);
graph.SetEnd(end);

View File

@ -1729,5 +1729,61 @@ TEST(AllocateNameDictionary) {
}
}
TEST(PopAndReturnConstant) {
Isolate* isolate(CcTest::InitIsolateOnce());
const int kNumParams = 4;
const int kNumProgramaticParams = 2;
CodeStubAssemblerTester m(isolate, kNumParams - kNumProgramaticParams);
// Call a function that return |kNumProgramaticParams| parameters in addition
// to those specified by the static descriptor. |kNumProgramaticParams| is
// specified as a constant.
m.PopAndReturn(m.Int32Constant(kNumProgramaticParams),
m.SmiConstant(Smi::FromInt(1234)));
Handle<Code> code = m.GenerateCode();
CHECK(!code.is_null());
FunctionTester ft(code, kNumParams);
Handle<Object> result;
for (int test_count = 0; test_count < 100; ++test_count) {
result = ft.Call(isolate->factory()->undefined_value(),
Handle<Smi>(Smi::FromInt(1234), isolate),
isolate->factory()->undefined_value(),
isolate->factory()->undefined_value())
.ToHandleChecked();
CHECK_EQ(1234, Handle<Smi>::cast(result)->value());
}
}
TEST(PopAndReturnVariable) {
Isolate* isolate(CcTest::InitIsolateOnce());
const int kNumParams = 4;
const int kNumProgramaticParams = 2;
CodeStubAssemblerTester m(isolate, kNumParams - kNumProgramaticParams);
// Call a function that return |kNumProgramaticParams| parameters in addition
// to those specified by the static descriptor. |kNumProgramaticParams| is
// passed in as a parameter to the function so that it can't be recongized as
// a constant.
m.PopAndReturn(m.SmiUntag(m.Parameter(1)), m.SmiConstant(Smi::FromInt(1234)));
Handle<Code> code = m.GenerateCode();
CHECK(!code.is_null());
FunctionTester ft(code, kNumParams);
Handle<Object> result;
for (int test_count = 0; test_count < 100; ++test_count) {
result = ft.Call(isolate->factory()->undefined_value(),
Handle<Smi>(Smi::FromInt(1234), isolate),
isolate->factory()->undefined_value(),
Handle<Smi>(Smi::FromInt(kNumProgramaticParams), isolate))
.ToHandleChecked();
CHECK_EQ(1234, Handle<Smi>::cast(result)->value());
}
}
} // namespace internal
} // namespace v8

View File

@ -391,8 +391,9 @@ class WasmFunctionWrapper : public HandleAndZoneScope,
graph()->start()),
graph()->NewNode(common()->Int32Constant(0)), call, effect,
graph()->start());
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* r = graph()->NewNode(
common()->Return(),
common()->Return(), zero,
graph()->NewNode(common()->Int32Constant(WASM_WRAPPER_RETURN_VALUE)),
effect, graph()->start());
graph()->SetEnd(graph()->NewNode(common()->End(2), r, graph()->start()));

View File

@ -65,8 +65,9 @@ TEST_F(BranchEliminationTest, NestedBranchSameTrue) {
graph()->NewNode(common()->Phi(MachineRepresentation::kWord32, 2),
inner_phi, Int32Constant(3), outer_merge);
Node* ret = graph()->NewNode(common()->Return(), outer_phi, graph()->start(),
outer_merge);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, outer_phi,
graph()->start(), outer_merge);
graph()->SetEnd(graph()->NewNode(common()->End(1), ret));
Reduce();
@ -106,8 +107,9 @@ TEST_F(BranchEliminationTest, NestedBranchSameFalse) {
graph()->NewNode(common()->Phi(MachineRepresentation::kWord32, 2),
Int32Constant(1), inner_phi, outer_merge);
Node* ret = graph()->NewNode(common()->Return(), outer_phi, graph()->start(),
outer_merge);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, outer_phi,
graph()->start(), outer_merge);
graph()->SetEnd(graph()->NewNode(common()->End(1), ret));
Reduce();
@ -144,8 +146,9 @@ TEST_F(BranchEliminationTest, BranchAfterDiamond) {
Node* add = graph()->NewNode(machine()->Int32Add(), phi1, phi2);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret =
graph()->NewNode(common()->Return(), add, graph()->start(), merge2);
graph()->NewNode(common()->Return(), zero, add, graph()->start(), merge2);
graph()->SetEnd(graph()->NewNode(common()->End(1), ret));
Reduce();
@ -176,8 +179,9 @@ TEST_F(BranchEliminationTest, BranchInsideLoopSame) {
Node* inner_branch = graph()->NewNode(common()->Branch(), condition, loop);
Node* inner_if_true = graph()->NewNode(common()->IfTrue(), inner_branch);
Node* ret1 = graph()->NewNode(common()->Return(), Int32Constant(2), effect,
inner_if_true);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret1 = graph()->NewNode(common()->Return(), zero, Int32Constant(2),
effect, inner_if_true);
Node* inner_if_false = graph()->NewNode(common()->IfFalse(), inner_branch);
loop->AppendInput(zone(), inner_if_false);
@ -191,7 +195,7 @@ TEST_F(BranchEliminationTest, BranchInsideLoopSame) {
Node* outer_ephi = graph()->NewNode(common()->EffectPhi(2), effect,
graph()->start(), outer_merge);
Node* ret2 = graph()->NewNode(common()->Return(), Int32Constant(1),
Node* ret2 = graph()->NewNode(common()->Return(), zero, Int32Constant(1),
outer_ephi, outer_merge);
Node* terminate = graph()->NewNode(common()->Terminate(), effect, loop);

View File

@ -361,7 +361,9 @@ TEST_F(CommonOperatorReducerTest, ReturnWithPhiAndEffectPhiAndMerge) {
Node* ephi = graph()->NewNode(common()->EffectPhi(2), etrue, efalse, merge);
Node* phi = graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
vtrue, vfalse, merge);
Node* ret = graph()->NewNode(common()->Return(), phi, ephi, merge);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, phi, ephi, merge);
graph()->SetEnd(graph()->NewNode(common()->End(1), ret));
StrictMock<MockAdvancedReducerEditor> editor;
EXPECT_CALL(editor, Replace(merge, IsDead()));

View File

@ -192,10 +192,10 @@ TEST_F(CommonOperatorTest, Return) {
const Operator* const op = common()->Return(input_count);
EXPECT_EQ(IrOpcode::kReturn, op->opcode());
EXPECT_EQ(Operator::kNoThrow, op->properties());
EXPECT_EQ(input_count, op->ValueInputCount());
EXPECT_EQ(input_count + 1, op->ValueInputCount());
EXPECT_EQ(1, op->EffectInputCount());
EXPECT_EQ(1, op->ControlInputCount());
EXPECT_EQ(2 + input_count, OperatorProperties::GetTotalInputCount(op));
EXPECT_EQ(3 + input_count, OperatorProperties::GetTotalInputCount(op));
EXPECT_EQ(0, op->ValueOutputCount());
EXPECT_EQ(0, op->EffectOutputCount());
EXPECT_EQ(1, op->ControlOutputCount());

View File

@ -60,7 +60,8 @@ TEST_F(EffectControlLinearizerTest, SimpleLoad) {
Node* load = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForHeapNumberValue()), heap_number,
graph()->start(), graph()->start());
Node* ret = graph()->NewNode(common()->Return(), load, graph()->start(),
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, load, graph()->start(),
graph()->start());
// Build the basic block structure.
@ -105,8 +106,9 @@ TEST_F(EffectControlLinearizerTest, DiamondLoad) {
Node* phi = graph()->NewNode(
common()->Phi(MachineRepresentation::kFloat64, 2), vtrue, vfalse, merge);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret =
graph()->NewNode(common()->Return(), phi, graph()->start(), merge);
graph()->NewNode(common()->Return(), zero, phi, graph()->start(), merge);
// Build the basic block structure.
BasicBlock* start = schedule.start();
@ -206,8 +208,9 @@ TEST_F(EffectControlLinearizerTest, FloatingDiamondsControlWiring) {
Node* if_false2 = graph()->NewNode(common()->IfFalse(), branch2);
Node* merge2 = graph()->NewNode(common()->Merge(2), if_true2, if_false2);
Node* ret =
graph()->NewNode(common()->Return(), call, graph()->start(), if_success);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, call, graph()->start(),
if_success);
// Build the basic block structure.
BasicBlock* start = schedule.start();
@ -289,7 +292,9 @@ TEST_F(EffectControlLinearizerTest, LoopLoad) {
simplified()->LoadField(AccessBuilder::ForHeapNumberValue()), heap_number,
graph()->start(), loop);
Node* ret = graph()->NewNode(common()->Return(), load, effect_phi, if_true);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret =
graph()->NewNode(common()->Return(), zero, load, effect_phi, if_true);
// Build the basic block structure.
BasicBlock* start = schedule.start();

View File

@ -119,8 +119,9 @@ class EscapeAnalysisTest : public TypedGraphTest {
if (!control) {
control = control_;
}
return control_ =
graph()->NewNode(common()->Return(), value, effect, control);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
return control_ = graph()->NewNode(common()->Return(), zero, value, effect,
control);
}
void EndGraph() {
@ -224,7 +225,7 @@ TEST_F(EscapeAnalysisTest, StraightNonEscape) {
Transformation();
ASSERT_EQ(object1, NodeProperties::GetValueInput(result, 0));
ASSERT_EQ(object1, NodeProperties::GetValueInput(result, 1));
}
@ -250,7 +251,7 @@ TEST_F(EscapeAnalysisTest, StraightNonEscapeNonConstStore) {
Transformation();
ASSERT_EQ(load, NodeProperties::GetValueInput(result, 0));
ASSERT_EQ(load, NodeProperties::GetValueInput(result, 1));
}
@ -272,7 +273,7 @@ TEST_F(EscapeAnalysisTest, StraightEscape) {
Transformation();
ASSERT_EQ(allocation, NodeProperties::GetValueInput(result, 0));
ASSERT_EQ(allocation, NodeProperties::GetValueInput(result, 1));
}
@ -300,7 +301,7 @@ TEST_F(EscapeAnalysisTest, StoreLoadEscape) {
Transformation();
ASSERT_EQ(finish1, NodeProperties::GetValueInput(result, 0));
ASSERT_EQ(finish1, NodeProperties::GetValueInput(result, 1));
}
@ -333,7 +334,7 @@ TEST_F(EscapeAnalysisTest, BranchNonEscape) {
Transformation();
ASSERT_EQ(replacement_phi, NodeProperties::GetValueInput(result, 0));
ASSERT_EQ(replacement_phi, NodeProperties::GetValueInput(result, 1));
}
@ -365,7 +366,7 @@ TEST_F(EscapeAnalysisTest, BranchEscapeOne) {
Transformation();
ASSERT_EQ(load, NodeProperties::GetValueInput(result, 0));
ASSERT_EQ(load, NodeProperties::GetValueInput(result, 1));
}
@ -400,7 +401,7 @@ TEST_F(EscapeAnalysisTest, BranchEscapeThroughStore) {
Transformation();
ASSERT_EQ(allocation, NodeProperties::GetValueInput(result, 0));
ASSERT_EQ(allocation, NodeProperties::GetValueInput(result, 1));
}
@ -425,7 +426,7 @@ TEST_F(EscapeAnalysisTest, DanglingLoadOrder) {
Transformation();
ASSERT_EQ(object1, NodeProperties::GetValueInput(result, 0));
ASSERT_EQ(object1, NodeProperties::GetValueInput(result, 1));
}
@ -461,7 +462,7 @@ TEST_F(EscapeAnalysisTest, DeoptReplacement) {
Transformation();
ASSERT_EQ(object1, NodeProperties::GetValueInput(result, 0));
ASSERT_EQ(object1, NodeProperties::GetValueInput(result, 1));
Node* object_state = NodeProperties::GetValueInput(state_values1, 0);
ASSERT_EQ(object_state->opcode(), IrOpcode::kObjectState);
ASSERT_EQ(1, object_state->op()->ValueInputCount());
@ -501,7 +502,7 @@ TEST_F(EscapeAnalysisTest, DISABLED_DeoptReplacementIdentity) {
Transformation();
ASSERT_EQ(object1, NodeProperties::GetValueInput(result, 0));
ASSERT_EQ(object1, NodeProperties::GetValueInput(result, 1));
Node* object_state = NodeProperties::GetValueInput(state_values1, 0);
ASSERT_EQ(object_state->opcode(), IrOpcode::kObjectState);

View File

@ -290,12 +290,13 @@ TEST_F(AdvancedReducerTest, ReplaceWithValue_ValueUse) {
CommonOperatorBuilder common(zone());
Node* node = graph()->NewNode(&kMockOperator);
Node* start = graph()->NewNode(common.Start(1));
Node* use_value = graph()->NewNode(common.Return(), node, start, start);
Node* zero = graph()->NewNode(common.Int32Constant(0));
Node* use_value = graph()->NewNode(common.Return(), zero, node, start, start);
Node* replacement = graph()->NewNode(&kMockOperator);
GraphReducer graph_reducer(zone(), graph(), nullptr);
ReplaceWithValueReducer r(&graph_reducer);
r.ReplaceWithValue(node, replacement);
EXPECT_EQ(replacement, use_value->InputAt(0));
EXPECT_EQ(replacement, use_value->InputAt(1));
EXPECT_EQ(0, node->UseCount());
EXPECT_EQ(1, replacement->UseCount());
EXPECT_THAT(replacement->uses(), ElementsAre(use_value));

View File

@ -166,7 +166,7 @@ TARGET_TEST_F(InstructionSelectorTest, ReturnFloat32Constant) {
ASSERT_EQ(InstructionOperand::CONSTANT, s[0]->OutputAt(0)->kind());
EXPECT_FLOAT_EQ(kValue, s.ToFloat32(s[0]->OutputAt(0)));
EXPECT_EQ(kArchRet, s[1]->arch_opcode());
EXPECT_EQ(1U, s[1]->InputCount());
EXPECT_EQ(2U, s[1]->InputCount());
}
@ -178,7 +178,7 @@ TARGET_TEST_F(InstructionSelectorTest, ReturnParameter) {
EXPECT_EQ(kArchNop, s[0]->arch_opcode());
ASSERT_EQ(1U, s[0]->OutputCount());
EXPECT_EQ(kArchRet, s[1]->arch_opcode());
EXPECT_EQ(1U, s[1]->InputCount());
EXPECT_EQ(2U, s[1]->InputCount());
}
@ -192,7 +192,7 @@ TARGET_TEST_F(InstructionSelectorTest, ReturnZero) {
EXPECT_EQ(InstructionOperand::CONSTANT, s[0]->OutputAt(0)->kind());
EXPECT_EQ(0, s.ToInt32(s[0]->OutputAt(0)));
EXPECT_EQ(kArchRet, s[1]->arch_opcode());
EXPECT_EQ(1U, s[1]->InputCount());
EXPECT_EQ(2U, s[1]->InputCount());
}
@ -251,7 +251,7 @@ TARGET_TEST_F(InstructionSelectorTest, FinishRegion) {
ASSERT_TRUE(s[0]->Output()->IsUnallocated());
EXPECT_EQ(kArchRet, s[1]->arch_opcode());
EXPECT_EQ(s.ToVreg(param), s.ToVreg(s[0]->Output()));
EXPECT_EQ(s.ToVreg(param), s.ToVreg(s[1]->InputAt(0)));
EXPECT_EQ(s.ToVreg(param), s.ToVreg(s[1]->InputAt(1)));
EXPECT_TRUE(s.IsReference(finish));
}

View File

@ -40,8 +40,9 @@ class Int64LoweringTest : public GraphTest {
MachineOperatorBuilder* machine() { return &machine_; }
void LowerGraph(Node* node, Signature<MachineRepresentation>* signature) {
Node* ret = graph()->NewNode(common()->Return(), node, graph()->start(),
graph()->start());
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, node,
graph()->start(), graph()->start());
NodeProperties::MergeControlToEnd(graph(), common(), ret);
Int64Lowering lowering(graph(), machine(), common(), zone(), signature);
@ -216,7 +217,8 @@ TEST_F(Int64LoweringTest, UnalignedInt64Load) {
Int32Constant(base), Int32Constant(index), \
Int64Constant(value(0)), start(), start()); \
\
Node* ret = graph()->NewNode(common()->Return(), \
Node* zero = graph()->NewNode(common()->Int32Constant(0)); \
Node* ret = graph()->NewNode(common()->Return(), zero, \
Int32Constant(return_value), store, start()); \
\
NodeProperties::MergeControlToEnd(graph(), common(), ret); \
@ -313,7 +315,7 @@ TEST_F(Int64LoweringTest, CallI64Return) {
CompareCallDescriptors(
OpParameter<const CallDescriptor*>(
graph()->end()->InputAt(1)->InputAt(0)->InputAt(0)),
graph()->end()->InputAt(1)->InputAt(1)->InputAt(0)),
wasm::ModuleEnv::GetI32WasmCallDescriptor(zone(), desc));
}
@ -347,7 +349,7 @@ TEST_F(Int64LoweringTest, CallI64Parameter) {
CompareCallDescriptors(
OpParameter<const CallDescriptor*>(
graph()->end()->InputAt(1)->InputAt(0)),
graph()->end()->InputAt(1)->InputAt(1)),
wasm::ModuleEnv::GetI32WasmCallDescriptor(zone(), desc));
}

View File

@ -90,7 +90,8 @@ class LoopPeelingTest : public GraphTest {
}
Node* InsertReturn(Node* val, Node* effect, Node* control) {
Node* r = graph()->NewNode(common()->Return(), val, effect, control);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* r = graph()->NewNode(common()->Return(), zero, val, effect, control);
graph()->SetEnd(r);
return r;
}

View File

@ -356,10 +356,10 @@ class IsReturnMatcher final : public NodeMatcher {
bool MatchAndExplain(Node* node, MatchResultListener* listener) const final {
return (NodeMatcher::MatchAndExplain(node, listener) &&
PrintMatchAndExplain(NodeProperties::GetValueInput(node, 0),
PrintMatchAndExplain(NodeProperties::GetValueInput(node, 1),
"value", value_matcher_, listener) &&
(!has_second_return_value_ ||
PrintMatchAndExplain(NodeProperties::GetValueInput(node, 1),
PrintMatchAndExplain(NodeProperties::GetValueInput(node, 2),
"value2", value2_matcher_, listener)) &&
PrintMatchAndExplain(NodeProperties::GetEffectInput(node), "effect",
effect_matcher_, listener) &&

View File

@ -96,7 +96,8 @@ TEST_F(SchedulerTest, BuildScheduleOneParameter) {
graph()->SetStart(graph()->NewNode(common()->Start(0)));
Node* p1 = graph()->NewNode(common()->Parameter(0), graph()->start());
Node* ret = graph()->NewNode(common()->Return(), p1, graph()->start(),
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, p1, graph()->start(),
graph()->start());
graph()->SetEnd(graph()->NewNode(common()->End(1), ret));
@ -128,12 +129,13 @@ TARGET_TEST_F(SchedulerTest, FloatingDiamond1) {
Node* p0 = graph()->NewNode(common()->Parameter(0), start);
Node* d1 = CreateDiamond(graph(), common(), p0);
Node* ret = graph()->NewNode(common()->Return(), d1, start, start);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, d1, start, start);
Node* end = graph()->NewNode(common()->End(1), ret);
graph()->SetEnd(end);
ComputeAndVerifySchedule(13);
ComputeAndVerifySchedule(14);
}
TARGET_TEST_F(SchedulerTest, FloatingDeadDiamond1) {
@ -143,12 +145,13 @@ TARGET_TEST_F(SchedulerTest, FloatingDeadDiamond1) {
Node* p0 = graph()->NewNode(common()->Parameter(0), start);
Node* d1 = CreateDiamond(graph(), common(), p0);
USE(d1);
Node* ret = graph()->NewNode(common()->Return(), p0, start, start);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, p0, start, start);
Node* end = graph()->NewNode(common()->End(1), ret);
graph()->SetEnd(end);
ComputeAndVerifySchedule(4);
ComputeAndVerifySchedule(5);
}
TARGET_TEST_F(SchedulerTest, FloatingDeadDiamond2) {
@ -162,9 +165,10 @@ TARGET_TEST_F(SchedulerTest, FloatingDeadDiamond2) {
Node* n3 = g->NewNode(common()->IfTrue(), n2);
Node* n4 = g->NewNode(common()->IfFalse(), n2);
Node* n5 = g->NewNode(common()->Int32Constant(-100));
Node* n6 = g->NewNode(common()->Return(), n5, start, n4);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* n6 = g->NewNode(common()->Return(), zero, n5, start, n4);
Node* n7 = g->NewNode(common()->Int32Constant(0));
Node* n8 = g->NewNode(common()->Return(), n7, start, n3);
Node* n8 = g->NewNode(common()->Return(), zero, n7, start, n3);
Node* n9 = g->NewNode(common()->End(2), n6, n8);
// Dead nodes
@ -179,7 +183,7 @@ TARGET_TEST_F(SchedulerTest, FloatingDeadDiamond2) {
g->SetEnd(n9);
ComputeAndVerifySchedule(10);
ComputeAndVerifySchedule(11);
}
TARGET_TEST_F(SchedulerTest, FloatingDiamond2) {
@ -191,12 +195,13 @@ TARGET_TEST_F(SchedulerTest, FloatingDiamond2) {
Node* d1 = CreateDiamond(graph(), common(), p0);
Node* d2 = CreateDiamond(graph(), common(), p1);
Node* add = graph()->NewNode(&kIntAdd, d1, d2);
Node* ret = graph()->NewNode(common()->Return(), add, start, start);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, add, start, start);
Node* end = graph()->NewNode(common()->End(1), ret);
graph()->SetEnd(end);
ComputeAndVerifySchedule(24);
ComputeAndVerifySchedule(25);
}
@ -210,12 +215,13 @@ TARGET_TEST_F(SchedulerTest, FloatingDiamond3) {
Node* d2 = CreateDiamond(graph(), common(), p1);
Node* add = graph()->NewNode(&kIntAdd, d1, d2);
Node* d3 = CreateDiamond(graph(), common(), add);
Node* ret = graph()->NewNode(common()->Return(), d3, start, start);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, d3, start, start);
Node* end = graph()->NewNode(common()->End(1), ret);
graph()->SetEnd(end);
ComputeAndVerifySchedule(33);
ComputeAndVerifySchedule(34);
}
@ -248,12 +254,13 @@ TARGET_TEST_F(SchedulerTest, NestedFloatingDiamonds) {
fv, phi1, m);
Node* ephi1 = graph()->NewNode(common()->EffectPhi(2), start, map, m);
Node* ret = graph()->NewNode(common()->Return(), phi, ephi1, start);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, phi, ephi1, start);
Node* end = graph()->NewNode(common()->End(1), ret);
graph()->SetEnd(end);
ComputeAndVerifySchedule(23);
ComputeAndVerifySchedule(24);
}
@ -294,12 +301,13 @@ TARGET_TEST_F(SchedulerTest, NestedFloatingDiamondWithChain) {
common()->Phi(MachineRepresentation::kTagged, 2), phiA1, c, mB2);
Node* add = graph()->NewNode(&kIntAdd, phiA2, phiB2);
Node* ret = graph()->NewNode(common()->Return(), add, start, start);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, add, start, start);
Node* end = graph()->NewNode(common()->End(1), ret);
graph()->SetEnd(end);
ComputeAndVerifySchedule(36);
ComputeAndVerifySchedule(37);
}
@ -330,12 +338,13 @@ TARGET_TEST_F(SchedulerTest, NestedFloatingDiamondWithLoop) {
Node* phi = graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
fv, ind, m);
Node* ret = graph()->NewNode(common()->Return(), phi, start, start);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, phi, start, start);
Node* end = graph()->NewNode(common()->End(1), ret);
graph()->SetEnd(end);
ComputeAndVerifySchedule(20);
ComputeAndVerifySchedule(21);
}
@ -365,12 +374,13 @@ TARGET_TEST_F(SchedulerTest, LoopedFloatingDiamond1) {
loop->ReplaceInput(1, t); // close loop.
ind->ReplaceInput(1, phi1); // close induction variable.
Node* ret = graph()->NewNode(common()->Return(), ind, start, f);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, ind, start, f);
Node* end = graph()->NewNode(common()->End(2), ret, f);
graph()->SetEnd(end);
ComputeAndVerifySchedule(20);
ComputeAndVerifySchedule(21);
}
@ -401,12 +411,13 @@ TARGET_TEST_F(SchedulerTest, LoopedFloatingDiamond2) {
loop->ReplaceInput(1, t); // close loop.
ind->ReplaceInput(1, add); // close induction variable.
Node* ret = graph()->NewNode(common()->Return(), ind, start, f);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, ind, start, f);
Node* end = graph()->NewNode(common()->End(2), ret, f);
graph()->SetEnd(end);
ComputeAndVerifySchedule(20);
ComputeAndVerifySchedule(21);
}
@ -450,12 +461,13 @@ TARGET_TEST_F(SchedulerTest, LoopedFloatingDiamond3) {
loop->ReplaceInput(1, t); // close loop.
ind->ReplaceInput(1, add); // close induction variable.
Node* ret = graph()->NewNode(common()->Return(), ind, start, f);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, ind, start, f);
Node* end = graph()->NewNode(common()->End(2), ret, f);
graph()->SetEnd(end);
ComputeAndVerifySchedule(28);
ComputeAndVerifySchedule(29);
}
@ -486,12 +498,13 @@ TARGET_TEST_F(SchedulerTest, PhisPushedDownToDifferentBranches) {
Node* phi3 = graph()->NewNode(
common()->Phi(MachineRepresentation::kTagged, 2), phi, phi2, m2);
Node* ret = graph()->NewNode(common()->Return(), phi3, start, start);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, phi3, start, start);
Node* end = graph()->NewNode(common()->End(1), ret);
graph()->SetEnd(end);
ComputeAndVerifySchedule(24);
ComputeAndVerifySchedule(25);
}
@ -508,12 +521,13 @@ TARGET_TEST_F(SchedulerTest, BranchHintTrue) {
Node* m = graph()->NewNode(common()->Merge(2), t, f);
Node* phi = graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
tv, fv, m);
Node* ret = graph()->NewNode(common()->Return(), phi, start, start);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, phi, start, start);
Node* end = graph()->NewNode(common()->End(1), ret);
graph()->SetEnd(end);
Schedule* schedule = ComputeAndVerifySchedule(13);
Schedule* schedule = ComputeAndVerifySchedule(14);
// Make sure the false block is marked as deferred.
EXPECT_FALSE(schedule->block(t)->deferred());
EXPECT_TRUE(schedule->block(f)->deferred());
@ -533,12 +547,13 @@ TARGET_TEST_F(SchedulerTest, BranchHintFalse) {
Node* m = graph()->NewNode(common()->Merge(2), t, f);
Node* phi = graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
tv, fv, m);
Node* ret = graph()->NewNode(common()->Return(), phi, start, start);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, phi, start, start);
Node* end = graph()->NewNode(common()->End(1), ret);
graph()->SetEnd(end);
Schedule* schedule = ComputeAndVerifySchedule(13);
Schedule* schedule = ComputeAndVerifySchedule(14);
// Make sure the true block is marked as deferred.
EXPECT_TRUE(schedule->block(t)->deferred());
EXPECT_FALSE(schedule->block(f)->deferred());
@ -560,12 +575,13 @@ TARGET_TEST_F(SchedulerTest, CallException) {
Node* m = graph()->NewNode(common()->Merge(2), ok2, hdl);
Node* phi = graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
c2, p0, m);
Node* ret = graph()->NewNode(common()->Return(), phi, start, m);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, phi, start, m);
Node* end = graph()->NewNode(common()->End(1), ret);
graph()->SetEnd(end);
Schedule* schedule = ComputeAndVerifySchedule(17);
Schedule* schedule = ComputeAndVerifySchedule(18);
// Make sure the exception blocks as well as the handler are deferred.
EXPECT_TRUE(schedule->block(ex1)->deferred());
EXPECT_TRUE(schedule->block(ex2)->deferred());
@ -603,12 +619,13 @@ TARGET_TEST_F(SchedulerTest, Switch) {
Node* m = graph()->NewNode(common()->Merge(3), c0, c1, d);
Node* phi = graph()->NewNode(common()->Phi(MachineRepresentation::kWord32, 3),
v0, v1, vd, m);
Node* ret = graph()->NewNode(common()->Return(), phi, start, m);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, phi, start, m);
Node* end = graph()->NewNode(common()->End(1), ret);
graph()->SetEnd(end);
ComputeAndVerifySchedule(16);
ComputeAndVerifySchedule(17);
}
@ -627,12 +644,13 @@ TARGET_TEST_F(SchedulerTest, FloatingSwitch) {
Node* m = graph()->NewNode(common()->Merge(3), c0, c1, d);
Node* phi = graph()->NewNode(common()->Phi(MachineRepresentation::kWord32, 3),
v0, v1, vd, m);
Node* ret = graph()->NewNode(common()->Return(), phi, start, start);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, phi, start, start);
Node* end = graph()->NewNode(common()->End(1), ret);
graph()->SetEnd(end);
ComputeAndVerifySchedule(16);
ComputeAndVerifySchedule(17);
}

View File

@ -39,7 +39,9 @@ TEST_F(TailCallOptimizationTest, CallCodeObject0) {
Node* call = graph()->NewNode(common()->Call(kCallDescriptor), p0, p1,
graph()->start(), graph()->start());
Node* if_success = graph()->NewNode(common()->IfSuccess(), call);
Node* ret = graph()->NewNode(common()->Return(), call, call, if_success);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret =
graph()->NewNode(common()->Return(), zero, call, call, if_success);
Reduction r = Reduce(ret);
ASSERT_FALSE(r.Changed());
}
@ -60,7 +62,9 @@ TEST_F(TailCallOptimizationTest, CallCodeObject1) {
graph()->start(), graph()->start());
Node* if_success = graph()->NewNode(common()->IfSuccess(), call);
Node* if_exception = graph()->NewNode(common()->IfException(), call, call);
Node* ret = graph()->NewNode(common()->Return(), call, call, if_success);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret =
graph()->NewNode(common()->Return(), zero, call, call, if_success);
Node* end = graph()->NewNode(common()->End(1), if_exception);
graph()->SetEnd(end);
Reduction r = Reduce(ret);
@ -82,7 +86,9 @@ TEST_F(TailCallOptimizationTest, CallCodeObject2) {
Node* call = graph()->NewNode(common()->Call(kCallDescriptor), p0, p1,
graph()->start(), graph()->start());
Node* if_success = graph()->NewNode(common()->IfSuccess(), call);
Node* ret = graph()->NewNode(common()->Return(), call, call, if_success);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret =
graph()->NewNode(common()->Return(), zero, call, call, if_success);
Reduction r = Reduce(ret);
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(), IsTailCall(kCallDescriptor, p0, p1,
@ -104,7 +110,9 @@ TEST_F(TailCallOptimizationTest, CallJSFunction0) {
Node* call = graph()->NewNode(common()->Call(kCallDescriptor), p0, p1,
graph()->start(), graph()->start());
Node* if_success = graph()->NewNode(common()->IfSuccess(), call);
Node* ret = graph()->NewNode(common()->Return(), call, call, if_success);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret =
graph()->NewNode(common()->Return(), zero, call, call, if_success);
Reduction r = Reduce(ret);
ASSERT_FALSE(r.Changed());
}
@ -125,7 +133,9 @@ TEST_F(TailCallOptimizationTest, CallJSFunction1) {
graph()->start(), graph()->start());
Node* if_success = graph()->NewNode(common()->IfSuccess(), call);
Node* if_exception = graph()->NewNode(common()->IfException(), call, call);
Node* ret = graph()->NewNode(common()->Return(), call, call, if_success);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret =
graph()->NewNode(common()->Return(), zero, call, call, if_success);
Node* end = graph()->NewNode(common()->End(1), if_exception);
graph()->SetEnd(end);
Reduction r = Reduce(ret);
@ -146,7 +156,9 @@ TEST_F(TailCallOptimizationTest, CallJSFunction2) {
Node* call = graph()->NewNode(common()->Call(kCallDescriptor), p0, p1,
graph()->start(), graph()->start());
Node* if_success = graph()->NewNode(common()->IfSuccess(), call);
Node* ret = graph()->NewNode(common()->Return(), call, call, if_success);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret =
graph()->NewNode(common()->Return(), zero, call, call, if_success);
Reduction r = Reduce(ret);
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(), IsTailCall(kCallDescriptor, p0, p1,