Revert of [turbofan] Support variable size argument popping in TF-generated functions (patchset #13 id:240001 of https://codereview.chromium.org/2446543002/ )

Reason for revert:
Seems to break arm64 sim debug and blocks roll:
https://build.chromium.org/p/client.v8.ports/builders/V8%20Linux%20-%20arm64%20-%20sim%20-%20debug/builds/3294

Original issue's description:
> [turbofan] Support variable size argument removal in TF-generated functions
>
> This is preparation for using TF to create builtins that handle variable number of
> arguments and have to remove these arguments dynamically from the stack upon
> return.
>
> The gist of the changes:
> - Added a second argument to the Return node which specifies the number of stack
>   slots to pop upon return in addition to those specified by the Linkage of the
>   compiled function.
> - Removed Tail -> Non-Tail fallback in the instruction selector. Since TF now should
>   handles all tail-call cases except where the return value type differs, this fallback
>   was not really useful and in fact caused unexpected behavior with variable
>   sized argument popping, since it wasn't possible to materialize a Return node
>   with the right pop count from the TailCall without additional context.
> - Modified existing Return generation to pass a constant zero as the additional
>   pop argument since the variable pop functionality
>
> LOG=N

TBR=bmeurer@chromium.org,mstarzinger@chromium.org,epertoso@chromium.org,danno@chromium.org
# Not skipping CQ checks because original CL landed more than 1 days ago.
NOPRESUBMIT=true

Review-Url: https://codereview.chromium.org/2473643002
Cr-Commit-Position: refs/heads/master@{#40691}
This commit is contained in:
machenbach 2016-11-02 00:48:47 -07:00 committed by Commit bot
parent 5ef1bddf80
commit c61902e072
41 changed files with 295 additions and 528 deletions

View File

@ -2891,9 +2891,7 @@ compiler::Node* FastCloneShallowArrayStub::Generate(
{
Node* abort_id = assembler->SmiConstant(
Smi::FromInt(BailoutReason::kExpectedFixedDoubleArrayMap));
assembler->CallRuntime(Runtime::kAbort, context, abort_id);
result.Bind(assembler->UndefinedConstant());
assembler->Goto(&return_result);
assembler->TailCallRuntime(Runtime::kAbort, context, abort_id);
}
assembler->Bind(&correct_elements_map);
}

View File

@ -744,7 +744,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
break;
}
case kArchRet:
AssembleReturn(instr->InputAt(0));
AssembleReturn();
DCHECK_EQ(LeaveCC, i.OutputSBit());
break;
case kArchStackPointer:
@ -1737,7 +1737,8 @@ void CodeGenerator::AssembleConstructFrame() {
}
}
void CodeGenerator::AssembleReturn(InstructionOperand* pop) {
void CodeGenerator::AssembleReturn() {
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
int pop_count = static_cast<int>(descriptor->StackParameterCount());
@ -1761,33 +1762,19 @@ void CodeGenerator::AssembleReturn(InstructionOperand* pop) {
unwinding_info_writer_.MarkBlockWillExit();
ArmOperandConverter g(this, nullptr);
if (descriptor->IsCFunctionCall()) {
AssembleDeconstructFrame();
} else if (frame_access_state()->has_frame()) {
// Canonicalize JSFunction return sites for now unless they have an variable
// number of stack slot pops.
if (pop->IsImmediate() && g.ToConstant(pop).ToInt32() == 0) {
if (return_label_.is_bound()) {
__ b(&return_label_);
return;
} else {
__ bind(&return_label_);
AssembleDeconstructFrame();
}
// Canonicalize JSFunction return sites for now.
if (return_label_.is_bound()) {
__ b(&return_label_);
return;
} else {
__ bind(&return_label_);
AssembleDeconstructFrame();
}
}
if (pop->IsImmediate()) {
DCHECK_EQ(Constant::kInt32, g.ToConstant(pop).type());
pop_count += g.ToConstant(pop).ToInt32();
} else {
__ Drop(g.ToRegister(pop));
}
__ Drop(pop_count);
__ Ret();
__ Ret(pop_count);
}
void CodeGenerator::AssembleMove(InstructionOperand* source,

View File

@ -783,7 +783,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
break;
}
case kArchRet:
AssembleReturn(instr->InputAt(0));
AssembleReturn();
break;
case kArchStackPointer:
__ mov(i.OutputRegister(), masm()->StackPointer());
@ -1854,7 +1854,8 @@ void CodeGenerator::AssembleConstructFrame() {
}
}
void CodeGenerator::AssembleReturn(InstructionOperand* pop) {
void CodeGenerator::AssembleReturn() {
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
// Restore registers.
@ -1873,35 +1874,24 @@ void CodeGenerator::AssembleReturn(InstructionOperand* pop) {
unwinding_info_writer_.MarkBlockWillExit();
Arm64OperandConverter g(this, nullptr);
int pop_count = static_cast<int>(descriptor->StackParameterCount());
if (descriptor->IsCFunctionCall()) {
AssembleDeconstructFrame();
} else if (frame_access_state()->has_frame()) {
// Canonicalize JSFunction return sites for now unless they have an variable
// number of stack slot pops.
if (pop->IsImmediate() && g.ToConstant(pop).ToInt32() == 0) {
if (return_label_.is_bound()) {
__ B(&return_label_);
return;
} else {
__ Bind(&return_label_);
AssembleDeconstructFrame();
if (descriptor->UseNativeStack()) {
pop_count += (pop_count & 1); // align
}
// Canonicalize JSFunction return sites for now.
if (return_label_.is_bound()) {
__ B(&return_label_);
return;
} else {
__ Bind(&return_label_);
AssembleDeconstructFrame();
if (descriptor->UseNativeStack()) {
pop_count += (pop_count & 1); // align
}
}
} else if (descriptor->UseNativeStack()) {
pop_count += (pop_count & 1); // align
}
if (pop->IsImmediate()) {
DCHECK_EQ(Constant::kInt32, g.ToConstant(pop).type());
pop_count += g.ToConstant(pop).ToInt32();
} else {
__ Drop(g.ToRegister(pop));
}
__ Drop(pop_count);
if (descriptor->UseNativeStack()) {

View File

@ -3777,8 +3777,7 @@ Node* AstGraphBuilder::BuildReturn(Node* return_value) {
return_value =
NewNode(javascript()->CallRuntime(Runtime::kTraceExit), return_value);
}
Node* pop_node = jsgraph()->Int32Constant(0);
Node* control = NewNode(common()->Return(), pop_node, return_value);
Node* control = NewNode(common()->Return(), return_value);
UpdateControlDependencyToLeaveFunction(control);
return control;
}

View File

@ -1762,9 +1762,8 @@ void BytecodeGraphBuilder::VisitStackCheck() {
void BytecodeGraphBuilder::VisitReturn() {
BuildLoopExitsForFunctionExit();
Node* pop_node = jsgraph()->Int32Constant(0);
Node* control =
NewNode(common()->Return(), pop_node, environment()->LookupAccumulator());
NewNode(common()->Return(), environment()->LookupAccumulator());
MergeControlToLeaveFunction(control);
}

View File

@ -180,10 +180,6 @@ void CodeAssembler::Return(Node* value) {
return raw_assembler_->Return(value);
}
void CodeAssembler::PopAndReturn(Node* pop, Node* value) {
return raw_assembler_->PopAndReturn(pop, value);
}
void CodeAssembler::DebugBreak() { raw_assembler_->DebugBreak(); }
void CodeAssembler::Comment(const char* format, ...) {

View File

@ -236,7 +236,6 @@ class V8_EXPORT_PRIVATE CodeAssembler {
Node* Parameter(int value);
void Return(Node* value);
void PopAndReturn(Node* pop, Node* value);
void DebugBreak();
void Comment(const char* format, ...);

View File

@ -129,7 +129,7 @@ class CodeGenerator final : public GapResolver::Assembler {
// Generates an architecture-specific, descriptor-specific return sequence
// to tear down a stack frame.
void AssembleReturn(InstructionOperand* pop);
void AssembleReturn();
void AssembleDeconstructFrame();

View File

@ -284,7 +284,7 @@ Reduction CommonOperatorReducer::ReducePhi(Node* node) {
Reduction CommonOperatorReducer::ReduceReturn(Node* node) {
DCHECK_EQ(IrOpcode::kReturn, node->opcode());
Node* const value = node->InputAt(1);
Node* const value = node->InputAt(0);
Node* effect = NodeProperties::GetEffectInput(node);
Node* const control = NodeProperties::GetControlInput(node);
bool changed = false;
@ -311,9 +311,8 @@ Reduction CommonOperatorReducer::ReduceReturn(Node* node) {
// {end} as revisit, because we mark {node} as {Dead} below, which was
// previously connected to {end}, so we know for sure that at some point
// the reducer logic will visit {end} again.
Node* ret = graph()->NewNode(common()->Return(), node->InputAt(0),
value->InputAt(i), effect->InputAt(i),
control->InputAt(i));
Node* ret = graph()->NewNode(common()->Return(), value->InputAt(i),
effect->InputAt(i), control->InputAt(i));
NodeProperties::MergeControlToEnd(graph(), common(), ret);
}
// Mark the merge {control} and return {node} as {dead}.

View File

@ -256,8 +256,8 @@ OsrGuardType OsrGuardTypeOf(Operator const* op) {
#define CACHED_RETURN_LIST(V) \
V(1) \
V(2) \
V(3) \
V(4)
V(3)
#define CACHED_END_LIST(V) \
V(1) \
@ -396,16 +396,16 @@ struct CommonOperatorGlobalCache final {
CACHED_END_LIST(CACHED_END)
#undef CACHED_END
template <size_t kValueInputCount>
template <size_t kInputCount>
struct ReturnOperator final : public Operator {
ReturnOperator()
: Operator( // --
IrOpcode::kReturn, Operator::kNoThrow, // opcode
"Return", // name
kValueInputCount + 1, 1, 1, 0, 0, 1) {} // counts
: Operator( // --
IrOpcode::kReturn, Operator::kNoThrow, // opcode
"Return", // name
kInputCount, 1, 1, 0, 0, 1) {} // counts
};
#define CACHED_RETURN(value_input_count) \
ReturnOperator<value_input_count> kReturn##value_input_count##Operator;
#define CACHED_RETURN(input_count) \
ReturnOperator<input_count> kReturn##input_count##Operator;
CACHED_RETURN_LIST(CACHED_RETURN)
#undef CACHED_RETURN
@ -632,6 +632,7 @@ const Operator* CommonOperatorBuilder::End(size_t control_input_count) {
0, 0, control_input_count, 0, 0, 0); // counts
}
const Operator* CommonOperatorBuilder::Return(int value_input_count) {
switch (value_input_count) {
#define CACHED_RETURN(input_count) \
@ -646,7 +647,7 @@ const Operator* CommonOperatorBuilder::Return(int value_input_count) {
return new (zone()) Operator( //--
IrOpcode::kReturn, Operator::kNoThrow, // opcode
"Return", // name
value_input_count + 1, 1, 1, 0, 0, 1); // counts
value_input_count, 1, 1, 0, 0, 1); // counts
}

View File

@ -649,7 +649,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
break;
}
case kArchRet:
AssembleReturn(instr->InputAt(0));
AssembleReturn();
break;
case kArchStackPointer:
__ mov(i.OutputRegister(), esp);
@ -1981,7 +1981,8 @@ void CodeGenerator::AssembleConstructFrame() {
}
}
void CodeGenerator::AssembleReturn(InstructionOperand* pop) {
void CodeGenerator::AssembleReturn() {
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
const RegList saves = descriptor->CalleeSavedRegisters();
@ -1993,41 +1994,22 @@ void CodeGenerator::AssembleReturn(InstructionOperand* pop) {
}
}
// Might need ecx for scratch if pop_size is too big or if there is a variable
// pop count.
DCHECK_EQ(0u, descriptor->CalleeSavedRegisters() & ecx.bit());
size_t pop_size = descriptor->StackParameterCount() * kPointerSize;
IA32OperandConverter g(this, nullptr);
if (descriptor->IsCFunctionCall()) {
AssembleDeconstructFrame();
} else if (frame_access_state()->has_frame()) {
// Canonicalize JSFunction return sites for now if they always have the same
// number of return args.
if (pop->IsImmediate() && g.ToConstant(pop).ToInt32() == 0) {
if (return_label_.is_bound()) {
__ jmp(&return_label_);
return;
} else {
__ bind(&return_label_);
AssembleDeconstructFrame();
}
// Canonicalize JSFunction return sites for now.
if (return_label_.is_bound()) {
__ jmp(&return_label_);
return;
} else {
__ bind(&return_label_);
AssembleDeconstructFrame();
}
}
DCHECK_EQ(0u, descriptor->CalleeSavedRegisters() & edx.bit());
size_t pop_size = descriptor->StackParameterCount() * kPointerSize;
// Might need ecx for scratch if pop_size is too big.
DCHECK_EQ(0u, descriptor->CalleeSavedRegisters() & ecx.bit());
if (pop->IsImmediate()) {
DCHECK_EQ(Constant::kInt32, g.ToConstant(pop).type());
pop_size += g.ToConstant(pop).ToInt32() * kPointerSize;
__ Ret(static_cast<int>(pop_size), ecx);
} else {
Register pop_reg = g.ToRegister(pop);
Register scratch_reg = pop_reg.is(ecx) ? edx : ecx;
__ pop(scratch_reg);
__ lea(esp, Operand(esp, pop_reg, times_4, static_cast<int>(pop_size)));
__ jmp(scratch_reg);
}
__ Ret(static_cast<int>(pop_size), ecx);
}

View File

@ -1889,63 +1889,106 @@ void InstructionSelector::VisitTailCall(Node* node) {
DCHECK_NE(0, descriptor->flags() & CallDescriptor::kSupportsTailCalls);
CallDescriptor* caller = linkage()->GetIncomingDescriptor();
DCHECK(caller->CanTailCall(node));
const CallDescriptor* callee = CallDescriptorOf(node->op());
int stack_param_delta = callee->GetStackParameterDelta(caller);
CallBuffer buffer(zone(), descriptor, nullptr);
if (caller->CanTailCall(node)) {
const CallDescriptor* callee = CallDescriptorOf(node->op());
int stack_param_delta = callee->GetStackParameterDelta(caller);
CallBuffer buffer(zone(), descriptor, nullptr);
// Compute InstructionOperands for inputs and outputs.
CallBufferFlags flags(kCallCodeImmediate | kCallTail);
if (IsTailCallAddressImmediate()) {
flags |= kCallAddressImmediate;
}
InitializeCallBuffer(node, &buffer, flags, stack_param_delta);
// Compute InstructionOperands for inputs and outputs.
CallBufferFlags flags(kCallCodeImmediate | kCallTail);
if (IsTailCallAddressImmediate()) {
flags |= kCallAddressImmediate;
}
InitializeCallBuffer(node, &buffer, flags, stack_param_delta);
// Select the appropriate opcode based on the call type.
InstructionCode opcode;
InstructionOperandVector temps(zone());
if (linkage()->GetIncomingDescriptor()->IsJSFunctionCall()) {
// Select the appropriate opcode based on the call type.
InstructionCode opcode;
InstructionOperandVector temps(zone());
if (linkage()->GetIncomingDescriptor()->IsJSFunctionCall()) {
switch (descriptor->kind()) {
case CallDescriptor::kCallCodeObject:
opcode = kArchTailCallCodeObjectFromJSFunction;
break;
case CallDescriptor::kCallJSFunction:
opcode = kArchTailCallJSFunctionFromJSFunction;
break;
default:
UNREACHABLE();
return;
}
int temps_count = GetTempsCountForTailCallFromJSFunction();
for (int i = 0; i < temps_count; i++) {
temps.push_back(g.TempRegister());
}
} else {
switch (descriptor->kind()) {
case CallDescriptor::kCallCodeObject:
opcode = kArchTailCallCodeObject;
break;
case CallDescriptor::kCallAddress:
opcode = kArchTailCallAddress;
break;
default:
UNREACHABLE();
return;
}
}
opcode |= MiscField::encode(descriptor->flags());
Emit(kArchPrepareTailCall, g.NoOutput());
int first_unused_stack_slot =
(V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK ? 1 : 0) +
stack_param_delta;
buffer.instruction_args.push_back(g.TempImmediate(first_unused_stack_slot));
// Emit the tailcall instruction.
Emit(opcode, 0, nullptr, buffer.instruction_args.size(),
&buffer.instruction_args.front(), temps.size(),
temps.empty() ? nullptr : &temps.front());
} else {
FrameStateDescriptor* frame_state_descriptor =
descriptor->NeedsFrameState()
? GetFrameStateDescriptor(
node->InputAt(static_cast<int>(descriptor->InputCount())))
: nullptr;
CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
// Compute InstructionOperands for inputs and outputs.
CallBufferFlags flags = kCallCodeImmediate;
if (IsTailCallAddressImmediate()) {
flags |= kCallAddressImmediate;
}
InitializeCallBuffer(node, &buffer, flags);
EmitPrepareArguments(&(buffer.pushed_nodes), descriptor, node);
// Select the appropriate opcode based on the call type.
InstructionCode opcode;
switch (descriptor->kind()) {
case CallDescriptor::kCallCodeObject:
opcode = kArchTailCallCodeObjectFromJSFunction;
opcode = kArchCallCodeObject;
break;
case CallDescriptor::kCallJSFunction:
opcode = kArchTailCallJSFunctionFromJSFunction;
break;
default:
UNREACHABLE();
return;
}
int temps_count = GetTempsCountForTailCallFromJSFunction();
for (int i = 0; i < temps_count; i++) {
temps.push_back(g.TempRegister());
}
} else {
switch (descriptor->kind()) {
case CallDescriptor::kCallCodeObject:
opcode = kArchTailCallCodeObject;
break;
case CallDescriptor::kCallAddress:
opcode = kArchTailCallAddress;
opcode = kArchCallJSFunction;
break;
default:
UNREACHABLE();
return;
}
opcode |= MiscField::encode(descriptor->flags());
// Emit the call instruction.
size_t output_count = buffer.outputs.size();
auto* outputs = &buffer.outputs.front();
Instruction* call_instr =
Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
&buffer.instruction_args.front());
if (instruction_selection_failed()) return;
call_instr->MarkAsCall();
Emit(kArchRet, 0, nullptr, output_count, outputs);
}
opcode |= MiscField::encode(descriptor->flags());
Emit(kArchPrepareTailCall, g.NoOutput());
int first_unused_stack_slot =
(V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK ? 1 : 0) +
stack_param_delta;
buffer.instruction_args.push_back(g.TempImmediate(first_unused_stack_slot));
// Emit the tailcall instruction.
Emit(opcode, 0, nullptr, buffer.instruction_args.size(),
&buffer.instruction_args.front(), temps.size(),
temps.empty() ? nullptr : &temps.front());
}
@ -1955,22 +1998,20 @@ void InstructionSelector::VisitGoto(BasicBlock* target) {
Emit(kArchJmp, g.NoOutput(), g.Label(target));
}
void InstructionSelector::VisitReturn(Node* ret) {
OperandGenerator g(this);
const int input_count = linkage()->GetIncomingDescriptor()->ReturnCount() == 0
? 1
: ret->op()->ValueInputCount();
DCHECK_GE(input_count, 1);
auto value_locations = zone()->NewArray<InstructionOperand>(input_count);
Node* pop_count = ret->InputAt(0);
value_locations[0] = pop_count->opcode() == IrOpcode::kInt32Constant
? g.UseImmediate(pop_count)
: g.UseRegister(pop_count);
for (int i = 1; i < input_count; ++i) {
value_locations[i] =
g.UseLocation(ret->InputAt(i), linkage()->GetReturnLocation(i - 1));
if (linkage()->GetIncomingDescriptor()->ReturnCount() == 0) {
Emit(kArchRet, g.NoOutput());
} else {
const int ret_count = ret->op()->ValueInputCount();
auto value_locations = zone()->NewArray<InstructionOperand>(ret_count);
for (int i = 0; i < ret_count; ++i) {
value_locations[i] =
g.UseLocation(ret->InputAt(i), linkage()->GetReturnLocation(i));
}
Emit(kArchRet, 0, nullptr, ret_count, value_locations);
}
Emit(kArchRet, 0, nullptr, input_count, value_locations);
}
Instruction* InstructionSelector::EmitDeoptimize(InstructionCode opcode,

View File

@ -184,7 +184,7 @@ Reduction JSInliner::InlineCall(Node* call, Node* new_target, Node* context,
for (Node* const input : end->inputs()) {
switch (input->opcode()) {
case IrOpcode::kReturn:
values.push_back(NodeProperties::GetValueInput(input, 1));
values.push_back(NodeProperties::GetValueInput(input, 0));
effects.push_back(NodeProperties::GetEffectInput(input));
controls.push_back(NodeProperties::GetControlInput(input));
break;

View File

@ -709,7 +709,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
break;
}
case kArchRet:
AssembleReturn(instr->InputAt(0));
AssembleReturn();
break;
case kArchStackPointer:
__ mov(i.OutputRegister(), sp);
@ -1960,7 +1960,8 @@ void CodeGenerator::AssembleConstructFrame() {
}
}
void CodeGenerator::AssembleReturn(InstructionOperand* pop) {
void CodeGenerator::AssembleReturn() {
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
int pop_count = static_cast<int>(descriptor->StackParameterCount());
@ -1976,32 +1977,18 @@ void CodeGenerator::AssembleReturn(InstructionOperand* pop) {
__ MultiPopFPU(saves_fpu);
}
MipsOperandConverter g(this, nullptr);
if (descriptor->IsCFunctionCall()) {
AssembleDeconstructFrame();
} else if (frame_access_state()->has_frame()) {
// Canonicalize JSFunction return sites for now unless they have an variable
// number of stack slot pops.
if (pop->IsImmediate() && g.ToConstant(pop).ToInt32() == 0) {
if (return_label_.is_bound()) {
__ Branch(&return_label_);
return;
} else {
__ bind(&return_label_);
AssembleDeconstructFrame();
}
// Canonicalize JSFunction return sites for now.
if (return_label_.is_bound()) {
__ Branch(&return_label_);
return;
} else {
__ bind(&return_label_);
AssembleDeconstructFrame();
}
}
if (pop->IsImmediate()) {
DCHECK_EQ(Constant::kInt32, g.ToConstant(pop).type());
pop_count += g.ToConstant(pop).ToInt32();
} else {
Register pop_reg = g.ToRegister(pop);
__ sll(pop_reg, pop_reg, kPointerSizeLog2);
__ Addu(sp, sp, Operand(pop_reg));
}
if (pop_count != 0) {
__ DropAndRet(pop_count);
} else {

View File

@ -719,7 +719,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
break;
}
case kArchRet:
AssembleReturn(instr->InputAt(0));
AssembleReturn();
break;
case kArchStackPointer:
__ mov(i.OutputRegister(), sp);
@ -2278,7 +2278,8 @@ void CodeGenerator::AssembleConstructFrame() {
}
}
void CodeGenerator::AssembleReturn(InstructionOperand* pop) {
void CodeGenerator::AssembleReturn() {
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
// Restore GP registers.
@ -2293,33 +2294,19 @@ void CodeGenerator::AssembleReturn(InstructionOperand* pop) {
__ MultiPopFPU(saves_fpu);
}
MipsOperandConverter g(this, nullptr);
if (descriptor->IsCFunctionCall()) {
AssembleDeconstructFrame();
} else if (frame_access_state()->has_frame()) {
// Canonicalize JSFunction return sites for now unless they have an variable
// number of stack slot pops.
if (pop->IsImmediate() && g.ToConstant(pop).ToInt32() == 0) {
if (return_label_.is_bound()) {
__ Branch(&return_label_);
return;
} else {
__ bind(&return_label_);
AssembleDeconstructFrame();
}
// Canonicalize JSFunction return sites for now.
if (return_label_.is_bound()) {
__ Branch(&return_label_);
return;
} else {
__ bind(&return_label_);
AssembleDeconstructFrame();
}
}
int pop_count = static_cast<int>(descriptor->StackParameterCount());
if (pop->IsImmediate()) {
DCHECK_EQ(Constant::kInt32, g.ToConstant(pop).type());
pop_count += g.ToConstant(pop).ToInt32();
} else {
Register pop_reg = g.ToRegister(pop);
__ dsll(pop_reg, pop_reg, kPointerSizeLog2);
__ Daddu(sp, sp, pop_reg);
}
if (pop_count != 0) {
__ DropAndRet(pop_count);
} else {

View File

@ -120,46 +120,23 @@ void RawMachineAssembler::Switch(Node* index, RawMachineLabel* default_label,
}
void RawMachineAssembler::Return(Node* value) {
Node* values[] = {Int32Constant(0), value};
Node* ret = MakeNode(common()->Return(1), 2, values);
Node* ret = MakeNode(common()->Return(), 1, &value);
schedule()->AddReturn(CurrentBlock(), ret);
current_block_ = nullptr;
}
void RawMachineAssembler::Return(Node* v1, Node* v2) {
Node* values[] = {Int32Constant(0), v1, v2};
Node* ret = MakeNode(common()->Return(2), 3, values);
Node* values[] = {v1, v2};
Node* ret = MakeNode(common()->Return(2), 2, values);
schedule()->AddReturn(CurrentBlock(), ret);
current_block_ = nullptr;
}
void RawMachineAssembler::Return(Node* v1, Node* v2, Node* v3) {
Node* values[] = {Int32Constant(0), v1, v2, v3};
Node* ret = MakeNode(common()->Return(3), 4, values);
schedule()->AddReturn(CurrentBlock(), ret);
current_block_ = nullptr;
}
void RawMachineAssembler::PopAndReturn(Node* pop, Node* value) {
Node* values[] = {pop, value};
Node* ret = MakeNode(common()->Return(1), 2, values);
schedule()->AddReturn(CurrentBlock(), ret);
current_block_ = nullptr;
}
void RawMachineAssembler::PopAndReturn(Node* pop, Node* v1, Node* v2) {
Node* values[] = {pop, v1, v2};
Node* ret = MakeNode(common()->Return(2), 3, values);
schedule()->AddReturn(CurrentBlock(), ret);
current_block_ = nullptr;
}
void RawMachineAssembler::PopAndReturn(Node* pop, Node* v1, Node* v2,
Node* v3) {
Node* values[] = {pop, v1, v2, v3};
Node* ret = MakeNode(common()->Return(3), 4, values);
Node* values[] = {v1, v2, v3};
Node* ret = MakeNode(common()->Return(3), 3, values);
schedule()->AddReturn(CurrentBlock(), ret);
current_block_ = nullptr;
}

View File

@ -774,9 +774,6 @@ class V8_EXPORT_PRIVATE RawMachineAssembler {
void Return(Node* value);
void Return(Node* v1, Node* v2);
void Return(Node* v1, Node* v2, Node* v3);
void PopAndReturn(Node* pop, Node* value);
void PopAndReturn(Node* pop, Node* v1, Node* v2);
void PopAndReturn(Node* pop, Node* v1, Node* v2, Node* v3);
void Bind(RawMachineLabel* label);
void Deoptimize(Node* state);
void DebugBreak();

View File

@ -749,23 +749,6 @@ class RepresentationSelector {
}
}
void VisitReturn(Node* node) {
int tagged_limit = node->op()->ValueInputCount() +
OperatorProperties::GetContextInputCount(node->op()) +
OperatorProperties::GetFrameStateInputCount(node->op());
// Visit integer slot count to pop
ProcessInput(node, 0, UseInfo::TruncatingWord32());
// Visit value, context and frame state inputs as tagged.
for (int i = 1; i < tagged_limit; i++) {
ProcessInput(node, i, UseInfo::AnyTagged());
}
// Only enqueue other inputs (effects, control).
for (int i = tagged_limit; i < node->InputCount(); i++) {
EnqueueInput(node, i);
}
}
// Helper for an unused node.
void VisitUnused(Node* node) {
int value_count = node->op()->ValueInputCount() +
@ -2465,14 +2448,10 @@ class RepresentationSelector {
case IrOpcode::kOsrGuard:
return VisitOsrGuard(node);
case IrOpcode::kReturn:
VisitReturn(node);
// Assume the output is tagged.
return SetOutput(node, MachineRepresentation::kTagged);
// Operators with all inputs tagged and no or tagged output have uniform
// handling.
case IrOpcode::kEnd:
case IrOpcode::kReturn:
case IrOpcode::kIfSuccess:
case IrOpcode::kIfException:
case IrOpcode::kIfTrue:

View File

@ -7,7 +7,6 @@
#include "src/compiler/common-operator.h"
#include "src/compiler/graph.h"
#include "src/compiler/linkage.h"
#include "src/compiler/node-matchers.h"
#include "src/compiler/node-properties.h"
namespace v8 {
@ -19,15 +18,12 @@ Reduction TailCallOptimization::Reduce(Node* node) {
// The value which is returned must be the result of a potential tail call,
// there must be no try/catch/finally around the Call, and there must be no
// other effect between the Call and the Return nodes.
Node* const call = NodeProperties::GetValueInput(node, 1);
Node* const call = NodeProperties::GetValueInput(node, 0);
if (call->opcode() == IrOpcode::kCall &&
CallDescriptorOf(call->op())->SupportsTailCalls() &&
NodeProperties::GetEffectInput(node) == call &&
!NodeProperties::IsExceptionalCall(call)) {
Node* const control = NodeProperties::GetControlInput(node);
// Ensure that no additional arguments are being popped other than those in
// the CallDescriptor, otherwise the tail call transformation is invalid.
DCHECK_EQ(0, Int32Matcher(NodeProperties::GetValueInput(node, 0)).Value());
if (control->opcode() == IrOpcode::kIfSuccess &&
call->OwnedBy(node, control) && control->OwnedBy(node)) {
// Furthermore, control has to flow via an IfSuccess from the Call, so
@ -66,10 +62,9 @@ Reduction TailCallOptimization::Reduce(Node* node) {
// |
DCHECK_EQ(call, NodeProperties::GetControlInput(control, 0));
DCHECK_EQ(4, node->InputCount());
DCHECK_EQ(3, node->InputCount());
node->ReplaceInput(0, NodeProperties::GetEffectInput(call));
node->ReplaceInput(1, NodeProperties::GetControlInput(call));
node->RemoveInput(3);
node->RemoveInput(2);
for (int index = 0; index < call->op()->ValueInputCount(); ++index) {
node->InsertInput(graph()->zone(), index,

View File

@ -281,8 +281,7 @@ class WasmTrapHelper : public ZoneObject {
} else {
// End the control flow with returning 0xdeadbeef
Node* ret_value = GetTrapValue(builder_->GetFunctionSignature());
end = graph()->NewNode(jsgraph()->common()->Return(),
jsgraph()->Int32Constant(0), ret_value,
end = graph()->NewNode(jsgraph()->common()->Return(), ret_value,
*effect_ptr, *control_ptr);
}
@ -1042,13 +1041,11 @@ Node* WasmGraphBuilder::Return(unsigned count, Node** vals) {
DCHECK_NOT_NULL(*control_);
DCHECK_NOT_NULL(*effect_);
Node** buf = Realloc(vals, count, count + 3);
memmove(buf + 1, buf, sizeof(void*) * count);
buf[0] = jsgraph()->Int32Constant(0);
buf[count + 1] = *effect_;
buf[count + 2] = *control_;
Node** buf = Realloc(vals, count, count + 2);
buf[count] = *effect_;
buf[count + 1] = *control_;
Node* ret =
graph()->NewNode(jsgraph()->common()->Return(count), count + 3, buf);
graph()->NewNode(jsgraph()->common()->Return(count), count + 2, vals);
MergeControlToEnd(jsgraph(), ret);
return ret;
@ -2678,8 +2675,8 @@ void WasmGraphBuilder::BuildJSToWasmWrapper(Handle<Code> wasm_code,
}
Node* jsval = ToJS(
retval, sig->return_count() == 0 ? wasm::kAstStmt : sig->GetReturn());
Node* ret = graph()->NewNode(jsgraph()->common()->Return(),
jsgraph()->Int32Constant(0), jsval, call, start);
Node* ret =
graph()->NewNode(jsgraph()->common()->Return(), jsval, call, start);
MergeControlToEnd(jsgraph(), ret);
}
@ -2790,16 +2787,14 @@ void WasmGraphBuilder::BuildWasmToJSWrapper(Handle<JSReceiver> target,
Node* val =
FromJS(call, HeapConstant(isolate->native_context()),
sig->return_count() == 0 ? wasm::kAstStmt : sig->GetReturn());
Node* pop_size = jsgraph()->Int32Constant(0);
if (jsgraph()->machine()->Is32() && sig->return_count() > 0 &&
sig->GetReturn() == wasm::kAstI64) {
ret = graph()->NewNode(jsgraph()->common()->Return(), pop_size, val,
ret = graph()->NewNode(jsgraph()->common()->Return(), val,
graph()->NewNode(jsgraph()->machine()->Word32Sar(),
val, jsgraph()->Int32Constant(31)),
call, start);
} else {
ret = graph()->NewNode(jsgraph()->common()->Return(), pop_size, val, call,
start);
ret = graph()->NewNode(jsgraph()->common()->Return(), val, call, start);
}
MergeControlToEnd(jsgraph(), ret);

View File

@ -4,8 +4,6 @@
#include "src/compiler/code-generator.h"
#include <limits>
#include "src/compilation-info.h"
#include "src/compiler/code-generator-impl.h"
#include "src/compiler/gap-resolver.h"
@ -920,7 +918,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
break;
}
case kArchRet:
AssembleReturn(instr->InputAt(0));
AssembleReturn();
break;
case kArchStackPointer:
__ movq(i.OutputRegister(), rsp);
@ -2449,7 +2447,8 @@ void CodeGenerator::AssembleConstructFrame() {
}
}
void CodeGenerator::AssembleReturn(InstructionOperand* pop) {
void CodeGenerator::AssembleReturn() {
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
// Restore registers.
@ -2478,41 +2477,22 @@ void CodeGenerator::AssembleReturn(InstructionOperand* pop) {
unwinding_info_writer_.MarkBlockWillExit();
// Might need rcx for scratch if pop_size is too big or if there is a variable
// pop count.
DCHECK_EQ(0u, descriptor->CalleeSavedRegisters() & rcx.bit());
DCHECK_EQ(0u, descriptor->CalleeSavedRegisters() & rdx.bit());
size_t pop_size = descriptor->StackParameterCount() * kPointerSize;
X64OperandConverter g(this, nullptr);
if (descriptor->IsCFunctionCall()) {
AssembleDeconstructFrame();
} else if (frame_access_state()->has_frame()) {
if (pop->IsImmediate() && g.ToConstant(pop).ToInt32() == 0) {
// Canonicalize JSFunction return sites for now.
if (return_label_.is_bound()) {
__ jmp(&return_label_);
return;
} else {
__ bind(&return_label_);
AssembleDeconstructFrame();
}
// Canonicalize JSFunction return sites for now.
if (return_label_.is_bound()) {
__ jmp(&return_label_);
return;
} else {
__ bind(&return_label_);
AssembleDeconstructFrame();
}
}
if (pop->IsImmediate()) {
DCHECK_EQ(Constant::kInt32, g.ToConstant(pop).type());
pop_size += g.ToConstant(pop).ToInt32() * kPointerSize;
CHECK_LT(pop_size, std::numeric_limits<int>::max());
__ Ret(static_cast<int>(pop_size), rcx);
} else {
Register pop_reg = g.ToRegister(pop);
Register scratch_reg = pop_reg.is(rcx) ? rdx : rcx;
__ popq(scratch_reg);
__ leaq(rsp, Operand(rsp, pop_reg, times_8, static_cast<int>(pop_size)));
__ jmp(scratch_reg);
}
size_t pop_size = descriptor->StackParameterCount() * kPointerSize;
// Might need rcx for scratch if pop_size is too big.
DCHECK_EQ(0u, descriptor->CalleeSavedRegisters() & rcx.bit());
__ Ret(static_cast<int>(pop_size), rcx);
}

View File

@ -86,9 +86,8 @@ class GraphBuilderTester : public HandleAndZoneScope,
}
void Return(Node* value) {
Node* zero = graph()->NewNode(common()->Int32Constant(0));
return_ = graph()->NewNode(common()->Return(), zero, value, effect_,
graph()->start());
return_ =
graph()->NewNode(common()->Return(), value, effect_, graph()->start());
effect_ = NULL;
}

View File

@ -694,7 +694,6 @@ TEST(RemoveToNumberEffects) {
JSTypedLoweringTester R;
Node* effect_use = NULL;
Node* zero = R.graph.NewNode(R.common.Int32Constant(0));
for (int i = 0; i < 10; i++) {
Node* p0 = R.Parameter(Type::Number());
Node* ton = R.Unop(R.javascript.ToNumber(), p0);
@ -725,12 +724,10 @@ TEST(RemoveToNumberEffects) {
R.context(), frame_state, ton, R.start());
break;
case 5:
effect_use =
R.graph.NewNode(R.common.Return(), zero, p0, ton, R.start());
effect_use = R.graph.NewNode(R.common.Return(), p0, ton, R.start());
break;
case 6:
effect_use =
R.graph.NewNode(R.common.Return(), zero, ton, ton, R.start());
effect_use = R.graph.NewNode(R.common.Return(), ton, ton, R.start());
}
R.CheckEffectInput(R.start(), ton);

View File

@ -116,8 +116,7 @@ class LoopFinderTester : HandleAndZoneScope {
}
Node* Return(Node* val, Node* effect, Node* control) {
Node* zero = graph.NewNode(common.Int32Constant(0));
Node* ret = graph.NewNode(common.Return(), zero, val, effect, control);
Node* ret = graph.NewNode(common.Return(), val, effect, control);
end->ReplaceInput(0, ret);
return ret;
}
@ -697,8 +696,7 @@ TEST(LaEdgeMatrix1) {
Node* if_true = t.graph.NewNode(t.common.IfTrue(), branch);
Node* exit = t.graph.NewNode(t.common.IfFalse(), branch);
loop->ReplaceInput(1, if_true);
Node* zero = t.graph.NewNode(t.common.Int32Constant(0));
Node* ret = t.graph.NewNode(t.common.Return(), zero, p3, t.start, exit);
Node* ret = t.graph.NewNode(t.common.Return(), p3, t.start, exit);
t.graph.SetEnd(ret);
Node* choices[] = {p1, phi, cond};
@ -745,9 +743,7 @@ void RunEdgeMatrix2(int i) {
loop2->ReplaceInput(1, if_true2);
loop1->ReplaceInput(1, exit2);
Node* zero = t.graph.NewNode(t.common.Int32Constant(0));
Node* ret =
t.graph.NewNode(t.common.Return(), zero, phi1, t.start, exit1);
Node* ret = t.graph.NewNode(t.common.Return(), phi1, t.start, exit1);
t.graph.SetEnd(ret);
Node* choices[] = {p1, phi1, cond1, phi2, cond2};
@ -834,8 +830,7 @@ void RunEdgeMatrix3(int c1a, int c1b, int c1c, // line break
loop2->ReplaceInput(1, exit3);
loop1->ReplaceInput(1, exit2);
Node* zero = t.graph.NewNode(t.common.Int32Constant(0));
Node* ret = t.graph.NewNode(t.common.Return(), zero, phi1, t.start, exit1);
Node* ret = t.graph.NewNode(t.common.Return(), phi1, t.start, exit1);
t.graph.SetEnd(ret);
// Mutate the graph according to the edge choices.
@ -948,8 +943,7 @@ static void RunManyChainedLoops_i(int count) {
last = exit;
}
Node* zero = t.graph.NewNode(t.common.Int32Constant(0));
Node* ret = t.graph.NewNode(t.common.Return(), zero, t.p0, t.start, last);
Node* ret = t.graph.NewNode(t.common.Return(), t.p0, t.start, last);
t.graph.SetEnd(ret);
// Verify loops.
@ -968,7 +962,6 @@ static void RunManyNestedLoops_i(int count) {
Node* entry = t.start;
// Build loops.
Node* zero = t.graph.NewNode(t.common.Int32Constant(0));
for (int i = 0; i < count; i++) {
Node* loop = t.graph.NewNode(t.common.Loop(2), entry, t.start);
Node* phi = t.graph.NewNode(t.common.Phi(MachineRepresentation::kWord32, 2),
@ -988,7 +981,7 @@ static void RunManyNestedLoops_i(int count) {
outer->ReplaceInput(1, exit);
} else {
// outer loop.
Node* ret = t.graph.NewNode(t.common.Return(), zero, t.p0, t.start, exit);
Node* ret = t.graph.NewNode(t.common.Return(), t.p0, t.start, exit);
t.graph.SetEnd(ret);
}
outer = loop;

View File

@ -84,8 +84,8 @@ class RepresentationChangerTester : public HandleAndZoneScope,
}
Node* Return(Node* input) {
Node* n = graph()->NewNode(common()->Return(), jsgraph()->Int32Constant(0),
input, graph()->start(), graph()->start());
Node* n = graph()->NewNode(common()->Return(), input, graph()->start(),
graph()->start());
return n;
}

View File

@ -293,9 +293,7 @@ Handle<Code> WrapWithCFunction(Handle<Code> inner, CallDescriptor* desc) {
// Build the call and return nodes.
Node* call =
b.graph()->NewNode(b.common()->Call(desc), param_count + 3, args);
Node* zero = b.graph()->NewNode(b.common()->Int32Constant(0));
Node* ret =
b.graph()->NewNode(b.common()->Return(), zero, call, call, start);
Node* ret = b.graph()->NewNode(b.common()->Return(), call, call, start);
b.graph()->SetEnd(ret);
}
@ -533,9 +531,7 @@ static void TestInt32Sub(CallDescriptor* desc) {
Node* p0 = b.graph()->NewNode(b.common()->Parameter(0), start);
Node* p1 = b.graph()->NewNode(b.common()->Parameter(1), start);
Node* add = b.graph()->NewNode(b.machine()->Int32Sub(), p0, p1);
Node* zero = b.graph()->NewNode(b.common()->Int32Constant(0));
Node* ret =
b.graph()->NewNode(b.common()->Return(), zero, add, start, start);
Node* ret = b.graph()->NewNode(b.common()->Return(), add, start, start);
b.graph()->SetEnd(ret);
}

View File

@ -47,11 +47,10 @@ TEST(RunStringLengthStub) {
Node* vectorParam = graph.NewNode(common.Parameter(4), start);
Node* theCode = graph.NewNode(common.HeapConstant(code));
Node* dummyContext = graph.NewNode(common.NumberConstant(0.0));
Node* zero = graph.NewNode(common.Int32Constant(0));
Node* call =
graph.NewNode(common.Call(descriptor), theCode, receiverParam, nameParam,
slotParam, vectorParam, dummyContext, start, start);
Node* ret = graph.NewNode(common.Return(), zero, call, call, start);
Node* ret = graph.NewNode(common.Return(), call, call, start);
Node* end = graph.NewNode(common.End(1), ret);
graph.SetStart(start);
graph.SetEnd(end);

View File

@ -1729,63 +1729,5 @@ TEST(AllocateNameDictionary) {
}
}
TEST(PopAndReturnConstant) {
Isolate* isolate(CcTest::InitIsolateOnce());
const int kNumParams = 4;
const int kNumProgramaticParams = 2;
CodeStubAssemblerTester m(isolate, kNumParams - kNumProgramaticParams);
// Call a function that return |kNumProgramaticParams| parameters in addition
// to those specified by the static descriptor. |kNumProgramaticParams| is
// specified as a constant.
m.PopAndReturn(m.Int32Constant(kNumProgramaticParams),
m.SmiConstant(Smi::FromInt(1234)));
Handle<Code> code = m.GenerateCode();
CHECK(!code.is_null());
FunctionTester ft(code, kNumParams);
Handle<Object> result;
for (int test_count = 0; test_count < 100; ++test_count) {
result = ft.Call(isolate->factory()->undefined_value(),
Handle<Smi>(Smi::FromInt(1234), isolate),
isolate->factory()->undefined_value(),
isolate->factory()->undefined_value())
.ToHandleChecked();
CHECK_EQ(1234, Handle<Smi>::cast(result)->value());
}
}
TEST(PopAndReturnVariable) {
Isolate* isolate(CcTest::InitIsolateOnce());
const int kNumParams = 4;
const int kNumProgramaticParams = 2;
CodeStubAssemblerTester m(isolate, kNumParams - kNumProgramaticParams);
// Call a function that return |kNumProgramaticParams| parameters in addition
// to those specified by the static descriptor. |kNumProgramaticParams| is
// passed in as a parameter to the function so that it can't be recongized as
// a constant.
m.PopAndReturn(m.SmiUntag(m.Parameter(1)), m.SmiConstant(Smi::FromInt(1234)));
Handle<Code> code = m.GenerateCode();
CHECK(!code.is_null());
FunctionTester ft(code, kNumParams);
Handle<Object> result;
for (int test_count = 0; test_count < 100; ++test_count) {
result =
ft.Call(isolate->factory()->undefined_value(),
Handle<Smi>(Smi::FromInt(1234), isolate),
isolate->factory()->undefined_value(),
Handle<Smi>(Smi::FromInt(kNumProgramaticParams * kPointerSize),
isolate))
.ToHandleChecked();
CHECK_EQ(1234, Handle<Smi>::cast(result)->value());
}
}
} // namespace internal
} // namespace v8

View File

@ -391,9 +391,8 @@ class WasmFunctionWrapper : public HandleAndZoneScope,
graph()->start()),
graph()->NewNode(common()->Int32Constant(0)), call, effect,
graph()->start());
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* r = graph()->NewNode(
common()->Return(), zero,
common()->Return(),
graph()->NewNode(common()->Int32Constant(WASM_WRAPPER_RETURN_VALUE)),
effect, graph()->start());
graph()->SetEnd(graph()->NewNode(common()->End(2), r, graph()->start()));

View File

@ -65,9 +65,8 @@ TEST_F(BranchEliminationTest, NestedBranchSameTrue) {
graph()->NewNode(common()->Phi(MachineRepresentation::kWord32, 2),
inner_phi, Int32Constant(3), outer_merge);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, outer_phi,
graph()->start(), outer_merge);
Node* ret = graph()->NewNode(common()->Return(), outer_phi, graph()->start(),
outer_merge);
graph()->SetEnd(graph()->NewNode(common()->End(1), ret));
Reduce();
@ -107,9 +106,8 @@ TEST_F(BranchEliminationTest, NestedBranchSameFalse) {
graph()->NewNode(common()->Phi(MachineRepresentation::kWord32, 2),
Int32Constant(1), inner_phi, outer_merge);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, outer_phi,
graph()->start(), outer_merge);
Node* ret = graph()->NewNode(common()->Return(), outer_phi, graph()->start(),
outer_merge);
graph()->SetEnd(graph()->NewNode(common()->End(1), ret));
Reduce();
@ -146,9 +144,8 @@ TEST_F(BranchEliminationTest, BranchAfterDiamond) {
Node* add = graph()->NewNode(machine()->Int32Add(), phi1, phi2);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret =
graph()->NewNode(common()->Return(), zero, add, graph()->start(), merge2);
graph()->NewNode(common()->Return(), add, graph()->start(), merge2);
graph()->SetEnd(graph()->NewNode(common()->End(1), ret));
Reduce();
@ -179,9 +176,8 @@ TEST_F(BranchEliminationTest, BranchInsideLoopSame) {
Node* inner_branch = graph()->NewNode(common()->Branch(), condition, loop);
Node* inner_if_true = graph()->NewNode(common()->IfTrue(), inner_branch);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret1 = graph()->NewNode(common()->Return(), zero, Int32Constant(2),
effect, inner_if_true);
Node* ret1 = graph()->NewNode(common()->Return(), Int32Constant(2), effect,
inner_if_true);
Node* inner_if_false = graph()->NewNode(common()->IfFalse(), inner_branch);
loop->AppendInput(zone(), inner_if_false);
@ -195,7 +191,7 @@ TEST_F(BranchEliminationTest, BranchInsideLoopSame) {
Node* outer_ephi = graph()->NewNode(common()->EffectPhi(2), effect,
graph()->start(), outer_merge);
Node* ret2 = graph()->NewNode(common()->Return(), zero, Int32Constant(1),
Node* ret2 = graph()->NewNode(common()->Return(), Int32Constant(1),
outer_ephi, outer_merge);
Node* terminate = graph()->NewNode(common()->Terminate(), effect, loop);

View File

@ -361,9 +361,7 @@ TEST_F(CommonOperatorReducerTest, ReturnWithPhiAndEffectPhiAndMerge) {
Node* ephi = graph()->NewNode(common()->EffectPhi(2), etrue, efalse, merge);
Node* phi = graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
vtrue, vfalse, merge);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, phi, ephi, merge);
Node* ret = graph()->NewNode(common()->Return(), phi, ephi, merge);
graph()->SetEnd(graph()->NewNode(common()->End(1), ret));
StrictMock<MockAdvancedReducerEditor> editor;
EXPECT_CALL(editor, Replace(merge, IsDead()));

View File

@ -192,10 +192,10 @@ TEST_F(CommonOperatorTest, Return) {
const Operator* const op = common()->Return(input_count);
EXPECT_EQ(IrOpcode::kReturn, op->opcode());
EXPECT_EQ(Operator::kNoThrow, op->properties());
EXPECT_EQ(input_count + 1, op->ValueInputCount());
EXPECT_EQ(input_count, op->ValueInputCount());
EXPECT_EQ(1, op->EffectInputCount());
EXPECT_EQ(1, op->ControlInputCount());
EXPECT_EQ(3 + input_count, OperatorProperties::GetTotalInputCount(op));
EXPECT_EQ(2 + input_count, OperatorProperties::GetTotalInputCount(op));
EXPECT_EQ(0, op->ValueOutputCount());
EXPECT_EQ(0, op->EffectOutputCount());
EXPECT_EQ(1, op->ControlOutputCount());

View File

@ -60,8 +60,7 @@ TEST_F(EffectControlLinearizerTest, SimpleLoad) {
Node* load = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForHeapNumberValue()), heap_number,
graph()->start(), graph()->start());
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, load, graph()->start(),
Node* ret = graph()->NewNode(common()->Return(), load, graph()->start(),
graph()->start());
// Build the basic block structure.
@ -106,9 +105,8 @@ TEST_F(EffectControlLinearizerTest, DiamondLoad) {
Node* phi = graph()->NewNode(
common()->Phi(MachineRepresentation::kFloat64, 2), vtrue, vfalse, merge);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret =
graph()->NewNode(common()->Return(), zero, phi, graph()->start(), merge);
graph()->NewNode(common()->Return(), phi, graph()->start(), merge);
// Build the basic block structure.
BasicBlock* start = schedule.start();
@ -208,9 +206,8 @@ TEST_F(EffectControlLinearizerTest, FloatingDiamondsControlWiring) {
Node* if_false2 = graph()->NewNode(common()->IfFalse(), branch2);
Node* merge2 = graph()->NewNode(common()->Merge(2), if_true2, if_false2);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, call, graph()->start(),
if_success);
Node* ret =
graph()->NewNode(common()->Return(), call, graph()->start(), if_success);
// Build the basic block structure.
BasicBlock* start = schedule.start();
@ -292,9 +289,7 @@ TEST_F(EffectControlLinearizerTest, LoopLoad) {
simplified()->LoadField(AccessBuilder::ForHeapNumberValue()), heap_number,
graph()->start(), loop);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret =
graph()->NewNode(common()->Return(), zero, load, effect_phi, if_true);
Node* ret = graph()->NewNode(common()->Return(), load, effect_phi, if_true);
// Build the basic block structure.
BasicBlock* start = schedule.start();

View File

@ -119,9 +119,8 @@ class EscapeAnalysisTest : public TypedGraphTest {
if (!control) {
control = control_;
}
Node* zero = graph()->NewNode(common()->Int32Constant(0));
return control_ = graph()->NewNode(common()->Return(), zero, value, effect,
control);
return control_ =
graph()->NewNode(common()->Return(), value, effect, control);
}
void EndGraph() {
@ -225,7 +224,7 @@ TEST_F(EscapeAnalysisTest, StraightNonEscape) {
Transformation();
ASSERT_EQ(object1, NodeProperties::GetValueInput(result, 1));
ASSERT_EQ(object1, NodeProperties::GetValueInput(result, 0));
}
@ -251,7 +250,7 @@ TEST_F(EscapeAnalysisTest, StraightNonEscapeNonConstStore) {
Transformation();
ASSERT_EQ(load, NodeProperties::GetValueInput(result, 1));
ASSERT_EQ(load, NodeProperties::GetValueInput(result, 0));
}
@ -273,7 +272,7 @@ TEST_F(EscapeAnalysisTest, StraightEscape) {
Transformation();
ASSERT_EQ(allocation, NodeProperties::GetValueInput(result, 1));
ASSERT_EQ(allocation, NodeProperties::GetValueInput(result, 0));
}
@ -301,7 +300,7 @@ TEST_F(EscapeAnalysisTest, StoreLoadEscape) {
Transformation();
ASSERT_EQ(finish1, NodeProperties::GetValueInput(result, 1));
ASSERT_EQ(finish1, NodeProperties::GetValueInput(result, 0));
}
@ -334,7 +333,7 @@ TEST_F(EscapeAnalysisTest, BranchNonEscape) {
Transformation();
ASSERT_EQ(replacement_phi, NodeProperties::GetValueInput(result, 1));
ASSERT_EQ(replacement_phi, NodeProperties::GetValueInput(result, 0));
}
@ -366,7 +365,7 @@ TEST_F(EscapeAnalysisTest, BranchEscapeOne) {
Transformation();
ASSERT_EQ(load, NodeProperties::GetValueInput(result, 1));
ASSERT_EQ(load, NodeProperties::GetValueInput(result, 0));
}
@ -401,7 +400,7 @@ TEST_F(EscapeAnalysisTest, BranchEscapeThroughStore) {
Transformation();
ASSERT_EQ(allocation, NodeProperties::GetValueInput(result, 1));
ASSERT_EQ(allocation, NodeProperties::GetValueInput(result, 0));
}
@ -426,7 +425,7 @@ TEST_F(EscapeAnalysisTest, DanglingLoadOrder) {
Transformation();
ASSERT_EQ(object1, NodeProperties::GetValueInput(result, 1));
ASSERT_EQ(object1, NodeProperties::GetValueInput(result, 0));
}
@ -462,7 +461,7 @@ TEST_F(EscapeAnalysisTest, DeoptReplacement) {
Transformation();
ASSERT_EQ(object1, NodeProperties::GetValueInput(result, 1));
ASSERT_EQ(object1, NodeProperties::GetValueInput(result, 0));
Node* object_state = NodeProperties::GetValueInput(state_values1, 0);
ASSERT_EQ(object_state->opcode(), IrOpcode::kObjectState);
ASSERT_EQ(1, object_state->op()->ValueInputCount());
@ -502,7 +501,7 @@ TEST_F(EscapeAnalysisTest, DISABLED_DeoptReplacementIdentity) {
Transformation();
ASSERT_EQ(object1, NodeProperties::GetValueInput(result, 1));
ASSERT_EQ(object1, NodeProperties::GetValueInput(result, 0));
Node* object_state = NodeProperties::GetValueInput(state_values1, 0);
ASSERT_EQ(object_state->opcode(), IrOpcode::kObjectState);

View File

@ -290,13 +290,12 @@ TEST_F(AdvancedReducerTest, ReplaceWithValue_ValueUse) {
CommonOperatorBuilder common(zone());
Node* node = graph()->NewNode(&kMockOperator);
Node* start = graph()->NewNode(common.Start(1));
Node* zero = graph()->NewNode(common.Int32Constant(0));
Node* use_value = graph()->NewNode(common.Return(), zero, node, start, start);
Node* use_value = graph()->NewNode(common.Return(), node, start, start);
Node* replacement = graph()->NewNode(&kMockOperator);
GraphReducer graph_reducer(zone(), graph(), nullptr);
ReplaceWithValueReducer r(&graph_reducer);
r.ReplaceWithValue(node, replacement);
EXPECT_EQ(replacement, use_value->InputAt(1));
EXPECT_EQ(replacement, use_value->InputAt(0));
EXPECT_EQ(0, node->UseCount());
EXPECT_EQ(1, replacement->UseCount());
EXPECT_THAT(replacement->uses(), ElementsAre(use_value));

View File

@ -166,7 +166,7 @@ TARGET_TEST_F(InstructionSelectorTest, ReturnFloat32Constant) {
ASSERT_EQ(InstructionOperand::CONSTANT, s[0]->OutputAt(0)->kind());
EXPECT_FLOAT_EQ(kValue, s.ToFloat32(s[0]->OutputAt(0)));
EXPECT_EQ(kArchRet, s[1]->arch_opcode());
EXPECT_EQ(2U, s[1]->InputCount());
EXPECT_EQ(1U, s[1]->InputCount());
}
@ -178,7 +178,7 @@ TARGET_TEST_F(InstructionSelectorTest, ReturnParameter) {
EXPECT_EQ(kArchNop, s[0]->arch_opcode());
ASSERT_EQ(1U, s[0]->OutputCount());
EXPECT_EQ(kArchRet, s[1]->arch_opcode());
EXPECT_EQ(2U, s[1]->InputCount());
EXPECT_EQ(1U, s[1]->InputCount());
}
@ -192,7 +192,7 @@ TARGET_TEST_F(InstructionSelectorTest, ReturnZero) {
EXPECT_EQ(InstructionOperand::CONSTANT, s[0]->OutputAt(0)->kind());
EXPECT_EQ(0, s.ToInt32(s[0]->OutputAt(0)));
EXPECT_EQ(kArchRet, s[1]->arch_opcode());
EXPECT_EQ(2U, s[1]->InputCount());
EXPECT_EQ(1U, s[1]->InputCount());
}
@ -251,7 +251,7 @@ TARGET_TEST_F(InstructionSelectorTest, FinishRegion) {
ASSERT_TRUE(s[0]->Output()->IsUnallocated());
EXPECT_EQ(kArchRet, s[1]->arch_opcode());
EXPECT_EQ(s.ToVreg(param), s.ToVreg(s[0]->Output()));
EXPECT_EQ(s.ToVreg(param), s.ToVreg(s[1]->InputAt(1)));
EXPECT_EQ(s.ToVreg(param), s.ToVreg(s[1]->InputAt(0)));
EXPECT_TRUE(s.IsReference(finish));
}

View File

@ -40,9 +40,8 @@ class Int64LoweringTest : public GraphTest {
MachineOperatorBuilder* machine() { return &machine_; }
void LowerGraph(Node* node, Signature<MachineRepresentation>* signature) {
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, node,
graph()->start(), graph()->start());
Node* ret = graph()->NewNode(common()->Return(), node, graph()->start(),
graph()->start());
NodeProperties::MergeControlToEnd(graph(), common(), ret);
Int64Lowering lowering(graph(), machine(), common(), zone(), signature);
@ -217,8 +216,7 @@ TEST_F(Int64LoweringTest, UnalignedInt64Load) {
Int32Constant(base), Int32Constant(index), \
Int64Constant(value(0)), start(), start()); \
\
Node* zero = graph()->NewNode(common()->Int32Constant(0)); \
Node* ret = graph()->NewNode(common()->Return(), zero, \
Node* ret = graph()->NewNode(common()->Return(), \
Int32Constant(return_value), store, start()); \
\
NodeProperties::MergeControlToEnd(graph(), common(), ret); \
@ -315,7 +313,7 @@ TEST_F(Int64LoweringTest, CallI64Return) {
CompareCallDescriptors(
OpParameter<const CallDescriptor*>(
graph()->end()->InputAt(1)->InputAt(1)->InputAt(0)),
graph()->end()->InputAt(1)->InputAt(0)->InputAt(0)),
wasm::ModuleEnv::GetI32WasmCallDescriptor(zone(), desc));
}
@ -349,7 +347,7 @@ TEST_F(Int64LoweringTest, CallI64Parameter) {
CompareCallDescriptors(
OpParameter<const CallDescriptor*>(
graph()->end()->InputAt(1)->InputAt(1)),
graph()->end()->InputAt(1)->InputAt(0)),
wasm::ModuleEnv::GetI32WasmCallDescriptor(zone(), desc));
}

View File

@ -90,8 +90,7 @@ class LoopPeelingTest : public GraphTest {
}
Node* InsertReturn(Node* val, Node* effect, Node* control) {
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* r = graph()->NewNode(common()->Return(), zero, val, effect, control);
Node* r = graph()->NewNode(common()->Return(), val, effect, control);
graph()->SetEnd(r);
return r;
}

View File

@ -356,10 +356,10 @@ class IsReturnMatcher final : public NodeMatcher {
bool MatchAndExplain(Node* node, MatchResultListener* listener) const final {
return (NodeMatcher::MatchAndExplain(node, listener) &&
PrintMatchAndExplain(NodeProperties::GetValueInput(node, 1),
PrintMatchAndExplain(NodeProperties::GetValueInput(node, 0),
"value", value_matcher_, listener) &&
(!has_second_return_value_ ||
PrintMatchAndExplain(NodeProperties::GetValueInput(node, 2),
PrintMatchAndExplain(NodeProperties::GetValueInput(node, 1),
"value2", value2_matcher_, listener)) &&
PrintMatchAndExplain(NodeProperties::GetEffectInput(node), "effect",
effect_matcher_, listener) &&

View File

@ -96,8 +96,7 @@ TEST_F(SchedulerTest, BuildScheduleOneParameter) {
graph()->SetStart(graph()->NewNode(common()->Start(0)));
Node* p1 = graph()->NewNode(common()->Parameter(0), graph()->start());
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, p1, graph()->start(),
Node* ret = graph()->NewNode(common()->Return(), p1, graph()->start(),
graph()->start());
graph()->SetEnd(graph()->NewNode(common()->End(1), ret));
@ -129,13 +128,12 @@ TARGET_TEST_F(SchedulerTest, FloatingDiamond1) {
Node* p0 = graph()->NewNode(common()->Parameter(0), start);
Node* d1 = CreateDiamond(graph(), common(), p0);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, d1, start, start);
Node* ret = graph()->NewNode(common()->Return(), d1, start, start);
Node* end = graph()->NewNode(common()->End(1), ret);
graph()->SetEnd(end);
ComputeAndVerifySchedule(14);
ComputeAndVerifySchedule(13);
}
TARGET_TEST_F(SchedulerTest, FloatingDeadDiamond1) {
@ -145,13 +143,12 @@ TARGET_TEST_F(SchedulerTest, FloatingDeadDiamond1) {
Node* p0 = graph()->NewNode(common()->Parameter(0), start);
Node* d1 = CreateDiamond(graph(), common(), p0);
USE(d1);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, p0, start, start);
Node* ret = graph()->NewNode(common()->Return(), p0, start, start);
Node* end = graph()->NewNode(common()->End(1), ret);
graph()->SetEnd(end);
ComputeAndVerifySchedule(5);
ComputeAndVerifySchedule(4);
}
TARGET_TEST_F(SchedulerTest, FloatingDeadDiamond2) {
@ -165,10 +162,9 @@ TARGET_TEST_F(SchedulerTest, FloatingDeadDiamond2) {
Node* n3 = g->NewNode(common()->IfTrue(), n2);
Node* n4 = g->NewNode(common()->IfFalse(), n2);
Node* n5 = g->NewNode(common()->Int32Constant(-100));
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* n6 = g->NewNode(common()->Return(), zero, n5, start, n4);
Node* n6 = g->NewNode(common()->Return(), n5, start, n4);
Node* n7 = g->NewNode(common()->Int32Constant(0));
Node* n8 = g->NewNode(common()->Return(), zero, n7, start, n3);
Node* n8 = g->NewNode(common()->Return(), n7, start, n3);
Node* n9 = g->NewNode(common()->End(2), n6, n8);
// Dead nodes
@ -183,7 +179,7 @@ TARGET_TEST_F(SchedulerTest, FloatingDeadDiamond2) {
g->SetEnd(n9);
ComputeAndVerifySchedule(11);
ComputeAndVerifySchedule(10);
}
TARGET_TEST_F(SchedulerTest, FloatingDiamond2) {
@ -195,13 +191,12 @@ TARGET_TEST_F(SchedulerTest, FloatingDiamond2) {
Node* d1 = CreateDiamond(graph(), common(), p0);
Node* d2 = CreateDiamond(graph(), common(), p1);
Node* add = graph()->NewNode(&kIntAdd, d1, d2);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, add, start, start);
Node* ret = graph()->NewNode(common()->Return(), add, start, start);
Node* end = graph()->NewNode(common()->End(1), ret);
graph()->SetEnd(end);
ComputeAndVerifySchedule(25);
ComputeAndVerifySchedule(24);
}
@ -215,13 +210,12 @@ TARGET_TEST_F(SchedulerTest, FloatingDiamond3) {
Node* d2 = CreateDiamond(graph(), common(), p1);
Node* add = graph()->NewNode(&kIntAdd, d1, d2);
Node* d3 = CreateDiamond(graph(), common(), add);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, d3, start, start);
Node* ret = graph()->NewNode(common()->Return(), d3, start, start);
Node* end = graph()->NewNode(common()->End(1), ret);
graph()->SetEnd(end);
ComputeAndVerifySchedule(34);
ComputeAndVerifySchedule(33);
}
@ -254,13 +248,12 @@ TARGET_TEST_F(SchedulerTest, NestedFloatingDiamonds) {
fv, phi1, m);
Node* ephi1 = graph()->NewNode(common()->EffectPhi(2), start, map, m);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, phi, ephi1, start);
Node* ret = graph()->NewNode(common()->Return(), phi, ephi1, start);
Node* end = graph()->NewNode(common()->End(1), ret);
graph()->SetEnd(end);
ComputeAndVerifySchedule(24);
ComputeAndVerifySchedule(23);
}
@ -301,13 +294,12 @@ TARGET_TEST_F(SchedulerTest, NestedFloatingDiamondWithChain) {
common()->Phi(MachineRepresentation::kTagged, 2), phiA1, c, mB2);
Node* add = graph()->NewNode(&kIntAdd, phiA2, phiB2);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, add, start, start);
Node* ret = graph()->NewNode(common()->Return(), add, start, start);
Node* end = graph()->NewNode(common()->End(1), ret);
graph()->SetEnd(end);
ComputeAndVerifySchedule(37);
ComputeAndVerifySchedule(36);
}
@ -338,13 +330,12 @@ TARGET_TEST_F(SchedulerTest, NestedFloatingDiamondWithLoop) {
Node* phi = graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
fv, ind, m);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, phi, start, start);
Node* ret = graph()->NewNode(common()->Return(), phi, start, start);
Node* end = graph()->NewNode(common()->End(1), ret);
graph()->SetEnd(end);
ComputeAndVerifySchedule(21);
ComputeAndVerifySchedule(20);
}
@ -374,13 +365,12 @@ TARGET_TEST_F(SchedulerTest, LoopedFloatingDiamond1) {
loop->ReplaceInput(1, t); // close loop.
ind->ReplaceInput(1, phi1); // close induction variable.
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, ind, start, f);
Node* ret = graph()->NewNode(common()->Return(), ind, start, f);
Node* end = graph()->NewNode(common()->End(2), ret, f);
graph()->SetEnd(end);
ComputeAndVerifySchedule(21);
ComputeAndVerifySchedule(20);
}
@ -411,13 +401,12 @@ TARGET_TEST_F(SchedulerTest, LoopedFloatingDiamond2) {
loop->ReplaceInput(1, t); // close loop.
ind->ReplaceInput(1, add); // close induction variable.
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, ind, start, f);
Node* ret = graph()->NewNode(common()->Return(), ind, start, f);
Node* end = graph()->NewNode(common()->End(2), ret, f);
graph()->SetEnd(end);
ComputeAndVerifySchedule(21);
ComputeAndVerifySchedule(20);
}
@ -461,13 +450,12 @@ TARGET_TEST_F(SchedulerTest, LoopedFloatingDiamond3) {
loop->ReplaceInput(1, t); // close loop.
ind->ReplaceInput(1, add); // close induction variable.
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, ind, start, f);
Node* ret = graph()->NewNode(common()->Return(), ind, start, f);
Node* end = graph()->NewNode(common()->End(2), ret, f);
graph()->SetEnd(end);
ComputeAndVerifySchedule(29);
ComputeAndVerifySchedule(28);
}
@ -498,13 +486,12 @@ TARGET_TEST_F(SchedulerTest, PhisPushedDownToDifferentBranches) {
Node* phi3 = graph()->NewNode(
common()->Phi(MachineRepresentation::kTagged, 2), phi, phi2, m2);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, phi3, start, start);
Node* ret = graph()->NewNode(common()->Return(), phi3, start, start);
Node* end = graph()->NewNode(common()->End(1), ret);
graph()->SetEnd(end);
ComputeAndVerifySchedule(25);
ComputeAndVerifySchedule(24);
}
@ -521,13 +508,12 @@ TARGET_TEST_F(SchedulerTest, BranchHintTrue) {
Node* m = graph()->NewNode(common()->Merge(2), t, f);
Node* phi = graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
tv, fv, m);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, phi, start, start);
Node* ret = graph()->NewNode(common()->Return(), phi, start, start);
Node* end = graph()->NewNode(common()->End(1), ret);
graph()->SetEnd(end);
Schedule* schedule = ComputeAndVerifySchedule(14);
Schedule* schedule = ComputeAndVerifySchedule(13);
// Make sure the false block is marked as deferred.
EXPECT_FALSE(schedule->block(t)->deferred());
EXPECT_TRUE(schedule->block(f)->deferred());
@ -547,13 +533,12 @@ TARGET_TEST_F(SchedulerTest, BranchHintFalse) {
Node* m = graph()->NewNode(common()->Merge(2), t, f);
Node* phi = graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
tv, fv, m);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, phi, start, start);
Node* ret = graph()->NewNode(common()->Return(), phi, start, start);
Node* end = graph()->NewNode(common()->End(1), ret);
graph()->SetEnd(end);
Schedule* schedule = ComputeAndVerifySchedule(14);
Schedule* schedule = ComputeAndVerifySchedule(13);
// Make sure the true block is marked as deferred.
EXPECT_TRUE(schedule->block(t)->deferred());
EXPECT_FALSE(schedule->block(f)->deferred());
@ -575,13 +560,12 @@ TARGET_TEST_F(SchedulerTest, CallException) {
Node* m = graph()->NewNode(common()->Merge(2), ok2, hdl);
Node* phi = graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
c2, p0, m);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, phi, start, m);
Node* ret = graph()->NewNode(common()->Return(), phi, start, m);
Node* end = graph()->NewNode(common()->End(1), ret);
graph()->SetEnd(end);
Schedule* schedule = ComputeAndVerifySchedule(18);
Schedule* schedule = ComputeAndVerifySchedule(17);
// Make sure the exception blocks as well as the handler are deferred.
EXPECT_TRUE(schedule->block(ex1)->deferred());
EXPECT_TRUE(schedule->block(ex2)->deferred());
@ -619,13 +603,12 @@ TARGET_TEST_F(SchedulerTest, Switch) {
Node* m = graph()->NewNode(common()->Merge(3), c0, c1, d);
Node* phi = graph()->NewNode(common()->Phi(MachineRepresentation::kWord32, 3),
v0, v1, vd, m);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, phi, start, m);
Node* ret = graph()->NewNode(common()->Return(), phi, start, m);
Node* end = graph()->NewNode(common()->End(1), ret);
graph()->SetEnd(end);
ComputeAndVerifySchedule(17);
ComputeAndVerifySchedule(16);
}
@ -644,13 +627,12 @@ TARGET_TEST_F(SchedulerTest, FloatingSwitch) {
Node* m = graph()->NewNode(common()->Merge(3), c0, c1, d);
Node* phi = graph()->NewNode(common()->Phi(MachineRepresentation::kWord32, 3),
v0, v1, vd, m);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret = graph()->NewNode(common()->Return(), zero, phi, start, start);
Node* ret = graph()->NewNode(common()->Return(), phi, start, start);
Node* end = graph()->NewNode(common()->End(1), ret);
graph()->SetEnd(end);
ComputeAndVerifySchedule(17);
ComputeAndVerifySchedule(16);
}

View File

@ -39,9 +39,7 @@ TEST_F(TailCallOptimizationTest, CallCodeObject0) {
Node* call = graph()->NewNode(common()->Call(kCallDescriptor), p0, p1,
graph()->start(), graph()->start());
Node* if_success = graph()->NewNode(common()->IfSuccess(), call);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret =
graph()->NewNode(common()->Return(), zero, call, call, if_success);
Node* ret = graph()->NewNode(common()->Return(), call, call, if_success);
Reduction r = Reduce(ret);
ASSERT_FALSE(r.Changed());
}
@ -62,9 +60,7 @@ TEST_F(TailCallOptimizationTest, CallCodeObject1) {
graph()->start(), graph()->start());
Node* if_success = graph()->NewNode(common()->IfSuccess(), call);
Node* if_exception = graph()->NewNode(common()->IfException(), call, call);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret =
graph()->NewNode(common()->Return(), zero, call, call, if_success);
Node* ret = graph()->NewNode(common()->Return(), call, call, if_success);
Node* end = graph()->NewNode(common()->End(1), if_exception);
graph()->SetEnd(end);
Reduction r = Reduce(ret);
@ -86,9 +82,7 @@ TEST_F(TailCallOptimizationTest, CallCodeObject2) {
Node* call = graph()->NewNode(common()->Call(kCallDescriptor), p0, p1,
graph()->start(), graph()->start());
Node* if_success = graph()->NewNode(common()->IfSuccess(), call);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret =
graph()->NewNode(common()->Return(), zero, call, call, if_success);
Node* ret = graph()->NewNode(common()->Return(), call, call, if_success);
Reduction r = Reduce(ret);
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(), IsTailCall(kCallDescriptor, p0, p1,
@ -110,9 +104,7 @@ TEST_F(TailCallOptimizationTest, CallJSFunction0) {
Node* call = graph()->NewNode(common()->Call(kCallDescriptor), p0, p1,
graph()->start(), graph()->start());
Node* if_success = graph()->NewNode(common()->IfSuccess(), call);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret =
graph()->NewNode(common()->Return(), zero, call, call, if_success);
Node* ret = graph()->NewNode(common()->Return(), call, call, if_success);
Reduction r = Reduce(ret);
ASSERT_FALSE(r.Changed());
}
@ -133,9 +125,7 @@ TEST_F(TailCallOptimizationTest, CallJSFunction1) {
graph()->start(), graph()->start());
Node* if_success = graph()->NewNode(common()->IfSuccess(), call);
Node* if_exception = graph()->NewNode(common()->IfException(), call, call);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret =
graph()->NewNode(common()->Return(), zero, call, call, if_success);
Node* ret = graph()->NewNode(common()->Return(), call, call, if_success);
Node* end = graph()->NewNode(common()->End(1), if_exception);
graph()->SetEnd(end);
Reduction r = Reduce(ret);
@ -156,9 +146,7 @@ TEST_F(TailCallOptimizationTest, CallJSFunction2) {
Node* call = graph()->NewNode(common()->Call(kCallDescriptor), p0, p1,
graph()->start(), graph()->start());
Node* if_success = graph()->NewNode(common()->IfSuccess(), call);
Node* zero = graph()->NewNode(common()->Int32Constant(0));
Node* ret =
graph()->NewNode(common()->Return(), zero, call, call, if_success);
Node* ret = graph()->NewNode(common()->Return(), call, call, if_success);
Reduction r = Reduce(ret);
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(), IsTailCall(kCallDescriptor, p0, p1,