[turbofan] Move platform-independent bits of VisitTailCall to instruction-selector.cc
Review URL: https://codereview.chromium.org/1414223004 Cr-Commit-Position: refs/heads/master@{#31561}
This commit is contained in:
parent
effe76ad25
commit
f5d42d04d7
@ -1140,85 +1140,7 @@ void InstructionSelector::EmitPrepareArguments(NodeVector* arguments,
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitTailCall(Node* node) {
|
||||
ArmOperandGenerator g(this);
|
||||
CallDescriptor const* descriptor = OpParameter<CallDescriptor const*>(node);
|
||||
DCHECK_NE(0, descriptor->flags() & CallDescriptor::kSupportsTailCalls);
|
||||
DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kPatchableCallSite);
|
||||
DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall);
|
||||
|
||||
// TODO(turbofan): Relax restriction for stack parameters.
|
||||
if (linkage()->GetIncomingDescriptor()->CanTailCall(node)) {
|
||||
CallBuffer buffer(zone(), descriptor, nullptr);
|
||||
|
||||
// Compute InstructionOperands for inputs and outputs.
|
||||
// TODO(turbofan): on ARM it's probably better to use the code object in a
|
||||
// register if there are multiple uses of it. Improve constant pool and the
|
||||
// heuristics in the register allocator for where to emit constants.
|
||||
InitializeCallBuffer(node, &buffer, true, false);
|
||||
|
||||
// Select the appropriate opcode based on the call type.
|
||||
InstructionCode opcode;
|
||||
switch (descriptor->kind()) {
|
||||
case CallDescriptor::kCallCodeObject:
|
||||
opcode = kArchTailCallCodeObject;
|
||||
break;
|
||||
case CallDescriptor::kCallJSFunction:
|
||||
opcode = kArchTailCallJSFunction;
|
||||
break;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
return;
|
||||
}
|
||||
opcode |= MiscField::encode(descriptor->flags());
|
||||
|
||||
// Emit the tailcall instruction.
|
||||
Emit(opcode, 0, nullptr, buffer.instruction_args.size(),
|
||||
&buffer.instruction_args.front());
|
||||
} else {
|
||||
FrameStateDescriptor* frame_state_descriptor =
|
||||
descriptor->NeedsFrameState()
|
||||
? GetFrameStateDescriptor(
|
||||
node->InputAt(static_cast<int>(descriptor->InputCount())))
|
||||
: nullptr;
|
||||
|
||||
CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
|
||||
|
||||
// Compute InstructionOperands for inputs and outputs.
|
||||
// TODO(turbofan): on ARM it's probably better to use the code object in a
|
||||
// register if there are multiple uses of it. Improve constant pool and the
|
||||
// heuristics in the register allocator for where to emit constants.
|
||||
InitializeCallBuffer(node, &buffer, true, false);
|
||||
|
||||
// Push any stack arguments.
|
||||
for (Node* input : base::Reversed(buffer.pushed_nodes)) {
|
||||
Emit(kArmPush, g.NoOutput(), g.UseRegister(input));
|
||||
}
|
||||
|
||||
// Select the appropriate opcode based on the call type.
|
||||
InstructionCode opcode;
|
||||
switch (descriptor->kind()) {
|
||||
case CallDescriptor::kCallCodeObject: {
|
||||
opcode = kArchCallCodeObject;
|
||||
break;
|
||||
}
|
||||
case CallDescriptor::kCallJSFunction:
|
||||
opcode = kArchCallJSFunction;
|
||||
break;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
return;
|
||||
}
|
||||
opcode |= MiscField::encode(descriptor->flags());
|
||||
|
||||
// Emit the call instruction.
|
||||
size_t const output_count = buffer.outputs.size();
|
||||
auto* outputs = output_count ? &buffer.outputs.front() : nullptr;
|
||||
Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
|
||||
&buffer.instruction_args.front())->MarkAsCall();
|
||||
Emit(kArchRet, 0, nullptr, output_count, outputs);
|
||||
}
|
||||
}
|
||||
bool InstructionSelector::IsTailCallAddressImmediate() { return false; }
|
||||
|
||||
|
||||
namespace {
|
||||
|
@ -1451,110 +1451,7 @@ void InstructionSelector::EmitPrepareArguments(NodeVector* arguments,
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitTailCall(Node* node) {
|
||||
Arm64OperandGenerator g(this);
|
||||
const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node);
|
||||
DCHECK_NE(0, descriptor->flags() & CallDescriptor::kSupportsTailCalls);
|
||||
DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kPatchableCallSite);
|
||||
DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall);
|
||||
|
||||
// TODO(turbofan): Relax restriction for stack parameters.
|
||||
if (linkage()->GetIncomingDescriptor()->CanTailCall(node)) {
|
||||
CallBuffer buffer(zone(), descriptor, nullptr);
|
||||
|
||||
// Compute InstructionOperands for inputs and outputs.
|
||||
// TODO(turbofan): on ARM64 it's probably better to use the code object in a
|
||||
// register if there are multiple uses of it. Improve constant pool and the
|
||||
// heuristics in the register allocator for where to emit constants.
|
||||
InitializeCallBuffer(node, &buffer, true, false);
|
||||
|
||||
// Select the appropriate opcode based on the call type.
|
||||
InstructionCode opcode;
|
||||
switch (descriptor->kind()) {
|
||||
case CallDescriptor::kCallCodeObject:
|
||||
opcode = kArchTailCallCodeObject;
|
||||
break;
|
||||
case CallDescriptor::kCallJSFunction:
|
||||
opcode = kArchTailCallJSFunction;
|
||||
break;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
return;
|
||||
}
|
||||
opcode |= MiscField::encode(descriptor->flags());
|
||||
|
||||
// Emit the tailcall instruction.
|
||||
Emit(opcode, 0, nullptr, buffer.instruction_args.size(),
|
||||
&buffer.instruction_args.front());
|
||||
} else {
|
||||
FrameStateDescriptor* frame_state_descriptor = nullptr;
|
||||
if (descriptor->NeedsFrameState()) {
|
||||
frame_state_descriptor = GetFrameStateDescriptor(
|
||||
node->InputAt(static_cast<int>(descriptor->InputCount())));
|
||||
}
|
||||
|
||||
CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
|
||||
|
||||
// Compute InstructionOperands for inputs and outputs.
|
||||
// TODO(turbofan): on ARM64 it's probably better to use the code object in a
|
||||
// register if there are multiple uses of it. Improve constant pool and the
|
||||
// heuristics in the register allocator for where to emit constants.
|
||||
InitializeCallBuffer(node, &buffer, true, false);
|
||||
|
||||
// Push the arguments to the stack.
|
||||
int aligned_push_count = static_cast<int>(buffer.pushed_nodes.size());
|
||||
bool pushed_count_uneven = aligned_push_count & 1;
|
||||
// TODO(dcarney): claim and poke probably take small immediates,
|
||||
// loop here or whatever.
|
||||
// Bump the stack pointer(s).
|
||||
if (aligned_push_count > 0) {
|
||||
// TODO(dcarney): it would be better to bump the csp here only
|
||||
// and emit paired stores with increment for non c frames.
|
||||
Emit(kArm64Claim, g.NoOutput(), g.TempImmediate(aligned_push_count));
|
||||
}
|
||||
// Move arguments to the stack.
|
||||
{
|
||||
int slot = aligned_push_count - 1;
|
||||
// Emit the uneven pushes.
|
||||
if (pushed_count_uneven) {
|
||||
Node* input = buffer.pushed_nodes[slot];
|
||||
Emit(kArm64Poke, g.NoOutput(), g.UseRegister(input),
|
||||
g.TempImmediate(slot));
|
||||
slot--;
|
||||
}
|
||||
// Now all pushes can be done in pairs.
|
||||
for (; slot >= 0; slot -= 2) {
|
||||
Emit(kArm64PokePair, g.NoOutput(),
|
||||
g.UseRegister(buffer.pushed_nodes[slot]),
|
||||
g.UseRegister(buffer.pushed_nodes[slot - 1]),
|
||||
g.TempImmediate(slot));
|
||||
}
|
||||
}
|
||||
|
||||
// Select the appropriate opcode based on the call type.
|
||||
InstructionCode opcode;
|
||||
switch (descriptor->kind()) {
|
||||
case CallDescriptor::kCallCodeObject: {
|
||||
opcode = kArchCallCodeObject;
|
||||
break;
|
||||
}
|
||||
case CallDescriptor::kCallJSFunction:
|
||||
opcode = kArchCallJSFunction;
|
||||
break;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
return;
|
||||
}
|
||||
opcode |= MiscField::encode(descriptor->flags());
|
||||
|
||||
// Emit the call instruction.
|
||||
size_t const output_count = buffer.outputs.size();
|
||||
auto* outputs = output_count ? &buffer.outputs.front() : nullptr;
|
||||
Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
|
||||
&buffer.instruction_args.front())->MarkAsCall();
|
||||
Emit(kArchRet, 0, nullptr, output_count, outputs);
|
||||
}
|
||||
}
|
||||
bool InstructionSelector::IsTailCallAddressImmediate() { return false; }
|
||||
|
||||
|
||||
namespace {
|
||||
|
@ -886,84 +886,7 @@ void InstructionSelector::EmitPrepareArguments(NodeVector* arguments,
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitTailCall(Node* node) {
|
||||
IA32OperandGenerator g(this);
|
||||
CallDescriptor const* descriptor = OpParameter<CallDescriptor const*>(node);
|
||||
DCHECK_NE(0, descriptor->flags() & CallDescriptor::kSupportsTailCalls);
|
||||
DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kPatchableCallSite);
|
||||
DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall);
|
||||
|
||||
// TODO(turbofan): Relax restriction for stack parameters.
|
||||
|
||||
if (linkage()->GetIncomingDescriptor()->CanTailCall(node)) {
|
||||
CallBuffer buffer(zone(), descriptor, nullptr);
|
||||
|
||||
// Compute InstructionOperands for inputs and outputs.
|
||||
InitializeCallBuffer(node, &buffer, true, true);
|
||||
|
||||
// Select the appropriate opcode based on the call type.
|
||||
InstructionCode opcode;
|
||||
switch (descriptor->kind()) {
|
||||
case CallDescriptor::kCallCodeObject:
|
||||
opcode = kArchTailCallCodeObject;
|
||||
break;
|
||||
case CallDescriptor::kCallJSFunction:
|
||||
opcode = kArchTailCallJSFunction;
|
||||
break;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
return;
|
||||
}
|
||||
opcode |= MiscField::encode(descriptor->flags());
|
||||
|
||||
// Emit the tailcall instruction.
|
||||
Emit(opcode, 0, nullptr, buffer.instruction_args.size(),
|
||||
&buffer.instruction_args.front());
|
||||
} else {
|
||||
FrameStateDescriptor* frame_state_descriptor =
|
||||
descriptor->NeedsFrameState()
|
||||
? GetFrameStateDescriptor(
|
||||
node->InputAt(static_cast<int>(descriptor->InputCount())))
|
||||
: nullptr;
|
||||
|
||||
CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
|
||||
|
||||
// Compute InstructionOperands for inputs and outputs.
|
||||
InitializeCallBuffer(node, &buffer, true, true);
|
||||
|
||||
// Push any stack arguments.
|
||||
for (Node* input : base::Reversed(buffer.pushed_nodes)) {
|
||||
// TODO(titzer): Handle pushing double parameters.
|
||||
InstructionOperand value =
|
||||
g.CanBeImmediate(input)
|
||||
? g.UseImmediate(input)
|
||||
: IsSupported(ATOM) ? g.UseRegister(input) : g.Use(input);
|
||||
Emit(kIA32Push, g.NoOutput(), value);
|
||||
}
|
||||
|
||||
// Select the appropriate opcode based on the call type.
|
||||
InstructionCode opcode;
|
||||
switch (descriptor->kind()) {
|
||||
case CallDescriptor::kCallCodeObject:
|
||||
opcode = kArchCallCodeObject;
|
||||
break;
|
||||
case CallDescriptor::kCallJSFunction:
|
||||
opcode = kArchCallJSFunction;
|
||||
break;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
return;
|
||||
}
|
||||
opcode |= MiscField::encode(descriptor->flags());
|
||||
|
||||
// Emit the call instruction.
|
||||
size_t output_count = buffer.outputs.size();
|
||||
auto* outputs = &buffer.outputs.front();
|
||||
Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
|
||||
&buffer.instruction_args.front())->MarkAsCall();
|
||||
Emit(kArchRet, 0, nullptr, output_count, outputs);
|
||||
}
|
||||
}
|
||||
bool InstructionSelector::IsTailCallAddressImmediate() { return true; }
|
||||
|
||||
|
||||
namespace {
|
||||
|
@ -1072,6 +1072,79 @@ void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitTailCall(Node* node) {
|
||||
OperandGenerator g(this);
|
||||
CallDescriptor const* descriptor = OpParameter<CallDescriptor const*>(node);
|
||||
DCHECK_NE(0, descriptor->flags() & CallDescriptor::kSupportsTailCalls);
|
||||
DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kPatchableCallSite);
|
||||
DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall);
|
||||
|
||||
// TODO(turbofan): Relax restriction for stack parameters.
|
||||
|
||||
if (linkage()->GetIncomingDescriptor()->CanTailCall(node)) {
|
||||
CallBuffer buffer(zone(), descriptor, nullptr);
|
||||
|
||||
// Compute InstructionOperands for inputs and outputs.
|
||||
InitializeCallBuffer(node, &buffer, true, true);
|
||||
|
||||
// Select the appropriate opcode based on the call type.
|
||||
InstructionCode opcode;
|
||||
switch (descriptor->kind()) {
|
||||
case CallDescriptor::kCallCodeObject:
|
||||
opcode = kArchTailCallCodeObject;
|
||||
break;
|
||||
case CallDescriptor::kCallJSFunction:
|
||||
opcode = kArchTailCallJSFunction;
|
||||
break;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
return;
|
||||
}
|
||||
opcode |= MiscField::encode(descriptor->flags());
|
||||
|
||||
// Emit the tailcall instruction.
|
||||
Emit(opcode, 0, nullptr, buffer.instruction_args.size(),
|
||||
&buffer.instruction_args.front());
|
||||
} else {
|
||||
FrameStateDescriptor* frame_state_descriptor =
|
||||
descriptor->NeedsFrameState()
|
||||
? GetFrameStateDescriptor(
|
||||
node->InputAt(static_cast<int>(descriptor->InputCount())))
|
||||
: nullptr;
|
||||
|
||||
CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
|
||||
|
||||
// Compute InstructionOperands for inputs and outputs.
|
||||
InitializeCallBuffer(node, &buffer, true, IsTailCallAddressImmediate());
|
||||
|
||||
EmitPrepareArguments(&(buffer.pushed_nodes), descriptor, node);
|
||||
|
||||
// Select the appropriate opcode based on the call type.
|
||||
InstructionCode opcode;
|
||||
switch (descriptor->kind()) {
|
||||
case CallDescriptor::kCallCodeObject:
|
||||
opcode = kArchCallCodeObject;
|
||||
break;
|
||||
case CallDescriptor::kCallJSFunction:
|
||||
opcode = kArchCallJSFunction;
|
||||
break;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
return;
|
||||
}
|
||||
opcode |= MiscField::encode(descriptor->flags());
|
||||
|
||||
// Emit the call instruction.
|
||||
size_t output_count = buffer.outputs.size();
|
||||
auto* outputs = &buffer.outputs.front();
|
||||
Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
|
||||
&buffer.instruction_args.front())
|
||||
->MarkAsCall();
|
||||
Emit(kArchRet, 0, nullptr, output_count, outputs);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitGoto(BasicBlock* target) {
|
||||
// jump to the next block.
|
||||
OperandGenerator g(this);
|
||||
|
@ -172,6 +172,7 @@ class InstructionSelector final {
|
||||
void InitializeCallBuffer(Node* call, CallBuffer* buffer,
|
||||
bool call_code_immediate,
|
||||
bool call_address_immediate);
|
||||
bool IsTailCallAddressImmediate();
|
||||
|
||||
FrameStateDescriptor* GetFrameStateDescriptor(Node* node);
|
||||
|
||||
|
@ -563,86 +563,7 @@ void InstructionSelector::EmitPrepareArguments(NodeVector* arguments,
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitTailCall(Node* node) {
|
||||
MipsOperandGenerator g(this);
|
||||
const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node);
|
||||
DCHECK_NE(0, descriptor->flags() & CallDescriptor::kSupportsTailCalls);
|
||||
DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kPatchableCallSite);
|
||||
DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall);
|
||||
|
||||
// TODO(turbofan): Relax restriction for stack parameters.
|
||||
if (linkage()->GetIncomingDescriptor()->CanTailCall(node)) {
|
||||
CallBuffer buffer(zone(), descriptor, nullptr);
|
||||
|
||||
// Compute InstructionOperands for inputs and outputs.
|
||||
InitializeCallBuffer(node, &buffer, true, false);
|
||||
|
||||
// Select the appropriate opcode based on the call type.
|
||||
InstructionCode opcode;
|
||||
switch (descriptor->kind()) {
|
||||
case CallDescriptor::kCallCodeObject:
|
||||
opcode = kArchTailCallCodeObject;
|
||||
break;
|
||||
case CallDescriptor::kCallJSFunction:
|
||||
opcode = kArchTailCallJSFunction;
|
||||
break;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
return;
|
||||
}
|
||||
opcode |= MiscField::encode(descriptor->flags());
|
||||
|
||||
// Emit the tailcall instruction.
|
||||
Emit(opcode, 0, nullptr, buffer.instruction_args.size(),
|
||||
&buffer.instruction_args.front());
|
||||
} else {
|
||||
FrameStateDescriptor* frame_state_descriptor =
|
||||
descriptor->NeedsFrameState()
|
||||
? GetFrameStateDescriptor(
|
||||
node->InputAt(static_cast<int>(descriptor->InputCount())))
|
||||
: nullptr;
|
||||
|
||||
CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
|
||||
|
||||
// Compute InstructionOperands for inputs and outputs.
|
||||
InitializeCallBuffer(node, &buffer, true, false);
|
||||
// Possibly align stack here for functions.
|
||||
int push_count = static_cast<int>(descriptor->StackParameterCount());
|
||||
if (push_count > 0) {
|
||||
Emit(kMipsStackClaim, g.NoOutput(),
|
||||
g.TempImmediate(push_count << kPointerSizeLog2));
|
||||
}
|
||||
int slot = static_cast<int>(buffer.pushed_nodes.size()) - 1;
|
||||
for (Node* input : base::Reversed(buffer.pushed_nodes)) {
|
||||
Emit(kMipsStoreToStackSlot, g.NoOutput(), g.UseRegister(input),
|
||||
g.TempImmediate(slot << kPointerSizeLog2));
|
||||
slot--;
|
||||
}
|
||||
|
||||
// Select the appropriate opcode based on the call type.
|
||||
InstructionCode opcode;
|
||||
switch (descriptor->kind()) {
|
||||
case CallDescriptor::kCallCodeObject: {
|
||||
opcode = kArchCallCodeObject;
|
||||
break;
|
||||
}
|
||||
case CallDescriptor::kCallJSFunction:
|
||||
opcode = kArchCallJSFunction;
|
||||
break;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
return;
|
||||
}
|
||||
opcode |= MiscField::encode(descriptor->flags());
|
||||
|
||||
// Emit the call instruction.
|
||||
size_t const output_count = buffer.outputs.size();
|
||||
auto* outputs = output_count ? &buffer.outputs.front() : nullptr;
|
||||
Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
|
||||
&buffer.instruction_args.front())->MarkAsCall();
|
||||
Emit(kArchRet, 0, nullptr, output_count, outputs);
|
||||
}
|
||||
}
|
||||
bool InstructionSelector::IsTailCallAddressImmediate() { return false; }
|
||||
|
||||
|
||||
void InstructionSelector::VisitCheckedLoad(Node* node) {
|
||||
|
@ -750,86 +750,7 @@ void InstructionSelector::EmitPrepareArguments(NodeVector* arguments,
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitTailCall(Node* node) {
|
||||
Mips64OperandGenerator g(this);
|
||||
const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node);
|
||||
DCHECK_NE(0, descriptor->flags() & CallDescriptor::kSupportsTailCalls);
|
||||
DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kPatchableCallSite);
|
||||
DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall);
|
||||
|
||||
// TODO(turbofan): Relax restriction for stack parameters.
|
||||
if (linkage()->GetIncomingDescriptor()->CanTailCall(node)) {
|
||||
CallBuffer buffer(zone(), descriptor, nullptr);
|
||||
|
||||
// Compute InstructionOperands for inputs and outputs.
|
||||
InitializeCallBuffer(node, &buffer, true, false);
|
||||
|
||||
// Select the appropriate opcode based on the call type.
|
||||
InstructionCode opcode;
|
||||
switch (descriptor->kind()) {
|
||||
case CallDescriptor::kCallCodeObject:
|
||||
opcode = kArchTailCallCodeObject;
|
||||
break;
|
||||
case CallDescriptor::kCallJSFunction:
|
||||
opcode = kArchTailCallJSFunction;
|
||||
break;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
return;
|
||||
}
|
||||
opcode |= MiscField::encode(descriptor->flags());
|
||||
|
||||
// Emit the tailcall instruction.
|
||||
Emit(opcode, 0, nullptr, buffer.instruction_args.size(),
|
||||
&buffer.instruction_args.front());
|
||||
} else {
|
||||
FrameStateDescriptor* frame_state_descriptor =
|
||||
descriptor->NeedsFrameState()
|
||||
? GetFrameStateDescriptor(
|
||||
node->InputAt(static_cast<int>(descriptor->InputCount())))
|
||||
: nullptr;
|
||||
|
||||
CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
|
||||
|
||||
// Compute InstructionOperands for inputs and outputs.
|
||||
InitializeCallBuffer(node, &buffer, true, false);
|
||||
|
||||
const int32_t push_count = static_cast<int32_t>(buffer.pushed_nodes.size());
|
||||
if (push_count > 0) {
|
||||
Emit(kMips64StackClaim, g.NoOutput(),
|
||||
g.TempImmediate(push_count << kPointerSizeLog2));
|
||||
}
|
||||
int slot = push_count - 1;
|
||||
for (Node* input : base::Reversed(buffer.pushed_nodes)) {
|
||||
Emit(kMips64StoreToStackSlot, g.NoOutput(), g.UseRegister(input),
|
||||
g.TempImmediate(slot << kPointerSizeLog2));
|
||||
slot--;
|
||||
}
|
||||
|
||||
// Select the appropriate opcode based on the call type.
|
||||
InstructionCode opcode;
|
||||
switch (descriptor->kind()) {
|
||||
case CallDescriptor::kCallCodeObject: {
|
||||
opcode = kArchCallCodeObject;
|
||||
break;
|
||||
}
|
||||
case CallDescriptor::kCallJSFunction:
|
||||
opcode = kArchCallJSFunction;
|
||||
break;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
return;
|
||||
}
|
||||
opcode |= MiscField::encode(descriptor->flags());
|
||||
|
||||
// Emit the call instruction.
|
||||
size_t const output_count = buffer.outputs.size();
|
||||
auto* outputs = output_count ? &buffer.outputs.front() : nullptr;
|
||||
Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
|
||||
&buffer.instruction_args.front())->MarkAsCall();
|
||||
Emit(kArchRet, 0, nullptr, output_count, outputs);
|
||||
}
|
||||
}
|
||||
bool InstructionSelector::IsTailCallAddressImmediate() { return false; }
|
||||
|
||||
|
||||
void InstructionSelector::VisitCheckedLoad(Node* node) {
|
||||
|
@ -1521,94 +1521,7 @@ void InstructionSelector::EmitPrepareArguments(NodeVector* arguments,
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitTailCall(Node* node) {
|
||||
PPCOperandGenerator g(this);
|
||||
CallDescriptor const* descriptor = OpParameter<CallDescriptor const*>(node);
|
||||
DCHECK_NE(0, descriptor->flags() & CallDescriptor::kSupportsTailCalls);
|
||||
DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kPatchableCallSite);
|
||||
DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall);
|
||||
|
||||
// TODO(turbofan): Relax restriction for stack parameters.
|
||||
if (linkage()->GetIncomingDescriptor()->CanTailCall(node)) {
|
||||
CallBuffer buffer(zone(), descriptor, nullptr);
|
||||
|
||||
// Compute InstructionOperands for inputs and outputs.
|
||||
// TODO(turbofan): on PPC it's probably better to use the code object in a
|
||||
// register if there are multiple uses of it. Improve constant pool and the
|
||||
// heuristics in the register allocator for where to emit constants.
|
||||
InitializeCallBuffer(node, &buffer, true, false);
|
||||
|
||||
// Select the appropriate opcode based on the call type.
|
||||
InstructionCode opcode;
|
||||
switch (descriptor->kind()) {
|
||||
case CallDescriptor::kCallCodeObject:
|
||||
opcode = kArchTailCallCodeObject;
|
||||
break;
|
||||
case CallDescriptor::kCallJSFunction:
|
||||
opcode = kArchTailCallJSFunction;
|
||||
break;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
return;
|
||||
}
|
||||
opcode |= MiscField::encode(descriptor->flags());
|
||||
|
||||
// Emit the tailcall instruction.
|
||||
Emit(opcode, 0, nullptr, buffer.instruction_args.size(),
|
||||
&buffer.instruction_args.front());
|
||||
} else {
|
||||
FrameStateDescriptor* frame_state_descriptor = nullptr;
|
||||
if (descriptor->NeedsFrameState()) {
|
||||
frame_state_descriptor =
|
||||
GetFrameStateDescriptor(node->InputAt(descriptor->InputCount()));
|
||||
}
|
||||
|
||||
CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
|
||||
|
||||
// Compute InstructionOperands for inputs and outputs.
|
||||
// TODO(turbofan): on PPC it's probably better to use the code object in a
|
||||
// register if there are multiple uses of it. Improve constant pool and the
|
||||
// heuristics in the register allocator for where to emit constants.
|
||||
InitializeCallBuffer(node, &buffer, true, false);
|
||||
|
||||
// Push any stack arguments.
|
||||
int num_slots = static_cast<int>(descriptor->StackParameterCount());
|
||||
int slot = 0;
|
||||
for (Node* input : buffer.pushed_nodes) {
|
||||
if (slot == 0) {
|
||||
Emit(kPPC_PushFrame, g.NoOutput(), g.UseRegister(input),
|
||||
g.TempImmediate(num_slots));
|
||||
} else {
|
||||
Emit(kPPC_StoreToStackSlot, g.NoOutput(), g.UseRegister(input),
|
||||
g.TempImmediate(slot));
|
||||
}
|
||||
++slot;
|
||||
}
|
||||
|
||||
// Select the appropriate opcode based on the call type.
|
||||
InstructionCode opcode;
|
||||
switch (descriptor->kind()) {
|
||||
case CallDescriptor::kCallCodeObject: {
|
||||
opcode = kArchCallCodeObject;
|
||||
break;
|
||||
}
|
||||
case CallDescriptor::kCallJSFunction:
|
||||
opcode = kArchCallJSFunction;
|
||||
break;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
return;
|
||||
}
|
||||
opcode |= MiscField::encode(descriptor->flags());
|
||||
|
||||
// Emit the call instruction.
|
||||
size_t const output_count = buffer.outputs.size();
|
||||
auto* outputs = output_count ? &buffer.outputs.front() : nullptr;
|
||||
Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
|
||||
&buffer.instruction_args.front())->MarkAsCall();
|
||||
Emit(kArchRet, 0, nullptr, output_count, outputs);
|
||||
}
|
||||
}
|
||||
bool InstructionSelector::IsTailCallAddressImmediate() { return false; }
|
||||
|
||||
|
||||
void InstructionSelector::VisitFloat64ExtractLowWord32(Node* node) {
|
||||
|
@ -1111,83 +1111,7 @@ void InstructionSelector::EmitPrepareArguments(NodeVector* arguments,
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitTailCall(Node* node) {
|
||||
X64OperandGenerator g(this);
|
||||
CallDescriptor const* descriptor = OpParameter<CallDescriptor const*>(node);
|
||||
DCHECK_NE(0, descriptor->flags() & CallDescriptor::kSupportsTailCalls);
|
||||
DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kPatchableCallSite);
|
||||
DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall);
|
||||
|
||||
// TODO(turbofan): Relax restriction for stack parameters.
|
||||
if (linkage()->GetIncomingDescriptor()->CanTailCall(node)) {
|
||||
CallBuffer buffer(zone(), descriptor, nullptr);
|
||||
|
||||
// Compute InstructionOperands for inputs and outputs.
|
||||
InitializeCallBuffer(node, &buffer, true, true);
|
||||
|
||||
// Select the appropriate opcode based on the call type.
|
||||
InstructionCode opcode;
|
||||
switch (descriptor->kind()) {
|
||||
case CallDescriptor::kCallCodeObject:
|
||||
opcode = kArchTailCallCodeObject;
|
||||
break;
|
||||
case CallDescriptor::kCallJSFunction:
|
||||
opcode = kArchTailCallJSFunction;
|
||||
break;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
return;
|
||||
}
|
||||
opcode |= MiscField::encode(descriptor->flags());
|
||||
|
||||
// Emit the tailcall instruction.
|
||||
Emit(opcode, 0, nullptr, buffer.instruction_args.size(),
|
||||
&buffer.instruction_args.front());
|
||||
} else {
|
||||
FrameStateDescriptor* frame_state_descriptor =
|
||||
descriptor->NeedsFrameState()
|
||||
? GetFrameStateDescriptor(
|
||||
node->InputAt(static_cast<int>(descriptor->InputCount())))
|
||||
: nullptr;
|
||||
|
||||
CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
|
||||
|
||||
// Compute InstructionOperands for inputs and outputs.
|
||||
InitializeCallBuffer(node, &buffer, true, true);
|
||||
|
||||
// Push any stack arguments.
|
||||
for (Node* input : base::Reversed(buffer.pushed_nodes)) {
|
||||
// TODO(titzer): Handle pushing double parameters.
|
||||
InstructionOperand value =
|
||||
g.CanBeImmediate(input)
|
||||
? g.UseImmediate(input)
|
||||
: IsSupported(ATOM) ? g.UseRegister(input) : g.Use(input);
|
||||
Emit(kX64Push, g.NoOutput(), value);
|
||||
}
|
||||
|
||||
// Select the appropriate opcode based on the call type.
|
||||
InstructionCode opcode;
|
||||
switch (descriptor->kind()) {
|
||||
case CallDescriptor::kCallCodeObject:
|
||||
opcode = kArchCallCodeObject;
|
||||
break;
|
||||
case CallDescriptor::kCallJSFunction:
|
||||
opcode = kArchCallJSFunction;
|
||||
break;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
return;
|
||||
}
|
||||
opcode |= MiscField::encode(descriptor->flags());
|
||||
|
||||
// Emit the call instruction.
|
||||
size_t output_count = buffer.outputs.size();
|
||||
auto* outputs = &buffer.outputs.front();
|
||||
Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
|
||||
&buffer.instruction_args.front())->MarkAsCall();
|
||||
Emit(kArchRet, 0, nullptr, output_count, outputs);
|
||||
}
|
||||
}
|
||||
bool InstructionSelector::IsTailCallAddressImmediate() { return true; }
|
||||
|
||||
|
||||
namespace {
|
||||
|
@ -879,84 +879,7 @@ void InstructionSelector::EmitPrepareArguments(NodeVector* arguments,
|
||||
}
|
||||
|
||||
|
||||
void InstructionSelector::VisitTailCall(Node* node) {
|
||||
X87OperandGenerator g(this);
|
||||
CallDescriptor const* descriptor = OpParameter<CallDescriptor const*>(node);
|
||||
DCHECK_NE(0, descriptor->flags() & CallDescriptor::kSupportsTailCalls);
|
||||
DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kPatchableCallSite);
|
||||
DCHECK_EQ(0, descriptor->flags() & CallDescriptor::kNeedsNopAfterCall);
|
||||
|
||||
// TODO(turbofan): Relax restriction for stack parameters.
|
||||
|
||||
if (linkage()->GetIncomingDescriptor()->CanTailCall(node)) {
|
||||
CallBuffer buffer(zone(), descriptor, nullptr);
|
||||
|
||||
// Compute InstructionOperands for inputs and outputs.
|
||||
InitializeCallBuffer(node, &buffer, true, true);
|
||||
|
||||
// Select the appropriate opcode based on the call type.
|
||||
InstructionCode opcode;
|
||||
switch (descriptor->kind()) {
|
||||
case CallDescriptor::kCallCodeObject:
|
||||
opcode = kArchTailCallCodeObject;
|
||||
break;
|
||||
case CallDescriptor::kCallJSFunction:
|
||||
opcode = kArchTailCallJSFunction;
|
||||
break;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
return;
|
||||
}
|
||||
opcode |= MiscField::encode(descriptor->flags());
|
||||
|
||||
// Emit the tailcall instruction.
|
||||
Emit(opcode, 0, nullptr, buffer.instruction_args.size(),
|
||||
&buffer.instruction_args.front());
|
||||
} else {
|
||||
FrameStateDescriptor* frame_state_descriptor =
|
||||
descriptor->NeedsFrameState()
|
||||
? GetFrameStateDescriptor(
|
||||
node->InputAt(static_cast<int>(descriptor->InputCount())))
|
||||
: nullptr;
|
||||
|
||||
CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
|
||||
|
||||
// Compute InstructionOperands for inputs and outputs.
|
||||
InitializeCallBuffer(node, &buffer, true, true);
|
||||
|
||||
// Push any stack arguments.
|
||||
for (Node* input : base::Reversed(buffer.pushed_nodes)) {
|
||||
// TODO(titzer): Handle pushing double parameters.
|
||||
InstructionOperand value =
|
||||
g.CanBeImmediate(input)
|
||||
? g.UseImmediate(input)
|
||||
: IsSupported(ATOM) ? g.UseRegister(input) : g.Use(input);
|
||||
Emit(kX87Push, g.NoOutput(), value);
|
||||
}
|
||||
|
||||
// Select the appropriate opcode based on the call type.
|
||||
InstructionCode opcode;
|
||||
switch (descriptor->kind()) {
|
||||
case CallDescriptor::kCallCodeObject:
|
||||
opcode = kArchCallCodeObject;
|
||||
break;
|
||||
case CallDescriptor::kCallJSFunction:
|
||||
opcode = kArchCallJSFunction;
|
||||
break;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
return;
|
||||
}
|
||||
opcode |= MiscField::encode(descriptor->flags());
|
||||
|
||||
// Emit the call instruction.
|
||||
size_t output_count = buffer.outputs.size();
|
||||
auto* outputs = &buffer.outputs.front();
|
||||
Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
|
||||
&buffer.instruction_args.front())->MarkAsCall();
|
||||
Emit(kArchRet, 0, nullptr, output_count, outputs);
|
||||
}
|
||||
}
|
||||
bool InstructionSelector::IsTailCallAddressImmediate() { return true; }
|
||||
|
||||
|
||||
namespace {
|
||||
|
Loading…
Reference in New Issue
Block a user