[arm64] Tidy up stack related TF opcodes

Unify PokeCSP/JSSP and ClaimCSP/JSSP, remove RestoreJSSP/CSP, and
remove UseNativeStack.

Bug: v8:6644
Change-Id: I482237a0e112f986c6155dce253749f55bd08f5f
Reviewed-on: https://chromium-review.googlesource.com/860104
Reviewed-by: Benedikt Meurer <bmeurer@chromium.org>
Reviewed-by: Andreas Haas <ahaas@chromium.org>
Commit-Queue: Martyn Capewell <martyn.capewell@arm.com>
Cr-Commit-Position: refs/heads/master@{#50531}
This commit is contained in:
Martyn Capewell 2018-01-10 17:52:37 +00:00 committed by Commit Bot
parent 229a3e37f9
commit c6c2d9a3e4
8 changed files with 20 additions and 119 deletions

View File

@ -579,17 +579,6 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
__ Call(target);
}
RecordCallPosition(instr);
// TODO(titzer): this is ugly. JSSP should be a caller-save register
// in this case, but it is not possible to express in the register
// allocator.
CallDescriptor::Flags flags(MiscField::decode(opcode));
if (flags & CallDescriptor::kRestoreJSSP) {
__ Mov(jssp, csp);
}
if (flags & CallDescriptor::kRestoreCSP) {
__ Mov(csp, jssp);
__ AssertCspAligned();
}
frame_access_state()->ClearSPDelta();
break;
}
@ -612,17 +601,6 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
__ Call(target);
}
RecordCallPosition(instr);
// TODO(titzer): this is ugly. JSSP should be a caller-save register
// in this case, but it is not possible to express in the register
// allocator.
CallDescriptor::Flags flags(MiscField::decode(opcode));
if (flags & CallDescriptor::kRestoreJSSP) {
__ Mov(jssp, csp);
}
if (flags & CallDescriptor::kRestoreCSP) {
__ Mov(csp, jssp);
__ AssertCspAligned();
}
frame_access_state()->ClearSPDelta();
break;
}
@ -696,17 +674,6 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
__ Add(x10, x10, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Call(x10);
RecordCallPosition(instr);
// TODO(titzer): this is ugly. JSSP should be a caller-save register
// in this case, but it is not possible to express in the register
// allocator.
CallDescriptor::Flags flags(MiscField::decode(opcode));
if (flags & CallDescriptor::kRestoreJSSP) {
__ Mov(jssp, csp);
}
if (flags & CallDescriptor::kRestoreCSP) {
__ Mov(csp, jssp);
__ AssertCspAligned();
}
frame_access_state()->ClearSPDelta();
break;
}
@ -1215,62 +1182,17 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
case kArm64CompareAndBranch:
// Pseudo instruction turned into cbz/cbnz in AssembleArchBranch.
break;
case kArm64ClaimCSP: {
case kArm64Claim: {
int count = i.InputInt32(0);
DCHECK_EQ(count % 2, 0);
Register prev = __ StackPointer();
if (prev.Is(jssp)) {
// TODO(titzer): make this a macro-assembler method.
// TODO(arm64): Storing JSSP on the stack is redundant when calling a C
// function, as JSSP is callee-saved (we still need to do this when
// calling a code object that uses the CSP as the stack pointer). See
// the code generation for kArchCallCodeObject vs. kArchCallCFunction
// (the latter does not restore CSP/JSSP).
// TurboAssembler::CallCFunction() (safely) drops this extra slot
// anyway.
__ SetStackPointer(csp);
__ Mov(csp, jssp);
if (count > 0) {
__ Claim(count);
}
__ SetStackPointer(prev);
} else {
__ AssertCspAligned();
if (count > 0) {
__ Claim(count);
frame_access_state()->IncreaseSPDelta(count);
}
}
break;
}
case kArm64ClaimJSSP: {
int count = i.InputInt32(0);
DCHECK_EQ(count % 2, 0);
if (csp.Is(__ StackPointer())) {
// No JSSP is set up. Compute it from the CSP.
__ AssertCspAligned();
if (count > 0) {
int even = RoundUp(count, 2);
// We must also update CSP to maintain stack consistency:
__ Sub(csp, csp, even * kPointerSize); // Must always be aligned.
__ Mov(jssp, csp);
__ AssertStackConsistency();
frame_access_state()->IncreaseSPDelta(even);
} else {
__ Mov(jssp, csp);
}
} else {
// JSSP is the current stack pointer, just use regular Claim().
__ AssertCspAligned();
if (count > 0) {
__ Claim(count);
frame_access_state()->IncreaseSPDelta(count);
}
break;
}
case kArm64PokeCSP: // fall through
case kArm64PokeJSSP: {
Register prev = __ StackPointer();
__ SetStackPointer(arch_opcode == kArm64PokeCSP ? csp : jssp);
case kArm64Poke: {
Operand operand(i.InputInt32(1) * kPointerSize);
if (instr->InputAt(0)->IsSimd128Register()) {
__ Poke(i.InputSimd128Register(0), operand);
@ -1279,7 +1201,6 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
} else {
__ Poke(i.InputOrZeroRegister64(0), operand);
}
__ SetStackPointer(prev);
break;
}
case kArm64PokePair: {
@ -2347,9 +2268,7 @@ void CodeGenerator::FinishFrame(Frame* frame) {
void CodeGenerator::AssembleConstructFrame() {
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
if (descriptor->UseNativeStack()) {
__ AssertCspAligned();
}
__ AssertCspAligned();
// The frame has been previously padded in CodeGenerator::FinishFrame().
DCHECK_EQ(frame()->GetTotalFrameSlotCount() % 2, 0);
@ -2364,7 +2283,6 @@ void CodeGenerator::AssembleConstructFrame() {
if (frame_access_state()->has_frame()) {
// Link the frame
if (descriptor->IsJSFunctionCall()) {
DCHECK(!descriptor->UseNativeStack());
__ Prologue();
} else {
__ Push(lr, fp);
@ -2522,9 +2440,7 @@ void CodeGenerator::AssembleReturn(InstructionOperand* pop) {
__ DropArguments(pop_reg);
}
if (descriptor->UseNativeStack()) {
__ AssertCspAligned();
}
__ AssertCspAligned();
__ Ret();
}

View File

@ -79,10 +79,8 @@ namespace compiler {
V(Arm64TestAndBranch) \
V(Arm64CompareAndBranch32) \
V(Arm64CompareAndBranch) \
V(Arm64ClaimCSP) \
V(Arm64ClaimJSSP) \
V(Arm64PokeCSP) \
V(Arm64PokeJSSP) \
V(Arm64Claim) \
V(Arm64Poke) \
V(Arm64PokePair) \
V(Arm64Float32Cmp) \
V(Arm64Float32Add) \
@ -326,8 +324,6 @@ namespace compiler {
V(Operand2_R_SXTH) /* %r0 SXTH (signed extend halfword) */ \
V(Operand2_R_SXTW) /* %r0 SXTW (signed extend word) */
enum ResetJSSPAfterCall { kNoResetJSSP, kResetJSSP };
} // namespace compiler
} // namespace internal
} // namespace v8

View File

@ -296,10 +296,8 @@ int InstructionScheduler::GetTargetInstructionFlags(
case kArm64Float64Mod: // This opcode will call a C Function which can
// alter CSP. TODO(arm64): Remove once JSSP is gone.
case kArm64ClaimCSP:
case kArm64ClaimJSSP:
case kArm64PokeCSP:
case kArm64PokeJSSP:
case kArm64Claim:
case kArm64Poke:
case kArm64PokePair:
case kArm64StrS:
case kArm64StrD:

View File

@ -1691,15 +1691,13 @@ void InstructionSelector::EmitPrepareArguments(
if (claim_count > 0) {
// TODO(titzer): claim and poke probably take small immediates.
// TODO(titzer): it would be better to bump the csp here only
// and emit paired stores with increment for non c frames.
ArchOpcode claim = kArm64ClaimCSP;
Emit(claim, g.NoOutput(), g.TempImmediate(claim_count));
// and emit paired stores with increment for non c frames.
Emit(kArm64Claim, g.NoOutput(), g.TempImmediate(claim_count));
}
ArchOpcode poke = kArm64PokeCSP;
if (claim_count > 0) {
// Store padding, which might be overwritten.
Emit(poke, g.NoOutput(), g.UseImmediate(0),
Emit(kArm64Poke, g.NoOutput(), g.UseImmediate(0),
g.TempImmediate(claim_count - 1));
}
@ -1708,7 +1706,7 @@ void InstructionSelector::EmitPrepareArguments(
Node* input_node = (*arguments)[slot].node;
// Skip any alignment holes in pushed nodes.
if (input_node != nullptr) {
Emit(poke, g.NoOutput(), g.UseRegister(input_node),
Emit(kArm64Poke, g.NoOutput(), g.UseRegister(input_node),
g.TempImmediate(slot));
}
slot--;

View File

@ -224,7 +224,7 @@ CallDescriptor* Linkage::GetSimplifiedCDescriptor(
// The target for C calls is always an address (i.e. machine pointer).
MachineType target_type = MachineType::Pointer();
LinkageLocation target_loc = LinkageLocation::ForAnyRegister(target_type);
CallDescriptor::Flags flags = CallDescriptor::kUseNativeStack;
CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
if (set_initialize_root_flag) {
flags |= CallDescriptor::kInitializeRootRegister;
}

View File

@ -177,17 +177,12 @@ class V8_EXPORT_PRIVATE CallDescriptor final
kNeedsFrameState = 1u << 0,
kHasExceptionHandler = 1u << 1,
kCanUseRoots = 1u << 2,
// (arm64 only) native stack should be used for arguments.
kUseNativeStack = 1u << 3,
// (arm64 only) call instruction has to restore JSSP or CSP.
kRestoreJSSP = 1u << 4,
kRestoreCSP = 1u << 5,
// Causes the code generator to initialize the root register.
kInitializeRootRegister = 1u << 6,
kInitializeRootRegister = 1u << 3,
// Does not ever try to allocate space on our heap.
kNoAllocate = 1u << 7,
kNoAllocate = 1u << 4,
// Push argument count as part of function prologue.
kPushArgumentCount = 1u << 8
kPushArgumentCount = 1u << 5
};
typedef base::Flags<Flag> Flags;
@ -253,7 +248,6 @@ class V8_EXPORT_PRIVATE CallDescriptor final
Flags flags() const { return flags_; }
bool NeedsFrameState() const { return flags() & kNeedsFrameState; }
bool UseNativeStack() const { return flags() & kUseNativeStack; }
bool PushArgumentCount() const { return flags() & kPushArgumentCount; }
bool InitializeRootRegister() const {
return flags() & kInitializeRootRegister;

View File

@ -260,7 +260,6 @@ CallDescriptor* GetWasmCallDescriptor(Zone* zone, wasm::FunctionSig* fsig) {
: MachineType::AnyTagged();
LinkageLocation target_loc = LinkageLocation::ForAnyRegister(target_type);
CallDescriptor::Flags flags = CallDescriptor::kUseNativeStack;
CallDescriptor::Kind kind = FLAG_wasm_jit_to_native
? CallDescriptor::kCallWasmFunction
: CallDescriptor::kCallCodeObject;
@ -274,7 +273,7 @@ CallDescriptor* GetWasmCallDescriptor(Zone* zone, wasm::FunctionSig* fsig) {
compiler::Operator::kNoProperties, // properties
kCalleeSaveRegisters, // callee-saved registers
kCalleeSaveFPRegisters, // callee-saved fp regs
flags, // flags
CallDescriptor::kNoFlags, // flags
"wasm-call", // debug name
0, // allocatable registers
rets.stack_offset - params.stack_offset); // stack_return_count

View File

@ -207,7 +207,7 @@ class RegisterConfig {
compiler::Operator::kNoProperties, // properties
kCalleeSaveRegisters, // callee-saved registers
kCalleeSaveFPRegisters, // callee-saved fp regs
CallDescriptor::kUseNativeStack, // flags
CallDescriptor::kNoFlags, // flags
"c-call");
}