Revert "[arm64] Switch jssp to csp"

This reverts commit 50baf93425.

Reason for revert: breaks arm64 nosnap debug tests:

https://build.chromium.org/p/client.v8.ports/builders/V8%20Linux%20-%20arm64%20-%20sim%20-%20nosnap%20-%20debug/builds/8418

Original change's description:
> [arm64] Switch jssp to csp
> 
> Switch stack pointer to using csp directly, making jssp redundant.
> 
> Bug: v8:6644
> Change-Id: I8e38eda50d56a25161b187c0a033608dd9f90239
> Reviewed-on: https://chromium-review.googlesource.com/860097
> Reviewed-by: Benedikt Meurer <bmeurer@chromium.org>
> Commit-Queue: Martyn Capewell <martyn.capewell@arm.com>
> Cr-Commit-Position: refs/heads/master@{#50487}

TBR=martyn.capewell@arm.com,bmeurer@chromium.org

Change-Id: I20015885e6029271ee6558509cdb92ff1a106e5f
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: v8:6644
Reviewed-on: https://chromium-review.googlesource.com/860319
Reviewed-by: Adam Klein <adamk@chromium.org>
Commit-Queue: Adam Klein <adamk@chromium.org>
Cr-Commit-Position: refs/heads/master@{#50488}
This commit is contained in:
Adam Klein 2018-01-10 18:38:33 +00:00 committed by Commit Bot
parent 50baf93425
commit 89348016ff
8 changed files with 82 additions and 13 deletions

View File

@ -42,6 +42,7 @@ void DoubleToIStub::Generate(MacroAssembler* masm) {
Register result = destination();
DCHECK(result.Is64Bits());
DCHECK(jssp.Is(masm->StackPointer()));
UseScratchRegisterScope temps(masm);
Register scratch1 = temps.AcquireX();
@ -283,6 +284,7 @@ void CEntryStub::Generate(MacroAssembler* masm) {
//
// The arguments are in reverse order, so that arg[argc-2] is actually the
// first argument to the target function and arg[0] is the last.
DCHECK(jssp.Is(__ StackPointer()));
const Register& argc_input = x0;
const Register& target_input = x1;
@ -414,6 +416,7 @@ void CEntryStub::Generate(MacroAssembler* masm) {
__ Peek(target, 3 * kPointerSize);
__ LeaveExitFrame(save_doubles(), x10, x9);
DCHECK(jssp.Is(__ StackPointer()));
if (!argv_in_register()) {
// Drop the remaining stack slots and return from the stub.
__ DropArguments(x11);
@ -450,6 +453,12 @@ void CEntryStub::Generate(MacroAssembler* masm) {
__ CallCFunction(find_handler, 3);
}
// We didn't execute a return case, so the stack frame hasn't been updated
// (except for the return address slot). However, we don't need to initialize
// jssp because the throw method will immediately overwrite it when it
// unwinds the stack.
__ SetStackPointer(jssp);
// Retrieve the handler context, SP and FP.
__ Mov(cp, Operand(pending_handler_context_address));
__ Ldr(cp, MemOperand(cp));
@ -487,6 +496,7 @@ void CEntryStub::Generate(MacroAssembler* masm) {
// Output:
// x0: result.
void JSEntryStub::Generate(MacroAssembler* masm) {
DCHECK(jssp.Is(__ StackPointer()));
Register code_entry = x0;
// Enable instruction instrumentation. This only works on the simulator, and
@ -503,6 +513,7 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
__ SetStackPointer(csp);
__ PushCalleeSavedRegisters();
__ Mov(jssp, csp);
__ SetStackPointer(jssp);
ProfileEntryHookStub::MaybeCallEntryHook(masm);
@ -579,6 +590,7 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
__ Bind(&invoke);
// Push new stack handler.
DCHECK(jssp.Is(__ StackPointer()));
static_assert(StackHandlerConstants::kSize == 2 * kPointerSize,
"Unexpected offset for StackHandlerConstants::kSize");
static_assert(StackHandlerConstants::kNextOffset == 0 * kPointerSize,
@ -655,6 +667,8 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
"Size of entry frame is not a multiple of 16 bytes");
__ Drop(EntryFrameConstants::kFixedFrameSize / kPointerSize);
// Restore the callee-saved registers and return.
DCHECK(jssp.Is(__ StackPointer()));
__ Mov(csp, jssp);
__ SetStackPointer(csp);
__ PopCalleeSavedRegisters();
// After this point, we must not modify jssp because it is a callee-saved

View File

@ -1230,10 +1230,7 @@ void TurboAssembler::Push(Handle<HeapObject> handle) {
UseScratchRegisterScope temps(this);
Register tmp = temps.AcquireX();
Mov(tmp, Operand(handle));
// This is only used in test-heap.cc, for generating code that is not
// executed. Push a padding slot together with the handle here, to
// satisfy the alignment requirement.
Push(padreg, tmp);
Push(tmp);
}
void TurboAssembler::Push(Smi* smi) {

View File

@ -44,7 +44,7 @@ TurboAssembler::TurboAssembler(Isolate* isolate, void* buffer, int buffer_size,
#endif
tmp_list_(DefaultTmpList()),
fptmp_list_(DefaultFPTmpList()),
sp_(csp),
sp_(jssp),
use_real_aborts_(true) {
if (create_code_object == CodeObjectRequired::kYes) {
code_object_ =
@ -2145,6 +2145,7 @@ void TurboAssembler::PrepareForTailCall(const ParameterCount& callee_args_count,
Add(dst_reg, dst_reg, 15);
Bic(dst_reg, dst_reg, 15);
DCHECK(jssp.Is(StackPointer()));
Register src_reg = caller_args_count_reg;
// Calculate the end of source area. +kPointerSize is for the receiver.
if (callee_args_count.is_reg()) {
@ -2188,6 +2189,7 @@ void TurboAssembler::PrepareForTailCall(const ParameterCount& callee_args_count,
// Leave current frame.
Mov(StackPointer(), dst_reg);
SetStackPointer(jssp);
AssertStackConsistency();
}
@ -2431,6 +2433,7 @@ void TurboAssembler::TruncateDoubleToIDelayed(Zone* zone, Register result,
// it should use.
Push(jssp, xzr); // Push xzr to maintain csp required 16-bytes alignment.
Mov(jssp, csp);
SetStackPointer(jssp);
}
// If we fell through then inline version didn't succeed - call stub instead.
@ -2456,6 +2459,7 @@ void TurboAssembler::TruncateDoubleToIDelayed(Zone* zone, Register result,
}
void TurboAssembler::Prologue() {
DCHECK(jssp.Is(StackPointer()));
Push(lr, fp, cp, x1);
Add(fp, StackPointer(), StandardFrameConstants::kFixedFrameSizeFromFp);
}
@ -2466,6 +2470,7 @@ void TurboAssembler::EnterFrame(StackFrame::Type type) {
Register code_reg = temps.AcquireX();
if (type == StackFrame::INTERNAL) {
DCHECK(jssp.Is(StackPointer()));
Mov(type_reg, StackFrame::TypeToMarker(type));
Mov(code_reg, Operand(CodeObject()));
Push(lr, fp, type_reg, code_reg);
@ -2486,6 +2491,7 @@ void TurboAssembler::EnterFrame(StackFrame::Type type) {
// csp[0] : for alignment
} else {
DCHECK_EQ(type, StackFrame::CONSTRUCT);
DCHECK(jssp.Is(StackPointer()));
Mov(type_reg, StackFrame::TypeToMarker(type));
// Users of this frame type push a context pointer after the type field,
@ -2510,6 +2516,7 @@ void TurboAssembler::LeaveFrame(StackFrame::Type type) {
AssertStackConsistency();
Pop(fp, lr);
} else {
DCHECK(jssp.Is(StackPointer()));
// Drop the execution stack down to the frame pointer and restore
// the caller frame pointer and return address.
Mov(StackPointer(), fp);
@ -2543,6 +2550,7 @@ void MacroAssembler::ExitFrameRestoreFPRegs() {
void MacroAssembler::EnterExitFrame(bool save_doubles, const Register& scratch,
int extra_space,
StackFrame::Type frame_type) {
DCHECK(jssp.Is(StackPointer()));
DCHECK(frame_type == StackFrame::EXIT ||
frame_type == StackFrame::BUILTIN_EXIT);
@ -2595,8 +2603,19 @@ void MacroAssembler::EnterExitFrame(bool save_doubles, const Register& scratch,
// sp[8]: Extra space reserved for caller (if extra_space != 0).
// sp -> sp[0]: Space reserved for the return address.
// Align and synchronize the system stack pointer with jssp.
AlignAndSetCSPForFrame();
DCHECK(csp.Is(StackPointer()));
// fp[8]: CallerPC (lr)
// fp -> fp[0]: CallerFP (old fp)
// fp[-8]: STUB marker
// fp[-16]: Space reserved for SPOffset.
// fp[-24]: CodeObject()
// fp[-24 - fp_size]: Saved doubles (if save_doubles is true).
// csp[8]: Memory reserved for the caller if extra_space != 0.
// csp -> csp[0]: Space reserved for the return address.
// ExitFrame::GetStateForFramePointer expects to find the return address at
// the memory address immediately below the pointer stored in SPOffset.
// It is not safe to derive much else from SPOffset, because the size of the
@ -2637,7 +2656,8 @@ void MacroAssembler::LeaveExitFrame(bool restore_doubles,
// fp[8]: CallerPC (lr)
// fp -> fp[0]: CallerFP (old fp)
// fp[...]: The rest of the frame.
Mov(csp, fp);
Mov(jssp, fp);
SetStackPointer(jssp);
AssertStackConsistency();
Pop(fp, lr);
}
@ -3059,6 +3079,7 @@ void TurboAssembler::Abort(AbortReason reason) {
// simplify the CallRuntime code, make sure that jssp is the stack pointer.
// There is no risk of register corruption here because Abort doesn't return.
Register old_stack_pointer = StackPointer();
SetStackPointer(jssp);
Mov(jssp, old_stack_pointer);
// We need some scratch registers for the MacroAssembler, so make sure we have

View File

@ -653,6 +653,8 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
Label* stack_overflow) {
DCHECK(masm->StackPointer().Is(jssp));
UseScratchRegisterScope temps(masm);
Register scratch = temps.AcquireX();
@ -1008,6 +1010,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
// Open a frame scope to indicate that there is a frame on the stack. The
// MANUAL indicates that the scope shouldn't actually generate code to set up
// the frame (that is done below).
DCHECK(jssp.Is(__ StackPointer()));
FrameScope frame_scope(masm, StackFrame::MANUAL);
__ Push(lr, fp, cp, closure);
__ Add(fp, __ StackPointer(), StandardFrameConstants::kFixedFrameSizeFromFp);
@ -1061,6 +1064,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
// Do a stack check to ensure we don't go over the limit.
Label ok;
DCHECK(jssp.Is(__ StackPointer()));
__ Sub(x10, __ StackPointer(), Operand(x11));
__ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
__ B(hs, &ok);
@ -1646,6 +1650,7 @@ void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
allocatable_register_count)) *
kPointerSize;
DCHECK(jssp.Is(__ StackPointer()));
// Set up frame pointer.
__ Add(fp, __ StackPointer(), frame_size);
@ -2956,6 +2961,10 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
}
void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
// Wasm code uses the csp. This builtin excepts to use the jssp.
// Thus, move csp to jssp when entering this builtin (called from wasm).
DCHECK(masm->StackPointer().is(jssp));
__ Move(jssp, csp);
{
FrameScope scope(masm, StackFrame::INTERNAL);
@ -2980,6 +2989,9 @@ void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
__ PopDRegList(fp_regs);
__ PopXRegList(gp_regs);
}
// Move back to csp land. jssp now has the same value as when entering this
// function, but csp might have changed in the runtime call.
__ Move(csp, jssp);
// Now jump to the instructions of the returned code object.
__ Jump(x8);
}

View File

@ -455,7 +455,12 @@ Condition FlagsConditionToCondition(FlagsCondition condition) {
} while (0)
void CodeGenerator::AssembleDeconstructFrame() {
__ Mov(csp, fp);
const CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
if (descriptor->IsCFunctionCall() || descriptor->UseNativeStack()) {
__ Mov(csp, fp);
} else {
__ Mov(jssp, fp);
}
__ Pop(fp, lr);
unwinding_info_writer_.MarkFrameDeconstructed(__ pc_offset());
@ -2323,7 +2328,11 @@ void CodeGenerator::FinishFrame(Frame* frame) {
frame->AlignFrame(16);
CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
__ SetStackPointer(csp);
if (descriptor->UseNativeStack() || descriptor->IsCFunctionCall()) {
__ SetStackPointer(csp);
} else {
__ SetStackPointer(jssp);
}
// Save FP registers.
CPURegList saves_fp = CPURegList(CPURegister::kVRegister, kDRegSizeInBits,
@ -2413,6 +2422,7 @@ void CodeGenerator::AssembleConstructFrame() {
__ EnterFrame(StackFrame::WASM_COMPILED);
}
DCHECK(__ StackPointer().Is(csp));
__ SetStackPointer(jssp);
__ AssertStackConsistency();
// Initialize the jssp because it is required for the runtime call.
__ Mov(jssp, csp);

View File

@ -1682,21 +1682,28 @@ void InstructionSelector::EmitPrepareArguments(
Node* node) {
Arm64OperandGenerator g(this);
bool from_native_stack = linkage()->GetIncomingDescriptor()->UseNativeStack();
bool to_native_stack = descriptor->UseNativeStack();
bool always_claim = to_native_stack != from_native_stack;
// `arguments` includes alignment "holes". This means that slots bigger than
// kPointerSize, e.g. Simd128, will span across multiple arguments.
int claim_count = static_cast<int>(arguments->size());
int slot = claim_count - 1;
claim_count = RoundUp(claim_count, 2);
// Bump the stack pointer(s).
if (claim_count > 0) {
if (claim_count > 0 || always_claim) {
// TODO(titzer): claim and poke probably take small immediates.
// TODO(titzer): it would be better to bump the csp here only
// and emit paired stores with increment for non c frames.
ArchOpcode claim = kArm64ClaimCSP;
ArchOpcode claim = to_native_stack ? kArm64ClaimCSP : kArm64ClaimJSSP;
// ClaimJSSP(0) or ClaimCSP(0) isn't a nop if there is a mismatch between
// CSP and JSSP.
Emit(claim, g.NoOutput(), g.TempImmediate(claim_count));
}
ArchOpcode poke = kArm64PokeCSP;
ArchOpcode poke = to_native_stack ? kArm64PokeCSP : kArm64PokeJSSP;
if (claim_count > 0) {
// Store padding, which might be overwritten.
Emit(poke, g.NoOutput(), g.UseImmediate(0),

View File

@ -2387,6 +2387,15 @@ void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
buffer.instruction_args.push_back(g.Label(handler));
}
bool from_native_stack = linkage()->GetIncomingDescriptor()->UseNativeStack();
bool to_native_stack = descriptor->UseNativeStack();
if (from_native_stack != to_native_stack) {
// (arm64 only) Mismatch in the use of stack pointers. One or the other
// has to be restored manually by the code generator.
flags |= to_native_stack ? CallDescriptor::kRestoreJSSP
: CallDescriptor::kRestoreCSP;
}
// Select the appropriate opcode based on the call type.
InstructionCode opcode = kArchNop;
switch (descriptor->kind()) {

View File

@ -3943,8 +3943,7 @@ static Handle<Code> DummyOptimizedCode(Isolate* isolate) {
v8::internal::CodeObjectRequired::kYes);
CodeDesc desc;
masm.Push(isolate->factory()->undefined_value());
masm.Push(isolate->factory()->undefined_value());
masm.Drop(2);
masm.Drop(1);
masm.GetCode(isolate, &desc);
Handle<Object> undefined(isolate->heap()->undefined_value(), isolate);
Handle<Code> code =