MIPS: JSEntryTrampoline: check for stack space before pushing arguments

Port 146598f44a

Original commit message:
Optimistically pushing a lot of arguments can run into the stack limit of the process, at least on operating systems where this limit is close to the limit that V8 sets for itself.

BUG=chromium:469768
LOG=y

Review URL: https://codereview.chromium.org/1066843003

Cr-Commit-Position: refs/heads/master@{#27634}
This commit is contained in:
balazs.kilvady 2015-04-07 08:59:00 -07:00 committed by Commit bot
parent 2d281e71ac
commit b635967b90
2 changed files with 93 additions and 51 deletions

View File

@ -846,6 +846,43 @@ void Builtins::Generate_JSConstructStubForDerived(MacroAssembler* masm) {
} }
enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
// Clobbers a2; preserves all other registers.
static void Generate_CheckStackOverflow(MacroAssembler* masm,
const int calleeOffset, Register argc,
IsTagged argc_is_tagged) {
// Check the stack for overflow. We are not trying to catch
// interruptions (e.g. debug break and preemption) here, so the "real stack
// limit" is checked.
Label okay;
__ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
// Make a2 the space we have left. The stack might already be overflowed
// here which will cause r2 to become negative.
__ Subu(a2, sp, a2);
// Check if the arguments will overflow the stack.
if (argc_is_tagged == kArgcIsSmiTagged) {
__ sll(t3, argc, kPointerSizeLog2 - kSmiTagSize);
} else {
DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
__ sll(t3, argc, kPointerSizeLog2);
}
// Signed comparison.
__ Branch(&okay, gt, a2, Operand(t3));
// Out of stack space.
__ lw(a1, MemOperand(fp, calleeOffset));
if (argc_is_tagged == kArgcIsUntaggedInt) {
__ SmiTag(argc);
}
__ Push(a1, argc);
__ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
__ bind(&okay);
}
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
bool is_construct) { bool is_construct) {
// Called from JSEntryStub::GenerateBody // Called from JSEntryStub::GenerateBody
@ -872,6 +909,14 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Push the function and the receiver onto the stack. // Push the function and the receiver onto the stack.
__ Push(a1, a2); __ Push(a1, a2);
// Check if we have enough stack space to push all arguments.
// The function is the first thing that was pushed above after entering
// the internal frame.
const int kFunctionOffset =
InternalFrameConstants::kCodeOffset - kPointerSize;
// Clobbers a2.
Generate_CheckStackOverflow(masm, kFunctionOffset, a3, kArgcIsUntaggedInt);
// Copy arguments to the stack in a loop. // Copy arguments to the stack in a loop.
// a3: argc // a3: argc
// s0: argv, i.e. points to first arg // s0: argv, i.e. points to first arg
@ -1351,30 +1396,6 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
} }
static void Generate_CheckStackOverflow(MacroAssembler* masm,
const int calleeOffset) {
// Check the stack for overflow. We are not trying to catch
// interruptions (e.g. debug break and preemption) here, so the "real stack
// limit" is checked.
Label okay;
__ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
// Make a2 the space we have left. The stack might already be overflowed
// here which will cause a2 to become negative.
__ Subu(a2, sp, a2);
// Check if the arguments will overflow the stack.
__ sll(t3, v0, kPointerSizeLog2 - kSmiTagSize);
// Signed comparison.
__ Branch(&okay, gt, a2, Operand(t3));
// Out of stack space.
__ lw(a1, MemOperand(fp, calleeOffset));
__ Push(a1, v0);
__ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
__ bind(&okay);
}
static void Generate_PushAppliedArguments(MacroAssembler* masm, static void Generate_PushAppliedArguments(MacroAssembler* masm,
const int argumentsOffset, const int argumentsOffset,
const int indexOffset, const int indexOffset,
@ -1433,7 +1454,7 @@ static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) {
} }
// Returns the result in v0. // Returns the result in v0.
Generate_CheckStackOverflow(masm, kFunctionOffset); Generate_CheckStackOverflow(masm, kFunctionOffset, v0, kArgcIsSmiTagged);
// Push current limit and index. // Push current limit and index.
const int kIndexOffset = const int kIndexOffset =
@ -1562,7 +1583,7 @@ static void Generate_ConstructHelper(MacroAssembler* masm) {
__ InvokeBuiltin(Builtins::REFLECT_CONSTRUCT_PREPARE, CALL_FUNCTION); __ InvokeBuiltin(Builtins::REFLECT_CONSTRUCT_PREPARE, CALL_FUNCTION);
// Returns result in v0. // Returns result in v0.
Generate_CheckStackOverflow(masm, kFunctionOffset); Generate_CheckStackOverflow(masm, kFunctionOffset, v0, kArgcIsSmiTagged);
// Push current limit and index. // Push current limit and index.
const int kIndexOffset = const int kIndexOffset =

View File

@ -857,6 +857,42 @@ void Builtins::Generate_JSConstructStubForDerived(MacroAssembler* masm) {
} }
enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
// Clobbers a2; preserves all other registers.
static void Generate_CheckStackOverflow(MacroAssembler* masm,
const int calleeOffset, Register argc,
IsTagged argc_is_tagged) {
// Check the stack for overflow. We are not trying to catch
// interruptions (e.g. debug break and preemption) here, so the "real stack
// limit" is checked.
Label okay;
__ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
// Make a2 the space we have left. The stack might already be overflowed
// here which will cause r2 to become negative.
__ dsubu(a2, sp, a2);
// Check if the arguments will overflow the stack.
if (argc_is_tagged == kArgcIsSmiTagged) {
__ SmiScale(a7, v0, kPointerSizeLog2);
} else {
DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
__ dsll(a7, argc, kPointerSizeLog2);
}
__ Branch(&okay, gt, a2, Operand(a7)); // Signed comparison.
// Out of stack space.
__ ld(a1, MemOperand(fp, calleeOffset));
if (argc_is_tagged == kArgcIsUntaggedInt) {
__ SmiTag(argc);
}
__ Push(a1, argc);
__ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
__ bind(&okay);
}
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
bool is_construct) { bool is_construct) {
// Called from JSEntryStub::GenerateBody // Called from JSEntryStub::GenerateBody
@ -882,6 +918,14 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Push the function and the receiver onto the stack. // Push the function and the receiver onto the stack.
__ Push(a1, a2); __ Push(a1, a2);
// Check if we have enough stack space to push all arguments.
// The function is the first thing that was pushed above after entering
// the internal frame.
const int kFunctionOffset =
InternalFrameConstants::kCodeOffset - kPointerSize;
// Clobbers a2.
Generate_CheckStackOverflow(masm, kFunctionOffset, a3, kArgcIsUntaggedInt);
// Copy arguments to the stack in a loop. // Copy arguments to the stack in a loop.
// a3: argc // a3: argc
// s0: argv, i.e. points to first arg // s0: argv, i.e. points to first arg
@ -1359,29 +1403,6 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
} }
static void Generate_CheckStackOverflow(MacroAssembler* masm,
const int calleeOffset) {
// Check the stack for overflow. We are not trying to catch
// interruptions (e.g. debug break and preemption) here, so the "real stack
// limit" is checked.
Label okay;
__ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
// Make a2 the space we have left. The stack might already be overflowed
// here which will cause a2 to become negative.
__ dsubu(a2, sp, a2);
// Check if the arguments will overflow the stack.
__ SmiScale(a7, v0, kPointerSizeLog2);
__ Branch(&okay, gt, a2, Operand(a7)); // Signed comparison.
// Out of stack space.
__ ld(a1, MemOperand(fp, calleeOffset));
__ Push(a1, v0);
__ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
__ bind(&okay);
}
static void Generate_PushAppliedArguments(MacroAssembler* masm, static void Generate_PushAppliedArguments(MacroAssembler* masm,
const int argumentsOffset, const int argumentsOffset,
const int indexOffset, const int indexOffset,
@ -1441,7 +1462,7 @@ static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) {
} }
// Returns the result in v0. // Returns the result in v0.
Generate_CheckStackOverflow(masm, kFunctionOffset); Generate_CheckStackOverflow(masm, kFunctionOffset, v0, kArgcIsSmiTagged);
// Push current limit and index. // Push current limit and index.
const int kIndexOffset = const int kIndexOffset =
@ -1570,7 +1591,7 @@ static void Generate_ConstructHelper(MacroAssembler* masm) {
__ InvokeBuiltin(Builtins::REFLECT_CONSTRUCT_PREPARE, CALL_FUNCTION); __ InvokeBuiltin(Builtins::REFLECT_CONSTRUCT_PREPARE, CALL_FUNCTION);
// Returns result in v0. // Returns result in v0.
Generate_CheckStackOverflow(masm, kFunctionOffset); Generate_CheckStackOverflow(masm, kFunctionOffset, v0, kArgcIsSmiTagged);
// Push current limit and index. // Push current limit and index.
const int kIndexOffset = const int kIndexOffset =