diff --git a/src/mips/builtins-mips.cc b/src/mips/builtins-mips.cc index 9bdc1e1bd8..e95c42292e 100644 --- a/src/mips/builtins-mips.cc +++ b/src/mips/builtins-mips.cc @@ -846,6 +846,43 @@ void Builtins::Generate_JSConstructStubForDerived(MacroAssembler* masm) { } +enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt }; + + +// Clobbers a2; preserves all other registers. +static void Generate_CheckStackOverflow(MacroAssembler* masm, + const int calleeOffset, Register argc, + IsTagged argc_is_tagged) { + // Check the stack for overflow. We are not trying to catch + // interruptions (e.g. debug break and preemption) here, so the "real stack + // limit" is checked. + Label okay; + __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); + // Make a2 the space we have left. The stack might already be overflowed + // here which will cause r2 to become negative. + __ Subu(a2, sp, a2); + // Check if the arguments will overflow the stack. + if (argc_is_tagged == kArgcIsSmiTagged) { + __ sll(t3, argc, kPointerSizeLog2 - kSmiTagSize); + } else { + DCHECK(argc_is_tagged == kArgcIsUntaggedInt); + __ sll(t3, argc, kPointerSizeLog2); + } + // Signed comparison. + __ Branch(&okay, gt, a2, Operand(t3)); + + // Out of stack space. + __ lw(a1, MemOperand(fp, calleeOffset)); + if (argc_is_tagged == kArgcIsUntaggedInt) { + __ SmiTag(argc); + } + __ Push(a1, argc); + __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); + + __ bind(&okay); +} + + static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, bool is_construct) { // Called from JSEntryStub::GenerateBody @@ -872,6 +909,14 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, // Push the function and the receiver onto the stack. __ Push(a1, a2); + // Check if we have enough stack space to push all arguments. + // The function is the first thing that was pushed above after entering + // the internal frame. + const int kFunctionOffset = + InternalFrameConstants::kCodeOffset - kPointerSize; + // Clobbers a2. + Generate_CheckStackOverflow(masm, kFunctionOffset, a3, kArgcIsUntaggedInt); + // Copy arguments to the stack in a loop. // a3: argc // s0: argv, i.e. points to first arg @@ -1351,30 +1396,6 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { } -static void Generate_CheckStackOverflow(MacroAssembler* masm, - const int calleeOffset) { - // Check the stack for overflow. We are not trying to catch - // interruptions (e.g. debug break and preemption) here, so the "real stack - // limit" is checked. - Label okay; - __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); - // Make a2 the space we have left. The stack might already be overflowed - // here which will cause a2 to become negative. - __ Subu(a2, sp, a2); - // Check if the arguments will overflow the stack. - __ sll(t3, v0, kPointerSizeLog2 - kSmiTagSize); - // Signed comparison. - __ Branch(&okay, gt, a2, Operand(t3)); - - // Out of stack space. - __ lw(a1, MemOperand(fp, calleeOffset)); - __ Push(a1, v0); - __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); - - __ bind(&okay); -} - - static void Generate_PushAppliedArguments(MacroAssembler* masm, const int argumentsOffset, const int indexOffset, @@ -1433,7 +1454,7 @@ static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) { } // Returns the result in v0. - Generate_CheckStackOverflow(masm, kFunctionOffset); + Generate_CheckStackOverflow(masm, kFunctionOffset, v0, kArgcIsSmiTagged); // Push current limit and index. const int kIndexOffset = @@ -1562,7 +1583,7 @@ static void Generate_ConstructHelper(MacroAssembler* masm) { __ InvokeBuiltin(Builtins::REFLECT_CONSTRUCT_PREPARE, CALL_FUNCTION); // Returns result in v0. - Generate_CheckStackOverflow(masm, kFunctionOffset); + Generate_CheckStackOverflow(masm, kFunctionOffset, v0, kArgcIsSmiTagged); // Push current limit and index. const int kIndexOffset = diff --git a/src/mips64/builtins-mips64.cc b/src/mips64/builtins-mips64.cc index 24d4a800e5..30a93dc5eb 100644 --- a/src/mips64/builtins-mips64.cc +++ b/src/mips64/builtins-mips64.cc @@ -857,6 +857,42 @@ void Builtins::Generate_JSConstructStubForDerived(MacroAssembler* masm) { } +enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt }; + + +// Clobbers a2; preserves all other registers. +static void Generate_CheckStackOverflow(MacroAssembler* masm, + const int calleeOffset, Register argc, + IsTagged argc_is_tagged) { + // Check the stack for overflow. We are not trying to catch + // interruptions (e.g. debug break and preemption) here, so the "real stack + // limit" is checked. + Label okay; + __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); + // Make a2 the space we have left. The stack might already be overflowed + // here which will cause r2 to become negative. + __ dsubu(a2, sp, a2); + // Check if the arguments will overflow the stack. + if (argc_is_tagged == kArgcIsSmiTagged) { + __ SmiScale(a7, v0, kPointerSizeLog2); + } else { + DCHECK(argc_is_tagged == kArgcIsUntaggedInt); + __ dsll(a7, argc, kPointerSizeLog2); + } + __ Branch(&okay, gt, a2, Operand(a7)); // Signed comparison. + + // Out of stack space. + __ ld(a1, MemOperand(fp, calleeOffset)); + if (argc_is_tagged == kArgcIsUntaggedInt) { + __ SmiTag(argc); + } + __ Push(a1, argc); + __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); + + __ bind(&okay); +} + + static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, bool is_construct) { // Called from JSEntryStub::GenerateBody @@ -882,6 +918,14 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, // Push the function and the receiver onto the stack. __ Push(a1, a2); + // Check if we have enough stack space to push all arguments. + // The function is the first thing that was pushed above after entering + // the internal frame. + const int kFunctionOffset = + InternalFrameConstants::kCodeOffset - kPointerSize; + // Clobbers a2. + Generate_CheckStackOverflow(masm, kFunctionOffset, a3, kArgcIsUntaggedInt); + // Copy arguments to the stack in a loop. // a3: argc // s0: argv, i.e. points to first arg @@ -1359,29 +1403,6 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { } -static void Generate_CheckStackOverflow(MacroAssembler* masm, - const int calleeOffset) { - // Check the stack for overflow. We are not trying to catch - // interruptions (e.g. debug break and preemption) here, so the "real stack - // limit" is checked. - Label okay; - __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); - // Make a2 the space we have left. The stack might already be overflowed - // here which will cause a2 to become negative. - __ dsubu(a2, sp, a2); - // Check if the arguments will overflow the stack. - __ SmiScale(a7, v0, kPointerSizeLog2); - __ Branch(&okay, gt, a2, Operand(a7)); // Signed comparison. - - // Out of stack space. - __ ld(a1, MemOperand(fp, calleeOffset)); - __ Push(a1, v0); - __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); - - __ bind(&okay); -} - - static void Generate_PushAppliedArguments(MacroAssembler* masm, const int argumentsOffset, const int indexOffset, @@ -1441,7 +1462,7 @@ static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) { } // Returns the result in v0. - Generate_CheckStackOverflow(masm, kFunctionOffset); + Generate_CheckStackOverflow(masm, kFunctionOffset, v0, kArgcIsSmiTagged); // Push current limit and index. const int kIndexOffset = @@ -1570,7 +1591,7 @@ static void Generate_ConstructHelper(MacroAssembler* masm) { __ InvokeBuiltin(Builtins::REFLECT_CONSTRUCT_PREPARE, CALL_FUNCTION); // Returns result in v0. - Generate_CheckStackOverflow(masm, kFunctionOffset); + Generate_CheckStackOverflow(masm, kFunctionOffset, v0, kArgcIsSmiTagged); // Push current limit and index. const int kIndexOffset =