diff --git a/src/full-codegen/arm/full-codegen-arm.cc b/src/full-codegen/arm/full-codegen-arm.cc index 7353e3de1b..e3a4fabf52 100644 --- a/src/full-codegen/arm/full-codegen-arm.cc +++ b/src/full-codegen/arm/full-codegen-arm.cc @@ -423,6 +423,30 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); } +void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence( + bool is_tail_call) { + // Pretend that the exit is a backwards jump to the entry. + int weight = 1; + if (info_->ShouldSelfOptimize()) { + weight = FLAG_interrupt_budget / FLAG_self_opt_count; + } else { + int distance = masm_->pc_offset(); + weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier)); + } + EmitProfilingCounterDecrement(weight); + Label ok; + __ b(pl, &ok); + // Don't need to save result register if we are going to do a tail call. + if (!is_tail_call) { + __ push(r0); + } + __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); + if (!is_tail_call) { + __ pop(r0); + } + EmitProfilingCounterReset(); + __ bind(&ok); +} void FullCodeGenerator::EmitReturnSequence() { Comment cmnt(masm_, "[ Return sequence"); @@ -436,24 +460,7 @@ void FullCodeGenerator::EmitReturnSequence() { __ push(r0); __ CallRuntime(Runtime::kTraceExit); } - // Pretend that the exit is a backwards jump to the entry. - int weight = 1; - if (info_->ShouldSelfOptimize()) { - weight = FLAG_interrupt_budget / FLAG_self_opt_count; - } else { - int distance = masm_->pc_offset(); - weight = Min(kMaxBackEdgeWeight, - Max(1, distance / kCodeSizeMultiplier)); - } - EmitProfilingCounterDecrement(weight); - Label ok; - __ b(pl, &ok); - __ push(r0); - __ Call(isolate()->builtins()->InterruptCheck(), - RelocInfo::CODE_TARGET); - __ pop(r0); - EmitProfilingCounterReset(); - __ bind(&ok); + EmitProfilingCounterHandlingForReturnSequence(false); // Make sure that the constant pool is not emitted inside of the return // sequence. @@ -2754,6 +2761,14 @@ void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) { PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); SetCallPosition(expr); + if (expr->tail_call_mode() == TailCallMode::kAllow) { + if (FLAG_trace) { + __ CallRuntime(Runtime::kTraceTailCall); + } + // Update profiling counters before the tail call since we will + // not return to this function. + EmitProfilingCounterHandlingForReturnSequence(true); + } Handle ic = CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode()) .code(); diff --git a/src/full-codegen/arm64/full-codegen-arm64.cc b/src/full-codegen/arm64/full-codegen-arm64.cc index 92a843ddbd..96fd06a766 100644 --- a/src/full-codegen/arm64/full-codegen-arm64.cc +++ b/src/full-codegen/arm64/full-codegen-arm64.cc @@ -414,6 +414,30 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); } +void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence( + bool is_tail_call) { + // Pretend that the exit is a backwards jump to the entry. + int weight = 1; + if (info_->ShouldSelfOptimize()) { + weight = FLAG_interrupt_budget / FLAG_self_opt_count; + } else { + int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2; + weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier)); + } + EmitProfilingCounterDecrement(weight); + Label ok; + __ B(pl, &ok); + // Don't need to save result register if we are going to do a tail call. + if (!is_tail_call) { + __ Push(x0); + } + __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); + if (!is_tail_call) { + __ Pop(x0); + } + EmitProfilingCounterReset(); + __ Bind(&ok); +} void FullCodeGenerator::EmitReturnSequence() { Comment cmnt(masm_, "[ Return sequence"); @@ -430,24 +454,7 @@ void FullCodeGenerator::EmitReturnSequence() { __ CallRuntime(Runtime::kTraceExit); DCHECK(x0.Is(result_register())); } - // Pretend that the exit is a backwards jump to the entry. - int weight = 1; - if (info_->ShouldSelfOptimize()) { - weight = FLAG_interrupt_budget / FLAG_self_opt_count; - } else { - int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2; - weight = Min(kMaxBackEdgeWeight, - Max(1, distance / kCodeSizeMultiplier)); - } - EmitProfilingCounterDecrement(weight); - Label ok; - __ B(pl, &ok); - __ Push(x0); - __ Call(isolate()->builtins()->InterruptCheck(), - RelocInfo::CODE_TARGET); - __ Pop(x0); - EmitProfilingCounterReset(); - __ Bind(&ok); + EmitProfilingCounterHandlingForReturnSequence(false); SetReturnPosition(literal()); const Register& current_sp = __ StackPointer(); @@ -2556,7 +2563,14 @@ void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) { PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); SetCallPosition(expr); - + if (expr->tail_call_mode() == TailCallMode::kAllow) { + if (FLAG_trace) { + __ CallRuntime(Runtime::kTraceTailCall); + } + // Update profiling counters before the tail call since we will + // not return to this function. + EmitProfilingCounterHandlingForReturnSequence(true); + } Handle ic = CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode()) .code(); diff --git a/src/full-codegen/full-codegen.h b/src/full-codegen/full-codegen.h index 8e02944020..a9602e00ec 100644 --- a/src/full-codegen/full-codegen.h +++ b/src/full-codegen/full-codegen.h @@ -505,6 +505,7 @@ class FullCodeGenerator: public AstVisitor { // Platform-specific return sequence void EmitReturnSequence(); + void EmitProfilingCounterHandlingForReturnSequence(bool is_tail_call); // Platform-specific code sequences for calls void EmitCall(Call* expr, ConvertReceiverMode = ConvertReceiverMode::kAny); diff --git a/src/full-codegen/ia32/full-codegen-ia32.cc b/src/full-codegen/ia32/full-codegen-ia32.cc index deed409d02..5a4e9d7fc7 100644 --- a/src/full-codegen/ia32/full-codegen-ia32.cc +++ b/src/full-codegen/ia32/full-codegen-ia32.cc @@ -386,6 +386,30 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); } +void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence( + bool is_tail_call) { + // Pretend that the exit is a backwards jump to the entry. + int weight = 1; + if (info_->ShouldSelfOptimize()) { + weight = FLAG_interrupt_budget / FLAG_self_opt_count; + } else { + int distance = masm_->pc_offset(); + weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier)); + } + EmitProfilingCounterDecrement(weight); + Label ok; + __ j(positive, &ok, Label::kNear); + // Don't need to save result register if we are going to do a tail call. + if (!is_tail_call) { + __ push(eax); + } + __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); + if (!is_tail_call) { + __ pop(eax); + } + EmitProfilingCounterReset(); + __ bind(&ok); +} void FullCodeGenerator::EmitReturnSequence() { Comment cmnt(masm_, "[ Return sequence"); @@ -398,24 +422,7 @@ void FullCodeGenerator::EmitReturnSequence() { __ push(eax); __ CallRuntime(Runtime::kTraceExit); } - // Pretend that the exit is a backwards jump to the entry. - int weight = 1; - if (info_->ShouldSelfOptimize()) { - weight = FLAG_interrupt_budget / FLAG_self_opt_count; - } else { - int distance = masm_->pc_offset(); - weight = Min(kMaxBackEdgeWeight, - Max(1, distance / kCodeSizeMultiplier)); - } - EmitProfilingCounterDecrement(weight); - Label ok; - __ j(positive, &ok, Label::kNear); - __ push(eax); - __ call(isolate()->builtins()->InterruptCheck(), - RelocInfo::CODE_TARGET); - __ pop(eax); - EmitProfilingCounterReset(); - __ bind(&ok); + EmitProfilingCounterHandlingForReturnSequence(false); SetReturnPosition(literal()); __ leave(); @@ -2639,6 +2646,14 @@ void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) { PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); SetCallPosition(expr); + if (expr->tail_call_mode() == TailCallMode::kAllow) { + if (FLAG_trace) { + __ CallRuntime(Runtime::kTraceTailCall); + } + // Update profiling counters before the tail call since we will + // not return to this function. + EmitProfilingCounterHandlingForReturnSequence(true); + } Handle ic = CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode()) .code(); diff --git a/src/full-codegen/mips/full-codegen-mips.cc b/src/full-codegen/mips/full-codegen-mips.cc index 7f37f7c36a..4af426cdc2 100644 --- a/src/full-codegen/mips/full-codegen-mips.cc +++ b/src/full-codegen/mips/full-codegen-mips.cc @@ -414,6 +414,30 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); } +void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence( + bool is_tail_call) { + // Pretend that the exit is a backwards jump to the entry. + int weight = 1; + if (info_->ShouldSelfOptimize()) { + weight = FLAG_interrupt_budget / FLAG_self_opt_count; + } else { + int distance = masm_->pc_offset(); + weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier)); + } + EmitProfilingCounterDecrement(weight); + Label ok; + __ Branch(&ok, ge, a3, Operand(zero_reg)); + // Don't need to save result register if we are going to do a tail call. + if (!is_tail_call) { + __ push(v0); + } + __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); + if (!is_tail_call) { + __ pop(v0); + } + EmitProfilingCounterReset(); + __ bind(&ok); +} void FullCodeGenerator::EmitReturnSequence() { Comment cmnt(masm_, "[ Return sequence"); @@ -427,24 +451,7 @@ void FullCodeGenerator::EmitReturnSequence() { __ push(v0); __ CallRuntime(Runtime::kTraceExit); } - // Pretend that the exit is a backwards jump to the entry. - int weight = 1; - if (info_->ShouldSelfOptimize()) { - weight = FLAG_interrupt_budget / FLAG_self_opt_count; - } else { - int distance = masm_->pc_offset(); - weight = Min(kMaxBackEdgeWeight, - Max(1, distance / kCodeSizeMultiplier)); - } - EmitProfilingCounterDecrement(weight); - Label ok; - __ Branch(&ok, ge, a3, Operand(zero_reg)); - __ push(v0); - __ Call(isolate()->builtins()->InterruptCheck(), - RelocInfo::CODE_TARGET); - __ pop(v0); - EmitProfilingCounterReset(); - __ bind(&ok); + EmitProfilingCounterHandlingForReturnSequence(false); // Make sure that the constant pool is not emitted inside of the return // sequence. @@ -2741,6 +2748,14 @@ void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) { PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); // Record source position of the IC call. SetCallPosition(expr); + if (expr->tail_call_mode() == TailCallMode::kAllow) { + if (FLAG_trace) { + __ CallRuntime(Runtime::kTraceTailCall); + } + // Update profiling counters before the tail call since we will + // not return to this function. + EmitProfilingCounterHandlingForReturnSequence(true); + } Handle ic = CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode()) .code(); diff --git a/src/full-codegen/mips64/full-codegen-mips64.cc b/src/full-codegen/mips64/full-codegen-mips64.cc index 7f69151501..92f0ad8a1a 100644 --- a/src/full-codegen/mips64/full-codegen-mips64.cc +++ b/src/full-codegen/mips64/full-codegen-mips64.cc @@ -414,6 +414,30 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); } +void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence( + bool is_tail_call) { + // Pretend that the exit is a backwards jump to the entry. + int weight = 1; + if (info_->ShouldSelfOptimize()) { + weight = FLAG_interrupt_budget / FLAG_self_opt_count; + } else { + int distance = masm_->pc_offset(); + weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier)); + } + EmitProfilingCounterDecrement(weight); + Label ok; + __ Branch(&ok, ge, a3, Operand(zero_reg)); + // Don't need to save result register if we are going to do a tail call. + if (!is_tail_call) { + __ push(v0); + } + __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); + if (!is_tail_call) { + __ pop(v0); + } + EmitProfilingCounterReset(); + __ bind(&ok); +} void FullCodeGenerator::EmitReturnSequence() { Comment cmnt(masm_, "[ Return sequence"); @@ -427,24 +451,7 @@ void FullCodeGenerator::EmitReturnSequence() { __ push(v0); __ CallRuntime(Runtime::kTraceExit); } - // Pretend that the exit is a backwards jump to the entry. - int weight = 1; - if (info_->ShouldSelfOptimize()) { - weight = FLAG_interrupt_budget / FLAG_self_opt_count; - } else { - int distance = masm_->pc_offset(); - weight = Min(kMaxBackEdgeWeight, - Max(1, distance / kCodeSizeMultiplier)); - } - EmitProfilingCounterDecrement(weight); - Label ok; - __ Branch(&ok, ge, a3, Operand(zero_reg)); - __ push(v0); - __ Call(isolate()->builtins()->InterruptCheck(), - RelocInfo::CODE_TARGET); - __ pop(v0); - EmitProfilingCounterReset(); - __ bind(&ok); + EmitProfilingCounterHandlingForReturnSequence(false); // Make sure that the constant pool is not emitted inside of the return // sequence. @@ -2747,6 +2754,14 @@ void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) { PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); // Record source position of the IC call. SetCallPosition(expr); + if (expr->tail_call_mode() == TailCallMode::kAllow) { + if (FLAG_trace) { + __ CallRuntime(Runtime::kTraceTailCall); + } + // Update profiling counters before the tail call since we will + // not return to this function. + EmitProfilingCounterHandlingForReturnSequence(true); + } Handle ic = CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode()) .code(); diff --git a/src/full-codegen/x64/full-codegen-x64.cc b/src/full-codegen/x64/full-codegen-x64.cc index 4b6d634aae..d440ae8c3b 100644 --- a/src/full-codegen/x64/full-codegen-x64.cc +++ b/src/full-codegen/x64/full-codegen-x64.cc @@ -389,6 +389,30 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); } +void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence( + bool is_tail_call) { + // Pretend that the exit is a backwards jump to the entry. + int weight = 1; + if (info_->ShouldSelfOptimize()) { + weight = FLAG_interrupt_budget / FLAG_self_opt_count; + } else { + int distance = masm_->pc_offset(); + weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier)); + } + EmitProfilingCounterDecrement(weight); + Label ok; + __ j(positive, &ok, Label::kNear); + // Don't need to save result register if we are going to do a tail call. + if (!is_tail_call) { + __ Push(rax); + } + __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); + if (!is_tail_call) { + __ Pop(rax); + } + EmitProfilingCounterReset(); + __ bind(&ok); +} void FullCodeGenerator::EmitReturnSequence() { Comment cmnt(masm_, "[ Return sequence"); @@ -400,24 +424,7 @@ void FullCodeGenerator::EmitReturnSequence() { __ Push(rax); __ CallRuntime(Runtime::kTraceExit); } - // Pretend that the exit is a backwards jump to the entry. - int weight = 1; - if (info_->ShouldSelfOptimize()) { - weight = FLAG_interrupt_budget / FLAG_self_opt_count; - } else { - int distance = masm_->pc_offset(); - weight = Min(kMaxBackEdgeWeight, - Max(1, distance / kCodeSizeMultiplier)); - } - EmitProfilingCounterDecrement(weight); - Label ok; - __ j(positive, &ok, Label::kNear); - __ Push(rax); - __ call(isolate()->builtins()->InterruptCheck(), - RelocInfo::CODE_TARGET); - __ Pop(rax); - EmitProfilingCounterReset(); - __ bind(&ok); + EmitProfilingCounterHandlingForReturnSequence(false); SetReturnPosition(literal()); __ leave(); @@ -2625,6 +2632,14 @@ void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) { PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); SetCallPosition(expr); + if (expr->tail_call_mode() == TailCallMode::kAllow) { + if (FLAG_trace) { + __ CallRuntime(Runtime::kTraceTailCall); + } + // Update profiling counters before the tail call since we will + // not return to this function. + EmitProfilingCounterHandlingForReturnSequence(true); + } Handle ic = CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode()) .code(); diff --git a/src/runtime/runtime-test.cc b/src/runtime/runtime-test.cc index 3979be5f13..5f27a609a6 100644 --- a/src/runtime/runtime-test.cc +++ b/src/runtime/runtime-test.cc @@ -408,53 +408,54 @@ RUNTIME_FUNCTION(Runtime_DisassembleFunction) { return isolate->heap()->undefined_value(); } +namespace { -static int StackSize(Isolate* isolate) { +int StackSize(Isolate* isolate) { int n = 0; for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) n++; return n; } - -static void PrintTransition(Isolate* isolate, Object* result) { - // indentation - { - const int nmax = 80; - int n = StackSize(isolate); - if (n <= nmax) - PrintF("%4d:%*s", n, n, ""); - else - PrintF("%4d:%*s", n, nmax, "..."); - } - - if (result == NULL) { - JavaScriptFrame::PrintTop(isolate, stdout, true, false); - PrintF(" {\n"); +void PrintIndentation(Isolate* isolate) { + const int nmax = 80; + int n = StackSize(isolate); + if (n <= nmax) { + PrintF("%4d:%*s", n, n, ""); } else { - // function result - PrintF("} -> "); - result->ShortPrint(); - PrintF("\n"); + PrintF("%4d:%*s", n, nmax, "..."); } } +} // namespace RUNTIME_FUNCTION(Runtime_TraceEnter) { SealHandleScope shs(isolate); - DCHECK(args.length() == 0); - PrintTransition(isolate, NULL); + DCHECK_EQ(0, args.length()); + PrintIndentation(isolate); + JavaScriptFrame::PrintTop(isolate, stdout, true, false); + PrintF(" {\n"); return isolate->heap()->undefined_value(); } RUNTIME_FUNCTION(Runtime_TraceExit) { SealHandleScope shs(isolate); - DCHECK(args.length() == 1); + DCHECK_EQ(1, args.length()); CONVERT_ARG_CHECKED(Object, obj, 0); - PrintTransition(isolate, obj); + PrintIndentation(isolate); + PrintF("} -> "); + obj->ShortPrint(); + PrintF("\n"); return obj; // return TOS } +RUNTIME_FUNCTION(Runtime_TraceTailCall) { + SealHandleScope shs(isolate); + DCHECK_EQ(0, args.length()); + PrintIndentation(isolate); + PrintF("} -> tail call ->\n"); + return isolate->heap()->undefined_value(); +} RUNTIME_FUNCTION(Runtime_HaveSameMap) { SealHandleScope shs(isolate); diff --git a/src/runtime/runtime.h b/src/runtime/runtime.h index 14144e9d93..f4e461b85e 100644 --- a/src/runtime/runtime.h +++ b/src/runtime/runtime.h @@ -919,7 +919,6 @@ namespace internal { F(SymbolRegistry, 0, 1) \ F(SymbolIsPrivate, 1, 1) - #define FOR_EACH_INTRINSIC_TEST(F) \ F(DeoptimizeFunction, 1, 1) \ F(DeoptimizeNow, 0, 1) \ @@ -947,6 +946,7 @@ namespace internal { F(DisassembleFunction, 1, 1) \ F(TraceEnter, 0, 1) \ F(TraceExit, 1, 1) \ + F(TraceTailCall, 0, 1) \ F(HaveSameMap, 2, 1) \ F(InNewSpace, 1, 1) \ F(HasFastSmiElements, 1, 1) \ @@ -968,7 +968,6 @@ namespace internal { F(HasFixedFloat64Elements, 1, 1) \ F(HasFixedUint8ClampedElements, 1, 1) - #define FOR_EACH_INTRINSIC_TYPEDARRAY(F) \ F(ArrayBufferGetByteLength, 1, 1) \ F(ArrayBufferSliceImpl, 4, 1) \ diff --git a/test/mjsunit/es6/tail-call-simple.js b/test/mjsunit/es6/tail-call-simple.js index 9443208c57..ad6f7cd78c 100644 --- a/test/mjsunit/es6/tail-call-simple.js +++ b/test/mjsunit/es6/tail-call-simple.js @@ -15,6 +15,8 @@ return f(n - 1); } assertThrows(()=>{ f(1e6) }); + %OptimizeFunctionOnNextCall(f); + assertThrows(()=>{ f(1e6) }); })(); @@ -30,6 +32,8 @@ return f(n - 1); } assertEquals("foo", f(1e6)); + %OptimizeFunctionOnNextCall(f); + assertEquals("foo", f(1e6)); })(); @@ -49,6 +53,9 @@ } assertEquals("foo", f(1e6)); assertEquals("bar", f(1e6 + 1)); + %OptimizeFunctionOnNextCall(f); + assertEquals("foo", f(1e6)); + assertEquals("bar", f(1e6 + 1)); })(); @@ -61,9 +68,14 @@ if (n <= 0) { return "foo"; } - return f(n - 1); + return f_bound(n - 1); } - var f = f0.bind({}); + var f_bound = f0.bind({}); + function f(n) { + return f_bound(n); + } + assertEquals("foo", f(1e6)); + %OptimizeFunctionOnNextCall(f); assertEquals("foo", f(1e6)); })(); @@ -74,17 +86,22 @@ if (n <= 0) { return "foo"; } - return g(n - 1); + return g_bound(n - 1); } function g0(n){ if (n <= 0) { return "bar"; } - return f(n - 1); + return f_bound(n - 1); } - var f = f0.bind({}); - var g = g0.bind({}); - + var f_bound = f0.bind({}); + var g_bound = g0.bind({}); + function f(n) { + return f_bound(n); + } + assertEquals("foo", f(1e6)); + assertEquals("bar", f(1e6 + 1)); + %OptimizeFunctionOnNextCall(f); assertEquals("foo", f(1e6)); assertEquals("bar", f(1e6 + 1)); })(); diff --git a/test/mjsunit/function-caller.js b/test/mjsunit/function-caller.js index a2c54bbfd3..84f3cbed2f 100644 --- a/test/mjsunit/function-caller.js +++ b/test/mjsunit/function-caller.js @@ -47,7 +47,8 @@ f(null); eval('f(null)'); // Check called from strict builtin functions. -[null, null].sort(f); +// [null, null].sort(f); // Does not work because sort tail calls. +[null].forEach(f, null); // Check called from sloppy builtin functions. "abel".replace(/b/g, function h() { diff --git a/test/mjsunit/mjsunit.status b/test/mjsunit/mjsunit.status index 3c5f902901..85e49f3071 100644 --- a/test/mjsunit/mjsunit.status +++ b/test/mjsunit/mjsunit.status @@ -43,6 +43,9 @@ # This test non-deterministically runs out of memory on Windows ia32. 'regress/regress-crbug-160010': [SKIP], + # Issue 4698: not fully supported by Turbofan yet + 'es6/tail-call-simple': [SKIP], + # Issue 3389: deopt_every_n_garbage_collections is unsafe 'regress/regress-2653': [SKIP], diff --git a/test/mjsunit/strict-mode.js b/test/mjsunit/strict-mode.js index 6beb9c667a..63dc9d0bda 100644 --- a/test/mjsunit/strict-mode.js +++ b/test/mjsunit/strict-mode.js @@ -1149,7 +1149,9 @@ function CheckArgumentsPillDescriptor(func, name) { function strict() { "use strict"; - return return_my_caller(); + // Returning result via local variable to avoid tail call optimization. + var res = return_my_caller(); + return res; } assertSame(null, strict()); @@ -1163,7 +1165,9 @@ function CheckArgumentsPillDescriptor(func, name) { (function TestNonStrictFunctionCallerPill() { function strict(n) { "use strict"; - return non_strict(n); + // Returning result via local variable to avoid tail call optimization. + var res = non_strict(n); + return res; } function recurse(n, then) { @@ -1191,7 +1195,9 @@ function CheckArgumentsPillDescriptor(func, name) { (function TestNonStrictFunctionCallerDescriptorPill() { function strict(n) { "use strict"; - return non_strict(n); + // Returning result via local variable to avoid tail call optimization. + var res = non_strict(n); + return res; } function recurse(n, then) {