[turbofan] Introduce JSCallForwardVarargs operator.

We turn a JSCallFunction node for

  f.apply(receiver, arguments)

into a JSCallForwardVarargs node, when the arguments refers to the
arguments of the outermost optimized code object, i.e. not an inlined
arguments, and the apply method refers to Function.prototype.apply,
and there's no other user of arguments except in frame states.

We also replace the arguments node in the graph with a marker for
the Deoptimizer similar to Crankshaft to make sure we don't materialize
unused arguments just for the sake of deoptimization. We plan to replace
this with a saner EscapeAnalysis based solution soon.

R=jarin@chromium.org
BUG=v8:5267,v8:5726

Review-Url: https://codereview.chromium.org/2655233002
Cr-Commit-Position: refs/heads/master@{#42680}
This commit is contained in:
bmeurer 2017-01-26 01:29:56 -08:00 committed by Commit bot
parent 7376e12e00
commit 69747e2658
36 changed files with 705 additions and 38 deletions

View File

@ -179,6 +179,13 @@ void CallTrampolineDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void CallForwardVarargsDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
// r2 : start index (to support rest parameters)
// r1 : the target to call
Register registers[] = {r1, r2};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void ConstructStubDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {

View File

@ -200,6 +200,13 @@ void CallTrampolineDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void CallForwardVarargsDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
// x1: target
// x2: start index (to supported rest parameters)
Register registers[] = {x1, x2};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void ConstructStubDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {

View File

@ -2249,6 +2249,72 @@ void Builtins::Generate_Apply(MacroAssembler* masm) {
}
}
// static
void Builtins::Generate_CallForwardVarargs(MacroAssembler* masm,
Handle<Code> code) {
// ----------- S t a t e -------------
// -- r1 : the target to call (can be any Object)
// -- r2 : start index (to support rest parameters)
// -- lr : return address.
// -- sp[0] : thisArgument
// -----------------------------------
// Check if we have an arguments adaptor frame below the function frame.
Label arguments_adaptor, arguments_done;
__ ldr(r3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
__ ldr(ip, MemOperand(r3, CommonFrameConstants::kContextOrFrameTypeOffset));
__ cmp(ip, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
__ b(eq, &arguments_adaptor);
{
__ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
__ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
__ ldr(r0, FieldMemOperand(
r0, SharedFunctionInfo::kFormalParameterCountOffset));
__ mov(r3, fp);
}
__ b(&arguments_done);
__ bind(&arguments_adaptor);
{
// Load the length from the ArgumentsAdaptorFrame.
__ ldr(r0, MemOperand(r3, ArgumentsAdaptorFrameConstants::kLengthOffset));
}
__ bind(&arguments_done);
Label stack_empty, stack_done, stack_overflow;
__ SmiUntag(r0);
__ sub(r0, r0, r2, SetCC);
__ b(le, &stack_empty);
{
// Check for stack overflow.
Generate_StackOverflowCheck(masm, r0, r2, &stack_overflow);
// Forward the arguments from the caller frame.
{
Label loop;
__ add(r3, r3, Operand(kPointerSize));
__ mov(r2, r0);
__ bind(&loop);
{
__ ldr(ip, MemOperand(r3, r2, LSL, kPointerSizeLog2));
__ push(ip);
__ sub(r2, r2, Operand(1), SetCC);
__ b(ne, &loop);
}
}
}
__ b(&stack_done);
__ bind(&stack_overflow);
__ TailCallRuntime(Runtime::kThrowStackOverflow);
__ bind(&stack_empty);
{
// We just pass the receiver, which is already on the stack.
__ mov(r0, Operand(0));
}
__ bind(&stack_done);
__ Jump(code, RelocInfo::CODE_TARGET);
}
namespace {
// Drops top JavaScript frame and an arguments adaptor frame below it (if

View File

@ -2333,6 +2333,72 @@ void Builtins::Generate_Apply(MacroAssembler* masm) {
}
}
// static
void Builtins::Generate_CallForwardVarargs(MacroAssembler* masm,
Handle<Code> code) {
// ----------- S t a t e -------------
// -- x1 : the target to call (can be any Object)
// -- x2 : start index (to support rest parameters)
// -- lr : return address.
// -- sp[0] : thisArgument
// -----------------------------------
// Check if we have an arguments adaptor frame below the function frame.
Label arguments_adaptor, arguments_done;
__ Ldr(x3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
__ Ldr(x4, MemOperand(x3, CommonFrameConstants::kContextOrFrameTypeOffset));
__ Cmp(x4, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ B(eq, &arguments_adaptor);
{
__ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
__ Ldr(x0, FieldMemOperand(x0, JSFunction::kSharedFunctionInfoOffset));
__ Ldrsw(x0, FieldMemOperand(
x0, SharedFunctionInfo::kFormalParameterCountOffset));
__ Mov(x3, fp);
}
__ B(&arguments_done);
__ Bind(&arguments_adaptor);
{
// Just load the length from ArgumentsAdaptorFrame.
__ Ldrsw(x0, UntagSmiMemOperand(
x3, ArgumentsAdaptorFrameConstants::kLengthOffset));
}
__ Bind(&arguments_done);
Label stack_empty, stack_done, stack_overflow;
__ Subs(x0, x0, x2);
__ B(le, &stack_empty);
{
// Check for stack overflow.
Generate_StackOverflowCheck(masm, x0, x2, &stack_overflow);
// Forward the arguments from the caller frame.
{
Label loop;
__ Add(x3, x3, kPointerSize);
__ Mov(x2, x0);
__ bind(&loop);
{
__ Ldr(x4, MemOperand(x3, x2, LSL, kPointerSizeLog2));
__ Push(x4);
__ Subs(x2, x2, 1);
__ B(ne, &loop);
}
}
}
__ B(&stack_done);
__ Bind(&stack_overflow);
__ TailCallRuntime(Runtime::kThrowStackOverflow);
__ Bind(&stack_empty);
{
// We just pass the receiver, which is already on the stack.
__ Mov(x0, 0);
}
__ Bind(&stack_done);
__ Jump(code, RelocInfo::CODE_TARGET);
}
namespace {
// Drops top JavaScript frame and an arguments adaptor frame below it (if

View File

@ -2,8 +2,9 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/builtins/builtins.h"
#include "src/builtins/builtins-utils.h"
#include "src/builtins/builtins.h"
#include "src/macro-assembler.h"
namespace v8 {
namespace internal {
@ -147,5 +148,14 @@ void Builtins::Generate_TailCall_ReceiverIsAny(MacroAssembler* masm) {
Generate_Call(masm, ConvertReceiverMode::kAny, TailCallMode::kAllow);
}
void Builtins::Generate_CallForwardVarargs(MacroAssembler* masm) {
Generate_CallForwardVarargs(masm, masm->isolate()->builtins()->Call());
}
void Builtins::Generate_CallFunctionForwardVarargs(MacroAssembler* masm) {
Generate_CallForwardVarargs(masm,
masm->isolate()->builtins()->CallFunction());
}
} // namespace internal
} // namespace v8

View File

@ -75,6 +75,8 @@ namespace internal {
ASM(TailCall_ReceiverIsNotNullOrUndefined) \
ASM(TailCall_ReceiverIsAny) \
ASM(CallWithSpread) \
ASM(CallForwardVarargs) \
ASM(CallFunctionForwardVarargs) \
\
/* Construct */ \
/* ES6 section 9.2.2 [[Construct]] ( argumentsList, newTarget) */ \
@ -903,6 +905,8 @@ class Builtins {
static void Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
TailCallMode tail_call_mode);
static void Generate_CallForwardVarargs(MacroAssembler* masm,
Handle<Code> code);
static void Generate_InterpreterPushArgsAndCallImpl(
MacroAssembler* masm, TailCallMode tail_call_mode,

View File

@ -2293,6 +2293,86 @@ void Builtins::Generate_Apply(MacroAssembler* masm) {
}
}
// static
void Builtins::Generate_CallForwardVarargs(MacroAssembler* masm,
Handle<Code> code) {
// ----------- S t a t e -------------
// -- edi : the target to call (can be any Object)
// -- ecx : start index (to support rest parameters)
// -- esp[0] : return address.
// -- esp[4] : thisArgument
// -----------------------------------
// Check if we have an arguments adaptor frame below the function frame.
Label arguments_adaptor, arguments_done;
__ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
__ cmp(Operand(ebx, CommonFrameConstants::kContextOrFrameTypeOffset),
Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
__ j(equal, &arguments_adaptor, Label::kNear);
{
__ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
__ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
__ mov(eax,
FieldOperand(eax, SharedFunctionInfo::kFormalParameterCountOffset));
__ mov(ebx, ebp);
}
__ jmp(&arguments_done, Label::kNear);
__ bind(&arguments_adaptor);
{
// Just load the length from the ArgumentsAdaptorFrame.
__ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
}
__ bind(&arguments_done);
Label stack_empty, stack_done;
__ SmiUntag(eax);
__ sub(eax, ecx);
__ j(less_equal, &stack_empty);
{
// Check for stack overflow.
{
// Check the stack for overflow. We are not trying to catch interruptions
// (i.e. debug break and preemption) here, so check the "real stack
// limit".
Label done;
__ LoadRoot(ecx, Heap::kRealStackLimitRootIndex);
// Make ecx the space we have left. The stack might already be
// overflowed here which will cause ecx to become negative.
__ neg(ecx);
__ add(ecx, esp);
__ sar(ecx, kPointerSizeLog2);
// Check if the arguments will overflow the stack.
__ cmp(ecx, eax);
__ j(greater, &done, Label::kNear); // Signed comparison.
__ TailCallRuntime(Runtime::kThrowStackOverflow);
__ bind(&done);
}
// Forward the arguments from the caller frame.
{
Label loop;
__ mov(ecx, eax);
__ pop(edx);
__ bind(&loop);
{
__ Push(Operand(ebx, ecx, times_pointer_size, 1 * kPointerSize));
__ dec(ecx);
__ j(not_zero, &loop);
}
__ push(edx);
}
}
__ jmp(&stack_done, Label::kNear);
__ bind(&stack_empty);
{
// We just pass the receiver, which is already on the stack.
__ Move(eax, Immediate(0));
}
__ bind(&stack_done);
__ Jump(code, RelocInfo::CODE_TARGET);
}
namespace {
// Drops top JavaScript frame and an arguments adaptor frame below it (if

View File

@ -2270,6 +2270,72 @@ void Builtins::Generate_Apply(MacroAssembler* masm) {
}
}
// static
void Builtins::Generate_CallForwardVarargs(MacroAssembler* masm,
Handle<Code> code) {
// ----------- S t a t e -------------
// -- a1 : the target to call (can be any Object)
// -- a2 : start index (to support rest parameters)
// -- ra : return address.
// -- sp[0] : thisArgument
// -----------------------------------
// Check if we have an arguments adaptor frame below the function frame.
Label arguments_adaptor, arguments_done;
__ lw(a3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
__ lw(a0, MemOperand(a3, CommonFrameConstants::kContextOrFrameTypeOffset));
__ Branch(&arguments_adaptor, eq, a0,
Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
{
__ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
__ lw(a0, FieldMemOperand(a0, JSFunction::kSharedFunctionInfoOffset));
__ lw(a0,
FieldMemOperand(a0, SharedFunctionInfo::kFormalParameterCountOffset));
__ mov(a3, fp);
}
__ Branch(&arguments_done);
__ bind(&arguments_adaptor);
{
// Just get the length from the ArgumentsAdaptorFrame.
__ lw(a0, MemOperand(a3, ArgumentsAdaptorFrameConstants::kLengthOffset));
}
__ bind(&arguments_done);
Label stack_empty, stack_done, stack_overflow;
__ SmiUntag(a0);
__ Subu(a0, a0, a2);
__ Branch(&stack_empty, le, a0, Operand(zero_reg));
{
// Check for stack overflow.
Generate_StackOverflowCheck(masm, a0, t0, t1, &stack_overflow);
// Forward the arguments from the caller frame.
{
Label loop;
__ mov(a2, a0);
__ bind(&loop);
{
__ Lsa(at, a3, a2, kPointerSizeLog2);
__ lw(at, MemOperand(at, 1 * kPointerSize));
__ push(at);
__ Subu(a2, a2, Operand(1));
__ Branch(&loop, ne, a2, Operand(zero_reg));
}
}
}
__ Branch(&stack_done);
__ bind(&stack_overflow);
__ TailCallRuntime(Runtime::kThrowStackOverflow);
__ bind(&stack_empty);
{
// We just pass the receiver, which is already on the stack.
__ li(a0, Operand(0));
}
__ bind(&stack_done);
__ Jump(code, RelocInfo::CODE_TARGET);
}
namespace {
// Drops top JavaScript frame and an arguments adaptor frame below it (if

View File

@ -2301,6 +2301,72 @@ void Builtins::Generate_Apply(MacroAssembler* masm) {
}
}
// static
void Builtins::Generate_CallForwardVarargs(MacroAssembler* masm,
Handle<Code> code) {
// ----------- S t a t e -------------
// -- a1 : the target to call (can be any Object)
// -- a2 : start index (to support rest parameters)
// -- ra : return address.
// -- sp[0] : thisArgument
// -----------------------------------
// Check if we have an arguments adaptor frame below the function frame.
Label arguments_adaptor, arguments_done;
__ ld(a3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
__ ld(a0, MemOperand(a3, CommonFrameConstants::kContextOrFrameTypeOffset));
__ Branch(&arguments_adaptor, eq, a0,
Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
{
__ ld(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
__ ld(a0, FieldMemOperand(a0, JSFunction::kSharedFunctionInfoOffset));
__ lw(a0,
FieldMemOperand(a0, SharedFunctionInfo::kFormalParameterCountOffset));
__ mov(a3, fp);
}
__ Branch(&arguments_done);
__ bind(&arguments_adaptor);
{
// Just get the length from the ArgumentsAdaptorFrame.
__ lw(a0, UntagSmiMemOperand(
a3, ArgumentsAdaptorFrameConstants::kLengthOffset));
}
__ bind(&arguments_done);
Label stack_empty, stack_done, stack_overflow;
__ Subu(a0, a0, a2);
__ Branch(&stack_empty, le, a0, Operand(zero_reg));
{
// Check for stack overflow.
Generate_StackOverflowCheck(masm, a0, a4, a5, &stack_overflow);
// Forward the arguments from the caller frame.
{
Label loop;
__ mov(a2, a0);
__ bind(&loop);
{
__ Dlsa(at, a3, a2, kPointerSizeLog2);
__ ld(at, MemOperand(at, 1 * kPointerSize));
__ push(at);
__ Subu(a2, a2, Operand(1));
__ Branch(&loop, ne, a2, Operand(zero_reg));
}
}
}
__ Branch(&stack_done);
__ bind(&stack_overflow);
__ TailCallRuntime(Runtime::kThrowStackOverflow);
__ bind(&stack_empty);
{
// We just pass the receiver, which is already on the stack.
__ mov(a0, zero_reg);
}
__ bind(&stack_done);
__ Jump(code, RelocInfo::CODE_TARGET);
}
namespace {
// Drops top JavaScript frame and an arguments adaptor frame below it (if

View File

@ -720,24 +720,23 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
__ jmp(rcx);
}
static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
Register scratch1, Register scratch2,
Label* stack_overflow) {
static void Generate_StackOverflowCheck(
MacroAssembler* masm, Register num_args, Register scratch,
Label* stack_overflow,
Label::Distance stack_overflow_distance = Label::kFar) {
// Check the stack for overflow. We are not trying to catch
// interruptions (e.g. debug break and preemption) here, so the "real stack
// limit" is checked.
__ LoadRoot(scratch1, Heap::kRealStackLimitRootIndex);
__ movp(scratch2, rsp);
// Make scratch2 the space we have left. The stack might already be overflowed
// here which will cause scratch2 to become negative.
__ subp(scratch2, scratch1);
// Make scratch1 the space we need for the array when it is unrolled onto the
// stack.
__ movp(scratch1, num_args);
__ shlp(scratch1, Immediate(kPointerSizeLog2));
__ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
__ movp(scratch, rsp);
// Make scratch the space we have left. The stack might already be overflowed
// here which will cause scratch to become negative.
__ subp(scratch, kScratchRegister);
__ sarp(scratch, Immediate(kPointerSizeLog2));
// Check if the arguments will overflow the stack.
__ cmpp(scratch2, scratch1);
__ j(less_equal, stack_overflow); // Signed comparison.
__ cmpp(scratch, num_args);
// Signed comparison.
__ j(less_equal, stack_overflow, stack_overflow_distance);
}
static void Generate_InterpreterPushArgs(MacroAssembler* masm,
@ -779,7 +778,7 @@ void Builtins::Generate_InterpreterPushArgsAndCallImpl(
__ addp(rcx, Immediate(1)); // Add one for receiver.
// Add a stack check before pushing arguments.
Generate_StackOverflowCheck(masm, rcx, rdx, r8, &stack_overflow);
Generate_StackOverflowCheck(masm, rcx, rdx, &stack_overflow);
// Pop return address to allow tail-call after pushing arguments.
__ PopReturnAddressTo(kScratchRegister);
@ -828,7 +827,7 @@ void Builtins::Generate_InterpreterPushArgsAndConstructImpl(
Label stack_overflow;
// Add a stack check before pushing arguments.
Generate_StackOverflowCheck(masm, rax, r8, r9, &stack_overflow);
Generate_StackOverflowCheck(masm, rax, r8, &stack_overflow);
// Pop return address to allow tail-call after pushing arguments.
__ PopReturnAddressTo(kScratchRegister);
@ -890,7 +889,7 @@ void Builtins::Generate_InterpreterPushArgsAndConstructArray(
__ addp(r8, Immediate(1)); // Add one for receiver.
// Add a stack check before pushing arguments.
Generate_StackOverflowCheck(masm, r8, rdi, r9, &stack_overflow);
Generate_StackOverflowCheck(masm, r8, rdi, &stack_overflow);
// Pop return address to allow tail-call after pushing arguments.
__ PopReturnAddressTo(kScratchRegister);
@ -2147,7 +2146,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ bind(&enough);
EnterArgumentsAdaptorFrame(masm);
// The registers rcx and r8 will be modified. The register rbx is only read.
Generate_StackOverflowCheck(masm, rbx, rcx, r8, &stack_overflow);
Generate_StackOverflowCheck(masm, rbx, rcx, &stack_overflow);
// Copy receiver and all expected arguments.
const int offset = StandardFrameConstants::kCallerSPOffset;
@ -2169,7 +2168,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
EnterArgumentsAdaptorFrame(masm);
// The registers rcx and r8 will be modified. The register rbx is only read.
Generate_StackOverflowCheck(masm, rbx, rcx, r8, &stack_overflow);
Generate_StackOverflowCheck(masm, rbx, rcx, &stack_overflow);
// Copy receiver and all actual arguments.
const int offset = StandardFrameConstants::kCallerSPOffset;
@ -2376,6 +2375,72 @@ void Builtins::Generate_Apply(MacroAssembler* masm) {
}
}
// static
void Builtins::Generate_CallForwardVarargs(MacroAssembler* masm,
Handle<Code> code) {
// ----------- S t a t e -------------
// -- rdi : the target to call (can be any Object)
// -- rcx : start index (to support rest parameters)
// -- rsp[0] : return address.
// -- rsp[8] : thisArgument
// -----------------------------------
// Check if we have an arguments adaptor frame below the function frame.
Label arguments_adaptor, arguments_done;
__ movp(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
__ Cmp(Operand(rbx, CommonFrameConstants::kContextOrFrameTypeOffset),
Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ j(equal, &arguments_adaptor, Label::kNear);
{
__ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ movp(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
__ LoadSharedFunctionInfoSpecialField(
rax, rax, SharedFunctionInfo::kFormalParameterCountOffset);
__ movp(rbx, rbp);
}
__ jmp(&arguments_done, Label::kNear);
__ bind(&arguments_adaptor);
{
__ SmiToInteger32(
rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
}
__ bind(&arguments_done);
Label stack_empty, stack_done, stack_overflow;
__ subl(rax, rcx);
__ j(less_equal, &stack_empty);
{
// Check for stack overflow.
Generate_StackOverflowCheck(masm, rax, rcx, &stack_overflow, Label::kNear);
// Forward the arguments from the caller frame.
{
Label loop;
__ movl(rcx, rax);
__ Pop(r8);
__ bind(&loop);
{
StackArgumentsAccessor args(rbx, rcx, ARGUMENTS_DONT_CONTAIN_RECEIVER);
__ Push(args.GetArgumentOperand(0));
__ decl(rcx);
__ j(not_zero, &loop);
}
__ Push(r8);
}
}
__ jmp(&stack_done, Label::kNear);
__ bind(&stack_overflow);
__ TailCallRuntime(Runtime::kThrowStackOverflow);
__ bind(&stack_empty);
{
// We just pass the receiver, which is already on the stack.
__ Set(rax, 0);
}
__ bind(&stack_done);
__ Jump(code, RelocInfo::CODE_TARGET);
}
namespace {
// Drops top JavaScript frame and an arguments adaptor frame below it (if

View File

@ -385,6 +385,18 @@ Callable CodeFactory::CallFunction(Isolate* isolate, ConvertReceiverMode mode) {
CallTrampolineDescriptor(isolate));
}
// static
Callable CodeFactory::CallForwardVarargs(Isolate* isolate) {
return Callable(isolate->builtins()->CallForwardVarargs(),
CallForwardVarargsDescriptor(isolate));
}
// static
Callable CodeFactory::CallFunctionForwardVarargs(Isolate* isolate) {
return Callable(isolate->builtins()->CallFunctionForwardVarargs(),
CallForwardVarargsDescriptor(isolate));
}
// static
Callable CodeFactory::Construct(Isolate* isolate) {
return Callable(isolate->builtins()->Construct(),

View File

@ -168,6 +168,8 @@ class V8_EXPORT_PRIVATE CodeFactory final {
static Callable CallWithSpread(Isolate* isolate);
static Callable CallFunction(
Isolate* isolate, ConvertReceiverMode mode = ConvertReceiverMode::kAny);
static Callable CallForwardVarargs(Isolate* isolate);
static Callable CallFunctionForwardVarargs(Isolate* isolate);
static Callable Construct(Isolate* isolate);
static Callable ConstructWithSpread(Isolate* isolate);
static Callable ConstructFunction(Isolate* isolate);

View File

@ -703,6 +703,10 @@ void CodeGenerator::TranslateStateValueDescriptor(
TranslateStateValueDescriptor(field.desc, field.nested, translation,
iter);
}
} else if (desc->IsArguments()) {
if (translation != nullptr) {
translation->BeginArgumentsObject(0);
}
} else if (desc->IsDuplicate()) {
if (translation != nullptr) {
translation->DuplicateObject(static_cast<int>(desc->id()));

View File

@ -1248,6 +1248,13 @@ const Operator* CommonOperatorBuilder::TypedStateValues(
TypedStateValueInfo(types, bitmask)); // parameters
}
const Operator* CommonOperatorBuilder::ArgumentsObjectState() {
return new (zone()) Operator( // --
IrOpcode::kArgumentsObjectState, Operator::kPure, // opcode
"ArgumentsObjectState", // name
0, 0, 0, 1, 0, 0); // counts
}
const Operator* CommonOperatorBuilder::ObjectState(int pointer_slots) {
return new (zone()) Operator1<int>( // --
IrOpcode::kObjectState, Operator::kPure, // opcode

View File

@ -371,6 +371,7 @@ class V8_EXPORT_PRIVATE CommonOperatorBuilder final
const Operator* StateValues(int arguments, SparseInputMask bitmask);
const Operator* TypedStateValues(const ZoneVector<MachineType>* types,
SparseInputMask bitmask);
const Operator* ArgumentsObjectState();
const Operator* ObjectState(int pointer_slots);
const Operator* TypedObjectState(const ZoneVector<MachineType>* types);
const Operator* FrameState(BailoutId bailout_id,

View File

@ -452,6 +452,7 @@ InstructionOperand OperandForDeopt(Isolate* isolate, OperandGenerator* g,
return g->UseImmediate(input);
}
case IrOpcode::kArgumentsObjectState:
case IrOpcode::kObjectState:
case IrOpcode::kTypedObjectState:
UNREACHABLE();
@ -507,6 +508,10 @@ size_t InstructionSelector::AddOperandToStateValueDescriptor(
}
switch (input->opcode()) {
case IrOpcode::kArgumentsObjectState: {
values->PushArguments();
return 0;
}
case IrOpcode::kObjectState: {
UNREACHABLE();
return 0;

View File

@ -1122,6 +1122,7 @@ std::ostream& operator<<(std::ostream& os, const Constant& constant);
class FrameStateDescriptor;
enum class StateValueKind : uint8_t {
kArguments,
kPlain,
kOptimizedOut,
kNested,
@ -1135,6 +1136,10 @@ class StateValueDescriptor {
type_(MachineType::AnyTagged()),
id_(0) {}
static StateValueDescriptor Arguments() {
return StateValueDescriptor(StateValueKind::kArguments,
MachineType::AnyTagged(), 0);
}
static StateValueDescriptor Plain(MachineType type) {
return StateValueDescriptor(StateValueKind::kPlain, type, 0);
}
@ -1151,10 +1156,11 @@ class StateValueDescriptor {
MachineType::AnyTagged(), id);
}
int IsPlain() { return kind_ == StateValueKind::kPlain; }
int IsOptimizedOut() { return kind_ == StateValueKind::kOptimizedOut; }
int IsNested() { return kind_ == StateValueKind::kNested; }
int IsDuplicate() { return kind_ == StateValueKind::kDuplicate; }
bool IsArguments() const { return kind_ == StateValueKind::kArguments; }
bool IsPlain() const { return kind_ == StateValueKind::kPlain; }
bool IsOptimizedOut() const { return kind_ == StateValueKind::kOptimizedOut; }
bool IsNested() const { return kind_ == StateValueKind::kNested; }
bool IsDuplicate() const { return kind_ == StateValueKind::kDuplicate; }
MachineType type() const { return type_; }
size_t id() const { return id_; }
@ -1223,6 +1229,7 @@ class StateValueList {
nested_.push_back(nested);
return nested;
}
void PushArguments() { fields_.push_back(StateValueDescriptor::Arguments()); }
void PushDuplicate(size_t id) {
fields_.push_back(StateValueDescriptor::Duplicate(id));
}

View File

@ -104,18 +104,11 @@ Reduction JSCallReducer::ReduceFunctionPrototypeApply(Node* node) {
if (edge.from() == node) continue;
return NoChange();
}
// Get to the actual frame state from which to extract the arguments;
// we can only optimize this in case the {node} was already inlined into
// some other function (and same for the {arg_array}).
CreateArgumentsType type = CreateArgumentsTypeOf(arg_array->op());
// Check if the arguments can be handled in the fast case (i.e. we don't
// have aliased sloppy arguments), and compute the {start_index} for
// rest parameters.
CreateArgumentsType const type = CreateArgumentsTypeOf(arg_array->op());
Node* frame_state = NodeProperties::GetFrameStateInput(arg_array);
Node* outer_state = frame_state->InputAt(kFrameStateOuterStateInput);
if (outer_state->opcode() != IrOpcode::kFrameState) return NoChange();
FrameStateInfo outer_info = OpParameter<FrameStateInfo>(outer_state);
if (outer_info.type() == FrameStateType::kArgumentsAdaptor) {
// Need to take the parameters from the arguments adaptor.
frame_state = outer_state;
}
FrameStateInfo state_info = OpParameter<FrameStateInfo>(frame_state);
int start_index = 0;
if (type == CreateArgumentsType::kMappedArguments) {
@ -128,11 +121,43 @@ Reduction JSCallReducer::ReduceFunctionPrototypeApply(Node* node) {
if (!state_info.shared_info().ToHandle(&shared)) return NoChange();
start_index = shared->internal_formal_parameter_count();
}
// Check if are applying to inlined arguments or to the arguments of
// the outermost function.
Node* outer_state = frame_state->InputAt(kFrameStateOuterStateInput);
if (outer_state->opcode() != IrOpcode::kFrameState) {
// TODO(jarin,bmeurer): Support the NewUnmappedArgumentsElement and
// NewRestParameterElements in the EscapeAnalysis and Deoptimizer
// instead, then we don't need this hack.
if (type != CreateArgumentsType::kRestParameter) {
// There are no other uses of the {arg_array} except in StateValues,
// so we just replace {arg_array} with a marker for the Deoptimizer
// that this refers to the arguments object.
Node* arguments = graph()->NewNode(common()->ArgumentsObjectState());
ReplaceWithValue(arg_array, arguments);
}
// Reduce {node} to a JSCallForwardVarargs operation, which just
// re-pushes the incoming arguments and calls the {target}.
node->RemoveInput(0); // Function.prototype.apply
node->RemoveInput(2); // arguments
NodeProperties::ChangeOp(node, javascript()->CallForwardVarargs(
start_index, p.tail_call_mode()));
return Changed(node);
}
// Get to the actual frame state from which to extract the arguments;
// we can only optimize this in case the {node} was already inlined into
// some other function (and same for the {arg_array}).
FrameStateInfo outer_info = OpParameter<FrameStateInfo>(outer_state);
if (outer_info.type() == FrameStateType::kArgumentsAdaptor) {
// Need to take the parameters from the arguments adaptor.
frame_state = outer_state;
}
// Remove the argArray input from the {node}.
node->RemoveInput(static_cast<int>(--arity));
// Add the actual parameters to the {node}, skipping the receiver.
// Add the actual parameters to the {node}, skipping the receiver,
// starting from {start_index}.
Node* const parameters = frame_state->InputAt(kFrameStateParametersInput);
for (int i = start_index + 1; i < state_info.parameter_count(); ++i) {
for (int i = start_index + 1; i < parameters->InputCount(); ++i) {
node->InsertInput(graph()->zone(), static_cast<int>(arity),
parameters->InputAt(i));
++arity;

View File

@ -530,6 +530,22 @@ void JSGenericLowering::LowerJSCallConstructWithSpread(Node* node) {
NodeProperties::ChangeOp(node, common()->Call(desc));
}
void JSGenericLowering::LowerJSCallForwardVarargs(Node* node) {
CallForwardVarargsParameters p = CallForwardVarargsParametersOf(node->op());
Callable callable = CodeFactory::CallForwardVarargs(isolate());
CallDescriptor::Flags flags = FrameStateFlagForCall(node);
if (p.tail_call_mode() == TailCallMode::kAllow) {
flags |= CallDescriptor::kSupportsTailCalls;
}
CallDescriptor* desc = Linkage::GetStubCallDescriptor(
isolate(), zone(), callable.descriptor(), 1, flags);
Node* stub_code = jsgraph()->HeapConstant(callable.code());
Node* start_index = jsgraph()->Uint32Constant(p.start_index());
node->InsertInput(zone(), 0, stub_code);
node->InsertInput(zone(), 2, start_index);
NodeProperties::ChangeOp(node, common()->Call(desc));
}
void JSGenericLowering::LowerJSCallFunction(Node* node) {
CallFunctionParameters const& p = CallFunctionParametersOf(node->op());
int const arg_count = static_cast<int>(p.arity() - 2);

View File

@ -118,6 +118,17 @@ const CallFunctionParameters& CallFunctionParametersOf(const Operator* op) {
return OpParameter<CallFunctionParameters>(op);
}
std::ostream& operator<<(std::ostream& os,
CallForwardVarargsParameters const& p) {
return os << p.start_index() << ", " << p.tail_call_mode();
}
CallForwardVarargsParameters const& CallForwardVarargsParametersOf(
Operator const* op) {
DCHECK_EQ(IrOpcode::kJSCallForwardVarargs, op->opcode());
return OpParameter<CallForwardVarargsParameters>(op);
}
bool operator==(CallFunctionWithSpreadParameters const& lhs,
CallFunctionWithSpreadParameters const& rhs) {
return lhs.arity() == rhs.arity();
@ -724,6 +735,16 @@ const Operator* JSOperatorBuilder::ToBoolean(ToBooleanHints hints) {
hints); // parameter
}
const Operator* JSOperatorBuilder::CallForwardVarargs(
uint32_t start_index, TailCallMode tail_call_mode) {
CallForwardVarargsParameters parameters(start_index, tail_call_mode);
return new (zone()) Operator1<CallForwardVarargsParameters>( // --
IrOpcode::kJSCallForwardVarargs, Operator::kNoProperties, // opcode
"JSCallForwardVarargs", // name
2, 1, 1, 1, 1, 2, // counts
parameters); // parameter
}
const Operator* JSOperatorBuilder::CallFunction(
size_t arity, float frequency, VectorSlotPair const& feedback,
ConvertReceiverMode convert_mode, TailCallMode tail_call_mode) {

View File

@ -106,6 +106,43 @@ std::ostream& operator<<(std::ostream&,
CallConstructWithSpreadParameters const& CallConstructWithSpreadParametersOf(
Operator const*);
// Defines the flags for a JavaScript call forwarding parameters. This
// is used as parameter by JSCallForwardVarargs operators.
class CallForwardVarargsParameters final {
public:
CallForwardVarargsParameters(uint32_t start_index,
TailCallMode tail_call_mode)
: bit_field_(StartIndexField::encode(start_index) |
TailCallModeField::encode(tail_call_mode)) {}
uint32_t start_index() const { return StartIndexField::decode(bit_field_); }
TailCallMode tail_call_mode() const {
return TailCallModeField::decode(bit_field_);
}
bool operator==(CallForwardVarargsParameters const& that) const {
return this->bit_field_ == that.bit_field_;
}
bool operator!=(CallForwardVarargsParameters const& that) const {
return !(*this == that);
}
private:
friend size_t hash_value(CallForwardVarargsParameters const& p) {
return p.bit_field_;
}
typedef BitField<uint32_t, 0, 30> StartIndexField;
typedef BitField<TailCallMode, 31, 1> TailCallModeField;
uint32_t const bit_field_;
};
std::ostream& operator<<(std::ostream&, CallForwardVarargsParameters const&);
CallForwardVarargsParameters const& CallForwardVarargsParametersOf(
Operator const*) WARN_UNUSED_RESULT;
// Defines the arity and the call flags for a JavaScript function call. This is
// used as a parameter by JSCallFunction operators.
class CallFunctionParameters final {
@ -572,6 +609,8 @@ class V8_EXPORT_PRIVATE JSOperatorBuilder final
const Operator* CreateLiteralRegExp(Handle<String> constant_pattern,
int literal_flags, int literal_index);
const Operator* CallForwardVarargs(uint32_t start_index,
TailCallMode tail_call_mode);
const Operator* CallFunction(
size_t arity, float frequency = 0.0f,
VectorSlotPair const& feedback = VectorSlotPair(),

View File

@ -2023,6 +2023,34 @@ Reduction JSTypedLowering::ReduceJSCallConstruct(Node* node) {
return NoChange();
}
Reduction JSTypedLowering::ReduceJSCallForwardVarargs(Node* node) {
DCHECK_EQ(IrOpcode::kJSCallForwardVarargs, node->opcode());
CallForwardVarargsParameters p = CallForwardVarargsParametersOf(node->op());
Node* target = NodeProperties::GetValueInput(node, 0);
Type* target_type = NodeProperties::GetType(target);
// Check if {target} is a JSFunction.
if (target_type->Is(Type::Function())) {
// Compute flags for the call.
CallDescriptor::Flags flags = CallDescriptor::kNeedsFrameState;
if (p.tail_call_mode() == TailCallMode::kAllow) {
flags |= CallDescriptor::kSupportsTailCalls;
}
// Patch {node} to an indirect call via CallFunctionForwardVarargs.
Callable callable = CodeFactory::CallFunctionForwardVarargs(isolate());
node->InsertInput(graph()->zone(), 0,
jsgraph()->HeapConstant(callable.code()));
node->InsertInput(graph()->zone(), 2, jsgraph()->Constant(p.start_index()));
NodeProperties::ChangeOp(
node,
common()->Call(Linkage::GetStubCallDescriptor(
isolate(), graph()->zone(), callable.descriptor(), 1, flags)));
return Changed(node);
}
return NoChange();
}
Reduction JSTypedLowering::ReduceJSCallFunction(Node* node) {
DCHECK_EQ(IrOpcode::kJSCallFunction, node->opcode());
@ -2386,6 +2414,8 @@ Reduction JSTypedLowering::Reduce(Node* node) {
return ReduceJSConvertReceiver(node);
case IrOpcode::kJSCallConstruct:
return ReduceJSCallConstruct(node);
case IrOpcode::kJSCallForwardVarargs:
return ReduceJSCallForwardVarargs(node);
case IrOpcode::kJSCallFunction:
return ReduceJSCallFunction(node);
case IrOpcode::kJSForInNext:

View File

@ -71,6 +71,7 @@ class V8_EXPORT_PRIVATE JSTypedLowering final
Reduction ReduceJSToObject(Node* node);
Reduction ReduceJSConvertReceiver(Node* node);
Reduction ReduceJSCallConstruct(Node* node);
Reduction ReduceJSCallForwardVarargs(Node* node);
Reduction ReduceJSCallFunction(Node* node);
Reduction ReduceJSForInNext(Node* node);
Reduction ReduceJSLoadMessage(Node* node);

View File

@ -59,6 +59,7 @@
V(FrameState) \
V(StateValues) \
V(TypedStateValues) \
V(ArgumentsObjectState) \
V(ObjectState) \
V(TypedObjectState) \
V(Call) \
@ -160,6 +161,7 @@
#define JS_OTHER_OP_LIST(V) \
V(JSCallConstruct) \
V(JSCallConstructWithSpread) \
V(JSCallForwardVarargs) \
V(JSCallFunction) \
V(JSCallFunctionWithSpread) \
V(JSCallRuntime) \

View File

@ -95,6 +95,7 @@ bool OperatorProperties::HasFrameStateInput(const Operator* op) {
// Call operations
case IrOpcode::kJSCallConstruct:
case IrOpcode::kJSCallConstructWithSpread:
case IrOpcode::kJSCallForwardVarargs:
case IrOpcode::kJSCallFunction:
case IrOpcode::kJSCallFunctionWithSpread:

View File

@ -2661,6 +2661,7 @@ class RepresentationSelector {
case IrOpcode::kBeginRegion:
case IrOpcode::kProjection:
case IrOpcode::kOsrValue:
case IrOpcode::kArgumentsObjectState:
// All JavaScript operators except JSToNumber have uniform handling.
#define OPCODE_CASE(name) case IrOpcode::k##name:
JS_SIMPLE_BINOP_LIST(OPCODE_CASE)

View File

@ -833,6 +833,10 @@ Type* Typer::Visitor::TypeTypedStateValues(Node* node) {
return Type::Internal();
}
Type* Typer::Visitor::TypeArgumentsObjectState(Node* node) {
return Type::Internal();
}
Type* Typer::Visitor::TypeObjectState(Node* node) { return Type::Internal(); }
Type* Typer::Visitor::TypeTypedObjectState(Node* node) {
@ -1575,6 +1579,9 @@ Type* Typer::Visitor::JSCallFunctionTyper(Type* fun, Typer* t) {
return Type::NonInternal();
}
Type* Typer::Visitor::TypeJSCallForwardVarargs(Node* node) {
return TypeUnaryOp(node, JSCallFunctionTyper);
}
Type* Typer::Visitor::TypeJSCallFunction(Node* node) {
// TODO(bmeurer): We could infer better types if we wouldn't ignore the

View File

@ -490,6 +490,7 @@ void Verifier::Visitor::Check(Node* node) {
}
case IrOpcode::kStateValues:
case IrOpcode::kTypedStateValues:
case IrOpcode::kArgumentsObjectState:
case IrOpcode::kObjectState:
case IrOpcode::kTypedObjectState:
// TODO(jarin): what are the constraints on these?
@ -680,6 +681,7 @@ void Verifier::Visitor::Check(Node* node) {
// Type is Receiver.
CheckTypeIs(node, Type::Receiver());
break;
case IrOpcode::kJSCallForwardVarargs:
case IrOpcode::kJSCallFunction:
case IrOpcode::kJSCallFunctionWithSpread:
case IrOpcode::kJSCallRuntime:

View File

@ -3692,7 +3692,6 @@ void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
__ TailCallRuntime(Runtime::kNewSloppyArguments);
}
void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- edi : function

View File

@ -178,6 +178,13 @@ void CallTrampolineDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void CallForwardVarargsDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
// ecx : start index (to support rest parameters)
// edi : the target to call
Register registers[] = {edi, ecx};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void ConstructStubDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {

View File

@ -433,6 +433,15 @@ void CallTrampolineDescriptor::InitializePlatformIndependent(
machine_types);
}
void CallForwardVarargsDescriptor::InitializePlatformIndependent(
CallInterfaceDescriptorData* data) {
// kTarget, kStartIndex
MachineType machine_types[] = {MachineType::AnyTagged(),
MachineType::Int32()};
data->InitializePlatformIndependent(arraysize(machine_types), 0,
machine_types);
}
void ConstructStubDescriptor::InitializePlatformIndependent(
CallInterfaceDescriptorData* data) {
// kFunction, kNewTarget, kActualArgumentsCount, kAllocationSite

View File

@ -46,6 +46,7 @@ class PlatformInterfaceDescriptor;
V(CallFunction) \
V(CallFunctionWithFeedback) \
V(CallFunctionWithFeedbackAndVector) \
V(CallForwardVarargs) \
V(CallConstruct) \
V(CallTrampoline) \
V(ConstructStub) \
@ -560,6 +561,12 @@ class CallTrampolineDescriptor : public CallInterfaceDescriptor {
CallInterfaceDescriptor)
};
class CallForwardVarargsDescriptor : public CallInterfaceDescriptor {
public:
DEFINE_PARAMETERS(kTarget, kStartIndex)
DECLARE_DESCRIPTOR_WITH_CUSTOM_FUNCTION_TYPE(CallForwardVarargsDescriptor,
CallInterfaceDescriptor)
};
class ConstructStubDescriptor : public CallInterfaceDescriptor {
public:

View File

@ -177,6 +177,13 @@ void CallTrampolineDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void CallForwardVarargsDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
// a1: the target to call
// a2: start index (to support rest parameters)
Register registers[] = {a1, a2};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void ConstructStubDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {

View File

@ -177,6 +177,13 @@ void CallTrampolineDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void CallForwardVarargsDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
// a1: the target to call
// a2: start index (to support rest parameters)
Register registers[] = {a1, a2};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void ConstructStubDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {

View File

@ -182,6 +182,10 @@ RUNTIME_FUNCTION(Runtime_NotifyDeoptimized) {
JavaScriptFrameIterator top_it(isolate);
JavaScriptFrame* top_frame = top_it.frame();
isolate->set_context(Context::cast(top_frame->context()));
} else {
// TODO(turbofan): We currently need the native context to materialize
// the arguments object, but only to get to its map.
isolate->set_context(function->native_context());
}
// Make sure to materialize objects before causing any allocation.

View File

@ -175,6 +175,13 @@ void CallTrampolineDescriptor::InitializePlatformSpecific(
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void CallForwardVarargsDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
// rcx : start index (to support rest parameters)
// rdi : the target to call
Register registers[] = {rdi, rcx};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void ConstructStubDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {