PPC: [builtins] Sanitize the machinery around Construct calls.

Port 374b6ea210

Original commit message:
    There's no point in collecting feedback for super constructor calls,
    because in all (interesting) cases we can gather (better) feedback from
    other sources (i.e. via inlining or via using a LOAD_IC to get to the
    [[Prototype]] of the target).  So CallConstructStub is now only used
    for new Foo(...args) sites where we want to collect feedback in the
    baseline compiler.  The optimizing compilers, Reflect.construct and
    super constructor calls use the Construct builtin directly, which allows
    us to remove some weird code from the CallConstructStub (and opens the
    possibility for more code sharing with the CallICStub, maybe even going
    for a ConstructICStub).

    Also remove the 100% redundant HCallNew instruction, which is just a
    wrapper for the Construct builtin anyway (indirectly via the
    CallConstructStub).

    Drive-by-fix: Drop unused has_function_cache bit on Code objects.

R=bmeurer@chromium.org, joransiu@ca.ibm.com, jyan@ca.ibm.com, michael_dawson@ca.ibm.com
BUG=v8:4413, v8:4430
LOG=n

Review URL: https://codereview.chromium.org/1471623005

Cr-Commit-Position: refs/heads/master@{#32186}
This commit is contained in:
mbrandy 2015-11-23 12:04:48 -08:00 committed by Commit bot
parent ee72c4079c
commit fe14ba45d6
7 changed files with 48 additions and 108 deletions

View File

@ -4060,19 +4060,6 @@ void LCodeGen::DoCallFunction(LCallFunction* instr) {
}
void LCodeGen::DoCallNew(LCallNew* instr) {
DCHECK(ToRegister(instr->context()).is(cp));
DCHECK(ToRegister(instr->constructor()).is(r4));
DCHECK(ToRegister(instr->result()).is(r3));
__ mov(r3, Operand(instr->arity()));
// No cell in r5 for construct type feedback in optimized code
__ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
CallConstructStub stub(isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
}
void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
DCHECK(ToRegister(instr->context()).is(cp));
DCHECK(ToRegister(instr->constructor()).is(r4));

View File

@ -311,13 +311,6 @@ void LInvokeFunction::PrintDataTo(StringStream* stream) {
}
void LCallNew::PrintDataTo(StringStream* stream) {
stream->Add("= ");
constructor()->PrintTo(stream);
stream->Add(" #%d / ", arity());
}
void LCallNewArray::PrintDataTo(StringStream* stream) {
stream->Add("= ");
constructor()->PrintTo(stream);
@ -1222,14 +1215,6 @@ LInstruction* LChunkBuilder::DoMathPowHalf(HUnaryMathOperation* instr) {
}
LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
LOperand* context = UseFixed(instr->context(), cp);
LOperand* constructor = UseFixed(instr->constructor(), r4);
LCallNew* result = new (zone()) LCallNew(context, constructor);
return MarkAsCall(DefineFixed(result, r3), instr);
}
LInstruction* LChunkBuilder::DoCallNewArray(HCallNewArray* instr) {
LOperand* context = UseFixed(instr->context(), cp);
LOperand* constructor = UseFixed(instr->constructor(), r4);

View File

@ -33,7 +33,6 @@ class LCodeGen;
V(CallJSFunction) \
V(CallWithDescriptor) \
V(CallFunction) \
V(CallNew) \
V(CallNewArray) \
V(CallRuntime) \
V(CallStub) \
@ -1825,25 +1824,6 @@ class LCallFunction final : public LTemplateInstruction<1, 2, 2> {
};
class LCallNew final : public LTemplateInstruction<1, 2, 0> {
public:
LCallNew(LOperand* context, LOperand* constructor) {
inputs_[0] = context;
inputs_[1] = constructor;
}
LOperand* context() { return inputs_[0]; }
LOperand* constructor() { return inputs_[1]; }
DECLARE_CONCRETE_INSTRUCTION(CallNew, "call-new")
DECLARE_HYDROGEN_ACCESSOR(CallNew)
void PrintDataTo(StringStream* stream) override;
int arity() const { return hydrogen()->argument_count() - 1; }
};
class LCallNewArray final : public LTemplateInstruction<1, 2, 0> {
public:
LCallNewArray(LOperand* context, LOperand* constructor) {

View File

@ -3064,7 +3064,7 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ EmitLoadTypeFeedbackVector(r5);
__ LoadSmiLiteral(r6, SmiFromSlot(expr->CallNewFeedbackSlot()));
CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
CallConstructStub stub(isolate());
__ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
// Restore context register.
@ -3092,20 +3092,15 @@ void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
// constructor invocation.
SetConstructCallPosition(expr, arg_count);
// Load new target into r7.
// Load new target into r6.
VisitForAccumulatorValue(super_call_ref->new_target_var());
__ mr(r7, result_register());
__ mr(r6, result_register());
// Load function and argument count into r1 and r0.
__ mov(r3, Operand(arg_count));
__ LoadP(r4, MemOperand(sp, arg_count * kPointerSize));
// Record call targets in unoptimized code.
__ EmitLoadTypeFeedbackVector(r5);
__ LoadSmiLiteral(r6, SmiFromSlot(expr->CallFeedbackSlot()));
CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
__ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
__ Call(isolate()->builtins()->Construct(), RelocInfo::CONSTRUCT_CALL);
RecordJSReturnSite(expr);

View File

@ -1394,11 +1394,11 @@ static void Generate_ConstructHelper(MacroAssembler* masm) {
// Use undefined feedback vector
__ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
__ LoadP(r4, MemOperand(fp, kFunctionOffset));
__ LoadP(r7, MemOperand(fp, kNewTargetOffset));
__ LoadP(r6, MemOperand(fp, kNewTargetOffset));
// Call the function.
CallConstructStub stub(masm->isolate(), SUPER_CONSTRUCTOR_CALL);
__ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
__ Call(masm->isolate()->builtins()->Construct(),
RelocInfo::CONSTRUCT_CALL);
// Leave internal frame.
}
@ -1675,22 +1675,23 @@ void Builtins::Generate_Construct(MacroAssembler* masm) {
// the JSFunction on which new was invoked initially)
// -----------------------------------
// Check if target has a [[Construct]] internal method.
// Check if target is a Smi.
Label non_constructor;
__ JumpIfSmi(r4, &non_constructor);
__ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
__ lbz(r5, FieldMemOperand(r7, Map::kBitFieldOffset));
__ TestBit(r5, Map::kIsConstructor, r0);
__ beq(&non_constructor, cr0);
// Dispatch based on instance type.
__ CompareInstanceType(r7, r8, JS_FUNCTION_TYPE);
__ CompareObjectType(r4, r7, r8, JS_FUNCTION_TYPE);
__ Jump(masm->isolate()->builtins()->ConstructFunction(),
RelocInfo::CODE_TARGET, eq);
__ cmpi(r8, Operand(JS_FUNCTION_PROXY_TYPE));
__ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
eq);
// Check if target has a [[Construct]] internal method.
__ lbz(r5, FieldMemOperand(r7, Map::kBitFieldOffset));
__ TestBit(r5, Map::kIsConstructor, r0);
__ beq(&non_constructor, cr0);
// Called Construct on an exotic Object with a [[Construct]] internal method.
{
// Overwrite the original receiver with the (original) target.

View File

@ -2408,35 +2408,25 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
}
static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub,
bool is_super) {
static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) {
// r3 : number of arguments to the construct function
// r4 : the function to call
// r5 : feedback vector
// r6 : slot in feedback vector (Smi)
// r7 : new target (for IsSuperConstructorCall)
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
// Number-of-arguments register must be smi-tagged to call out.
__ SmiTag(r3);
if (is_super) {
__ Push(r6, r5, r4, r3, r7);
} else {
__ Push(r6, r5, r4, r3);
}
__ Push(r6, r5, r4, r3);
__ CallStub(stub);
if (is_super) {
__ Pop(r6, r5, r4, r3, r7);
} else {
__ Pop(r6, r5, r4, r3);
}
__ Pop(r6, r5, r4, r3);
__ SmiUntag(r3);
}
static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
static void GenerateRecordCallTarget(MacroAssembler* masm) {
// Cache the called function in a feedback vector slot. Cache states
// are uninitialized, monomorphic (indicated by a JSFunction), and
// megamorphic.
@ -2444,7 +2434,6 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
// r4 : the function to call
// r5 : feedback vector
// r6 : slot in feedback vector (Smi)
// r7 : new target (for IsSuperConstructorCall)
Label initialize, done, miss, megamorphic, not_array_function;
DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()),
@ -2518,13 +2507,13 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
// Create an AllocationSite if we don't already have it, store it in the
// slot.
CreateAllocationSiteStub create_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &create_stub, is_super);
CallStubInRecordCallTarget(masm, &create_stub);
__ b(&done);
__ bind(&not_array_function);
CreateWeakCellStub weak_cell_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super);
CallStubInRecordCallTarget(masm, &weak_cell_stub);
__ bind(&done);
}
@ -2534,7 +2523,6 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
// r4 : the function to call
// r5 : feedback vector
// r6 : slot in feedback vector (Smi, for RecordCallTarget)
// r7 : new target (for IsSuperConstructorCall)
Label non_function;
// Check that the function is not a smi.
@ -2543,34 +2531,28 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
__ CompareObjectType(r4, r8, r8, JS_FUNCTION_TYPE);
__ bne(&non_function);
if (RecordCallTarget()) {
GenerateRecordCallTarget(masm, IsSuperConstructorCall());
GenerateRecordCallTarget(masm);
__ SmiToPtrArrayOffset(r8, r6);
__ add(r8, r5, r8);
// Put the AllocationSite from the feedback vector into r5, or undefined.
__ LoadP(r5, FieldMemOperand(r8, FixedArray::kHeaderSize));
__ LoadP(r8, FieldMemOperand(r5, AllocationSite::kMapOffset));
__ CompareRoot(r8, Heap::kAllocationSiteMapRootIndex);
if (CpuFeatures::IsSupported(ISELECT)) {
__ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
__ isel(eq, r5, r5, r8);
} else {
Label feedback_register_initialized;
__ beq(&feedback_register_initialized);
__ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
__ bind(&feedback_register_initialized);
}
__ AssertUndefinedOrAllocationSite(r5, r8);
__ SmiToPtrArrayOffset(r8, r6);
__ add(r8, r5, r8);
// Put the AllocationSite from the feedback vector into r5, or undefined.
__ LoadP(r5, FieldMemOperand(r8, FixedArray::kHeaderSize));
__ LoadP(r8, FieldMemOperand(r5, AllocationSite::kMapOffset));
__ CompareRoot(r8, Heap::kAllocationSiteMapRootIndex);
if (CpuFeatures::IsSupported(ISELECT)) {
__ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
__ isel(eq, r5, r5, r8);
} else {
Label feedback_register_initialized;
__ beq(&feedback_register_initialized);
__ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
__ bind(&feedback_register_initialized);
}
__ AssertUndefinedOrAllocationSite(r5, r8);
// Pass function as new target.
if (IsSuperConstructorCall()) {
__ mr(r6, r7);
} else {
__ mr(r6, r4);
}
__ mr(r6, r4);
// Tail call to the function-specific construct stub (still in the caller
// context at this point).

View File

@ -204,6 +204,16 @@ void CallTrampolineDescriptor::InitializePlatformSpecific(
}
void ConstructTrampolineDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
// r3 : number of arguments
// r4 : the target to call
// r6 : the new target
Register registers[] = {r4, r6, r3};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void RegExpConstructResultDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {r5, r4, r3};