PPC/s390: [TypeFeedbackVector] Root literal arrays in function literals slots

Port b8294aaa97

Original Commit Message:

    Literal arrays and feedback vectors for a function can be garbage
    collected if we don't have a rooted closure for the function, which
    happens often. It's expensive to come back from this (recreating
    boilerplates and gathering feedback again), and the cost is
    disproportionate if the function was inlined into optimized code.

    To guard against losing these arrays when we need them, we'll now
    create literal arrays when creating the feedback vector for the outer
    closure, and root them strongly in that vector.

R=mvstanton@chromium.org, joransiu@ca.ibm.com, jyan@ca.ibm.com, michael_dawson@ca.ibm.com
BUG=v8:5456
LOG=N

Review-Url: https://codereview.chromium.org/2626263002
Cr-Commit-Position: refs/heads/master@{#42280}
This commit is contained in:
bjaideep 2017-01-12 09:57:01 -08:00 committed by Commit bot
parent df5417ae76
commit be781e51f5
2 changed files with 16 additions and 29 deletions

View File

@ -1361,6 +1361,12 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
Register closure = r4;
Register map = r9;
Register index = r5;
// Do we have a valid feedback vector?
__ LoadP(index, FieldMemOperand(closure, JSFunction::kLiteralsOffset));
__ LoadP(index, FieldMemOperand(index, LiteralsArray::kFeedbackVectorOffset));
__ JumpIfRoot(index, Heap::kUndefinedValueRootIndex, &gotta_call_runtime);
__ LoadP(map,
FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
__ LoadP(map,
@ -1369,7 +1375,6 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ CmpSmiLiteral(index, Smi::FromInt(2), r0);
__ blt(&gotta_call_runtime);
// Find literals.
// r10 : native context
// r5 : length / index
// r9 : optimized code map
@ -1390,18 +1395,6 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ LoadP(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
__ cmp(temp, native_context);
__ bne(&loop_bottom);
// Literals available?
__ LoadP(temp,
FieldMemOperand(array_pointer,
SharedFunctionInfo::kOffsetToPreviousLiterals));
__ LoadP(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
__ JumpIfSmi(temp, &gotta_call_runtime);
// Save the literals in the closure.
__ StoreP(temp, FieldMemOperand(closure, JSFunction::kLiteralsOffset), r0);
__ RecordWriteField(closure, JSFunction::kLiteralsOffset, temp, r7,
kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
OMIT_SMI_CHECK);
// Code available?
Register entry = r7;
@ -1411,7 +1404,7 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ LoadP(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
__ JumpIfSmi(entry, &try_shared);
// Found literals and code. Get them into the closure and return.
// Found code. Get it into the closure and return.
// Store code entry in the closure.
__ addi(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
__ StoreP(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset), r0);
@ -1445,7 +1438,7 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ CmpSmiLiteral(index, Smi::FromInt(1), r0);
__ bgt(&loop_top);
// We found neither literals nor code.
// We found no code.
__ b(&gotta_call_runtime);
__ bind(&try_shared);

View File

@ -1366,6 +1366,12 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
Register closure = r3;
Register map = r8;
Register index = r4;
// Do we have a valid feedback vector?
__ LoadP(index, FieldMemOperand(closure, JSFunction::kLiteralsOffset));
__ LoadP(index, FieldMemOperand(index, LiteralsArray::kFeedbackVectorOffset));
__ JumpIfRoot(index, Heap::kUndefinedValueRootIndex, &gotta_call_runtime);
__ LoadP(map,
FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
__ LoadP(map,
@ -1395,18 +1401,6 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ LoadP(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
__ CmpP(temp, native_context);
__ bne(&loop_bottom, Label::kNear);
// Literals available?
__ LoadP(temp,
FieldMemOperand(array_pointer,
SharedFunctionInfo::kOffsetToPreviousLiterals));
__ LoadP(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
__ JumpIfSmi(temp, &gotta_call_runtime);
// Save the literals in the closure.
__ StoreP(temp, FieldMemOperand(closure, JSFunction::kLiteralsOffset), r0);
__ RecordWriteField(closure, JSFunction::kLiteralsOffset, temp, r6,
kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
OMIT_SMI_CHECK);
// Code available?
Register entry = r6;
@ -1416,7 +1410,7 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ LoadP(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
__ JumpIfSmi(entry, &try_shared);
// Found literals and code. Get them into the closure and return.
// Found code. Get it into the closure and return.
// Store code entry in the closure.
__ AddP(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
__ StoreP(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset), r0);
@ -1450,7 +1444,7 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ CmpSmiLiteral(index, Smi::FromInt(1), r0);
__ bgt(&loop_top);
// We found neither literals nor code.
// We found no code.
__ b(&gotta_call_runtime);
__ bind(&try_shared);