Revert of [TypeFeedbackVector] Root literal arrays in function literals slots (patchset #7 id:120001 of https://codereview.chromium.org/2620753003/ )

Reason for revert:
gc stress:
https://build.chromium.org/p/client.v8/builders/V8%20Linux%20-%20gc%20stress/builds/8105

also on mac

Original issue's description:
> [TypeFeedbackVector] Root literal arrays in function literals slots
>
> Literal arrays and feedback vectors for a function can be garbage
> collected if we don't have a rooted closure for the function, which
> happens often. It's expensive to come back from this (recreating
> boilerplates and gathering feedback again), and the cost is
> disproportionate if the function was inlined into optimized code.
>
> To guard against losing these arrays when we need them, we'll now
> create literal arrays when creating the feedback vector for the outer
> closure, and root them strongly in that vector.
>
> BUG=v8:5456
>
> Review-Url: https://codereview.chromium.org/2620753003
> Cr-Commit-Position: refs/heads/master@{#42258}
> Committed: 3188780410

TBR=bmeurer@chromium.org,mstarzinger@chromium.org,yangguo@chromium.org,mvstanton@chromium.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=v8:5456

Review-Url: https://codereview.chromium.org/2626863004
Cr-Commit-Position: refs/heads/master@{#42260}
This commit is contained in:
machenbach 2017-01-12 02:10:56 -08:00 committed by Commit bot
parent cfc2e5e180
commit 3d9e2ea32d
24 changed files with 522 additions and 318 deletions

View File

@ -1337,19 +1337,12 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
Register argument_count = r0;
Register closure = r1;
Register new_target = r3;
Register map = argument_count;
Register index = r2;
// Do we have a valid feedback vector?
__ ldr(index, FieldMemOperand(closure, JSFunction::kLiteralsOffset));
__ ldr(index, FieldMemOperand(index, LiteralsArray::kFeedbackVectorOffset));
__ JumpIfRoot(index, Heap::kUndefinedValueRootIndex,
&gotta_call_runtime_no_stack);
__ push(argument_count);
__ push(new_target);
__ push(closure);
Register map = argument_count;
Register index = r2;
__ ldr(map, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
__ ldr(map,
FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
@ -1357,6 +1350,7 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ cmp(index, Operand(Smi::FromInt(2)));
__ b(lt, &gotta_call_runtime);
// Find literals.
// r3 : native context
// r2 : length / index
// r0 : optimized code map
@ -1376,6 +1370,20 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ ldr(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
__ cmp(temp, native_context);
__ b(ne, &loop_bottom);
// Literals available?
__ ldr(temp, FieldMemOperand(array_pointer,
SharedFunctionInfo::kOffsetToPreviousLiterals));
__ ldr(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
__ JumpIfSmi(temp, &gotta_call_runtime);
// Save the literals in the closure.
__ ldr(r4, MemOperand(sp, 0));
__ str(temp, FieldMemOperand(r4, JSFunction::kLiteralsOffset));
__ push(index);
__ RecordWriteField(r4, JSFunction::kLiteralsOffset, temp, index,
kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
OMIT_SMI_CHECK);
__ pop(index);
// Code available?
Register entry = r4;
@ -1385,7 +1393,7 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ ldr(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
__ JumpIfSmi(entry, &try_shared);
// Found code. Get it into the closure and return.
// Found literals and code. Get them into the closure and return.
__ pop(closure);
// Store code entry in the closure.
__ add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
@ -1420,7 +1428,7 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ cmp(index, Operand(Smi::FromInt(1)));
__ b(gt, &loop_top);
// We found no code.
// We found neither literals nor code.
__ jmp(&gotta_call_runtime);
__ bind(&try_shared);

View File

@ -1347,12 +1347,6 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
Register closure = x1;
Register map = x13;
Register index = x2;
// Do we have a valid feedback vector?
__ Ldr(index, FieldMemOperand(closure, JSFunction::kLiteralsOffset));
__ Ldr(index, FieldMemOperand(index, LiteralsArray::kFeedbackVectorOffset));
__ JumpIfRoot(index, Heap::kUndefinedValueRootIndex, &gotta_call_runtime);
__ Ldr(map, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
__ Ldr(map,
FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
@ -1360,6 +1354,7 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ Cmp(index, Operand(2));
__ B(lt, &gotta_call_runtime);
// Find literals.
// x3 : native context
// x2 : length / index
// x13 : optimized code map
@ -1379,6 +1374,17 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ Ldr(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
__ Cmp(temp, native_context);
__ B(ne, &loop_bottom);
// Literals available?
__ Ldr(temp, FieldMemOperand(array_pointer,
SharedFunctionInfo::kOffsetToPreviousLiterals));
__ Ldr(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
__ JumpIfSmi(temp, &gotta_call_runtime);
// Save the literals in the closure.
__ Str(temp, FieldMemOperand(closure, JSFunction::kLiteralsOffset));
__ RecordWriteField(closure, JSFunction::kLiteralsOffset, temp, x7,
kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
OMIT_SMI_CHECK);
// Code available?
Register entry = x7;
@ -1388,7 +1394,7 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ Ldr(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
__ JumpIfSmi(entry, &try_shared);
// Found code. Get it into the closure and return.
// Found literals and code. Get them into the closure and return.
__ Add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Str(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
__ RecordWriteCodeEntryField(closure, entry, x5);
@ -1417,7 +1423,7 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ Cmp(index, Operand(1));
__ B(gt, &loop_top);
// We found no code.
// We found neither literals nor code.
__ B(&gotta_call_runtime);
__ Bind(&try_shared);

View File

@ -120,14 +120,13 @@ Node* ConstructorBuiltinsAssembler::EmitFastNewClosure(Node* shared_info,
// Initialize the rest of the function.
Node* empty_fixed_array = HeapConstant(factory->empty_fixed_array());
Node* empty_literals_array = HeapConstant(factory->empty_literals_array());
StoreObjectFieldNoWriteBarrier(result, JSObject::kPropertiesOffset,
empty_fixed_array);
StoreObjectFieldNoWriteBarrier(result, JSObject::kElementsOffset,
empty_fixed_array);
Node* literals_array = LoadFixedArrayElement(
feedback_vector, slot, 0, CodeStubAssembler::SMI_PARAMETERS);
StoreObjectFieldNoWriteBarrier(result, JSFunction::kLiteralsOffset,
literals_array);
empty_literals_array);
StoreObjectFieldNoWriteBarrier(
result, JSFunction::kPrototypeOrInitialMapOffset, TheHoleConstant());
StoreObjectFieldNoWriteBarrier(result, JSFunction::kSharedFunctionInfoOffset,

View File

@ -1025,12 +1025,6 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
Register new_target = edx;
Register argument_count = eax;
// Do we have a valid feedback vector?
__ mov(ebx, FieldOperand(closure, JSFunction::kLiteralsOffset));
__ mov(ebx, FieldOperand(ebx, LiteralsArray::kFeedbackVectorOffset));
__ cmp(ebx, masm->isolate()->factory()->undefined_value());
__ j(equal, &gotta_call_runtime_no_stack);
__ push(argument_count);
__ push(new_target);
__ push(closure);
@ -1043,6 +1037,7 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ cmp(index, Immediate(Smi::FromInt(2)));
__ j(less, &gotta_call_runtime);
// Find literals.
// edx : native context
// ebx : length / index
// eax : optimized code map
@ -1060,6 +1055,20 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ mov(temp, FieldOperand(temp, WeakCell::kValueOffset));
__ cmp(temp, native_context);
__ j(not_equal, &loop_bottom);
// Literals available?
__ mov(temp, FieldOperand(map, index, times_half_pointer_size,
SharedFunctionInfo::kOffsetToPreviousLiterals));
__ mov(temp, FieldOperand(temp, WeakCell::kValueOffset));
__ JumpIfSmi(temp, &gotta_call_runtime);
// Save the literals in the closure.
__ mov(ecx, Operand(esp, 0));
__ mov(FieldOperand(ecx, JSFunction::kLiteralsOffset), temp);
__ push(index);
__ RecordWriteField(ecx, JSFunction::kLiteralsOffset, temp, index,
kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
__ pop(index);
// Code available?
Register entry = ecx;
__ mov(entry, FieldOperand(map, index, times_half_pointer_size,
@ -1067,7 +1076,7 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ mov(entry, FieldOperand(entry, WeakCell::kValueOffset));
__ JumpIfSmi(entry, &try_shared);
// Found code. Get it into the closure and return.
// Found literals and code. Get them into the closure and return.
__ pop(closure);
// Store code entry in the closure.
__ lea(entry, FieldOperand(entry, Code::kHeaderSize));
@ -1101,7 +1110,7 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ cmp(index, Immediate(Smi::FromInt(1)));
__ j(greater, &loop_top);
// We found no code.
// We found neither literals nor code.
__ jmp(&gotta_call_runtime);
__ bind(&try_shared);

View File

@ -1349,24 +1349,18 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
Register argument_count = a0;
Register closure = a1;
Register new_target = a3;
Register map = a0;
Register index = a2;
// Do we have a valid feedback vector?
__ lw(index, FieldMemOperand(closure, JSFunction::kLiteralsOffset));
__ lw(index, FieldMemOperand(index, LiteralsArray::kFeedbackVectorOffset));
__ JumpIfRoot(index, Heap::kUndefinedValueRootIndex,
&gotta_call_runtime_no_stack);
__ push(argument_count);
__ push(new_target);
__ push(closure);
Register map = a0;
Register index = a2;
__ lw(map, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
__ lw(map, FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
__ lw(index, FieldMemOperand(map, FixedArray::kLengthOffset));
__ Branch(&gotta_call_runtime, lt, index, Operand(Smi::FromInt(2)));
// Find literals.
// a3 : native context
// a2 : length / index
// a0 : optimized code map
@ -1386,6 +1380,20 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
SharedFunctionInfo::kOffsetToPreviousContext));
__ lw(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
__ Branch(&loop_bottom, ne, temp, Operand(native_context));
// Literals available?
__ lw(temp, FieldMemOperand(array_pointer,
SharedFunctionInfo::kOffsetToPreviousLiterals));
__ lw(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
__ JumpIfSmi(temp, &gotta_call_runtime);
// Save the literals in the closure.
__ lw(t0, MemOperand(sp, 0));
__ sw(temp, FieldMemOperand(t0, JSFunction::kLiteralsOffset));
__ push(index);
__ RecordWriteField(t0, JSFunction::kLiteralsOffset, temp, index,
kRAHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
OMIT_SMI_CHECK);
__ pop(index);
// Code available?
Register entry = t0;
@ -1395,7 +1403,7 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ lw(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
__ JumpIfSmi(entry, &try_shared);
// Found code. Get it into the closure and return.
// Found literals and code. Get them into the closure and return.
__ pop(closure);
// Store code entry in the closure.
__ Addu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
@ -1430,7 +1438,7 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
Operand(Smi::FromInt(SharedFunctionInfo::kEntryLength)));
__ Branch(&loop_top, gt, index, Operand(Smi::FromInt(1)));
// We found no code.
// We found neither literals nor code.
__ jmp(&gotta_call_runtime);
__ bind(&try_shared);

View File

@ -1340,24 +1340,18 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
Register argument_count = a0;
Register closure = a1;
Register new_target = a3;
Register map = a0;
Register index = a2;
// Do we have a valid feedback vector?
__ ld(index, FieldMemOperand(closure, JSFunction::kLiteralsOffset));
__ ld(index, FieldMemOperand(index, LiteralsArray::kFeedbackVectorOffset));
__ JumpIfRoot(index, Heap::kUndefinedValueRootIndex,
&gotta_call_runtime_no_stack);
__ push(argument_count);
__ push(new_target);
__ push(closure);
Register map = a0;
Register index = a2;
__ ld(map, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
__ ld(map, FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
__ ld(index, FieldMemOperand(map, FixedArray::kLengthOffset));
__ Branch(&gotta_call_runtime, lt, index, Operand(Smi::FromInt(2)));
// Find literals.
// a3 : native context
// a2 : length / index
// a0 : optimized code map
@ -1377,6 +1371,20 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
SharedFunctionInfo::kOffsetToPreviousContext));
__ ld(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
__ Branch(&loop_bottom, ne, temp, Operand(native_context));
// Literals available?
__ ld(temp, FieldMemOperand(array_pointer,
SharedFunctionInfo::kOffsetToPreviousLiterals));
__ ld(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
__ JumpIfSmi(temp, &gotta_call_runtime);
// Save the literals in the closure.
__ ld(a4, MemOperand(sp, 0));
__ sd(temp, FieldMemOperand(a4, JSFunction::kLiteralsOffset));
__ push(index);
__ RecordWriteField(a4, JSFunction::kLiteralsOffset, temp, index,
kRAHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
OMIT_SMI_CHECK);
__ pop(index);
// Code available?
Register entry = a4;
@ -1386,7 +1394,7 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ ld(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
__ JumpIfSmi(entry, &try_shared);
// Found code. Get it into the closure and return.
// Found literals and code. Get them into the closure and return.
__ pop(closure);
// Store code entry in the closure.
__ Daddu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
@ -1421,7 +1429,7 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
Operand(Smi::FromInt(SharedFunctionInfo::kEntryLength)));
__ Branch(&loop_top, gt, index, Operand(Smi::FromInt(1)));
// We found no code.
// We found neither literals nor code.
__ jmp(&gotta_call_runtime);
__ bind(&try_shared);

View File

@ -994,18 +994,13 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
Register closure = rdi;
Register map = r8;
Register index = r9;
// Do we have a valid feedback vector?
__ movp(rbx, FieldOperand(closure, JSFunction::kLiteralsOffset));
__ movp(rbx, FieldOperand(rbx, LiteralsArray::kFeedbackVectorOffset));
__ JumpIfRoot(rbx, Heap::kUndefinedValueRootIndex, &gotta_call_runtime);
__ movp(map, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
__ movp(map, FieldOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
__ SmiToInteger32(index, FieldOperand(map, FixedArray::kLengthOffset));
__ cmpl(index, Immediate(2));
__ j(less, &gotta_call_runtime);
// Find literals.
// r14 : native context
// r9 : length / index
// r8 : optimized code map
@ -1022,6 +1017,17 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ movp(temp, FieldOperand(temp, WeakCell::kValueOffset));
__ cmpp(temp, native_context);
__ j(not_equal, &loop_bottom);
// Literals available?
__ movp(temp, FieldOperand(map, index, times_pointer_size,
SharedFunctionInfo::kOffsetToPreviousLiterals));
__ movp(temp, FieldOperand(temp, WeakCell::kValueOffset));
__ JumpIfSmi(temp, &gotta_call_runtime);
// Save the literals in the closure.
__ movp(FieldOperand(closure, JSFunction::kLiteralsOffset), temp);
__ movp(r15, index);
__ RecordWriteField(closure, JSFunction::kLiteralsOffset, temp, r15,
kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
// Code available?
Register entry = rcx;
@ -1030,7 +1036,7 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ movp(entry, FieldOperand(entry, WeakCell::kValueOffset));
__ JumpIfSmi(entry, &try_shared);
// Found code. Get it into the closure and return.
// Found literals and code. Get them into the closure and return.
__ leap(entry, FieldOperand(entry, Code::kHeaderSize));
__ movp(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry);
__ RecordWriteCodeEntryField(closure, entry, r15);
@ -1061,7 +1067,7 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ cmpl(index, Immediate(1));
__ j(greater, &loop_top);
// We found no code.
// We found neither literals nor code.
__ jmp(&gotta_call_runtime);
__ bind(&try_shared);

View File

@ -597,13 +597,14 @@ MUST_USE_RESULT MaybeHandle<Code> GetCodeFromOptimizedCodeMap(
&RuntimeCallStats::CompileGetFromOptimizedCodeMap);
Handle<SharedFunctionInfo> shared(function->shared());
DisallowHeapAllocation no_gc;
Code* code = shared->SearchOptimizedCodeMap(
CodeAndLiterals cached = shared->SearchOptimizedCodeMap(
function->context()->native_context(), osr_ast_id);
if (code != nullptr) {
if (cached.code != nullptr) {
// Caching of optimized code enabled and optimized code found.
DCHECK(!code->marked_for_deoptimization());
if (cached.literals != nullptr) function->set_literals(cached.literals);
DCHECK(!cached.code->marked_for_deoptimization());
DCHECK(function->shared()->is_compiled());
return Handle<Code>(code);
return Handle<Code>(cached.code);
}
return MaybeHandle<Code>();
}
@ -625,9 +626,10 @@ void InsertCodeIntoOptimizedCodeMap(CompilationInfo* info) {
// Cache optimized context-specific code.
Handle<JSFunction> function = info->closure();
Handle<SharedFunctionInfo> shared(function->shared());
Handle<LiteralsArray> literals(function->literals());
Handle<Context> native_context(function->context()->native_context());
SharedFunctionInfo::AddToOptimizedCodeMap(shared, native_context, code,
info->osr_ast_id());
literals, info->osr_ast_id());
}
bool GetOptimizedCodeNow(CompilationJob* job) {
@ -863,8 +865,10 @@ CompilationJob::Status FinalizeOptimizedCompilationJob(CompilationJob* job) {
} else if (job->FinalizeJob() == CompilationJob::SUCCEEDED) {
job->RecordOptimizedCompilationStats();
RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info);
if (shared->SearchOptimizedCodeMap(info->context()->native_context(),
info->osr_ast_id()) == nullptr) {
if (shared
->SearchOptimizedCodeMap(info->context()->native_context(),
info->osr_ast_id())
.code == nullptr) {
InsertCodeIntoOptimizedCodeMap(info);
}
if (FLAG_trace_opt) {
@ -1752,16 +1756,19 @@ void Compiler::PostInstantiation(Handle<JSFunction> function,
function->MarkForOptimization();
}
Code* code = shared->SearchOptimizedCodeMap(
CodeAndLiterals cached = shared->SearchOptimizedCodeMap(
function->context()->native_context(), BailoutId::None());
if (code != nullptr) {
if (cached.code != nullptr) {
// Caching of optimized code enabled and optimized code found.
DCHECK(!code->marked_for_deoptimization());
DCHECK(!cached.code->marked_for_deoptimization());
DCHECK(function->shared()->is_compiled());
function->ReplaceCode(code);
function->ReplaceCode(cached.code);
}
if (shared->is_compiled()) {
if (cached.literals != nullptr) {
DCHECK(shared->is_compiled());
function->set_literals(cached.literals);
} else if (shared->is_compiled()) {
// TODO(mvstanton): pass pretenure flag to EnsureLiterals.
JSFunction::EnsureLiterals(function);
}

View File

@ -762,15 +762,12 @@ Reduction JSCreateLowering::ReduceJSCreateClosure(Node* node) {
Node* function_map = jsgraph()->HeapConstant(
handle(Map::cast(native_context()->get(function_map_index)), isolate()));
FeedbackVectorSlot slot = p.feedback().slot();
Node* literals = jsgraph()->HeapConstant(
handle(LiteralsArray::cast(p.feedback().vector()->Get(slot)), isolate()));
// Note that it is only safe to embed the raw entry point of the compile
// lazy stub into the code, because that stub is immortal and immovable.
Node* compile_entry = jsgraph()->PointerConstant(
jsgraph()->isolate()->builtins()->CompileLazy()->entry());
Node* empty_fixed_array = jsgraph()->EmptyFixedArrayConstant();
Node* empty_literals_array = jsgraph()->EmptyLiteralsArrayConstant();
Node* the_hole = jsgraph()->TheHoleConstant();
Node* undefined = jsgraph()->UndefinedConstant();
AllocationBuilder a(jsgraph(), effect, control);
@ -779,7 +776,7 @@ Reduction JSCreateLowering::ReduceJSCreateClosure(Node* node) {
a.Store(AccessBuilder::ForMap(), function_map);
a.Store(AccessBuilder::ForJSObjectProperties(), empty_fixed_array);
a.Store(AccessBuilder::ForJSObjectElements(), empty_fixed_array);
a.Store(AccessBuilder::ForJSFunctionLiterals(), literals);
a.Store(AccessBuilder::ForJSFunctionLiterals(), empty_literals_array);
a.Store(AccessBuilder::ForJSFunctionPrototypeOrInitialMap(), the_hole);
a.Store(AccessBuilder::ForJSFunctionSharedFunctionInfo(), shared);
a.Store(AccessBuilder::ForJSFunctionContext(), context);

View File

@ -66,6 +66,11 @@ Node* JSGraph::EmptyFixedArrayConstant() {
HeapConstant(factory()->empty_fixed_array()));
}
Node* JSGraph::EmptyLiteralsArrayConstant() {
return CACHED(kEmptyLiteralsArrayConstant,
HeapConstant(factory()->empty_literals_array()));
}
Node* JSGraph::EmptyStringConstant() {
return CACHED(kEmptyStringConstant, HeapConstant(factory()->empty_string()));
}

View File

@ -49,6 +49,7 @@ class V8_EXPORT_PRIVATE JSGraph : public NON_EXPORTED_BASE(ZoneObject) {
ArgvMode argv_mode = kArgvOnStack,
bool builtin_exit_frame = false);
Node* EmptyFixedArrayConstant();
Node* EmptyLiteralsArrayConstant();
Node* EmptyStringConstant();
Node* FixedArrayMapConstant();
Node* FixedDoubleArrayMapConstant();
@ -166,6 +167,7 @@ class V8_EXPORT_PRIVATE JSGraph : public NON_EXPORTED_BASE(ZoneObject) {
kCEntryStub3Constant,
kCEntryStub1WithBuiltinExitFrameConstant,
kEmptyFixedArrayConstant,
kEmptyLiteralsArrayConstant,
kEmptyStringConstant,
kFixedArrayMapConstant,
kFixedDoubleArrayMapConstant,

View File

@ -412,8 +412,9 @@ Handle<Object> Context::Lookup(Handle<String> name, ContextLookupFlags flags,
static const int kSharedOffset = 0;
static const int kCachedCodeOffset = 1;
static const int kOsrAstIdOffset = 2;
static const int kEntryLength = 3;
static const int kLiteralsOffset = 2;
static const int kOsrAstIdOffset = 3;
static const int kEntryLength = 4;
static const int kInitialLength = kEntryLength;
int Context::SearchOptimizedCodeMapEntry(SharedFunctionInfo* shared,
@ -435,29 +436,38 @@ int Context::SearchOptimizedCodeMapEntry(SharedFunctionInfo* shared,
return -1;
}
Code* Context::SearchOptimizedCodeMap(SharedFunctionInfo* shared,
BailoutId osr_ast_id) {
void Context::SearchOptimizedCodeMap(SharedFunctionInfo* shared,
BailoutId osr_ast_id, Code** pcode,
LiteralsArray** pliterals) {
DCHECK(this->IsNativeContext());
int entry = SearchOptimizedCodeMapEntry(shared, osr_ast_id);
if (entry != -1) {
FixedArray* code_map = osr_code_table();
DCHECK_LE(entry + kEntryLength, code_map->length());
WeakCell* cell = WeakCell::cast(code_map->get(entry + kCachedCodeOffset));
WeakCell* literals_cell =
WeakCell::cast(code_map->get(entry + kLiteralsOffset));
return cell->cleared() ? nullptr : Code::cast(cell->value());
*pcode = cell->cleared() ? nullptr : Code::cast(cell->value());
*pliterals = literals_cell->cleared()
? nullptr
: LiteralsArray::cast(literals_cell->value());
} else {
*pcode = nullptr;
*pliterals = nullptr;
}
return nullptr;
}
void Context::AddToOptimizedCodeMap(Handle<Context> native_context,
Handle<SharedFunctionInfo> shared,
Handle<Code> code,
Handle<LiteralsArray> literals,
BailoutId osr_ast_id) {
DCHECK(native_context->IsNativeContext());
Isolate* isolate = native_context->GetIsolate();
if (isolate->serializer_enabled()) return;
STATIC_ASSERT(kEntryLength == 3);
STATIC_ASSERT(kEntryLength == 4);
Handle<FixedArray> new_code_map;
int entry;
@ -468,9 +478,12 @@ void Context::AddToOptimizedCodeMap(Handle<Context> native_context,
Handle<FixedArray> old_code_map(native_context->osr_code_table(), isolate);
entry = native_context->SearchOptimizedCodeMapEntry(*shared, osr_ast_id);
if (entry >= 0) {
// Just set the code of the entry.
// Just set the code and literals of the entry.
Handle<WeakCell> code_cell = isolate->factory()->NewWeakCell(code);
old_code_map->set(entry + kCachedCodeOffset, *code_cell);
Handle<WeakCell> literals_cell =
isolate->factory()->NewWeakCell(literals);
old_code_map->set(entry + kLiteralsOffset, *literals_cell);
return;
}
@ -494,10 +507,12 @@ void Context::AddToOptimizedCodeMap(Handle<Context> native_context,
}
Handle<WeakCell> code_cell = isolate->factory()->NewWeakCell(code);
Handle<WeakCell> literals_cell = isolate->factory()->NewWeakCell(literals);
Handle<WeakCell> shared_cell = isolate->factory()->NewWeakCell(shared);
new_code_map->set(entry + kSharedOffset, *shared_cell);
new_code_map->set(entry + kCachedCodeOffset, *code_cell);
new_code_map->set(entry + kLiteralsOffset, *literals_cell);
new_code_map->set(entry + kOsrAstIdOffset, Smi::FromInt(osr_ast_id.ToInt()));
#ifdef DEBUG
@ -508,6 +523,8 @@ void Context::AddToOptimizedCodeMap(Handle<Context> native_context,
DCHECK(cell->cleared() ||
(cell->value()->IsCode() &&
Code::cast(cell->value())->kind() == Code::OPTIMIZED_FUNCTION));
cell = WeakCell::cast(new_code_map->get(i + kLiteralsOffset));
DCHECK(cell->cleared() || cell->value()->IsFixedArray());
DCHECK(new_code_map->get(i + kOsrAstIdOffset)->IsSmi());
}
#endif
@ -548,6 +565,8 @@ void Context::EvictFromOptimizedCodeMap(Code* optimized_code,
code_map->set(dst + kSharedOffset, code_map->get(src + kSharedOffset));
code_map->set(dst + kCachedCodeOffset,
code_map->get(src + kCachedCodeOffset));
code_map->set(dst + kLiteralsOffset,
code_map->get(src + kLiteralsOffset));
code_map->set(dst + kOsrAstIdOffset,
code_map->get(src + kOsrAstIdOffset));
}

View File

@ -567,14 +567,15 @@ class Context: public FixedArray {
// A native context keeps track of all osrd optimized functions.
inline bool OptimizedCodeMapIsCleared();
Code* SearchOptimizedCodeMap(SharedFunctionInfo* shared,
BailoutId osr_ast_id);
void SearchOptimizedCodeMap(SharedFunctionInfo* shared, BailoutId osr_ast_id,
Code** pcode, LiteralsArray** pliterals);
int SearchOptimizedCodeMapEntry(SharedFunctionInfo* shared,
BailoutId osr_ast_id);
static void AddToOptimizedCodeMap(Handle<Context> native_context,
Handle<SharedFunctionInfo> shared,
Handle<Code> code,
Handle<LiteralsArray> literals,
BailoutId osr_ast_id);
// A native context holds a list of all functions with optimized code.

View File

@ -823,33 +823,35 @@ class LiteralFixer {
public:
static void PatchLiterals(FunctionInfoWrapper* compile_info_wrapper,
Handle<SharedFunctionInfo> shared_info,
Isolate* isolate) {
bool feedback_metadata_changed, Isolate* isolate) {
int new_literal_count = compile_info_wrapper->GetLiteralCount();
int old_literal_count = shared_info->num_literals();
// Recreate the literal array and type feedback vector.
// Since the feedback vector roots literal arrays for nested functions,
// we can't simply leave it in place because those nested literal
// array and feedback vectors may have changed structure.
Handle<FixedArray> function_instances =
CollectJSFunctions(shared_info, isolate);
Handle<TypeFeedbackMetadata> feedback_metadata(
shared_info->feedback_metadata());
if (old_literal_count == new_literal_count && !feedback_metadata_changed) {
// If literal count didn't change, simply go over all functions
// and clear literal arrays.
ClearValuesVisitor visitor;
IterateJSFunctions(shared_info, &visitor);
} else {
// When literal count changes, we have to create new array instances.
// Since we cannot create instances when iterating heap, we should first
// collect all functions and fix their literal arrays.
Handle<FixedArray> function_instances =
CollectJSFunctions(shared_info, isolate);
Handle<TypeFeedbackMetadata> feedback_metadata(
shared_info->feedback_metadata());
for (int i = 0; i < function_instances->length(); i++) {
Handle<JSFunction> fun(JSFunction::cast(function_instances->get(i)));
Handle<TypeFeedbackVector> vector =
TypeFeedbackVector::New(isolate, feedback_metadata);
Handle<LiteralsArray> new_literals =
LiteralsArray::New(isolate, vector, new_literal_count);
Handle<LiteralsArray> old_literals(fun->literals(), isolate);
fun->set_literals(*new_literals);
for (int i = 0; i < function_instances->length(); i++) {
Handle<JSFunction> fun(JSFunction::cast(function_instances->get(i)));
Handle<TypeFeedbackVector> vector =
TypeFeedbackVector::New(isolate, feedback_metadata);
Handle<LiteralsArray> new_literals =
LiteralsArray::New(isolate, vector, new_literal_count);
fun->set_literals(*new_literals);
}
// The literals are rooted in a containing feedback vector.
// Replace them there, so new closures have the correct literals.
ReplaceRoots(old_literals, new_literals);
shared_info->set_num_literals(new_literal_count);
}
shared_info->set_num_literals(new_literal_count);
}
private:
@ -870,56 +872,6 @@ class LiteralFixer {
}
}
template <typename Visitor>
static void IterateAllJSFunctions(Heap* heap, Visitor* visitor) {
HeapIterator iterator(heap);
for (HeapObject* obj = iterator.next(); obj != NULL;
obj = iterator.next()) {
if (obj->IsJSFunction()) {
JSFunction* function = JSFunction::cast(obj);
visitor->visit(function);
}
}
}
class ReplaceRootsVisitor {
public:
ReplaceRootsVisitor(Handle<LiteralsArray> old_literals,
Handle<LiteralsArray> new_literals)
: old_literals_(old_literals), new_literals_(new_literals) {}
void visit(JSFunction* fun) {
if (!fun->shared()->is_compiled()) return;
// Look in the type feedback vector for a copy of literals.
TypeFeedbackVector* vector = fun->feedback_vector();
// Note: it's important to get the feedback metadata from the
// type feedback vector, because there may be a new metadata
// object in the SharedFunctionInfo (with a different slot
// configuration).
TypeFeedbackMetadataIterator iter(vector->metadata());
while (iter.HasNext()) {
FeedbackVectorSlot slot = iter.Next();
FeedbackVectorSlotKind kind = iter.kind();
if (kind == FeedbackVectorSlotKind::CREATE_CLOSURE) {
Object* obj = vector->Get(slot);
if (obj == *old_literals_) {
vector->Set(slot, *new_literals_);
}
}
}
}
Handle<LiteralsArray> old_literals_;
Handle<LiteralsArray> new_literals_;
};
static void ReplaceRoots(Handle<LiteralsArray> old_literals,
Handle<LiteralsArray> new_literals) {
ReplaceRootsVisitor replace_visitor(old_literals, new_literals);
IterateAllJSFunctions(old_literals->GetHeap(), &replace_visitor);
}
// Finds all instances of JSFunction that refers to the provided shared_info
// and returns array with them.
static Handle<FixedArray> CollectJSFunctions(
@ -1020,6 +972,7 @@ void LiveEdit::ReplaceFunctionCode(
Handle<SharedFunctionInfo> shared_info = shared_info_wrapper.GetInfo();
Handle<SharedFunctionInfo> new_shared_info =
compile_info_wrapper.GetSharedFunctionInfo();
bool feedback_metadata_changed = false;
if (shared_info->is_compiled()) {
// Take whatever code we can get from the new shared function info. We
@ -1066,6 +1019,8 @@ void LiveEdit::ReplaceFunctionCode(
// Update the type feedback vector, if needed.
Handle<TypeFeedbackMetadata> new_feedback_metadata(
new_shared_info->feedback_metadata());
feedback_metadata_changed =
new_feedback_metadata->DiffersFrom(shared_info->feedback_metadata());
shared_info->set_feedback_metadata(*new_feedback_metadata);
}
@ -1074,7 +1029,8 @@ void LiveEdit::ReplaceFunctionCode(
shared_info->set_start_position(start_position);
shared_info->set_end_position(end_position);
LiteralFixer::PatchLiterals(&compile_info_wrapper, shared_info, isolate);
LiteralFixer::PatchLiterals(&compile_info_wrapper, shared_info,
feedback_metadata_changed, isolate);
DeoptimizeDependentFunctions(*shared_info);
isolate->compilation_cache()->Remove(shared_info);

View File

@ -551,6 +551,26 @@ void ObjectStatsCollector::RecordSharedFunctionInfoDetails(
RecordFixedArrayHelper(sfi, optimized_code_map, OPTIMIZED_CODE_MAP_SUB_TYPE,
0);
// Optimized code map should be small, so skip accounting.
int len = optimized_code_map->length();
for (int i = SharedFunctionInfo::kEntriesStart; i < len;
i += SharedFunctionInfo::kEntryLength) {
Object* slot =
optimized_code_map->get(i + SharedFunctionInfo::kLiteralsOffset);
LiteralsArray* literals = nullptr;
if (slot->IsWeakCell()) {
WeakCell* cell = WeakCell::cast(slot);
if (!cell->cleared()) {
literals = LiteralsArray::cast(cell->value());
}
} else {
literals = LiteralsArray::cast(slot);
}
if (literals != nullptr) {
RecordFixedArrayHelper(sfi, literals, LITERALS_ARRAY_SUB_TYPE, 0);
RecordFixedArrayHelper(sfi, literals->feedback_vector(),
TYPE_FEEDBACK_VECTOR_SUB_TYPE, 0);
}
}
}
}

View File

@ -3505,16 +3505,12 @@ LiteralsArray* LiteralsArray::cast(Object* object) {
return reinterpret_cast<LiteralsArray*>(object);
}
bool LiteralsArray::has_feedback_vector() const {
return !get(kVectorIndex)->IsUndefined(this->GetIsolate());
}
TypeFeedbackVector* LiteralsArray::feedback_vector() const {
if (length() == 0 || !has_feedback_vector()) {
if (length() == 0) {
return TypeFeedbackVector::cast(
this->GetIsolate()->heap()->empty_type_feedback_vector());
const_cast<FixedArray*>(FixedArray::cast(this)));
}
return TypeFeedbackVector::cast(get(kVectorIndex));
}
@ -6661,13 +6657,6 @@ void JSFunction::ReplaceCode(Code* code) {
}
}
bool JSFunction::has_literals_array() const {
SharedFunctionInfo* shared = this->shared();
return (literals() != shared->GetIsolate()->heap()->empty_literals_array() ||
(shared->feedback_metadata()->slot_count() == 0 &&
shared->num_literals() == 0));
}
Context* JSFunction::context() {
return Context::cast(READ_FIELD(this, kContextOffset));

View File

@ -11947,20 +11947,48 @@ void JSFunction::AttemptConcurrentOptimization() {
}
}
// static
Handle<LiteralsArray> SharedFunctionInfo::FindOrCreateLiterals(
Handle<SharedFunctionInfo> shared, Handle<Context> native_context) {
Isolate* isolate = shared->GetIsolate();
CodeAndLiterals result =
shared->SearchOptimizedCodeMap(*native_context, BailoutId::None());
if (result.literals != nullptr) {
DCHECK(shared->feedback_metadata()->is_empty() ||
!result.literals->feedback_vector()->is_empty());
return handle(result.literals, isolate);
}
Handle<TypeFeedbackVector> feedback_vector =
TypeFeedbackVector::New(isolate, handle(shared->feedback_metadata()));
Handle<LiteralsArray> literals =
LiteralsArray::New(isolate, feedback_vector, shared->num_literals());
Handle<Code> code;
if (result.code != nullptr) {
code = Handle<Code>(result.code, isolate);
}
AddToOptimizedCodeMap(shared, native_context, code, literals,
BailoutId::None());
return literals;
}
// static
void SharedFunctionInfo::AddToOptimizedCodeMap(
Handle<SharedFunctionInfo> shared, Handle<Context> native_context,
Handle<Code> code, BailoutId osr_ast_id) {
MaybeHandle<Code> code, Handle<LiteralsArray> literals,
BailoutId osr_ast_id) {
Isolate* isolate = shared->GetIsolate();
if (isolate->serializer_enabled()) return;
DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
DCHECK(code.is_null() ||
code.ToHandleChecked()->kind() == Code::OPTIMIZED_FUNCTION);
DCHECK(native_context->IsNativeContext());
STATIC_ASSERT(kEntryLength == 2);
STATIC_ASSERT(kEntryLength == 3);
Handle<FixedArray> new_code_map;
int entry;
if (!osr_ast_id.IsNone()) {
Context::AddToOptimizedCodeMap(native_context, shared, code, osr_ast_id);
Context::AddToOptimizedCodeMap(
native_context, shared, code.ToHandleChecked(), literals, osr_ast_id);
return;
}
@ -11972,9 +12000,15 @@ void SharedFunctionInfo::AddToOptimizedCodeMap(
Handle<FixedArray> old_code_map(shared->optimized_code_map(), isolate);
entry = shared->SearchOptimizedCodeMapEntry(*native_context);
if (entry >= kEntriesStart) {
// Just set the code of the entry.
Handle<WeakCell> code_cell = isolate->factory()->NewWeakCell(code);
old_code_map->set(entry + kCachedCodeOffset, *code_cell);
// Just set the code and literals of the entry.
if (!code.is_null()) {
Handle<WeakCell> code_cell =
isolate->factory()->NewWeakCell(code.ToHandleChecked());
old_code_map->set(entry + kCachedCodeOffset, *code_cell);
}
Handle<WeakCell> literals_cell =
isolate->factory()->NewWeakCell(literals);
old_code_map->set(entry + kLiteralsOffset, *literals_cell);
return;
}
@ -12002,11 +12036,15 @@ void SharedFunctionInfo::AddToOptimizedCodeMap(
}
}
Handle<WeakCell> code_cell = isolate->factory()->NewWeakCell(code);
Handle<WeakCell> code_cell =
code.is_null() ? isolate->factory()->empty_weak_cell()
: isolate->factory()->NewWeakCell(code.ToHandleChecked());
Handle<WeakCell> literals_cell = isolate->factory()->NewWeakCell(literals);
WeakCell* context_cell = native_context->self_weak_cell();
new_code_map->set(entry + kContextOffset, context_cell);
new_code_map->set(entry + kCachedCodeOffset, *code_cell);
new_code_map->set(entry + kLiteralsOffset, *literals_cell);
#ifdef DEBUG
for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) {
@ -12016,6 +12054,8 @@ void SharedFunctionInfo::AddToOptimizedCodeMap(
DCHECK(cell->cleared() ||
(cell->value()->IsCode() &&
Code::cast(cell->value())->kind() == Code::OPTIMIZED_FUNCTION));
cell = WeakCell::cast(new_code_map->get(i + kLiteralsOffset));
DCHECK(cell->cleared() || cell->value()->IsFixedArray());
}
#endif
@ -12053,7 +12093,7 @@ void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code,
ShortPrint();
PrintF("]\n");
}
// Just clear the code.
// Just clear the code in order to continue sharing literals.
code_map->set(src + kCachedCodeOffset, heap->empty_weak_cell(),
SKIP_WRITE_BARRIER);
}
@ -12070,45 +12110,12 @@ void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code,
void JSFunction::EnsureLiterals(Handle<JSFunction> function) {
Handle<SharedFunctionInfo> shared(function->shared());
Handle<Context> native_context(function->context()->native_context());
Isolate* isolate = shared->GetIsolate();
if (!function->has_literals_array()) {
if (FLAG_trace_strong_rooted_literals) {
PrintF("EnsureLiterals: Installing literals array in %s %p\n",
shared->DebugName()->ToCString().get(),
reinterpret_cast<void*>(*function));
}
// Top level code didn't get it's literals installed.
Handle<TypeFeedbackVector> feedback_vector =
TypeFeedbackVector::New(isolate, handle(shared->feedback_metadata()));
Handle<LiteralsArray> new_literals =
LiteralsArray::New(isolate, feedback_vector, shared->num_literals());
function->set_literals(*new_literals);
} else if (!function->literals()->has_feedback_vector()) {
if (FLAG_trace_strong_rooted_literals) {
PrintF("EnsureLiterals: Installing feedback vector in %s %p\n",
shared->DebugName()->ToCString().get(),
reinterpret_cast<void*>(*function));
}
// If the feedback vector hasn't been installed, do that.
Handle<TypeFeedbackVector> feedback_vector = TypeFeedbackVector::New(
shared->GetIsolate(), handle(shared->feedback_metadata()));
function->literals()->set_feedback_vector(*feedback_vector);
} else {
if (FLAG_trace_strong_rooted_literals) {
PrintF("EnsureLiterals: did nothing for %s %p\n",
shared->DebugName()->ToCString().get(),
reinterpret_cast<void*>(*function));
}
if (function->literals() ==
function->GetIsolate()->heap()->empty_literals_array()) {
Handle<LiteralsArray> literals =
SharedFunctionInfo::FindOrCreateLiterals(shared, native_context);
function->set_literals(*literals);
}
// No matter what, ensure some post-conditions.
DCHECK(shared->feedback_metadata()->slot_count() != 0 ||
function->feedback_vector() ==
shared->GetIsolate()->heap()->empty_type_feedback_vector());
DCHECK(shared->num_literals() == 0 ||
function->literals() !=
shared->GetIsolate()->heap()->empty_literals_array());
}
static void GetMinInobjectSlack(Map* map, void* data) {
@ -13678,11 +13685,15 @@ void SharedFunctionInfo::ClearCodeFromOptimizedCodeMap() {
}
}
Code* SharedFunctionInfo::SearchOptimizedCodeMap(Context* native_context,
BailoutId osr_ast_id) {
Code* result = nullptr;
CodeAndLiterals SharedFunctionInfo::SearchOptimizedCodeMap(
Context* native_context, BailoutId osr_ast_id) {
CodeAndLiterals result = {nullptr, nullptr};
if (!osr_ast_id.IsNone()) {
return native_context->SearchOptimizedCodeMap(this, osr_ast_id);
Code* code;
LiteralsArray* literals;
native_context->SearchOptimizedCodeMap(this, osr_ast_id, &code, &literals);
result = {code, literals};
return result;
}
DCHECK(osr_ast_id.IsNone());
@ -13691,8 +13702,12 @@ Code* SharedFunctionInfo::SearchOptimizedCodeMap(Context* native_context,
FixedArray* code_map = optimized_code_map();
DCHECK_LE(entry + kEntryLength, code_map->length());
WeakCell* cell = WeakCell::cast(code_map->get(entry + kCachedCodeOffset));
WeakCell* literals_cell =
WeakCell::cast(code_map->get(entry + kLiteralsOffset));
result = cell->cleared() ? nullptr : Code::cast(cell->value());
result = {cell->cleared() ? nullptr : Code::cast(cell->value()),
literals_cell->cleared() ? nullptr : LiteralsArray::cast(
literals_cell->value())};
}
return result;
}

View File

@ -5016,10 +5016,8 @@ class LiteralsArray : public FixedArray {
return OffsetOfElementAt(index + kFirstLiteralIndex);
}
inline bool has_feedback_vector() const;
inline TypeFeedbackVector* feedback_vector() const;
inline void set_feedback_vector(TypeFeedbackVector* vector);
inline Object* literal(int literal_index) const;
inline void set_literal(int literal_index, Object* literal);
inline void set_literal_undefined(int literal_index);
@ -7159,6 +7157,14 @@ enum BuiltinFunctionId {
};
// Result of searching in an optimized code map of a SharedFunctionInfo. Note
// that both {code} and {literals} can be NULL to pass search result status.
struct CodeAndLiterals {
Code* code; // Cached optimized code.
LiteralsArray* literals; // Cached literals array.
};
// SharedFunctionInfo describes the JSFunction information that can be
// shared by multiple instances of the function.
class SharedFunctionInfo: public HeapObject {
@ -7189,7 +7195,11 @@ class SharedFunctionInfo: public HeapObject {
DECL_ACCESSORS(optimized_code_map, FixedArray)
// Returns entry from optimized code map for specified context and OSR entry.
Code* SearchOptimizedCodeMap(Context* native_context, BailoutId osr_ast_id);
// Note that {code == nullptr, literals == nullptr} indicates no matching
// entry has been found, whereas {code, literals == nullptr} indicates that
// code is context-independent.
CodeAndLiterals SearchOptimizedCodeMap(Context* native_context,
BailoutId osr_ast_id);
// Clear optimized code map.
void ClearOptimizedCodeMap();
@ -7211,9 +7221,12 @@ class SharedFunctionInfo: public HeapObject {
Handle<SharedFunctionInfo> shared, Handle<Context> native_context);
// Add or update entry in the optimized code map for context-dependent code.
// If {code} is not given, then an existing entry's code won't be overwritten.
static void AddToOptimizedCodeMap(Handle<SharedFunctionInfo> shared,
Handle<Context> native_context,
Handle<Code> code, BailoutId osr_ast_id);
MaybeHandle<Code> code,
Handle<LiteralsArray> literals,
BailoutId osr_ast_id);
// Set up the link between shared function info and the script. The shared
// function info is added to the list on the script.
@ -7224,7 +7237,8 @@ class SharedFunctionInfo: public HeapObject {
static const int kEntriesStart = 0;
static const int kContextOffset = 0;
static const int kCachedCodeOffset = 1;
static const int kEntryLength = 2;
static const int kLiteralsOffset = 2;
static const int kEntryLength = 3;
static const int kInitialLength = kEntriesStart + kEntryLength;
static const int kNotFound = -1;
@ -7236,6 +7250,8 @@ class SharedFunctionInfo: public HeapObject {
static const int kOffsetToPreviousCachedCode =
FixedArray::kHeaderSize +
kPointerSize * (kCachedCodeOffset - kEntryLength);
static const int kOffsetToPreviousLiterals =
FixedArray::kHeaderSize + kPointerSize * (kLiteralsOffset - kEntryLength);
// [scope_info]: Scope info.
DECL_ACCESSORS(scope_info, ScopeInfo)
@ -8204,7 +8220,6 @@ class JSFunction: public JSObject {
// access to. For API objects we store the boilerplate in the literal array.
DECL_ACCESSORS(literals, LiteralsArray)
inline bool has_literals_array() const;
static void EnsureLiterals(Handle<JSFunction> function);
inline TypeFeedbackVector* feedback_vector();

View File

@ -23,15 +23,10 @@ RUNTIME_FUNCTION(Runtime_InterpreterNewClosure) {
HandleScope scope(isolate);
DCHECK_EQ(4, args.length());
CONVERT_ARG_HANDLE_CHECKED(SharedFunctionInfo, shared, 0);
CONVERT_ARG_HANDLE_CHECKED(TypeFeedbackVector, vector, 1);
CONVERT_SMI_ARG_CHECKED(index, 2);
CONVERT_SMI_ARG_CHECKED(pretenured_flag, 3);
Handle<Context> context(isolate->context(), isolate);
FeedbackVectorSlot slot = TypeFeedbackVector::ToSlot(index);
Handle<LiteralsArray> literals(LiteralsArray::cast(vector->Get(slot)),
isolate);
return *isolate->factory()->NewFunctionFromSharedFunctionInfo(
shared, context, literals, static_cast<PretenureFlag>(pretenured_flag));
shared, context, static_cast<PretenureFlag>(pretenured_flag));
}
namespace {

View File

@ -613,15 +613,10 @@ RUNTIME_FUNCTION(Runtime_NewClosure) {
HandleScope scope(isolate);
DCHECK_EQ(3, args.length());
CONVERT_ARG_HANDLE_CHECKED(SharedFunctionInfo, shared, 0);
CONVERT_ARG_HANDLE_CHECKED(TypeFeedbackVector, vector, 1);
CONVERT_SMI_ARG_CHECKED(index, 2);
Handle<Context> context(isolate->context(), isolate);
FeedbackVectorSlot slot = TypeFeedbackVector::ToSlot(index);
Handle<LiteralsArray> literals(LiteralsArray::cast(vector->Get(slot)),
isolate);
Handle<JSFunction> function =
isolate->factory()->NewFunctionFromSharedFunctionInfo(
shared, context, literals, NOT_TENURED);
isolate->factory()->NewFunctionFromSharedFunctionInfo(shared, context,
NOT_TENURED);
return *function;
}
@ -630,17 +625,12 @@ RUNTIME_FUNCTION(Runtime_NewClosure_Tenured) {
HandleScope scope(isolate);
DCHECK_EQ(3, args.length());
CONVERT_ARG_HANDLE_CHECKED(SharedFunctionInfo, shared, 0);
CONVERT_ARG_HANDLE_CHECKED(TypeFeedbackVector, vector, 1);
CONVERT_SMI_ARG_CHECKED(index, 2);
Handle<Context> context(isolate->context(), isolate);
FeedbackVectorSlot slot = TypeFeedbackVector::ToSlot(index);
Handle<LiteralsArray> literals(LiteralsArray::cast(vector->Get(slot)),
isolate);
// The caller ensures that we pretenure closures that are assigned
// directly to properties.
Handle<JSFunction> function =
isolate->factory()->NewFunctionFromSharedFunctionInfo(shared, context,
literals, TENURED);
TENURED);
return *function;
}

View File

@ -252,8 +252,10 @@ Handle<TypeFeedbackVector> TypeFeedbackVector::New(
// the empty literals array here.
array->set(index, *factory->empty_literals_array(), SKIP_WRITE_BARRIER);
} else {
Handle<FixedArray> value = factory->NewFixedArray(length);
array->set(index, *value);
// TODO(mvstanton): Create the array.
// Handle<FixedArray> value = factory->NewFixedArray(length);
// array->set(index, *value);
array->set(index, *factory->empty_literals_array(), SKIP_WRITE_BARRIER);
}
}
i += entry_size;
@ -374,10 +376,10 @@ void TypeFeedbackVector::ClearSlotsImpl(SharedFunctionInfo* shared,
break;
}
case FeedbackVectorSlotKind::CREATE_CLOSURE: {
// Clear the literals in the embedded LiteralsArray.
LiteralsArray* literals = LiteralsArray::cast(Get(slot));
for (int i = 0; i < literals->literals_count(); i++) {
literals->set_literal_undefined(i);
// Fill the array with undefined.
FixedArray* array = FixedArray::cast(Get(slot));
for (int i = 1; i < array->length(); i++) {
array->set_undefined(i);
}
break;
}

View File

@ -4296,6 +4296,10 @@ TEST(Regress513507) {
if (!code->is_optimized_code()) return;
}
Handle<TypeFeedbackVector> vector =
TypeFeedbackVector::New(isolate, handle(shared->feedback_metadata()));
Handle<LiteralsArray> lit =
LiteralsArray::New(isolate, vector, shared->num_literals());
Handle<Context> context(isolate->context());
// Add the new code several times to the optimized code map and also set an
@ -4304,11 +4308,212 @@ TEST(Regress513507) {
FLAG_gc_interval = 1000;
for (int i = 0; i < 10; ++i) {
BailoutId id = BailoutId(i);
SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, id);
SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
}
}
#endif // DEBUG
TEST(Regress514122) {
if (!i::FLAG_incremental_marking) return;
i::FLAG_allow_natives_syntax = true;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
LocalContext env;
Heap* heap = isolate->heap();
HandleScope scope(isolate);
// Perfrom one initial GC to enable code flushing.
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
// Prepare function whose optimized code map we can use.
Handle<SharedFunctionInfo> shared;
{
HandleScope inner_scope(isolate);
CompileRun("function f() { return 1 }"
"f(); %OptimizeFunctionOnNextCall(f); f();");
Handle<JSFunction> f = Handle<JSFunction>::cast(
v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
shared = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
CompileRun("f = null");
}
// Prepare optimized code that we can use.
Handle<Code> code;
{
HandleScope inner_scope(isolate);
CompileRun("function g() { return 2 }"
"g(); %OptimizeFunctionOnNextCall(g); g();");
Handle<JSFunction> g = Handle<JSFunction>::cast(
v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
CcTest::global()->Get(env.local(), v8_str("g")).ToLocalChecked())));
code = inner_scope.CloseAndEscape(handle(g->code(), isolate));
if (!code->is_optimized_code()) return;
}
Handle<TypeFeedbackVector> vector =
TypeFeedbackVector::New(isolate, handle(shared->feedback_metadata()));
Handle<LiteralsArray> lit =
LiteralsArray::New(isolate, vector, shared->num_literals(), TENURED);
Handle<Context> context(isolate->context());
// Add the code several times to the optimized code map.
for (int i = 0; i < 3; ++i) {
HandleScope inner_scope(isolate);
BailoutId id = BailoutId(i);
SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
}
shared->optimized_code_map()->Print();
// Add the code with a literals array to be evacuated.
Page* evac_page;
{
HandleScope inner_scope(isolate);
AlwaysAllocateScope always_allocate(isolate);
// Make sure literal is placed on an old-space evacuation candidate.
heap::SimulateFullSpace(heap->old_space());
// Make sure there the number of literals is > 0.
Handle<LiteralsArray> lit = LiteralsArray::New(isolate, vector, 23);
evac_page = Page::FromAddress(lit->address());
BailoutId id = BailoutId(100);
SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
}
// Heap is ready, force {lit_page} to become an evacuation candidate and
// simulate incremental marking to enqueue optimized code map.
FLAG_manual_evacuation_candidates_selection = true;
heap::ForceEvacuationCandidate(evac_page);
heap::SimulateIncrementalMarking(heap);
// No matter whether reachable or not, {boomer} is doomed.
Handle<Object> boomer(shared->optimized_code_map(), isolate);
// Add the code several times to the optimized code map. This will leave old
// copies of the optimized code map unreachable but still marked.
for (int i = 3; i < 6; ++i) {
HandleScope inner_scope(isolate);
BailoutId id = BailoutId(i);
SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
}
// Trigger a GC to flush out the bug.
CcTest::CollectGarbage(i::OLD_SPACE);
boomer->Print();
}
TEST(OptimizedCodeMapReuseEntries) {
i::FLAG_allow_natives_syntax = true;
// BUG(v8:4598): Since TurboFan doesn't treat maps in code weakly, we can't
// run this test.
if (i::FLAG_turbo) return;
CcTest::InitializeVM();
v8::Isolate* v8_isolate = CcTest::isolate();
Isolate* isolate = CcTest::i_isolate();
HandleScope scope(isolate);
// Create 3 contexts, allow the 2nd one to be disposed, and verify that
// a 4th context will re-use the weak slots in the optimized code map
// to hold data, rather than expanding the map.
v8::Local<v8::Context> c1 = v8::Context::New(v8_isolate);
const char* source = "function foo(x) { var l = [1]; return x+l[0]; }";
v8::ScriptCompiler::Source script_source(
v8::String::NewFromUtf8(v8_isolate, source, v8::NewStringType::kNormal)
.ToLocalChecked());
v8::Local<v8::UnboundScript> indep =
v8::ScriptCompiler::CompileUnboundScript(v8_isolate, &script_source)
.ToLocalChecked();
const char* toplevel = "foo(3); %OptimizeFunctionOnNextCall(foo); foo(3);";
// Perfrom one initial GC to enable code flushing.
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
c1->Enter();
indep->BindToCurrentContext()->Run(c1).ToLocalChecked();
CompileRun(toplevel);
Handle<SharedFunctionInfo> shared;
Handle<JSFunction> foo = Handle<JSFunction>::cast(
v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
CcTest::global()->Get(c1, v8_str("foo")).ToLocalChecked())));
CHECK(foo->shared()->is_compiled());
shared = handle(foo->shared());
c1->Exit();
{
HandleScope scope(isolate);
v8::Local<v8::Context> c2 = v8::Context::New(v8_isolate);
c2->Enter();
indep->BindToCurrentContext()->Run(c2).ToLocalChecked();
CompileRun(toplevel);
c2->Exit();
}
{
HandleScope scope(isolate);
v8::Local<v8::Context> c3 = v8::Context::New(v8_isolate);
c3->Enter();
indep->BindToCurrentContext()->Run(c3).ToLocalChecked();
CompileRun(toplevel);
c3->Exit();
// Now, collect garbage. Context c2 should have no roots to it, and it's
// entry in the optimized code map should be free for a new context.
for (int i = 0; i < 4; i++) {
CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
}
Handle<FixedArray> optimized_code_map =
handle(shared->optimized_code_map());
// There should be 3 entries in the map.
CHECK_EQ(
3, ((optimized_code_map->length() - SharedFunctionInfo::kEntriesStart) /
SharedFunctionInfo::kEntryLength));
// But one of them (formerly for c2) should be cleared.
int cleared_count = 0;
for (int i = SharedFunctionInfo::kEntriesStart;
i < optimized_code_map->length();
i += SharedFunctionInfo::kEntryLength) {
cleared_count +=
WeakCell::cast(
optimized_code_map->get(i + SharedFunctionInfo::kContextOffset))
->cleared()
? 1
: 0;
}
CHECK_EQ(1, cleared_count);
// Verify that a new context uses the cleared entry rather than creating a
// new
// optimized code map array.
v8::Local<v8::Context> c4 = v8::Context::New(v8_isolate);
c4->Enter();
indep->BindToCurrentContext()->Run(c4).ToLocalChecked();
CompileRun(toplevel);
c4->Exit();
CHECK_EQ(*optimized_code_map, shared->optimized_code_map());
// Now each entry is in use.
cleared_count = 0;
for (int i = SharedFunctionInfo::kEntriesStart;
i < optimized_code_map->length();
i += SharedFunctionInfo::kEntryLength) {
cleared_count +=
WeakCell::cast(
optimized_code_map->get(i + SharedFunctionInfo::kContextOffset))
->cleared()
? 1
: 0;
}
CHECK_EQ(0, cleared_count);
}
}
TEST(Regress513496) {
i::FLAG_allow_natives_syntax = true;
CcTest::InitializeVM();
@ -4354,9 +4559,9 @@ TEST(Regress513496) {
}
// Lookup the optimized code and keep it alive.
Code* result = shared->SearchOptimizedCodeMap(
CodeAndLiterals result = shared->SearchOptimizedCodeMap(
isolate->context()->native_context(), BailoutId::None());
Handle<Code> optimized_code(result, isolate);
Handle<Code> optimized_code(result.code, isolate);
// Finish a full GC cycle so that the unoptimized code of 'g' is flushed even
// though the optimized code for 'f' is reachable via the optimized code map.

View File

@ -107,9 +107,7 @@ TEST(VectorStructure) {
FeedbackVectorSlotKind::CREATE_CLOSURE));
FeedbackVectorSlot slot = helper.slot(1);
FixedArray* array = FixedArray::cast(vector->Get(slot));
CHECK_EQ(5, array->length());
CHECK_EQ(5, vector->GetParameter(slot));
CHECK_EQ(array->get(0), *factory->undefined_value());
CHECK_EQ(array, *factory->empty_literals_array());
}
}

View File

@ -1,56 +0,0 @@
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --allow-natives-syntax --expose-gc
// Make sure literals are strongly rooted and safe from weak-code deopts.
(function() {
function foo() {
var a = { y: 0 };
a.y = 1;
return a;
}
foo();
foo();
%OptimizeFunctionOnNextCall(foo);
foo();
gc();
assertOptimized(foo);
})();
(function() {
function hot(o) {
return o.x + o.y;
}
function mapPlus(a, y) {
return a.map(x => hot({x, y}));
}
var a = [1, 2, 3];
print(mapPlus(a, 1));
print(mapPlus(a, 2));
%OptimizeFunctionOnNextCall(hot);
print(mapPlus(a, 3));
gc(); // BOOOM!
assertOptimized(hot);
print(mapPlus(a, 4));
})();
// Verify that we can handle the creation of a new script, where the
// code is cached and the feedback vector has to be re-created.
(function() {
var sopen = "function wrapper() { ";
var s1 = "function foo() { return bar(5); } ";
var s2 = "foo(); foo(); %OptimizeFunctionOnNextCall(foo); foo(); ";
var sclose = "} wrapper(); ";
var s = sopen + s1 + s2 + sclose;
function bar(i) { return i + 3 };
for (var i = 0; i < 4; i++) {
eval(s);
}
})();