Full code shouldn't embed the type feedback vector.

Make sure to always reference it indirectly. This allows us to make the vector
native-context dependent should we wish.

R=ishell@chromium.org
BUG=

Review URL: https://codereview.chromium.org/1364373003

Cr-Commit-Position: refs/heads/master@{#30940}
This commit is contained in:
mvstanton 2015-09-25 06:56:24 -07:00 committed by Commit bot
parent ca5780690d
commit c90c60ba26
28 changed files with 156 additions and 139 deletions

View File

@ -2507,15 +2507,6 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
} }
static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
__ ldr(vector, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
__ ldr(vector, FieldMemOperand(vector,
JSFunction::kSharedFunctionInfoOffset));
__ ldr(vector, FieldMemOperand(vector,
SharedFunctionInfo::kFeedbackVectorOffset));
}
void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
// r1 - function // r1 - function
// r3 - slot id // r3 - slot id
@ -4241,21 +4232,21 @@ void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
void LoadICTrampolineStub::Generate(MacroAssembler* masm) { void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, LoadWithVectorDescriptor::VectorRegister()); __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
LoadICStub stub(isolate(), state()); LoadICStub stub(isolate(), state());
stub.GenerateForTrampoline(masm); stub.GenerateForTrampoline(masm);
} }
void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) { void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, LoadWithVectorDescriptor::VectorRegister()); __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
KeyedLoadICStub stub(isolate(), state()); KeyedLoadICStub stub(isolate(), state());
stub.GenerateForTrampoline(masm); stub.GenerateForTrampoline(masm);
} }
void CallICTrampolineStub::Generate(MacroAssembler* masm) { void CallICTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, r2); __ EmitLoadTypeFeedbackVector(r2);
CallICStub stub(isolate(), state()); CallICStub stub(isolate(), state());
__ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
} }
@ -4472,14 +4463,14 @@ void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) { void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, VectorStoreICDescriptor::VectorRegister()); __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
VectorStoreICStub stub(isolate(), state()); VectorStoreICStub stub(isolate(), state());
stub.GenerateForTrampoline(masm); stub.GenerateForTrampoline(masm);
} }
void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) { void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, VectorStoreICDescriptor::VectorRegister()); __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
VectorKeyedStoreICStub stub(isolate(), state()); VectorKeyedStoreICStub stub(isolate(), state());
stub.GenerateForTrampoline(masm); stub.GenerateForTrampoline(masm);
} }

View File

@ -1043,6 +1043,14 @@ void MacroAssembler::Prologue(bool code_pre_aging) {
} }
void MacroAssembler::EmitLoadTypeFeedbackVector(Register vector) {
ldr(vector, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
ldr(vector, FieldMemOperand(vector, JSFunction::kSharedFunctionInfoOffset));
ldr(vector,
FieldMemOperand(vector, SharedFunctionInfo::kFeedbackVectorOffset));
}
void MacroAssembler::EnterFrame(StackFrame::Type type, void MacroAssembler::EnterFrame(StackFrame::Type type,
bool load_constant_pool_pointer_reg) { bool load_constant_pool_pointer_reg) {
// r0-r3: preserved // r0-r3: preserved

View File

@ -1409,6 +1409,9 @@ class MacroAssembler: public Assembler {
DecodeField<Field>(reg, reg); DecodeField<Field>(reg, reg);
} }
// Load the type feedback vector from a JavaScript frame.
void EmitLoadTypeFeedbackVector(Register vector);
// Activation support. // Activation support.
void EnterFrame(StackFrame::Type type, void EnterFrame(StackFrame::Type type,
bool load_constant_pool_pointer_reg = false); bool load_constant_pool_pointer_reg = false);

View File

@ -2893,15 +2893,6 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
} }
static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
__ Ldr(vector, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
__ Ldr(vector, FieldMemOperand(vector,
JSFunction::kSharedFunctionInfoOffset));
__ Ldr(vector, FieldMemOperand(vector,
SharedFunctionInfo::kFeedbackVectorOffset));
}
void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
// x1 - function // x1 - function
// x3 - slot id // x3 - slot id
@ -4377,21 +4368,21 @@ void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
void LoadICTrampolineStub::Generate(MacroAssembler* masm) { void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, LoadWithVectorDescriptor::VectorRegister()); __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
LoadICStub stub(isolate(), state()); LoadICStub stub(isolate(), state());
stub.GenerateForTrampoline(masm); stub.GenerateForTrampoline(masm);
} }
void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) { void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, LoadWithVectorDescriptor::VectorRegister()); __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
KeyedLoadICStub stub(isolate(), state()); KeyedLoadICStub stub(isolate(), state());
stub.GenerateForTrampoline(masm); stub.GenerateForTrampoline(masm);
} }
void CallICTrampolineStub::Generate(MacroAssembler* masm) { void CallICTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, x2); __ EmitLoadTypeFeedbackVector(x2);
CallICStub stub(isolate(), state()); CallICStub stub(isolate(), state());
__ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
} }
@ -4608,14 +4599,14 @@ void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) { void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, VectorStoreICDescriptor::VectorRegister()); __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
VectorStoreICStub stub(isolate(), state()); VectorStoreICStub stub(isolate(), state());
stub.GenerateForTrampoline(masm); stub.GenerateForTrampoline(masm);
} }
void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) { void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, VectorStoreICDescriptor::VectorRegister()); __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
VectorKeyedStoreICStub stub(isolate(), state()); VectorKeyedStoreICStub stub(isolate(), state());
stub.GenerateForTrampoline(masm); stub.GenerateForTrampoline(masm);
} }

View File

@ -2780,6 +2780,14 @@ void MacroAssembler::Prologue(bool code_pre_aging) {
} }
void MacroAssembler::EmitLoadTypeFeedbackVector(Register vector) {
Ldr(vector, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
Ldr(vector, FieldMemOperand(vector, JSFunction::kSharedFunctionInfoOffset));
Ldr(vector,
FieldMemOperand(vector, SharedFunctionInfo::kFeedbackVectorOffset));
}
void MacroAssembler::EnterFrame(StackFrame::Type type, void MacroAssembler::EnterFrame(StackFrame::Type type,
bool load_constant_pool_pointer_reg) { bool load_constant_pool_pointer_reg) {
// Out-of-line constant pool not implemented on arm64. // Out-of-line constant pool not implemented on arm64.

View File

@ -1591,6 +1591,9 @@ class MacroAssembler : public Assembler {
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Frames. // Frames.
// Load the type feedback vector from a JavaScript frame.
void EmitLoadTypeFeedbackVector(Register vector);
// Activation support. // Activation support.
void EnterFrame(StackFrame::Type type); void EnterFrame(StackFrame::Type type);
void EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg); void EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg);

View File

@ -1124,9 +1124,9 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
Label non_proxy; Label non_proxy;
__ bind(&fixed_array); __ bind(&fixed_array);
__ Move(r1, FeedbackVector()); __ EmitLoadTypeFeedbackVector(r1);
__ mov(r2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate()))); __ mov(r2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
int vector_index = FeedbackVector()->GetIndex(slot); int vector_index = SmiFromSlot(slot)->value();
__ str(r2, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(vector_index))); __ str(r2, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(vector_index)));
__ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check
@ -3185,7 +3185,7 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ ldr(r1, MemOperand(sp, arg_count * kPointerSize)); __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
// Record call targets in unoptimized code. // Record call targets in unoptimized code.
__ Move(r2, FeedbackVector()); __ EmitLoadTypeFeedbackVector(r2);
__ mov(r3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot()))); __ mov(r3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET); CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
@ -3225,7 +3225,7 @@ void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
__ ldr(r1, MemOperand(sp, arg_count * kPointerSize)); __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
// Record call targets in unoptimized code. // Record call targets in unoptimized code.
__ Move(r2, FeedbackVector()); __ EmitLoadTypeFeedbackVector(r2);
__ mov(r3, Operand(SmiFromSlot(expr->CallFeedbackSlot()))); __ mov(r3, Operand(SmiFromSlot(expr->CallFeedbackSlot())));
CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET); CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);

View File

@ -1123,9 +1123,9 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// We got a fixed array in register x0. Iterate through that. // We got a fixed array in register x0. Iterate through that.
__ Bind(&fixed_array); __ Bind(&fixed_array);
__ LoadObject(x1, FeedbackVector()); __ EmitLoadTypeFeedbackVector(x1);
__ Mov(x10, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate()))); __ Mov(x10, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
int vector_index = FeedbackVector()->GetIndex(slot); int vector_index = SmiFromSlot(slot)->value();
__ Str(x10, FieldMemOperand(x1, FixedArray::OffsetOfElementAt(vector_index))); __ Str(x10, FieldMemOperand(x1, FixedArray::OffsetOfElementAt(vector_index)));
__ Mov(x1, Smi::FromInt(1)); // Smi indicates slow check. __ Mov(x1, Smi::FromInt(1)); // Smi indicates slow check.
@ -2893,7 +2893,7 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ Peek(x1, arg_count * kXRegSize); __ Peek(x1, arg_count * kXRegSize);
// Record call targets in unoptimized code. // Record call targets in unoptimized code.
__ LoadObject(x2, FeedbackVector()); __ EmitLoadTypeFeedbackVector(x2);
__ Mov(x3, SmiFromSlot(expr->CallNewFeedbackSlot())); __ Mov(x3, SmiFromSlot(expr->CallNewFeedbackSlot()));
CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET); CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
@ -2933,7 +2933,7 @@ void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
__ Peek(x1, arg_count * kXRegSize); __ Peek(x1, arg_count * kXRegSize);
// Record call targets in unoptimized code. // Record call targets in unoptimized code.
__ LoadObject(x2, FeedbackVector()); __ EmitLoadTypeFeedbackVector(x2);
__ Mov(x3, SmiFromSlot(expr->CallFeedbackSlot())); __ Mov(x3, SmiFromSlot(expr->CallFeedbackSlot()));
CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET); CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);

View File

@ -91,28 +91,6 @@ unsigned FullCodeGenerator::EmitBackEdgeTable() {
} }
void FullCodeGenerator::EnsureSlotContainsAllocationSite(
FeedbackVectorSlot slot) {
Handle<TypeFeedbackVector> vector = FeedbackVector();
if (!vector->Get(slot)->IsAllocationSite()) {
Handle<AllocationSite> allocation_site =
isolate()->factory()->NewAllocationSite();
vector->Set(slot, *allocation_site);
}
}
void FullCodeGenerator::EnsureSlotContainsAllocationSite(
FeedbackVectorICSlot slot) {
Handle<TypeFeedbackVector> vector = FeedbackVector();
if (!vector->Get(slot)->IsAllocationSite()) {
Handle<AllocationSite> allocation_site =
isolate()->factory()->NewAllocationSite();
vector->Set(slot, *allocation_site);
}
}
void FullCodeGenerator::PopulateDeoptimizationData(Handle<Code> code) { void FullCodeGenerator::PopulateDeoptimizationData(Handle<Code> code) {
// Fill in the deoptimization information. // Fill in the deoptimization information.
DCHECK(info_->HasDeoptimizationSupport() || bailout_entries_.is_empty()); DCHECK(info_->HasDeoptimizationSupport() || bailout_entries_.is_empty());

View File

@ -420,22 +420,16 @@ class FullCodeGenerator: public AstVisitor {
void PrepareForBailout(Expression* node, State state); void PrepareForBailout(Expression* node, State state);
void PrepareForBailoutForId(BailoutId id, State state); void PrepareForBailoutForId(BailoutId id, State state);
// Feedback slot support. The feedback vector will be cleared during gc and
// collected by the type-feedback oracle.
Handle<TypeFeedbackVector> FeedbackVector() const {
return info_->feedback_vector();
}
void EnsureSlotContainsAllocationSite(FeedbackVectorSlot slot);
void EnsureSlotContainsAllocationSite(FeedbackVectorICSlot slot);
// Returns a smi for the index into the FixedArray that backs the feedback // Returns a smi for the index into the FixedArray that backs the feedback
// vector // vector
Smi* SmiFromSlot(FeedbackVectorSlot slot) const { Smi* SmiFromSlot(FeedbackVectorSlot slot) const {
return Smi::FromInt(FeedbackVector()->GetIndex(slot)); return Smi::FromInt(TypeFeedbackVector::GetIndexFromSpec(
literal()->feedback_vector_spec(), slot));
} }
Smi* SmiFromSlot(FeedbackVectorICSlot slot) const { Smi* SmiFromSlot(FeedbackVectorICSlot slot) const {
return Smi::FromInt(FeedbackVector()->GetIndex(slot)); return Smi::FromInt(TypeFeedbackVector::GetIndexFromSpec(
literal()->feedback_vector_spec(), slot));
} }
// Record a call's return site offset, used to rebuild the frame if the // Record a call's return site offset, used to rebuild the frame if the
@ -702,7 +696,7 @@ class FullCodeGenerator: public AstVisitor {
bool is_native() { return info_->is_native(); } bool is_native() { return info_->is_native(); }
LanguageMode language_mode() { return literal()->language_mode(); } LanguageMode language_mode() { return literal()->language_mode(); }
bool has_simple_parameters() { return info_->has_simple_parameters(); } bool has_simple_parameters() { return info_->has_simple_parameters(); }
FunctionLiteral* literal() { return info_->literal(); } FunctionLiteral* literal() const { return info_->literal(); }
Scope* scope() { return scope_; } Scope* scope() { return scope_; }
static Register result_register(); static Register result_register();

View File

@ -1058,8 +1058,8 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ bind(&fixed_array); __ bind(&fixed_array);
// No need for a write barrier, we are storing a Smi in the feedback vector. // No need for a write barrier, we are storing a Smi in the feedback vector.
__ LoadHeapObject(ebx, FeedbackVector()); __ EmitLoadTypeFeedbackVector(ebx);
int vector_index = FeedbackVector()->GetIndex(slot); int vector_index = SmiFromSlot(slot)->value();
__ mov(FieldOperand(ebx, FixedArray::OffsetOfElementAt(vector_index)), __ mov(FieldOperand(ebx, FixedArray::OffsetOfElementAt(vector_index)),
Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate()))); Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate())));
@ -3074,7 +3074,7 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ mov(edi, Operand(esp, arg_count * kPointerSize)); __ mov(edi, Operand(esp, arg_count * kPointerSize));
// Record call targets in unoptimized code. // Record call targets in unoptimized code.
__ LoadHeapObject(ebx, FeedbackVector()); __ EmitLoadTypeFeedbackVector(ebx);
__ mov(edx, Immediate(SmiFromSlot(expr->CallNewFeedbackSlot()))); __ mov(edx, Immediate(SmiFromSlot(expr->CallNewFeedbackSlot())));
CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET); CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
@ -3114,7 +3114,7 @@ void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
__ mov(edi, Operand(esp, arg_count * kPointerSize)); __ mov(edi, Operand(esp, arg_count * kPointerSize));
// Record call targets in unoptimized code. // Record call targets in unoptimized code.
__ LoadHeapObject(ebx, FeedbackVector()); __ EmitLoadTypeFeedbackVector(ebx);
__ mov(edx, Immediate(SmiFromSlot(expr->CallFeedbackSlot()))); __ mov(edx, Immediate(SmiFromSlot(expr->CallFeedbackSlot())));
CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET); CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);

View File

@ -1124,9 +1124,9 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
Label non_proxy; Label non_proxy;
__ bind(&fixed_array); __ bind(&fixed_array);
__ li(a1, FeedbackVector()); __ EmitLoadTypeFeedbackVector(a1);
__ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate()))); __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
int vector_index = FeedbackVector()->GetIndex(slot); int vector_index = SmiFromSlot(slot)->value();
__ sw(a2, FieldMemOperand(a1, FixedArray::OffsetOfElementAt(vector_index))); __ sw(a2, FieldMemOperand(a1, FixedArray::OffsetOfElementAt(vector_index)));
__ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check
@ -3177,7 +3177,7 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ lw(a1, MemOperand(sp, arg_count * kPointerSize)); __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
// Record call targets in unoptimized code. // Record call targets in unoptimized code.
__ li(a2, FeedbackVector()); __ EmitLoadTypeFeedbackVector(a2);
__ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot()))); __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET); CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
@ -3217,7 +3217,7 @@ void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
__ lw(a1, MemOperand(sp, arg_count * kPointerSize)); __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
// Record call targets in unoptimized code. // Record call targets in unoptimized code.
__ li(a2, FeedbackVector()); __ EmitLoadTypeFeedbackVector(a2);
__ li(a3, Operand(SmiFromSlot(expr->CallFeedbackSlot()))); __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackSlot())));
CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET); CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);

View File

@ -1122,9 +1122,9 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
Label non_proxy; Label non_proxy;
__ bind(&fixed_array); __ bind(&fixed_array);
__ li(a1, FeedbackVector()); __ EmitLoadTypeFeedbackVector(a1);
__ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate()))); __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
int vector_index = FeedbackVector()->GetIndex(slot); int vector_index = SmiFromSlot(slot)->value();
__ sd(a2, FieldMemOperand(a1, FixedArray::OffsetOfElementAt(vector_index))); __ sd(a2, FieldMemOperand(a1, FixedArray::OffsetOfElementAt(vector_index)));
__ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check
@ -3179,7 +3179,7 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ ld(a1, MemOperand(sp, arg_count * kPointerSize)); __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
// Record call targets in unoptimized code. // Record call targets in unoptimized code.
__ li(a2, FeedbackVector()); __ EmitLoadTypeFeedbackVector(a2);
__ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot()))); __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET); CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
@ -3219,7 +3219,7 @@ void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
__ ld(a1, MemOperand(sp, arg_count * kPointerSize)); __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
// Record call targets in unoptimized code. // Record call targets in unoptimized code.
__ li(a2, FeedbackVector()); __ EmitLoadTypeFeedbackVector(a2);
__ li(a3, Operand(SmiFromSlot(expr->CallFeedbackSlot()))); __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackSlot())));
CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET); CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);

View File

@ -1084,8 +1084,8 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ bind(&fixed_array); __ bind(&fixed_array);
// No need for a write barrier, we are storing a Smi in the feedback vector. // No need for a write barrier, we are storing a Smi in the feedback vector.
__ Move(rbx, FeedbackVector()); __ EmitLoadTypeFeedbackVector(rbx);
int vector_index = FeedbackVector()->GetIndex(slot); int vector_index = SmiFromSlot(slot)->value();
__ Move(FieldOperand(rbx, FixedArray::OffsetOfElementAt(vector_index)), __ Move(FieldOperand(rbx, FixedArray::OffsetOfElementAt(vector_index)),
TypeFeedbackVector::MegamorphicSentinel(isolate())); TypeFeedbackVector::MegamorphicSentinel(isolate()));
__ Move(rbx, Smi::FromInt(1)); // Smi indicates slow check __ Move(rbx, Smi::FromInt(1)); // Smi indicates slow check
@ -3066,7 +3066,7 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ movp(rdi, Operand(rsp, arg_count * kPointerSize)); __ movp(rdi, Operand(rsp, arg_count * kPointerSize));
// Record call targets in unoptimized code, but not in the snapshot. // Record call targets in unoptimized code, but not in the snapshot.
__ Move(rbx, FeedbackVector()); __ EmitLoadTypeFeedbackVector(rbx);
__ Move(rdx, SmiFromSlot(expr->CallNewFeedbackSlot())); __ Move(rdx, SmiFromSlot(expr->CallNewFeedbackSlot()));
CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET); CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
@ -3106,7 +3106,7 @@ void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
__ movp(rdi, Operand(rsp, arg_count * kPointerSize)); __ movp(rdi, Operand(rsp, arg_count * kPointerSize));
// Record call targets in unoptimized code. // Record call targets in unoptimized code.
__ Move(rbx, FeedbackVector()); __ EmitLoadTypeFeedbackVector(rbx);
__ Move(rdx, SmiFromSlot(expr->CallFeedbackSlot())); __ Move(rdx, SmiFromSlot(expr->CallFeedbackSlot()));
CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET); CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);

View File

@ -2176,14 +2176,6 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
} }
static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
__ mov(vector, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
__ mov(vector, FieldOperand(vector, JSFunction::kSharedFunctionInfoOffset));
__ mov(vector, FieldOperand(vector,
SharedFunctionInfo::kFeedbackVectorOffset));
}
void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
// edi - function // edi - function
// edx - slot id // edx - slot id
@ -4311,14 +4303,14 @@ void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
void LoadICTrampolineStub::Generate(MacroAssembler* masm) { void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, LoadWithVectorDescriptor::VectorRegister()); __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
LoadICStub stub(isolate(), state()); LoadICStub stub(isolate(), state());
stub.GenerateForTrampoline(masm); stub.GenerateForTrampoline(masm);
} }
void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) { void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, LoadWithVectorDescriptor::VectorRegister()); __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
KeyedLoadICStub stub(isolate(), state()); KeyedLoadICStub stub(isolate(), state());
stub.GenerateForTrampoline(masm); stub.GenerateForTrampoline(masm);
} }
@ -4542,14 +4534,14 @@ void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) { void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, VectorStoreICDescriptor::VectorRegister()); __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
VectorStoreICStub stub(isolate(), state()); VectorStoreICStub stub(isolate(), state());
stub.GenerateForTrampoline(masm); stub.GenerateForTrampoline(masm);
} }
void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) { void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, VectorStoreICDescriptor::VectorRegister()); __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
VectorKeyedStoreICStub stub(isolate(), state()); VectorKeyedStoreICStub stub(isolate(), state());
stub.GenerateForTrampoline(masm); stub.GenerateForTrampoline(masm);
} }
@ -4902,7 +4894,7 @@ void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
void CallICTrampolineStub::Generate(MacroAssembler* masm) { void CallICTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, ebx); __ EmitLoadTypeFeedbackVector(ebx);
CallICStub stub(isolate(), state()); CallICStub stub(isolate(), state());
__ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
} }

View File

@ -869,6 +869,13 @@ void MacroAssembler::Prologue(bool code_pre_aging) {
} }
void MacroAssembler::EmitLoadTypeFeedbackVector(Register vector) {
mov(vector, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
mov(vector, FieldOperand(vector, JSFunction::kSharedFunctionInfoOffset));
mov(vector, FieldOperand(vector, SharedFunctionInfo::kFeedbackVectorOffset));
}
void MacroAssembler::EnterFrame(StackFrame::Type type, void MacroAssembler::EnterFrame(StackFrame::Type type,
bool load_constant_pool_pointer_reg) { bool load_constant_pool_pointer_reg) {
// Out-of-line constant pool not implemented on ia32. // Out-of-line constant pool not implemented on ia32.

View File

@ -954,6 +954,9 @@ class MacroAssembler: public Assembler {
return SafepointRegisterStackIndex(reg.code()); return SafepointRegisterStackIndex(reg.code());
} }
// Load the type feedback vector from a JavaScript frame.
void EmitLoadTypeFeedbackVector(Register vector);
// Activation support. // Activation support.
void EnterFrame(StackFrame::Type type); void EnterFrame(StackFrame::Type type);
void EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg); void EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg);

View File

@ -2632,15 +2632,6 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
} }
static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
__ lw(vector, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
__ lw(vector, FieldMemOperand(vector,
JSFunction::kSharedFunctionInfoOffset));
__ lw(vector, FieldMemOperand(vector,
SharedFunctionInfo::kFeedbackVectorOffset));
}
void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
// a1 - function // a1 - function
// a3 - slot id // a3 - slot id
@ -4458,21 +4449,21 @@ void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
void LoadICTrampolineStub::Generate(MacroAssembler* masm) { void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, LoadWithVectorDescriptor::VectorRegister()); __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
LoadICStub stub(isolate(), state()); LoadICStub stub(isolate(), state());
stub.GenerateForTrampoline(masm); stub.GenerateForTrampoline(masm);
} }
void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) { void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, LoadWithVectorDescriptor::VectorRegister()); __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
KeyedLoadICStub stub(isolate(), state()); KeyedLoadICStub stub(isolate(), state());
stub.GenerateForTrampoline(masm); stub.GenerateForTrampoline(masm);
} }
void CallICTrampolineStub::Generate(MacroAssembler* masm) { void CallICTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, a2); __ EmitLoadTypeFeedbackVector(a2);
CallICStub stub(isolate(), state()); CallICStub stub(isolate(), state());
__ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
} }
@ -4693,14 +4684,14 @@ void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) { void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, VectorStoreICDescriptor::VectorRegister()); __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
VectorStoreICStub stub(isolate(), state()); VectorStoreICStub stub(isolate(), state());
stub.GenerateForTrampoline(masm); stub.GenerateForTrampoline(masm);
} }
void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) { void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, VectorStoreICDescriptor::VectorRegister()); __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
VectorKeyedStoreICStub stub(isolate(), state()); VectorKeyedStoreICStub stub(isolate(), state());
stub.GenerateForTrampoline(masm); stub.GenerateForTrampoline(masm);
} }

View File

@ -4825,6 +4825,14 @@ void MacroAssembler::Prologue(bool code_pre_aging) {
} }
void MacroAssembler::EmitLoadTypeFeedbackVector(Register vector) {
lw(vector, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
lw(vector, FieldMemOperand(vector, JSFunction::kSharedFunctionInfoOffset));
lw(vector,
FieldMemOperand(vector, SharedFunctionInfo::kFeedbackVectorOffset));
}
void MacroAssembler::EnterFrame(StackFrame::Type type, void MacroAssembler::EnterFrame(StackFrame::Type type,
bool load_constant_pool_pointer_reg) { bool load_constant_pool_pointer_reg) {
// Out-of-line constant pool not implemented on mips. // Out-of-line constant pool not implemented on mips.

View File

@ -1587,6 +1587,9 @@ const Operand& rt = Operand(zero_reg), BranchDelaySlot bd = PROTECT
void StubPrologue(); void StubPrologue();
void Prologue(bool code_pre_aging); void Prologue(bool code_pre_aging);
// Load the type feedback vector from a JavaScript frame.
void EmitLoadTypeFeedbackVector(Register vector);
// Activation support. // Activation support.
void EnterFrame(StackFrame::Type type); void EnterFrame(StackFrame::Type type);
void EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg); void EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg);

View File

@ -2709,15 +2709,6 @@ void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
} }
static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
__ ld(vector, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
__ ld(vector, FieldMemOperand(vector,
JSFunction::kSharedFunctionInfoOffset));
__ ld(vector, FieldMemOperand(vector,
SharedFunctionInfo::kFeedbackVectorOffset));
}
void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
// a1 - function // a1 - function
// a3 - slot id // a3 - slot id
@ -4491,21 +4482,21 @@ void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
void LoadICTrampolineStub::Generate(MacroAssembler* masm) { void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, LoadWithVectorDescriptor::VectorRegister()); __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
LoadICStub stub(isolate(), state()); LoadICStub stub(isolate(), state());
stub.GenerateForTrampoline(masm); stub.GenerateForTrampoline(masm);
} }
void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) { void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, LoadWithVectorDescriptor::VectorRegister()); __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
KeyedLoadICStub stub(isolate(), state()); KeyedLoadICStub stub(isolate(), state());
stub.GenerateForTrampoline(masm); stub.GenerateForTrampoline(masm);
} }
void CallICTrampolineStub::Generate(MacroAssembler* masm) { void CallICTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, a2); __ EmitLoadTypeFeedbackVector(a2);
CallICStub stub(isolate(), state()); CallICStub stub(isolate(), state());
__ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
} }
@ -4724,14 +4715,14 @@ void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) { void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, VectorStoreICDescriptor::VectorRegister()); __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
VectorStoreICStub stub(isolate(), state()); VectorStoreICStub stub(isolate(), state());
stub.GenerateForTrampoline(masm); stub.GenerateForTrampoline(masm);
} }
void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) { void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, VectorStoreICDescriptor::VectorRegister()); __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
VectorKeyedStoreICStub stub(isolate(), state()); VectorKeyedStoreICStub stub(isolate(), state());
stub.GenerateForTrampoline(masm); stub.GenerateForTrampoline(masm);
} }

View File

@ -4956,6 +4956,14 @@ void MacroAssembler::Prologue(bool code_pre_aging) {
} }
void MacroAssembler::EmitLoadTypeFeedbackVector(Register vector) {
ld(vector, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
ld(vector, FieldMemOperand(vector, JSFunction::kSharedFunctionInfoOffset));
ld(vector,
FieldMemOperand(vector, SharedFunctionInfo::kFeedbackVectorOffset));
}
void MacroAssembler::EnterFrame(StackFrame::Type type, void MacroAssembler::EnterFrame(StackFrame::Type type,
bool load_constant_pool_pointer_reg) { bool load_constant_pool_pointer_reg) {
// Out-of-line constant pool not implemented on mips64. // Out-of-line constant pool not implemented on mips64.

View File

@ -1671,6 +1671,9 @@ const Operand& rt = Operand(zero_reg), BranchDelaySlot bd = PROTECT
void StubPrologue(); void StubPrologue();
void Prologue(bool code_pre_aging); void Prologue(bool code_pre_aging);
// Load the type feedback vector from a JavaScript frame.
void EmitLoadTypeFeedbackVector(Register vector);
// Activation support. // Activation support.
void EnterFrame(StackFrame::Type type); void EnterFrame(StackFrame::Type type);
void EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg); void EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg);

View File

@ -125,6 +125,34 @@ Handle<TypeFeedbackVector> TypeFeedbackVector::Allocate(Isolate* isolate,
} }
template int TypeFeedbackVector::GetIndexFromSpec(const ZoneFeedbackVectorSpec*,
FeedbackVectorICSlot);
template int TypeFeedbackVector::GetIndexFromSpec(const ZoneFeedbackVectorSpec*,
FeedbackVectorSlot);
// static
template <typename Spec>
int TypeFeedbackVector::GetIndexFromSpec(const Spec* spec,
FeedbackVectorSlot slot) {
const int ic_slot_count = spec->ic_slots();
const int index_count = VectorICComputer::word_count(ic_slot_count);
return kReservedIndexCount + index_count + slot.ToInt();
}
// static
template <typename Spec>
int TypeFeedbackVector::GetIndexFromSpec(const Spec* spec,
FeedbackVectorICSlot slot) {
const int slot_count = spec->slots();
const int ic_slot_count = spec->ic_slots();
const int index_count = VectorICComputer::word_count(ic_slot_count);
return kReservedIndexCount + index_count + slot_count +
slot.ToInt() * elements_per_ic_slot();
}
// static // static
int TypeFeedbackVector::PushAppliedArgumentsIndex() { int TypeFeedbackVector::PushAppliedArgumentsIndex() {
const int index_count = VectorICComputer::word_count(1); const int index_count = VectorICComputer::word_count(1);

View File

@ -113,6 +113,11 @@ class TypeFeedbackVector : public FixedArray {
inline int GetIndex(FeedbackVectorSlot slot) const; inline int GetIndex(FeedbackVectorSlot slot) const;
inline int GetIndex(FeedbackVectorICSlot slot) const; inline int GetIndex(FeedbackVectorICSlot slot) const;
template <typename Spec>
static int GetIndexFromSpec(const Spec* spec, FeedbackVectorSlot slot);
template <typename Spec>
static int GetIndexFromSpec(const Spec* spec, FeedbackVectorICSlot slot);
// Conversion from an integer index to either a slot or an ic slot. The caller // Conversion from an integer index to either a slot or an ic slot. The caller
// should know what kind she expects. // should know what kind she expects.
inline FeedbackVectorSlot ToSlot(int index) const; inline FeedbackVectorSlot ToSlot(int index) const;

View File

@ -2023,14 +2023,6 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
} }
static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
__ movp(vector, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ movp(vector, FieldOperand(vector, JSFunction::kSharedFunctionInfoOffset));
__ movp(vector, FieldOperand(vector,
SharedFunctionInfo::kFeedbackVectorOffset));
}
void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
// rdi - function // rdi - function
// rdx - slot id // rdx - slot id
@ -4227,14 +4219,14 @@ void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
void LoadICTrampolineStub::Generate(MacroAssembler* masm) { void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, LoadWithVectorDescriptor::VectorRegister()); __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
LoadICStub stub(isolate(), state()); LoadICStub stub(isolate(), state());
stub.GenerateForTrampoline(masm); stub.GenerateForTrampoline(masm);
} }
void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) { void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, LoadWithVectorDescriptor::VectorRegister()); __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
KeyedLoadICStub stub(isolate(), state()); KeyedLoadICStub stub(isolate(), state());
stub.GenerateForTrampoline(masm); stub.GenerateForTrampoline(masm);
} }
@ -4436,14 +4428,14 @@ void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) { void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, VectorStoreICDescriptor::VectorRegister()); __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
VectorStoreICStub stub(isolate(), state()); VectorStoreICStub stub(isolate(), state());
stub.GenerateForTrampoline(masm); stub.GenerateForTrampoline(masm);
} }
void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) { void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, VectorStoreICDescriptor::VectorRegister()); __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
VectorKeyedStoreICStub stub(isolate(), state()); VectorKeyedStoreICStub stub(isolate(), state());
stub.GenerateForTrampoline(masm); stub.GenerateForTrampoline(masm);
} }
@ -4630,7 +4622,7 @@ void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
void CallICTrampolineStub::Generate(MacroAssembler* masm) { void CallICTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, rbx); __ EmitLoadTypeFeedbackVector(rbx);
CallICStub stub(isolate(), state()); CallICStub stub(isolate(), state());
__ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
} }

View File

@ -3634,6 +3634,13 @@ void MacroAssembler::Prologue(bool code_pre_aging) {
} }
void MacroAssembler::EmitLoadTypeFeedbackVector(Register vector) {
movp(vector, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
movp(vector, FieldOperand(vector, JSFunction::kSharedFunctionInfoOffset));
movp(vector, FieldOperand(vector, SharedFunctionInfo::kFeedbackVectorOffset));
}
void MacroAssembler::EnterFrame(StackFrame::Type type, void MacroAssembler::EnterFrame(StackFrame::Type type,
bool load_constant_pool_pointer_reg) { bool load_constant_pool_pointer_reg) {
// Out-of-line constant pool not implemented on x64. // Out-of-line constant pool not implemented on x64.

View File

@ -1426,6 +1426,9 @@ class MacroAssembler: public Assembler {
return SafepointRegisterStackIndex(reg.code()); return SafepointRegisterStackIndex(reg.code());
} }
// Load the type feedback vector from a JavaScript frame.
void EmitLoadTypeFeedbackVector(Register vector);
// Activation support. // Activation support.
void EnterFrame(StackFrame::Type type); void EnterFrame(StackFrame::Type type);
void EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg); void EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg);