Revert "Second attempt at introducing a premonomorphic state in the call"

This reverts commits r19463 and r19457 (includes MIPS port), there was a
Sunspider perf issue and on reflection we can achieve the necessary
result in a new way.

TBR=verwaest@chromium.org

Review URL: https://codereview.chromium.org/172383003

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@19488 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
mvstanton@chromium.org 2014-02-19 13:55:25 +00:00
parent 139134acc2
commit 73b679cbee
9 changed files with 160 additions and 206 deletions

View File

@ -3268,17 +3268,12 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
// x1 : the function to call
// x2 : feedback vector
// x3 : slot in feedback vector (smi)
Label check_array, initialize_array, initialize_non_array, megamorphic, done;
Label initialize, done, miss, megamorphic, not_array_function;
ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()),
masm->isolate()->heap()->undefined_value());
Heap::RootListIndex kMegamorphicRootIndex = Heap::kUndefinedValueRootIndex;
ASSERT_EQ(*TypeFeedbackInfo::UninitializedSentinel(masm->isolate()),
masm->isolate()->heap()->the_hole_value());
Heap::RootListIndex kUninitializedRootIndex = Heap::kTheHoleValueRootIndex;
ASSERT_EQ(*TypeFeedbackInfo::PremonomorphicSentinel(masm->isolate()),
masm->isolate()->heap()->null_value());
Heap::RootListIndex kPremonomorphicRootIndex = Heap::kNullValueRootIndex;
// Load the cache state.
__ Add(x4, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2));
@ -3288,44 +3283,43 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
// function without changing the state.
__ Cmp(x4, x1);
__ B(eq, &done);
__ JumpIfRoot(x4, kMegamorphicRootIndex, &done);
// Check if we're dealing with the Array function or not.
__ LoadArrayFunction(x5);
__ Cmp(x1, x5);
__ B(eq, &check_array);
// If we came here, we need to see if we are the array function.
// If we didn't have a matching function, and we didn't find the megamorph
// sentinel, then we have in the slot either some other function or an
// AllocationSite. Do a map check on the object in ecx.
__ Ldr(x5, FieldMemOperand(x4, AllocationSite::kMapOffset));
__ JumpIfNotRoot(x5, Heap::kAllocationSiteMapRootIndex, &miss);
// Non-array cache: Check the cache state.
__ JumpIfRoot(x4, kPremonomorphicRootIndex, &initialize_non_array);
__ JumpIfNotRoot(x4, kUninitializedRootIndex, &megamorphic);
// Non-array cache: Uninitialized -> premonomorphic. The sentinel is an
// immortal immovable object (null) so no write-barrier is needed.
__ Add(x4, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2));
__ LoadRoot(x10, kPremonomorphicRootIndex);
__ Str(x10, FieldMemOperand(x4, FixedArray::kHeaderSize));
// Make sure the function is the Array() function
__ LoadArrayFunction(x4);
__ Cmp(x1, x4);
__ B(ne, &megamorphic);
__ B(&done);
// Array cache: Check the cache state to see if we're in a monomorphic
// state where the state object is an AllocationSite object.
__ Bind(&check_array);
__ Ldr(x5, FieldMemOperand(x4, AllocationSite::kMapOffset));
__ JumpIfRoot(x5, Heap::kAllocationSiteMapRootIndex, &done);
__ Bind(&miss);
// Array cache: Uninitialized or premonomorphic -> monomorphic.
__ JumpIfRoot(x4, kUninitializedRootIndex, &initialize_array);
__ JumpIfRoot(x4, kPremonomorphicRootIndex, &initialize_array);
// Both caches: Monomorphic -> megamorphic. The sentinel is an
// immortal immovable object (undefined) so no write-barrier is needed.
// A monomorphic miss (i.e, here the cache is not uninitialized) goes
// megamorphic.
__ JumpIfRoot(x4, Heap::kTheHoleValueRootIndex, &initialize);
// MegamorphicSentinel is an immortal immovable object (undefined) so no
// write-barrier is needed.
__ Bind(&megamorphic);
__ Add(x4, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2));
__ LoadRoot(x10, kMegamorphicRootIndex);
__ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
__ Str(x10, FieldMemOperand(x4, FixedArray::kHeaderSize));
__ B(&done);
// Array cache: Uninitialized or premonomorphic -> monomorphic.
__ Bind(&initialize_array);
// An uninitialized cache is patched with the function or sentinel to
// indicate the ElementsKind if function is the Array constructor.
__ Bind(&initialize);
// Make sure the function is the Array() function
__ LoadArrayFunction(x4);
__ Cmp(x1, x4);
__ B(ne, &not_array_function);
// The target function is the Array constructor,
// Create an AllocationSite if we don't already have it, store it in the slot.
{
FrameScope scope(masm, StackFrame::INTERNAL);
CreateAllocationSiteStub create_stub;
@ -3341,8 +3335,9 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
}
__ B(&done);
// Non-array cache: Premonomorphic -> monomorphic.
__ Bind(&initialize_non_array);
__ Bind(&not_array_function);
// An uninitialized cache is patched with the function.
__ Add(x4, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2));
// TODO(all): Does the value need to be left in x4? If not, FieldMemOperand
// could be used to avoid this add.
@ -3355,6 +3350,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
__ Pop(x1, x2, x4);
// TODO(all): Are x4, x2 and x1 outputs? This isn't clear.
__ Bind(&done);
}

View File

@ -3012,17 +3012,12 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
// r1 : the function to call
// r2 : Feedback vector
// r3 : slot in feedback vector (Smi)
Label check_array, initialize_array, initialize_non_array, megamorphic, done;
Label initialize, done, miss, megamorphic, not_array_function;
ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()),
masm->isolate()->heap()->undefined_value());
Heap::RootListIndex kMegamorphicRootIndex = Heap::kUndefinedValueRootIndex;
ASSERT_EQ(*TypeFeedbackInfo::UninitializedSentinel(masm->isolate()),
masm->isolate()->heap()->the_hole_value());
Heap::RootListIndex kUninitializedRootIndex = Heap::kTheHoleValueRootIndex;
ASSERT_EQ(*TypeFeedbackInfo::PremonomorphicSentinel(masm->isolate()),
masm->isolate()->heap()->null_value());
Heap::RootListIndex kPremonomorphicRootIndex = Heap::kNullValueRootIndex;
// Load the cache state into r4.
__ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
@ -3032,50 +3027,45 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
// function without changing the state.
__ cmp(r4, r1);
__ b(eq, &done);
__ CompareRoot(r4, kMegamorphicRootIndex);
__ b(eq, &done);
// Check if we're dealing with the Array function or not.
__ LoadArrayFunction(r5);
__ cmp(r1, r5);
__ b(eq, &check_array);
// Non-array cache: Check the cache state.
__ CompareRoot(r4, kPremonomorphicRootIndex);
__ b(eq, &initialize_non_array);
__ CompareRoot(r4, kUninitializedRootIndex);
__ b(ne, &megamorphic);
// Non-array cache: Uninitialized -> premonomorphic. The sentinel is an
// immortal immovable object (null) so no write-barrier is needed.
__ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
__ LoadRoot(ip, kPremonomorphicRootIndex);
__ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize));
__ jmp(&done);
// Array cache: Check the cache state to see if we're in a monomorphic
// state where the state object is an AllocationSite object.
__ bind(&check_array);
// If we came here, we need to see if we are the array function.
// If we didn't have a matching function, and we didn't find the megamorph
// sentinel, then we have in the slot either some other function or an
// AllocationSite. Do a map check on the object in ecx.
__ ldr(r5, FieldMemOperand(r4, 0));
__ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
__ b(eq, &done);
__ b(ne, &miss);
// Array cache: Uninitialized or premonomorphic -> monomorphic.
__ CompareRoot(r4, kUninitializedRootIndex);
__ b(eq, &initialize_array);
__ CompareRoot(r4, kPremonomorphicRootIndex);
__ b(eq, &initialize_array);
// Make sure the function is the Array() function
__ LoadArrayFunction(r4);
__ cmp(r1, r4);
__ b(ne, &megamorphic);
__ jmp(&done);
// Both caches: Monomorphic -> megamorphic. The sentinel is an
// immortal immovable object (undefined) so no write-barrier is needed.
__ bind(&miss);
// A monomorphic miss (i.e, here the cache is not uninitialized) goes
// megamorphic.
__ CompareRoot(r4, Heap::kTheHoleValueRootIndex);
__ b(eq, &initialize);
// MegamorphicSentinel is an immortal immovable object (undefined) so no
// write-barrier is needed.
__ bind(&megamorphic);
__ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
__ LoadRoot(ip, kMegamorphicRootIndex);
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize));
__ jmp(&done);
// Array cache: Uninitialized or premonomorphic -> monomorphic.
__ bind(&initialize_array);
// An uninitialized cache is patched with the function or sentinel to
// indicate the ElementsKind if function is the Array constructor.
__ bind(&initialize);
// Make sure the function is the Array() function
__ LoadArrayFunction(r4);
__ cmp(r1, r4);
__ b(ne, &not_array_function);
// The target function is the Array constructor,
// Create an AllocationSite if we don't already have it, store it in the slot.
{
FrameScope scope(masm, StackFrame::INTERNAL);
@ -3091,8 +3081,8 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
}
__ b(&done);
// Non-array cache: Premonomorphic -> monomorphic.
__ bind(&initialize_non_array);
__ bind(&not_array_function);
__ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
__ add(r4, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
__ str(r1, MemOperand(r4, 0));

View File

@ -2330,7 +2330,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
// edx : slot in feedback vector (Smi)
// edi : the function to call
Isolate* isolate = masm->isolate();
Label check_array, initialize_array, initialize_non_array, megamorphic, done;
Label initialize, done, miss, megamorphic, not_array_function;
// Load the cache state into ecx.
__ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
@ -2343,53 +2343,48 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
__ cmp(ecx, Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate)));
__ j(equal, &done, Label::kFar);
// Load the global or builtins object from the current context and check
// if we're dealing with the Array function or not.
// If we came here, we need to see if we are the array function.
// If we didn't have a matching function, and we didn't find the megamorph
// sentinel, then we have in the slot either some other function or an
// AllocationSite. Do a map check on the object in ecx.
Handle<Map> allocation_site_map =
masm->isolate()->factory()->allocation_site_map();
__ cmp(FieldOperand(ecx, 0), Immediate(allocation_site_map));
__ j(not_equal, &miss);
// Load the global or builtins object from the current context
__ LoadGlobalContext(ecx);
// Make sure the function is the Array() function
__ cmp(edi, Operand(ecx,
Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
__ j(equal, &check_array);
// Non-array cache: Reload the cache state and check it.
__ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
FixedArray::kHeaderSize));
__ cmp(ecx, Immediate(TypeFeedbackInfo::PremonomorphicSentinel(isolate)));
__ j(equal, &initialize_non_array);
__ cmp(ecx, Immediate(TypeFeedbackInfo::UninitializedSentinel(isolate)));
__ j(not_equal, &megamorphic);
// Non-array cache: Uninitialized -> premonomorphic. The sentinel is an
// immortal immovable object (null) so no write-barrier is needed.
__ mov(FieldOperand(ebx, edx, times_half_pointer_size,
FixedArray::kHeaderSize),
Immediate(TypeFeedbackInfo::PremonomorphicSentinel(isolate)));
__ jmp(&done, Label::kFar);
// Array cache: Reload the cache state and check to see if we're in a
// monomorphic state where the state object is an AllocationSite object.
__ bind(&check_array);
__ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
FixedArray::kHeaderSize));
Handle<Map> allocation_site_map = isolate->factory()->allocation_site_map();
__ cmp(FieldOperand(ecx, 0), Immediate(allocation_site_map));
__ j(equal, &done, Label::kFar);
__ bind(&miss);
// Array cache: Uninitialized or premonomorphic -> monomorphic.
// A monomorphic miss (i.e, here the cache is not uninitialized) goes
// megamorphic.
__ cmp(ecx, Immediate(TypeFeedbackInfo::UninitializedSentinel(isolate)));
__ j(equal, &initialize_array);
__ cmp(ecx, Immediate(TypeFeedbackInfo::PremonomorphicSentinel(isolate)));
__ j(equal, &initialize_array);
// Both caches: Monomorphic -> megamorphic. The sentinel is an
// immortal immovable object (undefined) so no write-barrier is needed.
__ j(equal, &initialize);
// MegamorphicSentinel is an immortal immovable object (undefined) so no
// write-barrier is needed.
__ bind(&megamorphic);
__ mov(FieldOperand(ebx, edx, times_half_pointer_size,
FixedArray::kHeaderSize),
Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate)));
__ jmp(&done, Label::kFar);
// Array cache: Uninitialized or premonomorphic -> monomorphic.
__ bind(&initialize_array);
// An uninitialized cache is patched with the function or sentinel to
// indicate the ElementsKind if function is the Array constructor.
__ bind(&initialize);
__ LoadGlobalContext(ecx);
// Make sure the function is the Array() function
__ cmp(edi, Operand(ecx,
Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
__ j(not_equal, &not_array_function);
// The target function is the Array constructor,
// Create an AllocationSite if we don't already have it, store it in the slot.
{
FrameScope scope(masm, StackFrame::INTERNAL);
@ -2411,11 +2406,11 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
}
__ jmp(&done);
// Non-array cache: Premonomorphic -> monomorphic.
__ bind(&initialize_non_array);
__ bind(&not_array_function);
__ mov(FieldOperand(ebx, edx, times_half_pointer_size,
FixedArray::kHeaderSize),
edi);
// We won't need edx or ebx anymore, just save edi
__ push(edi);
__ push(ebx);
__ push(edx);

View File

@ -3159,17 +3159,12 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
// a1 : the function to call
// a2 : Feedback vector
// a3 : slot in feedback vector (Smi)
Label check_array, initialize_array, initialize_non_array, megamorphic, done;
Label initialize, done, miss, megamorphic, not_array_function;
ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()),
masm->isolate()->heap()->undefined_value());
Heap::RootListIndex kMegamorphicRootIndex = Heap::kUndefinedValueRootIndex;
ASSERT_EQ(*TypeFeedbackInfo::UninitializedSentinel(masm->isolate()),
masm->isolate()->heap()->the_hole_value());
Heap::RootListIndex kUninitializedRootIndex = Heap::kTheHoleValueRootIndex;
ASSERT_EQ(*TypeFeedbackInfo::PremonomorphicSentinel(masm->isolate()),
masm->isolate()->heap()->null_value());
Heap::RootListIndex kPremonomorphicRootIndex = Heap::kNullValueRootIndex;
// Load the cache state into t0.
__ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize);
@ -3179,51 +3174,44 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
// A monomorphic cache hit or an already megamorphic state: invoke the
// function without changing the state.
__ Branch(&done, eq, t0, Operand(a1));
__ LoadRoot(at, kMegamorphicRootIndex);
__ Branch(&done, eq, t0, Operand(at));
// Check if we're dealing with the Array function or not.
__ LoadArrayFunction(t1);
__ Branch(&check_array, eq, a1, Operand(t1));
// Non-array cache: Check the cache state.
__ LoadRoot(at, kPremonomorphicRootIndex);
__ Branch(&initialize_non_array, eq, t0, Operand(at));
__ LoadRoot(at, kUninitializedRootIndex);
__ Branch(&megamorphic, ne, t0, Operand(at));
// Non-array cache: Uninitialized -> premonomorphic. The sentinel is an
// immortal immovable object (null) so no write-barrier is needed.
__ sll(at, a3, kPointerSizeLog2 - kSmiTagSize);
__ Addu(t0, a2, at);
__ LoadRoot(at, kPremonomorphicRootIndex);
__ Branch(USE_DELAY_SLOT, &done);
__ sw(at, FieldMemOperand(t0, FixedArray::kHeaderSize)); // In delay slot.
// Array cache: Check the cache state to see if we're in a monomorphic
// state where the state object is an AllocationSite object.
__ bind(&check_array);
// If we came here, we need to see if we are the array function.
// If we didn't have a matching function, and we didn't find the megamorph
// sentinel, then we have in the slot either some other function or an
// AllocationSite. Do a map check on the object in a3.
__ lw(t1, FieldMemOperand(t0, 0));
__ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
__ Branch(&done, eq, t1, Operand(at));
__ Branch(&miss, ne, t1, Operand(at));
// Array cache: Uninitialized or premonomorphic -> monomorphic.
__ LoadRoot(at, kUninitializedRootIndex);
__ Branch(&initialize_array, eq, t0, Operand(at));
__ LoadRoot(at, kPremonomorphicRootIndex);
__ Branch(&initialize_array, eq, t0, Operand(at));
// Make sure the function is the Array() function
__ LoadArrayFunction(t0);
__ Branch(&megamorphic, ne, a1, Operand(t0));
__ jmp(&done);
// Both caches: Monomorphic -> megamorphic. The sentinel is an
// immortal immovable object (undefined) so no write-barrier is needed.
__ bind(&miss);
// A monomorphic miss (i.e, here the cache is not uninitialized) goes
// megamorphic.
__ LoadRoot(at, Heap::kTheHoleValueRootIndex);
__ Branch(&initialize, eq, t0, Operand(at));
// MegamorphicSentinel is an immortal immovable object (undefined) so no
// write-barrier is needed.
__ bind(&megamorphic);
__ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize);
__ Addu(t0, a2, Operand(t0));
__ LoadRoot(at, kMegamorphicRootIndex);
__ Branch(USE_DELAY_SLOT, &done);
__ sw(at, FieldMemOperand(t0, FixedArray::kHeaderSize)); // In delay slot.
__ LoadRoot(at, Heap::kUndefinedValueRootIndex);
__ sw(at, FieldMemOperand(t0, FixedArray::kHeaderSize));
__ jmp(&done);
// Array cache: Uninitialized or premonomorphic -> monomorphic.
__ bind(&initialize_array);
// An uninitialized cache is patched with the function or sentinel to
// indicate the ElementsKind if function is the Array constructor.
__ bind(&initialize);
// Make sure the function is the Array() function
__ LoadArrayFunction(t0);
__ Branch(&not_array_function, ne, a1, Operand(t0));
// The target function is the Array constructor.
// Create an AllocationSite if we don't already have it, store it in the slot.
{
FrameScope scope(masm, StackFrame::INTERNAL);
const RegList kSavedRegs =
@ -3244,8 +3232,8 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
}
__ Branch(&done);
// Non-array cache: Premonomorphic -> monomorphic.
__ bind(&initialize_non_array);
__ bind(&not_array_function);
__ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize);
__ Addu(t0, a2, Operand(t0));
__ Addu(t0, t0, Operand(FixedArray::kHeaderSize - kHeapObjectTag));

View File

@ -6543,11 +6543,6 @@ Handle<Object> TypeFeedbackInfo::UninitializedSentinel(Isolate* isolate) {
}
Handle<Object> TypeFeedbackInfo::PremonomorphicSentinel(Isolate* isolate) {
return isolate->factory()->null_value();
}
Handle<Object> TypeFeedbackInfo::MegamorphicSentinel(Isolate* isolate) {
return isolate->factory()->undefined_value();
}

View File

@ -8177,9 +8177,6 @@ class TypeFeedbackInfo: public Struct {
// The object that indicates an uninitialized cache.
static inline Handle<Object> UninitializedSentinel(Isolate* isolate);
// The object that indicates a cache in pre-monomorphic state.
static inline Handle<Object> PremonomorphicSentinel(Isolate* isolate);
// The object that indicates a megamorphic state.
static inline Handle<Object> MegamorphicSentinel(Isolate* isolate);

View File

@ -14841,7 +14841,6 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ArrayConstructor) {
Handle<AllocationSite> site;
if (!type_info.is_null() &&
*type_info != isolate->heap()->null_value() &&
*type_info != isolate->heap()->undefined_value()) {
site = Handle<AllocationSite>::cast(type_info);
ASSERT(!site->SitePointsToLiteral());

View File

@ -2169,7 +2169,8 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
// rdx : slot in feedback vector (Smi)
// rdi : the function to call
Isolate* isolate = masm->isolate();
Label check_array, initialize_array, initialize_non_array, megamorphic, done;
Label initialize, done, miss, megamorphic, not_array_function,
done_no_smi_convert;
// Load the cache state into rcx.
__ SmiToInteger32(rdx, rdx);
@ -2183,49 +2184,44 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
__ Cmp(rcx, TypeFeedbackInfo::MegamorphicSentinel(isolate));
__ j(equal, &done);
// Check if we're dealing with the Array function or not.
// If we came here, we need to see if we are the array function.
// If we didn't have a matching function, and we didn't find the megamorph
// sentinel, then we have in the slot either some other function or an
// AllocationSite. Do a map check on the object in rcx.
Handle<Map> allocation_site_map =
masm->isolate()->factory()->allocation_site_map();
__ Cmp(FieldOperand(rcx, 0), allocation_site_map);
__ j(not_equal, &miss);
// Make sure the function is the Array() function
__ LoadArrayFunction(rcx);
__ cmpq(rdi, rcx);
__ j(equal, &check_array);
// Non-array cache: Reload the cache state and check it.
__ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size,
FixedArray::kHeaderSize));
__ Cmp(rcx, TypeFeedbackInfo::PremonomorphicSentinel(isolate));
__ j(equal, &initialize_non_array);
__ Cmp(rcx, TypeFeedbackInfo::UninitializedSentinel(isolate));
__ j(not_equal, &megamorphic);
__ jmp(&done);
// Non-array cache: Uninitialized -> premonomorphic. The sentinel is an
// immortal immovable object (null) so no write-barrier is needed.
__ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
TypeFeedbackInfo::PremonomorphicSentinel(isolate));
__ jmp(&done, Label::kFar);
__ bind(&miss);
// Array cache: Reload the cache state and check to see if we're in a
// monomorphic state where the state object is an AllocationSite object.
__ bind(&check_array);
__ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size,
FixedArray::kHeaderSize));
Handle<Map> allocation_site_map = isolate->factory()->allocation_site_map();
__ Cmp(FieldOperand(rcx, 0), allocation_site_map);
__ j(equal, &done);
// Array cache: Uninitialized or premonomorphic -> monomorphic.
// A monomorphic miss (i.e, here the cache is not uninitialized) goes
// megamorphic.
__ Cmp(rcx, TypeFeedbackInfo::UninitializedSentinel(isolate));
__ j(equal, &initialize_array);
__ Cmp(rcx, TypeFeedbackInfo::PremonomorphicSentinel(isolate));
__ j(equal, &initialize_array);
// Both caches: Monomorphic -> megamorphic. The sentinel is an
// immortal immovable object (undefined) so no write-barrier is needed.
__ j(equal, &initialize);
// MegamorphicSentinel is an immortal immovable object (undefined) so no
// write-barrier is needed.
__ bind(&megamorphic);
__ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
TypeFeedbackInfo::MegamorphicSentinel(isolate));
__ jmp(&done);
// Array cache: Uninitialized or premonomorphic -> monomorphic.
__ bind(&initialize_array);
// An uninitialized cache is patched with the function or sentinel to
// indicate the ElementsKind if function is the Array constructor.
__ bind(&initialize);
// Make sure the function is the Array() function
__ LoadArrayFunction(rcx);
__ cmpq(rdi, rcx);
__ j(not_equal, &not_array_function);
// The target function is the Array constructor,
// Create an AllocationSite if we don't already have it, store it in the slot.
{
FrameScope scope(masm, StackFrame::INTERNAL);
@ -2246,13 +2242,13 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
__ pop(rax);
__ SmiToInteger32(rax, rax);
}
Label done_no_smi_convert;
__ jmp(&done_no_smi_convert);
// Non-array cache: Premonomorphic -> monomorphic.
__ bind(&initialize_non_array);
__ bind(&not_array_function);
__ movp(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
rdi);
// We won't need rdx or rbx anymore, just save rdi
__ push(rdi);
__ push(rbx);
__ push(rdx);

View File

@ -2848,9 +2848,7 @@ TEST(IncrementalMarkingClearsTypeFeedbackInfo) {
// originating from two different native contexts.
CcTest::global()->Set(v8_str("fun1"), fun1);
CcTest::global()->Set(v8_str("fun2"), fun2);
CompileRun("function f(a, b) { a(); b(); }"
"f(fun1, fun2);" // Run twice to skip premonomorphic state.
"f(fun1, fun2)");
CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
Handle<JSFunction> f =
v8::Utils::OpenHandle(