Revert "Add a premonomorphic state to the call target cache."

This reverts commit r19402

R=verwaest@chromium.org

Review URL: https://codereview.chromium.org/169713002

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@19412 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
mvstanton@chromium.org 2014-02-17 14:22:18 +00:00
parent eaab533fd8
commit 8bcdbc354f
10 changed files with 8 additions and 109 deletions

View File

@ -3195,8 +3195,6 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
masm->isolate()->heap()->undefined_value());
ASSERT_EQ(*TypeFeedbackInfo::UninitializedSentinel(masm->isolate()),
masm->isolate()->heap()->the_hole_value());
ASSERT_EQ(*TypeFeedbackInfo::PremonomorphicSentinel(masm->isolate()),
masm->isolate()->heap()->null_value());
// Load the cache state.
__ Add(x4, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2));
@ -3224,22 +3222,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
// A monomorphic miss (i.e, here the cache is not uninitialized) goes
// megamorphic.
Label not_uninitialized;
__ JumpIfNotRoot(x4, Heap::kTheHoleValueRootIndex, &not_uninitialized);
// PremonomorphicSentinel is an immortal immovable object (null) so no
// write-barrier is needed.
__ Add(x4, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2));
__ LoadRoot(x10, Heap::kNullValueRootIndex);
__ Str(x10, FieldMemOperand(x4, FixedArray::kHeaderSize));
__ B(&done);
// If the cache isn't uninitialized, it is either premonomorphic or
// monomorphic. If it is premonomorphic, we initialize it thus making
// it monomorphic. Otherwise, we go megamorphic.
__ Bind(&not_uninitialized);
__ JumpIfRoot(x4, Heap::kNullValueRootIndex, &initialize);
__ JumpIfRoot(x4, Heap::kTheHoleValueRootIndex, &initialize);
// MegamorphicSentinel is an immortal immovable object (undefined) so no
// write-barrier is needed.
__ Bind(&megamorphic);

View File

@ -3017,8 +3017,6 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
masm->isolate()->heap()->undefined_value());
ASSERT_EQ(*TypeFeedbackInfo::UninitializedSentinel(masm->isolate()),
masm->isolate()->heap()->the_hole_value());
ASSERT_EQ(*TypeFeedbackInfo::PremonomorphicSentinel(masm->isolate()),
masm->isolate()->heap()->null_value());
// Load the cache state into r4.
__ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
@ -3045,26 +3043,10 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
__ bind(&miss);
// A monomorphic miss (i.e, here the cache is not uninitialized or
// pre-monomorphic) goes megamorphic.
Label not_uninitialized;
// A monomorphic miss (i.e, here the cache is not uninitialized) goes
// megamorphic.
__ CompareRoot(r4, Heap::kTheHoleValueRootIndex);
__ b(ne, &not_uninitialized);
// PremonomorphicSentinel is an immortal immovable object (null) so no
// write-barrier is needed.
__ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
__ LoadRoot(ip, Heap::kNullValueRootIndex);
__ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize));
__ jmp(&done);
// If the cache isn't uninitialized, it is either premonomorphic or
// monomorphic. If it is premonomorphic, we initialize it thus making
// it monomorphic. Otherwise, we go megamorphic.
__ bind(&not_uninitialized);
__ CompareRoot(r4, Heap::kNullValueRootIndex);
__ b(eq, &initialize);
// MegamorphicSentinel is an immortal immovable object (undefined) so no
// write-barrier is needed.
__ bind(&megamorphic);

View File

@ -2362,26 +2362,10 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
__ bind(&miss);
// A monomorphic miss (i.e, here the cache is not uninitialized or
// pre-monomorphic) goes megamorphic.
Label not_uninitialized;
// A monomorphic miss (i.e, here the cache is not uninitialized) goes
// megamorphic.
__ cmp(ecx, Immediate(TypeFeedbackInfo::UninitializedSentinel(isolate)));
__ j(not_equal, &not_uninitialized);
// PremonomorphicSentinel is an immortal immovable object (null) so no
// write-barrier is needed.
__ mov(FieldOperand(ebx, edx, times_half_pointer_size,
FixedArray::kHeaderSize),
Immediate(TypeFeedbackInfo::PremonomorphicSentinel(isolate)));
__ jmp(&done, Label::kFar);
// If the cache isn't uninitialized, it is either premonomorphic or
// monomorphic. If it is premonomorphic, we initialize it thus making
// it monomorphic. Otherwise, we go megamorphic.
__ bind(&not_uninitialized);
__ cmp(ecx, Immediate(TypeFeedbackInfo::PremonomorphicSentinel(isolate)));
__ j(equal, &initialize);
// MegamorphicSentinel is an immortal immovable object (undefined) so no
// write-barrier is needed.
__ bind(&megamorphic);

View File

@ -6537,11 +6537,6 @@ Handle<Object> TypeFeedbackInfo::UninitializedSentinel(Isolate* isolate) {
}
Handle<Object> TypeFeedbackInfo::PremonomorphicSentinel(Isolate* isolate) {
return isolate->factory()->null_value();
}
Handle<Object> TypeFeedbackInfo::MegamorphicSentinel(Isolate* isolate) {
return isolate->factory()->undefined_value();
}

View File

@ -8172,9 +8172,6 @@ class TypeFeedbackInfo: public Struct {
// The object that indicates an uninitialized cache.
static inline Handle<Object> UninitializedSentinel(Isolate* isolate);
// The object that indicates a cache in pre-monomorphic state.
static inline Handle<Object> PremonomorphicSentinel(Isolate* isolate);
// The object that indicates a megamorphic state.
static inline Handle<Object> MegamorphicSentinel(Isolate* isolate);

View File

@ -14762,7 +14762,6 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ArrayConstructor) {
Handle<AllocationSite> site;
if (!type_info.is_null() &&
*type_info != isolate->heap()->null_value() &&
*type_info != isolate->heap()->undefined_value()) {
site = Handle<AllocationSite>::cast(type_info);
ASSERT(!site->SitePointsToLiteral());

View File

@ -2201,25 +2201,10 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
__ bind(&miss);
// A monomorphic miss (i.e, here the cache is not uninitialized or
// pre-monomorphic) goes megamorphic.
Label not_uninitialized;
// A monomorphic miss (i.e, here the cache is not uninitialized) goes
// megamorphic.
__ Cmp(rcx, TypeFeedbackInfo::UninitializedSentinel(isolate));
__ j(not_equal, &not_uninitialized);
// PremonomorphicSentinel is an immortal immovable object (null) so no
// write-barrier is needed.
__ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
TypeFeedbackInfo::PremonomorphicSentinel(isolate));
__ jmp(&done);
// If the cache isn't uninitialized, it is either premonomorphic or
// monomorphic. If it is premonomorphic, we initialize it thus making
// it monomorphic. Otherwise, we go megamorphic.
__ bind(&not_uninitialized);
__ Cmp(rcx, TypeFeedbackInfo::PremonomorphicSentinel(isolate));
__ j(equal, &initialize);
// MegamorphicSentinel is an immortal immovable object (undefined) so no
// write-barrier is needed.
__ bind(&megamorphic);

View File

@ -2848,9 +2848,7 @@ TEST(IncrementalMarkingClearsTypeFeedbackInfo) {
// originating from two different native contexts.
CcTest::global()->Set(v8_str("fun1"), fun1);
CcTest::global()->Set(v8_str("fun2"), fun2);
CompileRun("function f(a, b) { a(); b(); }"
"f(fun1, fun2);" // Run twice to skip premonomorphic state.
"f(fun1, fun2)");
CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
Handle<JSFunction> f =
v8::Utils::OpenHandle(

View File

@ -128,7 +128,6 @@ if (support_smi_only_arrays) {
}
// Case: [1,2,3] as allocation site
get_standard_literal(); // Skip premonomorphic state.
obj = fastliteralcase(get_standard_literal(), 1);
assertKind(elements_kind.fast_smi_only, obj);
obj = fastliteralcase(get_standard_literal(), 1.5);
@ -170,7 +169,6 @@ if (support_smi_only_arrays) {
return literal;
}
fastliteralcase_smifast(1); // Skip premonomorphic state.
obj = fastliteralcase_smifast(1);
assertKind(elements_kind.fast_smi_only, obj);
obj = fastliteralcase_smifast("carter");
@ -185,7 +183,6 @@ if (support_smi_only_arrays) {
return literal;
}
fastliteralcase_smiholey(5, 1); // Skip premonomorphic state.
obj = fastliteralcase_smiholey(5, 1);
assertKind(elements_kind.fast_smi_only, obj);
assertHoley(obj);
@ -200,7 +197,6 @@ if (support_smi_only_arrays) {
}
// Case: new Array() as allocation site, smi->double
newarraycase_smidouble(1); // Skip premonomorphic state.
obj = newarraycase_smidouble(1);
assertKind(elements_kind.fast_smi_only, obj);
obj = newarraycase_smidouble(1.5);
@ -215,7 +211,6 @@ if (support_smi_only_arrays) {
}
// Case: new Array() as allocation site, smi->fast
newarraycase_smiobj(1); // Skip premonomorphic state.
obj = newarraycase_smiobj(1);
assertKind(elements_kind.fast_smi_only, obj);
obj = newarraycase_smiobj("gloria");
@ -230,7 +225,6 @@ if (support_smi_only_arrays) {
}
// Case: new Array(length) as allocation site
newarraycase_length_smidouble(1); // Skip premonomorphic state.
obj = newarraycase_length_smidouble(1);
assertKind(elements_kind.fast_smi_only, obj);
obj = newarraycase_length_smidouble(1.5);
@ -254,7 +248,6 @@ if (support_smi_only_arrays) {
}
// Case: new Array(<length>) as allocation site, smi->fast
newarraycase_length_smiobj(1); // Skip premonomorphic state.
obj = newarraycase_length_smiobj(1);
assertKind(elements_kind.fast_smi_only, obj);
obj = newarraycase_length_smiobj("gloria");
@ -268,7 +261,6 @@ if (support_smi_only_arrays) {
return a;
}
newarraycase_list_smidouble(1); // Skip premonomorphic state.
obj = newarraycase_list_smidouble(1);
assertKind(elements_kind.fast_smi_only, obj);
obj = newarraycase_list_smidouble(1.5);
@ -282,7 +274,6 @@ if (support_smi_only_arrays) {
return a;
}
newarraycase_list_smiobj(1); // Skip premonomorphic state.
obj = newarraycase_list_smiobj(1);
assertKind(elements_kind.fast_smi_only, obj);
obj = newarraycase_list_smiobj("coates");
@ -302,7 +293,6 @@ if (support_smi_only_arrays) {
return a;
}
foo(0); foo(1); // Skip premonomorphic state.
for (i = 0; i < 2; i++) {
a = foo(i);
b = foo(i);
@ -323,7 +313,6 @@ if (support_smi_only_arrays) {
return a;
}
newarraycase_onearg(5, 3.5); // Skip premonomorphic state.
obj = newarraycase_onearg(5, 3.5);
assertKind(elements_kind.fast_double, obj);
obj = newarraycase_onearg(10, 5);
@ -399,7 +388,6 @@ if (support_smi_only_arrays) {
return literal;
}
get_nested_literal(); // Skip premonomorphic state.
obj = get_nested_literal();
assertKind(elements_kind.fast, obj);
obj[0][0] = 3.5;
@ -415,7 +403,6 @@ if (support_smi_only_arrays) {
return literal;
}
get_deep_nested_literal(); // Skip premonomorphic state.
obj = get_deep_nested_literal();
assertKind(elements_kind.fast_smi_only, obj[1][0]);
obj[0][0] = 3.5;
@ -441,7 +428,6 @@ if (support_smi_only_arrays) {
return literal;
}
get_object_literal(); // Skip premonomorphic state.
obj = get_object_literal();
assertKind(elements_kind.fast_smi_only, obj.array);
obj.array[1] = 3.5;
@ -457,7 +443,6 @@ if (support_smi_only_arrays) {
return literal;
}
get_nested_object_literal(); // Skip premonomorphic state.
obj = get_nested_object_literal();
assertKind(elements_kind.fast, obj.array);
assertKind(elements_kind.fast_smi_only, obj.array[1]);
@ -477,7 +462,6 @@ if (support_smi_only_arrays) {
return literal;
}
get_nested_literal(); // Skip premonomorphic state.
obj = get_nested_literal();
assertKind(elements_kind.fast, obj);
obj[0][0] = 3.5;
@ -493,7 +477,6 @@ if (support_smi_only_arrays) {
return literal;
}
get_deep_nested_literal(); // Skip premonomorphic state.
obj = get_deep_nested_literal();
assertKind(elements_kind.fast_smi_only, obj[1][0]);
obj[0][0] = 3.5;

View File

@ -89,7 +89,6 @@ if (support_smi_only_arrays) {
return new t(len);
}
bar(Array, 10); // Skip premonomorphic state.
a = bar(Array, 10);
a[0] = 3.5;
b = bar(Array, 1);
@ -109,8 +108,6 @@ if (support_smi_only_arrays) {
function bar0(t) {
return new t();
}
bar0(Array); // Skip premonomorphic state.
a = bar0(Array);
a[0] = 3.5;
b = bar0(Array);
@ -142,8 +139,6 @@ if (support_smi_only_arrays) {
function bar(len) {
return new Array(len);
}
bar(10); // Skip premonomorphic state.
a = bar(10);
a[0] = "a string";
a = bar(10);
@ -195,8 +190,6 @@ if (support_smi_only_arrays) {
function bar() {
return new Array();
}
bar(); // Skip premonomorphic state.
a = bar();
bar();
%OptimizeFunctionOnNextCall(bar);