diff --git a/src/arm/code-stubs-arm.h b/src/arm/code-stubs-arm.h index 9cc6be2ae8..5bc1355d6d 100644 --- a/src/arm/code-stubs-arm.h +++ b/src/arm/code-stubs-arm.h @@ -1,4 +1,4 @@ -// Copyright 2011 the V8 project authors. All rights reserved. +// Copyright 2010 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: diff --git a/src/arm/ic-arm.cc b/src/arm/ic-arm.cc index 8749eea5b2..731c4719e7 100644 --- a/src/arm/ic-arm.cc +++ b/src/arm/ic-arm.cc @@ -868,7 +868,10 @@ void LoadIC::GenerateMiss(MacroAssembler* masm) { } -void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, bool force_generic) { +Object* KeyedLoadIC_Miss(Arguments args); + + +void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { // ---------- S t a t e -------------- // -- lr : return address // -- r0 : key @@ -880,11 +883,8 @@ void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, bool force_generic) { __ Push(r1, r0); - // Perform tail call to the entry. - ExternalReference ref = force_generic - ? ExternalReference(IC_Utility(kKeyedLoadIC_MissForceGeneric), isolate) - : ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate); - + ExternalReference ref = + ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate); __ TailCallExternalReference(ref, 2, 1); } @@ -1075,7 +1075,7 @@ void KeyedLoadIC::GenerateString(MacroAssembler* masm) { char_at_generator.GenerateSlow(masm, call_helper); __ bind(&miss); - GenerateMiss(masm, false); + GenerateMiss(masm); } @@ -1115,11 +1115,11 @@ void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) { 1); __ bind(&slow); - GenerateMiss(masm, false); + GenerateMiss(masm); } -void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, bool force_generic) { +void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) { // ---------- S t a t e -------------- // -- r0 : value // -- r1 : key @@ -1130,29 +1130,8 @@ void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, bool force_generic) { // Push receiver, key and value for runtime call. __ Push(r2, r1, r0); - ExternalReference ref = force_generic - ? ExternalReference(IC_Utility(kKeyedStoreIC_MissForceGeneric), - masm->isolate()) - : ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate()); - __ TailCallExternalReference(ref, 3, 1); -} - - -void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) { - // ---------- S t a t e -------------- - // -- r0 : value - // -- r1 : key - // -- r2 : receiver - // -- lr : return address - // ----------------------------------- - - // Push receiver, key and value for runtime call. - __ Push(r2, r1, r0); - - // The slow case calls into the runtime to complete the store without causing - // an IC miss that would otherwise cause a transition to the generic stub. ExternalReference ref = - ExternalReference(IC_Utility(kKeyedStoreIC_Slow), masm->isolate()); + ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate()); __ TailCallExternalReference(ref, 3, 1); } diff --git a/src/arm/macro-assembler-arm.cc b/src/arm/macro-assembler-arm.cc index 885dd5a0c7..340ac379aa 100644 --- a/src/arm/macro-assembler-arm.cc +++ b/src/arm/macro-assembler-arm.cc @@ -1681,23 +1681,6 @@ void MacroAssembler::CheckMap(Register obj, } -void MacroAssembler::DispatchMap(Register obj, - Register scratch, - Handle map, - Handle success, - SmiCheckType smi_check_type) { - Label fail; - if (smi_check_type == DO_SMI_CHECK) { - JumpIfSmi(obj, &fail); - } - ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset)); - mov(ip, Operand(map)); - cmp(scratch, ip); - Jump(success, RelocInfo::CODE_TARGET, eq); - bind(&fail); -} - - void MacroAssembler::TryGetFunctionPrototype(Register function, Register result, Register scratch, diff --git a/src/arm/macro-assembler-arm.h b/src/arm/macro-assembler-arm.h index 8996537d3d..9be4ca093d 100644 --- a/src/arm/macro-assembler-arm.h +++ b/src/arm/macro-assembler-arm.h @@ -578,7 +578,6 @@ class MacroAssembler: public Assembler { Label* fail, SmiCheckType smi_check_type); - void CheckMap(Register obj, Register scratch, Heap::RootListIndex index, @@ -586,16 +585,6 @@ class MacroAssembler: public Assembler { SmiCheckType smi_check_type); - // Check if the map of an object is equal to a specified map and branch to a - // specified target if equal. Skip the smi check if not required (object is - // known to be a heap object) - void DispatchMap(Register obj, - Register scratch, - Handle map, - Handle success, - SmiCheckType smi_check_type); - - // Compare the object in a register to a value from the root list. // Uses the ip register as scratch. void CompareRoot(Register obj, Heap::RootListIndex index); diff --git a/src/arm/stub-cache-arm.cc b/src/arm/stub-cache-arm.cc index 21b77e1bcb..eca3e453c0 100644 --- a/src/arm/stub-cache-arm.cc +++ b/src/arm/stub-cache-arm.cc @@ -3091,56 +3091,52 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) { } -MaybeObject* KeyedLoadStubCompiler::CompileLoadFastElement(Map* receiver_map) { - // ----------- S t a t e ------------- - // -- lr : return address - // -- r0 : key - // -- r1 : receiver - // ----------------------------------- - MaybeObject* maybe_stub = KeyedLoadFastElementStub().TryGetCode(); - Code* stub; - if (!maybe_stub->To(&stub)) return maybe_stub; - __ DispatchMap(r1, - r2, - Handle(receiver_map), - Handle(stub), - DO_SMI_CHECK); - - Handle ic = isolate()->builtins()->KeyedLoadIC_Miss(); - __ Jump(ic, RelocInfo::CODE_TARGET); - - // Return the generated code. - return GetCode(NORMAL, NULL); -} - - -MaybeObject* KeyedLoadStubCompiler::CompileLoadMegamorphic( - MapList* receiver_maps, - CodeList* handler_ics) { +MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) { // ----------- S t a t e ------------- // -- lr : return address // -- r0 : key // -- r1 : receiver // ----------------------------------- Label miss; - __ JumpIfSmi(r1, &miss); - int receiver_count = receiver_maps->length(); + // Check that the receiver isn't a smi. + __ tst(r1, Operand(kSmiTagMask)); + __ b(eq, &miss); + + // Check that the map matches. __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); - for (int current = 0; current < receiver_count; ++current) { - Handle map(receiver_maps->at(current)); - Handle code(handler_ics->at(current)); - __ mov(ip, Operand(map)); - __ cmp(r2, ip); - __ Jump(code, RelocInfo::CODE_TARGET, eq); - } + __ cmp(r2, Operand(Handle(receiver->map()))); + __ b(ne, &miss); + + // Check that the key is a smi. + __ tst(r0, Operand(kSmiTagMask)); + __ b(ne, &miss); + + // Get the elements array. + __ ldr(r2, FieldMemOperand(r1, JSObject::kElementsOffset)); + __ AssertFastElements(r2); + + // Check that the key is within bounds. + __ ldr(r3, FieldMemOperand(r2, FixedArray::kLengthOffset)); + __ cmp(r0, Operand(r3)); + __ b(hs, &miss); + + // Load the result and make sure it's not the hole. + __ add(r3, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); + ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); + __ ldr(r4, + MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize)); + __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); + __ cmp(r4, ip); + __ b(eq, &miss); + __ mov(r0, r4); + __ Ret(); __ bind(&miss); - Handle miss_ic = isolate()->builtins()->KeyedLoadIC_Miss(); - __ Jump(miss_ic, RelocInfo::CODE_TARGET, al); + GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); // Return the generated code. - return GetCode(NORMAL, NULL, MEGAMORPHIC); + return GetCode(NORMAL, NULL); } @@ -3182,27 +3178,69 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object, } -MaybeObject* KeyedStoreStubCompiler::CompileStoreFastElement( - Map* receiver_map) { +MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized( + JSObject* receiver) { // ----------- S t a t e ------------- // -- r0 : value // -- r1 : key // -- r2 : receiver // -- lr : return address // -- r3 : scratch + // -- r4 : scratch (elements) // ----------------------------------- - bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE; - MaybeObject* maybe_stub = - KeyedStoreFastElementStub(is_js_array).TryGetCode(); - Code* stub; - if (!maybe_stub->To(&stub)) return maybe_stub; - __ DispatchMap(r2, - r3, - Handle(receiver_map), - Handle(stub), - DO_SMI_CHECK); + Label miss; - Handle ic = isolate()->builtins()->KeyedStoreIC_Miss(); + Register value_reg = r0; + Register key_reg = r1; + Register receiver_reg = r2; + Register scratch = r3; + Register elements_reg = r4; + + // Check that the receiver isn't a smi. + __ tst(receiver_reg, Operand(kSmiTagMask)); + __ b(eq, &miss); + + // Check that the map matches. + __ ldr(scratch, FieldMemOperand(receiver_reg, HeapObject::kMapOffset)); + __ cmp(scratch, Operand(Handle(receiver->map()))); + __ b(ne, &miss); + + // Check that the key is a smi. + __ tst(key_reg, Operand(kSmiTagMask)); + __ b(ne, &miss); + + // Get the elements array and make sure it is a fast element array, not 'cow'. + __ ldr(elements_reg, + FieldMemOperand(receiver_reg, JSObject::kElementsOffset)); + __ ldr(scratch, FieldMemOperand(elements_reg, HeapObject::kMapOffset)); + __ cmp(scratch, Operand(Handle(factory()->fixed_array_map()))); + __ b(ne, &miss); + + // Check that the key is within bounds. + if (receiver->IsJSArray()) { + __ ldr(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset)); + } else { + __ ldr(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset)); + } + // Compare smis. + __ cmp(key_reg, scratch); + __ b(hs, &miss); + + __ add(scratch, + elements_reg, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); + ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); + __ str(value_reg, + MemOperand(scratch, key_reg, LSL, kPointerSizeLog2 - kSmiTagSize)); + __ RecordWrite(scratch, + Operand(key_reg, LSL, kPointerSizeLog2 - kSmiTagSize), + receiver_reg , elements_reg); + + // value_reg (r0) is preserved. + // Done. + __ Ret(); + + __ bind(&miss); + Handle ic = masm()->isolate()->builtins()->KeyedStoreIC_Miss(); __ Jump(ic, RelocInfo::CODE_TARGET); // Return the generated code. @@ -3210,38 +3248,6 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreFastElement( } -MaybeObject* KeyedStoreStubCompiler::CompileStoreMegamorphic( - MapList* receiver_maps, - CodeList* handler_ics) { - // ----------- S t a t e ------------- - // -- r0 : value - // -- r1 : key - // -- r2 : receiver - // -- lr : return address - // -- r3 : scratch - // ----------------------------------- - Label miss; - __ JumpIfSmi(r2, &miss); - - int receiver_count = receiver_maps->length(); - __ ldr(r3, FieldMemOperand(r2, HeapObject::kMapOffset)); - for (int current = 0; current < receiver_count; ++current) { - Handle map(receiver_maps->at(current)); - Handle code(handler_ics->at(current)); - __ mov(ip, Operand(map)); - __ cmp(r3, ip); - __ Jump(code, RelocInfo::CODE_TARGET, eq); - } - - __ bind(&miss); - Handle miss_ic = isolate()->builtins()->KeyedStoreIC_Miss(); - __ Jump(miss_ic, RelocInfo::CODE_TARGET, al); - - // Return the generated code. - return GetCode(NORMAL, NULL, MEGAMORPHIC); -} - - MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) { // ----------- S t a t e ------------- // -- r0 : argc @@ -3386,60 +3392,6 @@ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) { } -MaybeObject* ExternalArrayLoadStubCompiler::CompileLoad( - JSObject*receiver, ExternalArrayType array_type) { - // ----------- S t a t e ------------- - // -- lr : return address - // -- r0 : key - // -- r1 : receiver - // ----------------------------------- - MaybeObject* maybe_stub = - KeyedLoadExternalArrayStub(array_type).TryGetCode(); - Code* stub; - if (!maybe_stub->To(&stub)) return maybe_stub; - __ DispatchMap(r1, - r2, - Handle(receiver->map()), - Handle(stub), - DO_SMI_CHECK); - - Handle ic = isolate()->builtins()->KeyedLoadIC_Miss(); - __ Jump(ic, RelocInfo::CODE_TARGET); - - // Return the generated code. - return GetCode(); -} - - -MaybeObject* ExternalArrayStoreStubCompiler::CompileStore( - JSObject* receiver, ExternalArrayType array_type) { - // ----------- S t a t e ------------- - // -- r0 : value - // -- r1 : name - // -- r2 : receiver - // -- lr : return address - // ----------------------------------- - MaybeObject* maybe_stub = - KeyedStoreExternalArrayStub(array_type).TryGetCode(); - Code* stub; - if (!maybe_stub->To(&stub)) return maybe_stub; - __ DispatchMap(r2, - r3, - Handle(receiver->map()), - Handle(stub), - DONT_DO_SMI_CHECK); - - Handle ic = isolate()->builtins()->KeyedStoreIC_Miss(); - __ Jump(ic, RelocInfo::CODE_TARGET); - - return GetCode(); -} - - -#undef __ -#define __ ACCESS_MASM(masm) - - static bool IsElementTypeSigned(ExternalArrayType array_type) { switch (array_type) { case kExternalByteArray: @@ -3459,24 +3411,30 @@ static bool IsElementTypeSigned(ExternalArrayType array_type) { } -void KeyedLoadStubCompiler::GenerateLoadExternalArray( - MacroAssembler* masm, - ExternalArrayType array_type) { +MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub( + JSObject* receiver_object, + ExternalArrayType array_type, + Code::Flags flags) { // ---------- S t a t e -------------- // -- lr : return address // -- r0 : key // -- r1 : receiver // ----------------------------------- - Label miss_force_generic, slow, failed_allocation; + Label slow, failed_allocation; Register key = r0; Register receiver = r1; - // This stub is meant to be tail-jumped to, the receiver must already - // have been verified by the caller to not be a smi. + // Check that the object isn't a smi + __ JumpIfSmi(receiver, &slow); // Check that the key is a smi. - __ JumpIfNotSmi(key, &miss_force_generic); + __ JumpIfNotSmi(key, &slow); + + // Make sure that we've got the right map. + __ ldr(r2, FieldMemOperand(receiver, HeapObject::kMapOffset)); + __ cmp(r2, Operand(Handle(receiver_object->map()))); + __ b(ne, &slow); __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset)); // r3: elements array @@ -3485,7 +3443,7 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray( __ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset)); __ cmp(ip, Operand(key, ASR, kSmiTagSize)); // Unsigned comparison catches both negative and too-large values. - __ b(lo, &miss_force_generic); + __ b(lo, &slow); __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset)); // r3: base pointer of external storage @@ -3577,7 +3535,8 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray( __ Ret(); } else { WriteInt32ToHeapNumberStub stub(value, r0, r3); - __ TailCallStub(&stub); + MaybeObject* stub_code = masm()->TryTailCallStub(&stub); + if (stub_code->IsFailure()) return stub_code; } } else if (array_type == kExternalUnsignedIntArray) { // The test is different for unsigned int values. Since we need @@ -3622,12 +3581,12 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray( __ bind(&box_int_0); // Integer does not have leading zeros. - GenerateUInt2Double(masm, hiword, loword, r4, 0); + GenerateUInt2Double(masm(), hiword, loword, r4, 0); __ b(&done); __ bind(&box_int_1); // Integer has one leading zero. - GenerateUInt2Double(masm, hiword, loword, r4, 1); + GenerateUInt2Double(masm(), hiword, loword, r4, 1); __ bind(&done); @@ -3749,7 +3708,7 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray( // Slow case, key and receiver still in r0 and r1. __ bind(&slow); __ IncrementCounter( - masm->isolate()->counters()->keyed_load_external_array_slow(), + masm()->isolate()->counters()->keyed_load_external_array_slow(), 1, r2, r3); // ---------- S t a t e -------------- @@ -3762,23 +3721,21 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray( __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1); - __ bind(&miss_force_generic); - Code* stub = masm->isolate()->builtins()->builtin( - Builtins::kKeyedLoadIC_MissForceGeneric); - __ Jump(Handle(stub), RelocInfo::CODE_TARGET); + return GetCode(flags); } -void KeyedStoreStubCompiler::GenerateStoreExternalArray( - MacroAssembler* masm, - ExternalArrayType array_type) { +MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub( + JSObject* receiver_object, + ExternalArrayType array_type, + Code::Flags flags) { // ---------- S t a t e -------------- // -- r0 : value // -- r1 : key // -- r2 : receiver // -- lr : return address // ----------------------------------- - Label slow, check_heap_number, miss_force_generic; + Label slow, check_heap_number; // Register usage. Register value = r0; @@ -3786,20 +3743,25 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray( Register receiver = r2; // r3 mostly holds the elements array or the destination external array. - // This stub is meant to be tail-jumped to, the receiver must already - // have been verified by the caller to not be a smi. + // Check that the object isn't a smi. + __ JumpIfSmi(receiver, &slow); + + // Make sure that we've got the right map. + __ ldr(r3, FieldMemOperand(receiver, HeapObject::kMapOffset)); + __ cmp(r3, Operand(Handle(receiver_object->map()))); + __ b(ne, &slow); __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset)); // Check that the key is a smi. - __ JumpIfNotSmi(key, &miss_force_generic); + __ JumpIfNotSmi(key, &slow); // Check that the index is in range __ SmiUntag(r4, key); __ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset)); __ cmp(r4, ip); // Unsigned comparison catches both negative and too-large values. - __ b(hs, &miss_force_generic); + __ b(hs, &slow); // Handle both smis and HeapNumbers in the fast path. Go to the // runtime for all other kinds of values. @@ -3837,7 +3799,7 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray( break; case kExternalFloatArray: // Perform int-to-float conversion and store to memory. - StoreIntAsFloat(masm, r3, r4, r5, r6, r7, r9); + StoreIntAsFloat(masm(), r3, r4, r5, r6, r7, r9); break; case kExternalDoubleArray: __ add(r3, r3, Operand(r4, LSL, 3)); @@ -3849,7 +3811,7 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray( destination = FloatingPointHelper::kCoreRegisters; } FloatingPointHelper::ConvertIntToDouble( - masm, r5, destination, + masm(), r5, destination, d0, r6, r7, // These are: double_dst, dst1, dst2. r4, s2); // These are: scratch2, single_scratch. if (destination == FloatingPointHelper::kVFPRegisters) { @@ -4076,137 +4038,28 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray( } } - // Slow case, key and receiver still in r0 and r1. + // Slow case: call runtime. __ bind(&slow); - __ IncrementCounter( - masm->isolate()->counters()->keyed_load_external_array_slow(), - 1, r2, r3); + // Entry registers are intact. // ---------- S t a t e -------------- + // -- r0 : value + // -- r1 : key + // -- r2 : receiver // -- lr : return address - // -- r0 : key - // -- r1 : receiver - // ----------------------------------- - Handle slow_ic = - masm->isolate()->builtins()->KeyedStoreIC_Slow(); - __ Jump(slow_ic, RelocInfo::CODE_TARGET); - - // Miss case, call the runtime. - __ bind(&miss_force_generic); - - // ---------- S t a t e -------------- - // -- lr : return address - // -- r0 : key - // -- r1 : receiver // ----------------------------------- - Handle miss_ic = - masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric(); - __ Jump(miss_ic, RelocInfo::CODE_TARGET); -} + // Push receiver, key and value for runtime call. + __ Push(r2, r1, r0); + __ mov(r1, Operand(Smi::FromInt(NONE))); // PropertyAttributes + __ mov(r0, Operand(Smi::FromInt( + Code::ExtractExtraICStateFromFlags(flags) & kStrictMode))); + __ Push(r1, r0); -void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) { - // ----------- S t a t e ------------- - // -- lr : return address - // -- r0 : key - // -- r1 : receiver - // ----------------------------------- - Label miss_force_generic; + __ TailCallRuntime(Runtime::kSetProperty, 5, 1); - // This stub is meant to be tail-jumped to, the receiver must already - // have been verified by the caller to not be a smi. - - // Check that the key is a smi. - __ JumpIfNotSmi(r0, &miss_force_generic); - - // Get the elements array. - __ ldr(r2, FieldMemOperand(r1, JSObject::kElementsOffset)); - __ AssertFastElements(r2); - - // Check that the key is within bounds. - __ ldr(r3, FieldMemOperand(r2, FixedArray::kLengthOffset)); - __ cmp(r0, Operand(r3)); - __ b(hs, &miss_force_generic); - - // Load the result and make sure it's not the hole. - __ add(r3, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); - ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); - __ ldr(r4, - MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize)); - __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); - __ cmp(r4, ip); - __ b(eq, &miss_force_generic); - __ mov(r0, r4); - __ Ret(); - - __ bind(&miss_force_generic); - Code* stub = masm->isolate()->builtins()->builtin( - Builtins::kKeyedLoadIC_MissForceGeneric); - __ Jump(Handle(stub), RelocInfo::CODE_TARGET); -} - - -void KeyedStoreStubCompiler::GenerateStoreFastElement(MacroAssembler* masm, - bool is_js_array) { - // ----------- S t a t e ------------- - // -- r0 : value - // -- r1 : key - // -- r2 : receiver - // -- lr : return address - // -- r3 : scratch - // -- r4 : scratch (elements) - // ----------------------------------- - Label miss_force_generic; - - Register value_reg = r0; - Register key_reg = r1; - Register receiver_reg = r2; - Register scratch = r3; - Register elements_reg = r4; - - // This stub is meant to be tail-jumped to, the receiver must already - // have been verified by the caller to not be a smi. - - // Check that the key is a smi. - __ JumpIfNotSmi(r0, &miss_force_generic); - - // Get the elements array and make sure it is a fast element array, not 'cow'. - __ ldr(elements_reg, - FieldMemOperand(receiver_reg, JSObject::kElementsOffset)); - __ CheckMap(elements_reg, - scratch, - Heap::kFixedArrayMapRootIndex, - &miss_force_generic, - DONT_DO_SMI_CHECK); - - // Check that the key is within bounds. - if (is_js_array) { - __ ldr(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset)); - } else { - __ ldr(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset)); - } - // Compare smis. - __ cmp(key_reg, scratch); - __ b(hs, &miss_force_generic); - - __ add(scratch, - elements_reg, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); - ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); - __ str(value_reg, - MemOperand(scratch, key_reg, LSL, kPointerSizeLog2 - kSmiTagSize)); - __ RecordWrite(scratch, - Operand(key_reg, LSL, kPointerSizeLog2 - kSmiTagSize), - receiver_reg , elements_reg); - - // value_reg (r0) is preserved. - // Done. - __ Ret(); - - __ bind(&miss_force_generic); - Handle ic = - masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric(); - __ Jump(ic, RelocInfo::CODE_TARGET); + return GetCode(flags); } diff --git a/src/builtins.cc b/src/builtins.cc index c398aa30fb..f473559638 100644 --- a/src/builtins.cc +++ b/src/builtins.cc @@ -1,4 +1,4 @@ -// Copyright 2011 the V8 project authors. All rights reserved. +// Copyright 2006-2008 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -1341,18 +1341,8 @@ static void Generate_KeyedLoadIC_Initialize(MacroAssembler* masm) { } -static void Generate_KeyedLoadIC_Slow(MacroAssembler* masm) { - KeyedLoadIC::GenerateRuntimeGetProperty(masm); -} - - static void Generate_KeyedLoadIC_Miss(MacroAssembler* masm) { - KeyedLoadIC::GenerateMiss(masm, false); -} - - -static void Generate_KeyedLoadIC_MissForceGeneric(MacroAssembler* masm) { - KeyedLoadIC::GenerateMiss(masm, true); + KeyedLoadIC::GenerateMiss(masm); } @@ -1441,17 +1431,7 @@ static void Generate_KeyedStoreIC_Generic_Strict(MacroAssembler* masm) { static void Generate_KeyedStoreIC_Miss(MacroAssembler* masm) { - KeyedStoreIC::GenerateMiss(masm, false); -} - - -static void Generate_KeyedStoreIC_MissForceGeneric(MacroAssembler* masm) { - KeyedStoreIC::GenerateMiss(masm, true); -} - - -static void Generate_KeyedStoreIC_Slow(MacroAssembler* masm) { - KeyedStoreIC::GenerateSlow(masm); + KeyedStoreIC::GenerateMiss(masm); } diff --git a/src/builtins.h b/src/builtins.h index b01f10b274..a84eb311f8 100644 --- a/src/builtins.h +++ b/src/builtins.h @@ -1,4 +1,4 @@ -// Copyright 2011 the V8 project authors. All rights reserved. +// Copyright 2010 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -67,122 +67,115 @@ enum BuiltinExtraArguments { // Define list of builtins implemented in assembly. -#define BUILTIN_LIST_A(V) \ - V(ArgumentsAdaptorTrampoline, BUILTIN, UNINITIALIZED, \ - Code::kNoExtraICState) \ - V(JSConstructCall, BUILTIN, UNINITIALIZED, \ - Code::kNoExtraICState) \ - V(JSConstructStubCountdown, BUILTIN, UNINITIALIZED, \ - Code::kNoExtraICState) \ - V(JSConstructStubGeneric, BUILTIN, UNINITIALIZED, \ - Code::kNoExtraICState) \ - V(JSConstructStubApi, BUILTIN, UNINITIALIZED, \ - Code::kNoExtraICState) \ - V(JSEntryTrampoline, BUILTIN, UNINITIALIZED, \ - Code::kNoExtraICState) \ - V(JSConstructEntryTrampoline, BUILTIN, UNINITIALIZED, \ - Code::kNoExtraICState) \ - V(LazyCompile, BUILTIN, UNINITIALIZED, \ - Code::kNoExtraICState) \ - V(LazyRecompile, BUILTIN, UNINITIALIZED, \ - Code::kNoExtraICState) \ - V(NotifyDeoptimized, BUILTIN, UNINITIALIZED, \ - Code::kNoExtraICState) \ - V(NotifyLazyDeoptimized, BUILTIN, UNINITIALIZED, \ - Code::kNoExtraICState) \ - V(NotifyOSR, BUILTIN, UNINITIALIZED, \ - Code::kNoExtraICState) \ - \ - V(LoadIC_Miss, BUILTIN, UNINITIALIZED, \ - Code::kNoExtraICState) \ - V(KeyedLoadIC_Miss, BUILTIN, UNINITIALIZED, \ - Code::kNoExtraICState) \ - V(KeyedLoadIC_MissForceGeneric, BUILTIN, UNINITIALIZED, \ - Code::kNoExtraICState) \ - V(KeyedLoadIC_Slow, BUILTIN, UNINITIALIZED, \ - Code::kNoExtraICState) \ - V(StoreIC_Miss, BUILTIN, UNINITIALIZED, \ - Code::kNoExtraICState) \ - V(KeyedStoreIC_Miss, BUILTIN, UNINITIALIZED, \ - Code::kNoExtraICState) \ - V(KeyedStoreIC_MissForceGeneric, BUILTIN, UNINITIALIZED, \ - Code::kNoExtraICState) \ - V(KeyedStoreIC_Slow, BUILTIN, UNINITIALIZED, \ - Code::kNoExtraICState) \ - V(LoadIC_Initialize, LOAD_IC, UNINITIALIZED, \ - Code::kNoExtraICState) \ - V(LoadIC_PreMonomorphic, LOAD_IC, PREMONOMORPHIC, \ - Code::kNoExtraICState) \ - V(LoadIC_Normal, LOAD_IC, MONOMORPHIC, \ - Code::kNoExtraICState) \ - V(LoadIC_ArrayLength, LOAD_IC, MONOMORPHIC, \ - Code::kNoExtraICState) \ - V(LoadIC_StringLength, LOAD_IC, MONOMORPHIC, \ - Code::kNoExtraICState) \ - V(LoadIC_StringWrapperLength, LOAD_IC, MONOMORPHIC, \ - Code::kNoExtraICState) \ - V(LoadIC_FunctionPrototype, LOAD_IC, MONOMORPHIC, \ - Code::kNoExtraICState) \ - V(LoadIC_Megamorphic, LOAD_IC, MEGAMORPHIC, \ - Code::kNoExtraICState) \ - \ - V(KeyedLoadIC_Initialize, KEYED_LOAD_IC, UNINITIALIZED, \ - Code::kNoExtraICState) \ - V(KeyedLoadIC_PreMonomorphic, KEYED_LOAD_IC, PREMONOMORPHIC, \ - Code::kNoExtraICState) \ - V(KeyedLoadIC_Generic, KEYED_LOAD_IC, MEGAMORPHIC, \ - Code::kNoExtraICState) \ - V(KeyedLoadIC_String, KEYED_LOAD_IC, MEGAMORPHIC, \ - Code::kNoExtraICState) \ - V(KeyedLoadIC_IndexedInterceptor, KEYED_LOAD_IC, MEGAMORPHIC, \ - Code::kNoExtraICState) \ - \ - V(StoreIC_Initialize, STORE_IC, UNINITIALIZED, \ - Code::kNoExtraICState) \ - V(StoreIC_ArrayLength, STORE_IC, MONOMORPHIC, \ - Code::kNoExtraICState) \ - V(StoreIC_Normal, STORE_IC, MONOMORPHIC, \ - Code::kNoExtraICState) \ - V(StoreIC_Megamorphic, STORE_IC, MEGAMORPHIC, \ - Code::kNoExtraICState) \ - V(StoreIC_GlobalProxy, STORE_IC, MEGAMORPHIC, \ - Code::kNoExtraICState) \ - V(StoreIC_Initialize_Strict, STORE_IC, UNINITIALIZED, \ - kStrictMode) \ - V(StoreIC_ArrayLength_Strict, STORE_IC, MONOMORPHIC, \ - kStrictMode) \ - V(StoreIC_Normal_Strict, STORE_IC, MONOMORPHIC, \ - kStrictMode) \ - V(StoreIC_Megamorphic_Strict, STORE_IC, MEGAMORPHIC, \ - kStrictMode) \ - V(StoreIC_GlobalProxy_Strict, STORE_IC, MEGAMORPHIC, \ - kStrictMode) \ - \ - V(KeyedStoreIC_Initialize, KEYED_STORE_IC, UNINITIALIZED, \ - Code::kNoExtraICState) \ - V(KeyedStoreIC_Generic, KEYED_STORE_IC, MEGAMORPHIC, \ - Code::kNoExtraICState) \ - \ - V(KeyedStoreIC_Initialize_Strict, KEYED_STORE_IC, UNINITIALIZED, \ - kStrictMode) \ - V(KeyedStoreIC_Generic_Strict, KEYED_STORE_IC, MEGAMORPHIC, \ - kStrictMode) \ - \ - /* Uses KeyedLoadIC_Initialize; must be after in list. */ \ - V(FunctionCall, BUILTIN, UNINITIALIZED, \ - Code::kNoExtraICState) \ - V(FunctionApply, BUILTIN, UNINITIALIZED, \ - Code::kNoExtraICState) \ - \ - V(ArrayCode, BUILTIN, UNINITIALIZED, \ - Code::kNoExtraICState) \ - V(ArrayConstructCode, BUILTIN, UNINITIALIZED, \ - Code::kNoExtraICState) \ - \ - V(StringConstructCode, BUILTIN, UNINITIALIZED, \ - Code::kNoExtraICState) \ - \ - V(OnStackReplacement, BUILTIN, UNINITIALIZED, \ +#define BUILTIN_LIST_A(V) \ + V(ArgumentsAdaptorTrampoline, BUILTIN, UNINITIALIZED, \ + Code::kNoExtraICState) \ + V(JSConstructCall, BUILTIN, UNINITIALIZED, \ + Code::kNoExtraICState) \ + V(JSConstructStubCountdown, BUILTIN, UNINITIALIZED, \ + Code::kNoExtraICState) \ + V(JSConstructStubGeneric, BUILTIN, UNINITIALIZED, \ + Code::kNoExtraICState) \ + V(JSConstructStubApi, BUILTIN, UNINITIALIZED, \ + Code::kNoExtraICState) \ + V(JSEntryTrampoline, BUILTIN, UNINITIALIZED, \ + Code::kNoExtraICState) \ + V(JSConstructEntryTrampoline, BUILTIN, UNINITIALIZED, \ + Code::kNoExtraICState) \ + V(LazyCompile, BUILTIN, UNINITIALIZED, \ + Code::kNoExtraICState) \ + V(LazyRecompile, BUILTIN, UNINITIALIZED, \ + Code::kNoExtraICState) \ + V(NotifyDeoptimized, BUILTIN, UNINITIALIZED, \ + Code::kNoExtraICState) \ + V(NotifyLazyDeoptimized, BUILTIN, UNINITIALIZED, \ + Code::kNoExtraICState) \ + V(NotifyOSR, BUILTIN, UNINITIALIZED, \ + Code::kNoExtraICState) \ + \ + V(LoadIC_Miss, BUILTIN, UNINITIALIZED, \ + Code::kNoExtraICState) \ + V(KeyedLoadIC_Miss, BUILTIN, UNINITIALIZED, \ + Code::kNoExtraICState) \ + V(StoreIC_Miss, BUILTIN, UNINITIALIZED, \ + Code::kNoExtraICState) \ + V(KeyedStoreIC_Miss, BUILTIN, UNINITIALIZED, \ + Code::kNoExtraICState) \ + \ + V(LoadIC_Initialize, LOAD_IC, UNINITIALIZED, \ + Code::kNoExtraICState) \ + V(LoadIC_PreMonomorphic, LOAD_IC, PREMONOMORPHIC, \ + Code::kNoExtraICState) \ + V(LoadIC_Normal, LOAD_IC, MONOMORPHIC, \ + Code::kNoExtraICState) \ + V(LoadIC_ArrayLength, LOAD_IC, MONOMORPHIC, \ + Code::kNoExtraICState) \ + V(LoadIC_StringLength, LOAD_IC, MONOMORPHIC, \ + Code::kNoExtraICState) \ + V(LoadIC_StringWrapperLength, LOAD_IC, MONOMORPHIC, \ + Code::kNoExtraICState) \ + V(LoadIC_FunctionPrototype, LOAD_IC, MONOMORPHIC, \ + Code::kNoExtraICState) \ + V(LoadIC_Megamorphic, LOAD_IC, MEGAMORPHIC, \ + Code::kNoExtraICState) \ + \ + V(KeyedLoadIC_Initialize, KEYED_LOAD_IC, UNINITIALIZED, \ + Code::kNoExtraICState) \ + V(KeyedLoadIC_PreMonomorphic, KEYED_LOAD_IC, PREMONOMORPHIC, \ + Code::kNoExtraICState) \ + V(KeyedLoadIC_Generic, KEYED_LOAD_IC, MEGAMORPHIC, \ + Code::kNoExtraICState) \ + V(KeyedLoadIC_String, KEYED_LOAD_IC, MEGAMORPHIC, \ + Code::kNoExtraICState) \ + V(KeyedLoadIC_IndexedInterceptor, KEYED_LOAD_IC, MEGAMORPHIC, \ + Code::kNoExtraICState) \ + \ + V(StoreIC_Initialize, STORE_IC, UNINITIALIZED, \ + Code::kNoExtraICState) \ + V(StoreIC_ArrayLength, STORE_IC, MONOMORPHIC, \ + Code::kNoExtraICState) \ + V(StoreIC_Normal, STORE_IC, MONOMORPHIC, \ + Code::kNoExtraICState) \ + V(StoreIC_Megamorphic, STORE_IC, MEGAMORPHIC, \ + Code::kNoExtraICState) \ + V(StoreIC_GlobalProxy, STORE_IC, MEGAMORPHIC, \ + Code::kNoExtraICState) \ + V(StoreIC_Initialize_Strict, STORE_IC, UNINITIALIZED, \ + kStrictMode) \ + V(StoreIC_ArrayLength_Strict, STORE_IC, MONOMORPHIC, \ + kStrictMode) \ + V(StoreIC_Normal_Strict, STORE_IC, MONOMORPHIC, \ + kStrictMode) \ + V(StoreIC_Megamorphic_Strict, STORE_IC, MEGAMORPHIC, \ + kStrictMode) \ + V(StoreIC_GlobalProxy_Strict, STORE_IC, MEGAMORPHIC, \ + kStrictMode) \ + \ + V(KeyedStoreIC_Initialize, KEYED_STORE_IC, UNINITIALIZED, \ + Code::kNoExtraICState) \ + V(KeyedStoreIC_Generic, KEYED_STORE_IC, MEGAMORPHIC, \ + Code::kNoExtraICState) \ + \ + V(KeyedStoreIC_Initialize_Strict, KEYED_STORE_IC, UNINITIALIZED, \ + kStrictMode) \ + V(KeyedStoreIC_Generic_Strict, KEYED_STORE_IC, MEGAMORPHIC, \ + kStrictMode) \ + \ + /* Uses KeyedLoadIC_Initialize; must be after in list. */ \ + V(FunctionCall, BUILTIN, UNINITIALIZED, \ + Code::kNoExtraICState) \ + V(FunctionApply, BUILTIN, UNINITIALIZED, \ + Code::kNoExtraICState) \ + \ + V(ArrayCode, BUILTIN, UNINITIALIZED, \ + Code::kNoExtraICState) \ + V(ArrayConstructCode, BUILTIN, UNINITIALIZED, \ + Code::kNoExtraICState) \ + \ + V(StringConstructCode, BUILTIN, UNINITIALIZED, \ + Code::kNoExtraICState) \ + \ + V(OnStackReplacement, BUILTIN, UNINITIALIZED, \ Code::kNoExtraICState) diff --git a/src/code-stubs.cc b/src/code-stubs.cc index d12def85ef..7f08d22dc9 100644 --- a/src/code-stubs.cc +++ b/src/code-stubs.cc @@ -1,4 +1,4 @@ -// Copyright 2011 the V8 project authors. All rights reserved. +// Copyright 2006-2008 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -29,7 +29,6 @@ #include "bootstrapper.h" #include "code-stubs.h" -#include "stub-cache.h" #include "factory.h" #include "gdb-jit.h" #include "macro-assembler.h" @@ -244,24 +243,4 @@ const char* InstanceofStub::GetName() { } -void KeyedLoadFastElementStub::Generate(MacroAssembler* masm) { - KeyedLoadStubCompiler::GenerateLoadFastElement(masm); -} - - -void KeyedStoreFastElementStub::Generate(MacroAssembler* masm) { - KeyedStoreStubCompiler::GenerateStoreFastElement(masm, is_js_array_); -} - - -void KeyedLoadExternalArrayStub::Generate(MacroAssembler* masm) { - KeyedLoadStubCompiler::GenerateLoadExternalArray(masm, array_type_); -} - - -void KeyedStoreExternalArrayStub::Generate(MacroAssembler* masm) { - KeyedStoreStubCompiler::GenerateStoreExternalArray(masm, array_type_); -} - - } } // namespace v8::internal diff --git a/src/code-stubs.h b/src/code-stubs.h index 9074793ffd..f16a8db825 100644 --- a/src/code-stubs.h +++ b/src/code-stubs.h @@ -66,10 +66,6 @@ namespace internal { V(NumberToString) \ V(CEntry) \ V(JSEntry) \ - V(KeyedLoadFastElement) \ - V(KeyedStoreFastElement) \ - V(KeyedLoadExternalArray) \ - V(KeyedStoreExternalArray) \ V(DebuggerStatement) \ V(StringDictionaryNegativeLookup) @@ -926,86 +922,6 @@ class AllowStubCallsScope { DISALLOW_COPY_AND_ASSIGN(AllowStubCallsScope); }; -#ifdef DEBUG -#define DECLARE_ARRAY_STUB_PRINT(name) void Print() { PrintF(#name); } -#else -#define DECLARE_ARRAY_STUB_PRINT(name) -#endif - - -class KeyedLoadFastElementStub : public CodeStub { - public: - explicit KeyedLoadFastElementStub() { - } - - Major MajorKey() { return KeyedLoadFastElement; } - int MinorKey() { return 0; } - - void Generate(MacroAssembler* masm); - - const char* GetName() { return "KeyedLoadFastElementStub"; } - - DECLARE_ARRAY_STUB_PRINT(KeyedLoadFastElementStub) -}; - - -class KeyedStoreFastElementStub : public CodeStub { - public: - explicit KeyedStoreFastElementStub(bool is_js_array) - : is_js_array_(is_js_array) { } - - Major MajorKey() { return KeyedStoreFastElement; } - int MinorKey() { return is_js_array_ ? 1 : 0; } - - void Generate(MacroAssembler* masm); - - const char* GetName() { return "KeyedStoreFastElementStub"; } - - DECLARE_ARRAY_STUB_PRINT(KeyedStoreFastElementStub) - - private: - bool is_js_array_; -}; - - -class KeyedLoadExternalArrayStub : public CodeStub { - public: - explicit KeyedLoadExternalArrayStub(ExternalArrayType array_type) - : array_type_(array_type) { } - - Major MajorKey() { return KeyedLoadExternalArray; } - int MinorKey() { return array_type_; } - - void Generate(MacroAssembler* masm); - - const char* GetName() { return "KeyedLoadExternalArrayStub"; } - - DECLARE_ARRAY_STUB_PRINT(KeyedLoadExternalArrayStub) - - protected: - ExternalArrayType array_type_; -}; - - -class KeyedStoreExternalArrayStub : public CodeStub { - public: - explicit KeyedStoreExternalArrayStub(ExternalArrayType array_type) - : array_type_(array_type) { } - - Major MajorKey() { return KeyedStoreExternalArray; } - int MinorKey() { return array_type_; } - - void Generate(MacroAssembler* masm); - - const char* GetName() { return "KeyedStoreExternalArrayStub"; } - - DECLARE_ARRAY_STUB_PRINT(KeyedStoreExternalArrayStub) - - protected: - ExternalArrayType array_type_; -}; - - } } // namespace v8::internal #endif // V8_CODE_STUBS_H_ diff --git a/src/flag-definitions.h b/src/flag-definitions.h index a85f5fe8f7..aa1d2745ad 100644 --- a/src/flag-definitions.h +++ b/src/flag-definitions.h @@ -287,9 +287,10 @@ DEFINE_bool(native_code_counters, false, DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(never_compact, false, "Never perform compaction on full GC - testing only") -DEFINE_bool(cleanup_code_caches_at_gc, true, - "Flush inline caches prior to mark compact collection and " - "flush code caches in maps during mark compact cycle.") +DEFINE_bool(cleanup_ics_at_gc, true, + "Flush inline caches prior to mark compact collection.") +DEFINE_bool(cleanup_caches_in_maps_at_gc, true, + "Flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator " "(0, the default, means to use system random).") diff --git a/src/heap.h b/src/heap.h index 3365192832..6c29a0afc2 100644 --- a/src/heap.h +++ b/src/heap.h @@ -1,4 +1,4 @@ -// Copyright 2011 the V8 project authors. All rights reserved. +// Copyright 2010 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -178,14 +178,8 @@ inline Heap* _inline_get_heap_(); V(value_of_symbol, "valueOf") \ V(InitializeVarGlobal_symbol, "InitializeVarGlobal") \ V(InitializeConstGlobal_symbol, "InitializeConstGlobal") \ - V(KeyedLoadSpecializedMonomorphic_symbol, \ - "KeyedLoadSpecializedMonomorphic") \ - V(KeyedLoadSpecializedPolymorphic_symbol, \ - "KeyedLoadSpecializedPolymorphic") \ - V(KeyedStoreSpecializedMonomorphic_symbol, \ - "KeyedStoreSpecializedMonomorphic") \ - V(KeyedStoreSpecializedPolymorphic_symbol, \ - "KeyedStoreSpecializedPolymorphic") \ + V(KeyedLoadSpecialized_symbol, "KeyedLoadSpecialized") \ + V(KeyedStoreSpecialized_symbol, "KeyedStoreSpecialized") \ V(stack_overflow_symbol, "kStackOverflowBoilerplate") \ V(illegal_access_symbol, "illegal access") \ V(out_of_memory_symbol, "out-of-memory") \ @@ -213,7 +207,32 @@ inline Heap* _inline_get_heap_(); V(global_eval_symbol, "GlobalEval") \ V(identity_hash_symbol, "v8::IdentityHash") \ V(closure_symbol, "(closure)") \ - V(use_strict, "use strict") + V(use_strict, "use strict") \ + V(KeyedLoadExternalByteArray_symbol, "KeyedLoadExternalByteArray") \ + V(KeyedLoadExternalUnsignedByteArray_symbol, \ + "KeyedLoadExternalUnsignedByteArray") \ + V(KeyedLoadExternalShortArray_symbol, \ + "KeyedLoadExternalShortArray") \ + V(KeyedLoadExternalUnsignedShortArray_symbol, \ + "KeyedLoadExternalUnsignedShortArray") \ + V(KeyedLoadExternalIntArray_symbol, "KeyedLoadExternalIntArray") \ + V(KeyedLoadExternalUnsignedIntArray_symbol, \ + "KeyedLoadExternalUnsignedIntArray") \ + V(KeyedLoadExternalFloatArray_symbol, "KeyedLoadExternalFloatArray") \ + V(KeyedLoadExternalDoubleArray_symbol, "KeyedLoadExternalDoubleArray") \ + V(KeyedLoadExternalPixelArray_symbol, "KeyedLoadExternalPixelArray") \ + V(KeyedStoreExternalByteArray_symbol, "KeyedStoreExternalByteArray") \ + V(KeyedStoreExternalUnsignedByteArray_symbol, \ + "KeyedStoreExternalUnsignedByteArray") \ + V(KeyedStoreExternalShortArray_symbol, "KeyedStoreExternalShortArray") \ + V(KeyedStoreExternalUnsignedShortArray_symbol, \ + "KeyedStoreExternalUnsignedShortArray") \ + V(KeyedStoreExternalIntArray_symbol, "KeyedStoreExternalIntArray") \ + V(KeyedStoreExternalUnsignedIntArray_symbol, \ + "KeyedStoreExternalUnsignedIntArray") \ + V(KeyedStoreExternalFloatArray_symbol, "KeyedStoreExternalFloatArray") \ + V(KeyedStoreExternalDoubleArray_symbol, "KeyedStoreExternalDoubleArray") \ + V(KeyedStoreExternalPixelArray_symbol, "KeyedStoreExternalPixelArray") // Forward declarations. class GCTracer; diff --git a/src/ia32/ic-ia32.cc b/src/ia32/ic-ia32.cc index 6952823767..bde3b7e1a1 100644 --- a/src/ia32/ic-ia32.cc +++ b/src/ia32/ic-ia32.cc @@ -655,7 +655,7 @@ void KeyedLoadIC::GenerateString(MacroAssembler* masm) { char_at_generator.GenerateSlow(masm, call_helper); __ bind(&miss); - GenerateMiss(masm, false); + GenerateMiss(masm); } @@ -698,7 +698,7 @@ void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) { __ TailCallExternalReference(ref, 2, 1); __ bind(&slow); - GenerateMiss(masm, false); + GenerateMiss(masm); } @@ -1222,7 +1222,7 @@ void LoadIC::GenerateMiss(MacroAssembler* masm) { } -void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, bool force_generic) { +void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- eax : key // -- edx : receiver @@ -1237,10 +1237,8 @@ void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, bool force_generic) { __ push(ebx); // return address // Perform tail call to the entry. - ExternalReference ref = force_generic - ? ExternalReference(IC_Utility(kKeyedLoadIC_MissForceGeneric), - masm->isolate()) - : ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate()); + ExternalReference ref = + ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate()); __ TailCallExternalReference(ref, 2, 1); } @@ -1432,7 +1430,7 @@ void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm, } -void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, bool force_generic) { +void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- eax : value // -- ecx : key @@ -1447,35 +1445,12 @@ void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, bool force_generic) { __ push(ebx); // Do tail-call to runtime routine. - ExternalReference ref = force_generic - ? ExternalReference(IC_Utility(kKeyedStoreIC_MissForceGeneric), - masm->isolate()) - : ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate()); + ExternalReference ref = + ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate()); __ TailCallExternalReference(ref, 3, 1); } -void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) { - // ----------- S t a t e ------------- - // -- eax : value - // -- ecx : key - // -- edx : receiver - // -- esp[0] : return address - // ----------------------------------- - - __ pop(ebx); - __ push(edx); - __ push(ecx); - __ push(eax); - __ push(Immediate(Smi::FromInt(NONE))); // PropertyAttributes - __ push(ebx); // return address - - // Do tail-call to runtime routine. - ExternalReference ref(IC_Utility(kKeyedStoreIC_Slow), masm->isolate()); - __ TailCallExternalReference(ref, 4, 1); -} - - #undef __ diff --git a/src/ia32/macro-assembler-ia32.cc b/src/ia32/macro-assembler-ia32.cc index f38e2eefa6..7b2f6cf9d7 100644 --- a/src/ia32/macro-assembler-ia32.cc +++ b/src/ia32/macro-assembler-ia32.cc @@ -286,21 +286,6 @@ void MacroAssembler::CheckMap(Register obj, } -void MacroAssembler::DispatchMap(Register obj, - Handle map, - Handle success, - SmiCheckType smi_check_type) { - Label fail; - if (smi_check_type == DONT_DO_SMI_CHECK) { - JumpIfSmi(obj, &fail); - } - cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map)); - j(equal, success); - - bind(&fail); -} - - Condition MacroAssembler::IsObjectStringType(Register heap_object, Register map, Register instance_type) { diff --git a/src/ia32/macro-assembler-ia32.h b/src/ia32/macro-assembler-ia32.h index 837e981018..f48ed1361e 100644 --- a/src/ia32/macro-assembler-ia32.h +++ b/src/ia32/macro-assembler-ia32.h @@ -45,7 +45,6 @@ enum AllocationFlags { RESULT_CONTAINS_TOP = 1 << 1 }; - // Convenience for platform-independent signatures. We do not normally // distinguish memory operands from other operands on ia32. typedef Operand MemOperand; @@ -207,22 +206,14 @@ class MacroAssembler: public Assembler { // Compare instance type for map. void CmpInstanceType(Register map, InstanceType type); - // Check if the map of an object is equal to a specified map and branch to - // label if not. Skip the smi check if not required (object is known to be a - // heap object) + // Check if the map of an object is equal to a specified map and + // branch to label if not. Skip the smi check if not required + // (object is known to be a heap object) void CheckMap(Register obj, Handle map, Label* fail, SmiCheckType smi_check_type); - // Check if the map of an object is equal to a specified map and branch to a - // specified target if equal. Skip the smi check if not required (object is - // known to be a heap object) - void DispatchMap(Register obj, - Handle map, - Handle success, - SmiCheckType smi_check_type); - // Check if the object in register heap_object is a string. Afterwards the // register map contains the object map and the register instance_type // contains the instance_type. The registers map and instance_type can be the diff --git a/src/ia32/stub-cache-ia32.cc b/src/ia32/stub-cache-ia32.cc index efaefabbdf..3379ff00fd 100644 --- a/src/ia32/stub-cache-ia32.cc +++ b/src/ia32/stub-cache-ia32.cc @@ -713,14 +713,6 @@ void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) { } -void StubCompiler::GenerateKeyedLoadMissForceGeneric(MacroAssembler* masm) { - Code* code = masm->isolate()->builtins()->builtin( - Builtins::kKeyedLoadIC_MissForceGeneric); - Handle ic(code); - __ jmp(ic, RelocInfo::CODE_TARGET); -} - - // Both name_reg and receiver_reg are preserved on jumps to miss_label, // but may be destroyed if store is successful. void StubCompiler::GenerateStoreField(MacroAssembler* masm, @@ -2662,35 +2654,8 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object, } -MaybeObject* KeyedStoreStubCompiler::CompileStoreFastElement( - Map* receiver_map) { - // ----------- S t a t e ------------- - // -- eax : value - // -- ecx : key - // -- edx : receiver - // -- esp[0] : return address - // ----------------------------------- - bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE; - MaybeObject* maybe_stub = - KeyedStoreFastElementStub(is_js_array).TryGetCode(); - Code* stub; - if (!maybe_stub->To(&stub)) return maybe_stub; - __ DispatchMap(edx, - Handle(receiver_map), - Handle(stub), - DO_SMI_CHECK); - - Handle ic = isolate()->builtins()->KeyedStoreIC_Miss(); - __ jmp(ic, RelocInfo::CODE_TARGET); - - // Return the generated code. - return GetCode(NORMAL, NULL); -} - - -MaybeObject* KeyedStoreStubCompiler::CompileStoreMegamorphic( - MapList* receiver_maps, - CodeList* handler_ics) { +MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized( + JSObject* receiver) { // ----------- S t a t e ------------- // -- eax : value // -- ecx : key @@ -2698,22 +2663,51 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreMegamorphic( // -- esp[0] : return address // ----------------------------------- Label miss; - __ JumpIfSmi(edx, &miss); - Register map_reg = ebx; - __ mov(map_reg, FieldOperand(edx, HeapObject::kMapOffset)); - int receiver_count = receiver_maps->length(); - for (int current = 0; current < receiver_count; ++current) { - Handle map(receiver_maps->at(current)); - __ cmp(map_reg, map); - __ j(equal, Handle(handler_ics->at(current))); + // Check that the receiver isn't a smi. + __ test(edx, Immediate(kSmiTagMask)); + __ j(zero, &miss); + + // Check that the map matches. + __ cmp(FieldOperand(edx, HeapObject::kMapOffset), + Immediate(Handle(receiver->map()))); + __ j(not_equal, &miss); + + // Check that the key is a smi. + __ test(ecx, Immediate(kSmiTagMask)); + __ j(not_zero, &miss); + + // Get the elements array and make sure it is a fast element array, not 'cow'. + __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset)); + __ cmp(FieldOperand(edi, HeapObject::kMapOffset), + Immediate(factory()->fixed_array_map())); + __ j(not_equal, &miss); + + // Check that the key is within bounds. + if (receiver->IsJSArray()) { + __ cmp(ecx, FieldOperand(edx, JSArray::kLengthOffset)); // Compare smis. + __ j(above_equal, &miss); + } else { + __ cmp(ecx, FieldOperand(edi, FixedArray::kLengthOffset)); // Compare smis. + __ j(above_equal, &miss); } + + // Do the store and update the write barrier. Make sure to preserve + // the value in register eax. + __ mov(edx, Operand(eax)); + __ mov(FieldOperand(edi, ecx, times_2, FixedArray::kHeaderSize), eax); + __ RecordWrite(edi, 0, edx, ecx); + + // Done. + __ ret(0); + + // Handle store cache miss. __ bind(&miss); - Handle miss_ic = isolate()->builtins()->KeyedStoreIC_Miss(); - __ jmp(miss_ic, RelocInfo::CODE_TARGET); + Handle ic = isolate()->builtins()->KeyedStoreIC_Miss(); + __ jmp(ic, RelocInfo::CODE_TARGET); // Return the generated code. - return GetCode(NORMAL, NULL, MEGAMORPHIC); + return GetCode(NORMAL, NULL); } @@ -3126,52 +3120,48 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) { } -MaybeObject* KeyedLoadStubCompiler::CompileLoadFastElement(Map* receiver_map) { - // ----------- S t a t e ------------- - // -- eax : key - // -- edx : receiver - // -- esp[0] : return address - // ----------------------------------- - MaybeObject* maybe_stub = KeyedLoadFastElementStub().TryGetCode(); - Code* stub; - if (!maybe_stub->To(&stub)) return maybe_stub; - __ DispatchMap(edx, - Handle(receiver_map), - Handle(stub), - DO_SMI_CHECK); - - GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); - - // Return the generated code. - return GetCode(NORMAL, NULL); -} - - -MaybeObject* KeyedLoadStubCompiler::CompileLoadMegamorphic( - MapList* receiver_maps, - CodeList* handler_ics) { +MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) { // ----------- S t a t e ------------- // -- eax : key // -- edx : receiver // -- esp[0] : return address // ----------------------------------- Label miss; - __ JumpIfSmi(edx, &miss); - Register map_reg = ebx; - __ mov(map_reg, FieldOperand(edx, HeapObject::kMapOffset)); - int receiver_count = receiver_maps->length(); - for (int current = 0; current < receiver_count; ++current) { - Handle map(receiver_maps->at(current)); - __ cmp(map_reg, map); - __ j(equal, Handle(handler_ics->at(current))); - } + // Check that the receiver isn't a smi. + __ test(edx, Immediate(kSmiTagMask)); + __ j(zero, &miss); + + // Check that the map matches. + __ cmp(FieldOperand(edx, HeapObject::kMapOffset), + Immediate(Handle(receiver->map()))); + __ j(not_equal, &miss); + + // Check that the key is a smi. + __ test(eax, Immediate(kSmiTagMask)); + __ j(not_zero, &miss); + + // Get the elements array. + __ mov(ecx, FieldOperand(edx, JSObject::kElementsOffset)); + __ AssertFastElements(ecx); + + // Check that the key is within bounds. + __ cmp(eax, FieldOperand(ecx, FixedArray::kLengthOffset)); + __ j(above_equal, &miss); + + // Load the result and make sure it's not the hole. + __ mov(ebx, Operand(ecx, eax, times_2, + FixedArray::kHeaderSize - kHeapObjectTag)); + __ cmp(ebx, factory()->the_hole_value()); + __ j(equal, &miss); + __ mov(eax, ebx); + __ ret(0); __ bind(&miss); GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); // Return the generated code. - return GetCode(NORMAL, NULL, MEGAMORPHIC); + return GetCode(NORMAL, NULL); } @@ -3314,82 +3304,36 @@ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) { } -MaybeObject* ExternalArrayLoadStubCompiler::CompileLoad( - JSObject*receiver, ExternalArrayType array_type) { +MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub( + JSObject*receiver, ExternalArrayType array_type, Code::Flags flags) { // ----------- S t a t e ------------- // -- eax : key // -- edx : receiver // -- esp[0] : return address // ----------------------------------- - MaybeObject* maybe_stub = - KeyedLoadExternalArrayStub(array_type).TryGetCode(); - Code* stub; - if (!maybe_stub->To(&stub)) return maybe_stub; - __ DispatchMap(edx, - Handle(receiver->map()), - Handle(stub), - DO_SMI_CHECK); + Label slow, failed_allocation; - Handle ic = isolate()->builtins()->KeyedLoadIC_Miss(); - __ jmp(ic, RelocInfo::CODE_TARGET); - - // Return the generated code. - return GetCode(); -} - - -MaybeObject* ExternalArrayStoreStubCompiler::CompileStore( - JSObject* receiver, ExternalArrayType array_type) { - // ----------- S t a t e ------------- - // -- eax : value - // -- ecx : key - // -- edx : receiver - // -- esp[0] : return address - // ----------------------------------- - MaybeObject* maybe_stub = - KeyedStoreExternalArrayStub(array_type).TryGetCode(); - Code* stub; - if (!maybe_stub->To(&stub)) return maybe_stub; - __ DispatchMap(edx, - Handle(receiver->map()), - Handle(stub), - DO_SMI_CHECK); - - Handle ic = isolate()->builtins()->KeyedStoreIC_Miss(); - __ jmp(ic, RelocInfo::CODE_TARGET); - - return GetCode(); -} - - -#undef __ -#define __ ACCESS_MASM(masm) - - -void KeyedLoadStubCompiler::GenerateLoadExternalArray( - MacroAssembler* masm, - ExternalArrayType array_type) { - // ----------- S t a t e ------------- - // -- eax : key - // -- edx : receiver - // -- esp[0] : return address - // ----------------------------------- - Label miss_force_generic, failed_allocation, slow; - - // This stub is meant to be tail-jumped to, the receiver must already - // have been verified by the caller to not be a smi. + // Check that the object isn't a smi. + __ test(edx, Immediate(kSmiTagMask)); + __ j(zero, &slow); // Check that the key is a smi. __ test(eax, Immediate(kSmiTagMask)); - __ j(not_zero, &miss_force_generic); + __ j(not_zero, &slow); + // Check that the map matches. + __ CheckMap(edx, Handle(receiver->map()), &slow, DO_SMI_CHECK); + __ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset)); + + // eax: key, known to be a smi. + // edx: receiver, known to be a JSObject. + // ebx: elements object, known to be an external array. // Check that the index is in range. __ mov(ecx, eax); __ SmiUntag(ecx); // Untag the index. - __ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset)); __ cmp(ecx, FieldOperand(ebx, ExternalArray::kLengthOffset)); // Unsigned comparison catches both negative and too-large values. - __ j(above_equal, &miss_force_generic); + __ j(above_equal, &slow); __ mov(ebx, FieldOperand(ebx, ExternalArray::kExternalPointerOffset)); // ebx: base pointer of external storage switch (array_type) { @@ -3496,48 +3440,47 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray( // Slow case: Jump to runtime. __ bind(&slow); - Counters* counters = masm->isolate()->counters(); + Counters* counters = isolate()->counters(); __ IncrementCounter(counters->keyed_load_external_array_slow(), 1); - // ----------- S t a t e ------------- // -- eax : key // -- edx : receiver // -- esp[0] : return address // ----------------------------------- - Handle ic = masm->isolate()->builtins()->KeyedLoadIC_Slow(); - __ jmp(ic, RelocInfo::CODE_TARGET); + __ pop(ebx); + __ push(edx); // receiver + __ push(eax); // name + __ push(ebx); // return address - // ----------- S t a t e ------------- - // -- eax : key - // -- edx : receiver - // -- esp[0] : return address - // ----------------------------------- + // Perform tail call to the entry. + __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1); - // Miss case: Jump to runtime. - __ bind(&miss_force_generic); - Handle miss_ic = - masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric(); - __ jmp(miss_ic, RelocInfo::CODE_TARGET); + // Return the generated code. + return GetCode(flags); } -void KeyedStoreStubCompiler::GenerateStoreExternalArray( - MacroAssembler* masm, - ExternalArrayType array_type) { +MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub( + JSObject* receiver, ExternalArrayType array_type, Code::Flags flags) { // ----------- S t a t e ------------- - // -- eax : key + // -- eax : value + // -- ecx : key // -- edx : receiver // -- esp[0] : return address // ----------------------------------- - Label miss_force_generic, slow, check_heap_number; + Label slow, check_heap_number; - // This stub is meant to be tail-jumped to, the receiver must already - // have been verified by the caller to not be a smi. + // Check that the object isn't a smi. + __ test(edx, Immediate(kSmiTagMask)); + __ j(zero, &slow); + + // Check that the map matches. + __ CheckMap(edx, Handle(receiver->map()), &slow, DO_SMI_CHECK); // Check that the key is a smi. __ test(ecx, Immediate(kSmiTagMask)); - __ j(not_zero, &miss_force_generic); + __ j(not_zero, &slow); // Check that the index is in range. __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset)); @@ -3616,7 +3559,7 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray( // edi: elements array // ebx: untagged index __ cmp(FieldOperand(eax, HeapObject::kMapOffset), - Immediate(masm->isolate()->factory()->heap_number_map())); + Immediate(factory()->heap_number_map())); __ j(not_equal, &slow); // The WebGL specification leaves the behavior of storing NaN and @@ -3711,9 +3654,6 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray( // Slow case: call runtime. __ bind(&slow); - Counters* counters = masm->isolate()->counters(); - __ IncrementCounter(counters->keyed_store_external_array_slow(), 1); - // ----------- S t a t e ------------- // -- eax : value // -- ecx : key @@ -3721,109 +3661,19 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray( // -- esp[0] : return address // ----------------------------------- - Handle ic = masm->isolate()->builtins()->KeyedStoreIC_Slow(); - __ jmp(ic, RelocInfo::CODE_TARGET); + __ pop(ebx); + __ push(edx); + __ push(ecx); + __ push(eax); + __ push(Immediate(Smi::FromInt(NONE))); // PropertyAttributes + __ push(Immediate(Smi::FromInt( + Code::ExtractExtraICStateFromFlags(flags) & kStrictMode))); + __ push(ebx); // return address - // ----------- S t a t e ------------- - // -- eax : value - // -- ecx : key - // -- edx : receiver - // -- esp[0] : return address - // ----------------------------------- + // Do tail-call to runtime routine. + __ TailCallRuntime(Runtime::kSetProperty, 5, 1); - __ bind(&miss_force_generic); - Handle miss_ic = - masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric(); - __ jmp(miss_ic, RelocInfo::CODE_TARGET); -} - - - - -void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) { - // ----------- S t a t e ------------- - // -- eax : key - // -- edx : receiver - // -- esp[0] : return address - // ----------------------------------- - Label miss_force_generic; - - // This stub is meant to be tail-jumped to, the receiver must already - // have been verified by the caller to not be a smi. - - // Check that the key is a smi. - __ test(eax, Immediate(kSmiTagMask)); - __ j(not_zero, &miss_force_generic); - - // Get the elements array. - __ mov(ecx, FieldOperand(edx, JSObject::kElementsOffset)); - __ AssertFastElements(ecx); - - // Check that the key is within bounds. - __ cmp(eax, FieldOperand(ecx, FixedArray::kLengthOffset)); - __ j(above_equal, &miss_force_generic); - - // Load the result and make sure it's not the hole. - __ mov(ebx, Operand(ecx, eax, times_2, - FixedArray::kHeaderSize - kHeapObjectTag)); - __ cmp(ebx, masm->isolate()->factory()->the_hole_value()); - __ j(equal, &miss_force_generic); - __ mov(eax, ebx); - __ ret(0); - - __ bind(&miss_force_generic); - Handle miss_ic = - masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric(); - __ jmp(miss_ic, RelocInfo::CODE_TARGET); -} - - -void KeyedStoreStubCompiler::GenerateStoreFastElement(MacroAssembler* masm, - bool is_js_array) { - // ----------- S t a t e ------------- - // -- eax : key - // -- edx : receiver - // -- esp[0] : return address - // ----------------------------------- - Label miss_force_generic; - - // This stub is meant to be tail-jumped to, the receiver must already - // have been verified by the caller to not be a smi. - - // Check that the key is a smi. - __ test(ecx, Immediate(kSmiTagMask)); - __ j(not_zero, &miss_force_generic); - - // Get the elements array and make sure it is a fast element array, not 'cow'. - __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset)); - __ cmp(FieldOperand(edi, HeapObject::kMapOffset), - Immediate(masm->isolate()->factory()->fixed_array_map())); - __ j(not_equal, &miss_force_generic); - - if (is_js_array) { - // Check that the key is within bounds. - __ cmp(ecx, FieldOperand(edx, JSArray::kLengthOffset)); // smis. - __ j(above_equal, &miss_force_generic); - } else { - // Check that the key is within bounds. - __ cmp(ecx, FieldOperand(edi, FixedArray::kLengthOffset)); // smis. - __ j(above_equal, &miss_force_generic); - } - - // Do the store and update the write barrier. Make sure to preserve - // the value in register eax. - __ mov(edx, Operand(eax)); - __ mov(FieldOperand(edi, ecx, times_2, FixedArray::kHeaderSize), eax); - __ RecordWrite(edi, 0, edx, ecx); - - // Done. - __ ret(0); - - // Handle store cache miss, replacing the ic with the generic stub. - __ bind(&miss_force_generic); - Handle ic_force_generic = - masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric(); - __ jmp(ic_force_generic, RelocInfo::CODE_TARGET); + return GetCode(flags); } diff --git a/src/ic.cc b/src/ic.cc index 8506c0ec0a..3f5326b002 100644 --- a/src/ic.cc +++ b/src/ic.cc @@ -1,4 +1,4 @@ -// Copyright 2011 the V8 project authors. All rights reserved. +// Copyright 2006-2009 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -67,33 +67,7 @@ void IC::TraceIC(const char* type, State new_state = StateFrom(new_target, HEAP->undefined_value(), HEAP->undefined_value()); - PrintF("[%s in ", type); - StackFrameIterator it; - while (it.frame()->fp() != this->fp()) it.Advance(); - StackFrame* raw_frame = it.frame(); - if (raw_frame->is_internal()) { - Isolate* isolate = new_target->GetIsolate(); - Code* apply_builtin = isolate->builtins()->builtin( - Builtins::kFunctionApply); - if (raw_frame->unchecked_code() == apply_builtin) { - PrintF("apply from "); - it.Advance(); - raw_frame = it.frame(); - } - } - if (raw_frame->is_java_script()) { - JavaScriptFrame* frame = JavaScriptFrame::cast(raw_frame); - Code* js_code = frame->unchecked_code(); - // Find the function on the stack and both the active code for the - // function and the original code. - JSFunction* function = JSFunction::cast(frame->function()); - function->PrintName(); - int code_offset = address() - js_code->instruction_start(); - PrintF("+%d", code_offset); - } else { - PrintF(""); - } - PrintF(" (%c->%c)%s", + PrintF("[%s (%c->%c)%s", type, TransitionMarkFromState(old_state), TransitionMarkFromState(new_state), extra_info); @@ -300,9 +274,11 @@ void IC::Clear(Address address) { switch (target->kind()) { case Code::LOAD_IC: return LoadIC::Clear(address, target); case Code::KEYED_LOAD_IC: + case Code::KEYED_EXTERNAL_ARRAY_LOAD_IC: return KeyedLoadIC::Clear(address, target); case Code::STORE_IC: return StoreIC::Clear(address, target); case Code::KEYED_STORE_IC: + case Code::KEYED_EXTERNAL_ARRAY_STORE_IC: return KeyedStoreIC::Clear(address, target); case Code::CALL_IC: return CallIC::Clear(address, target); case Code::KEYED_CALL_IC: return KeyedCallIC::Clear(address, target); @@ -1056,49 +1032,9 @@ void LoadIC::UpdateCaches(LookupResult* lookup, } -String* KeyedLoadIC::GetStubNameForCache(IC::State ic_state) { - if (ic_state == MONOMORPHIC) { - return isolate()->heap()->KeyedLoadSpecializedMonomorphic_symbol(); - } else { - ASSERT(ic_state == MEGAMORPHIC); - return isolate()->heap()->KeyedLoadSpecializedPolymorphic_symbol(); - } -} - - -MaybeObject* KeyedLoadIC::GetFastElementStubWithoutMapCheck( - bool is_js_array) { - return KeyedLoadFastElementStub().TryGetCode(); -} - - -MaybeObject* KeyedLoadIC::GetExternalArrayStubWithoutMapCheck( - ExternalArrayType array_type) { - return KeyedLoadExternalArrayStub(array_type).TryGetCode(); -} - - -MaybeObject* KeyedLoadIC::ConstructMegamorphicStub( - MapList* receiver_maps, - CodeList* targets, - StrictModeFlag strict_mode) { - Object* object; - KeyedLoadStubCompiler compiler; - MaybeObject* maybe_code = compiler.CompileLoadMegamorphic(receiver_maps, - targets); - if (!maybe_code->ToObject(&object)) return maybe_code; - isolate()->counters()->keyed_load_polymorphic_stubs()->Increment(); - PROFILE(isolate(), CodeCreateEvent( - Logger::KEYED_LOAD_MEGAMORPHIC_IC_TAG, - Code::cast(object), 0)); - return object; -} - - MaybeObject* KeyedLoadIC::Load(State state, Handle object, - Handle key, - bool force_generic_stub) { + Handle key) { // Check for values that can be converted into a symbol. // TODO(1295): Remove this code. HandleScope scope(isolate()); @@ -1224,31 +1160,34 @@ MaybeObject* KeyedLoadIC::Load(State state, if (use_ic) { Code* stub = generic_stub(); - if (!force_generic_stub) { + if (state == UNINITIALIZED) { if (object->IsString() && key->IsNumber()) { - if (state == UNINITIALIZED) { - stub = string_stub(); - } + stub = string_stub(); } else if (object->IsJSObject()) { - JSObject* receiver = JSObject::cast(*object); - if (receiver->HasIndexedInterceptor()) { + Handle receiver = Handle::cast(object); + if (receiver->HasExternalArrayElements()) { + MaybeObject* probe = + isolate()->stub_cache()->ComputeKeyedLoadOrStoreExternalArray( + *receiver, false, kNonStrictMode); + stub = probe->IsFailure() ? + NULL : Code::cast(probe->ToObjectUnchecked()); + } else if (receiver->HasIndexedInterceptor()) { stub = indexed_interceptor_stub(); - } else if (key->IsSmi()) { - MaybeObject* maybe_stub = ComputeStub(receiver, - false, - kNonStrictMode, - stub); - stub = maybe_stub->IsFailure() ? - NULL : Code::cast(maybe_stub->ToObjectUnchecked()); + } else if (key->IsSmi() && + receiver->map()->has_fast_elements()) { + MaybeObject* probe = + isolate()->stub_cache()->ComputeKeyedLoadSpecialized(*receiver); + stub = probe->IsFailure() ? + NULL : Code::cast(probe->ToObjectUnchecked()); } } } if (stub != NULL) set_target(stub); - } #ifdef DEBUG - TraceIC("KeyedLoadIC", key, state, target()); + TraceIC("KeyedLoadIC", key, state, target()); #endif // DEBUG + } // Get the property. return Runtime::GetObjectProperty(isolate(), object, key); @@ -1545,256 +1484,11 @@ void StoreIC::UpdateCaches(LookupResult* lookup, } -static bool AddOneReceiverMapIfMissing(MapList* receiver_maps, - Map* new_receiver_map) { - for (int current = 0; current < receiver_maps->length(); ++current) { - if (receiver_maps->at(current) == new_receiver_map) { - return false; - } - } - receiver_maps->Add(new_receiver_map); - return true; -} - - -void KeyedIC::GetReceiverMapsForStub(Code* stub, MapList* result) { - ASSERT(stub->is_inline_cache_stub()); - if (stub == string_stub()) { - return result->Add(isolate()->heap()->string_map()); - } else if (stub->is_keyed_load_stub() || stub->is_keyed_store_stub()) { - if (stub->ic_state() == MONOMORPHIC) { - result->Add(Map::cast(stub->FindFirstMap())); - } else { - ASSERT(stub->ic_state() == MEGAMORPHIC); - AssertNoAllocation no_allocation; - int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); - for (RelocIterator it(stub, mask); !it.done(); it.next()) { - RelocInfo* info = it.rinfo(); - Object* object = info->target_object(); - ASSERT(object->IsMap()); - result->Add(Map::cast(object)); - } - } - } -} - - -MaybeObject* KeyedIC::ComputeStub(JSObject* receiver, - bool is_store, - StrictModeFlag strict_mode, - Code* generic_stub) { - State ic_state = target()->ic_state(); - Code* monomorphic_stub; - // Always compute the MONOMORPHIC stub, even if the MEGAMORPHIC stub ends up - // being used. This is necessary because the megamorphic stub needs to have - // access to more information than what is stored in the receiver map in some - // cases (external arrays need the array type from the MONOMORPHIC stub). - MaybeObject* maybe_stub = ComputeMonomorphicStub(receiver, - is_store, - strict_mode, - generic_stub); - if (!maybe_stub->To(&monomorphic_stub)) return maybe_stub; - - if (ic_state == UNINITIALIZED || ic_state == PREMONOMORPHIC) { - return monomorphic_stub; - } - ASSERT(target() != generic_stub); - - // Don't handle megamorphic property accesses for INTERCEPTORS or CALLBACKS - // via megamorphic stubs, since they don't have a map in their relocation info - // and so the stubs can't be harvested for the object needed for a map check. - if (target()->type() != NORMAL) { - return generic_stub; - } - - // Determine the list of receiver maps that this call site has seen, - // adding the map that was just encountered. - MapList target_receiver_maps; - GetReceiverMapsForStub(target(), &target_receiver_maps); - if (!AddOneReceiverMapIfMissing(&target_receiver_maps, receiver->map())) { - // If the miss wasn't due to an unseen map, a MEGAMORPHIC stub - // won't help, use the generic stub. - return generic_stub; - } - - // TODO(1385): Currently MEGAMORPHIC stubs are cached in the receiver map stub - // cache, but that can put receiver types together from unrelated call sites - // into the same stub--they always handle the union of all receiver maps seen - // at all call sites involving the receiver map. This is only an - // approximation: ideally, there would be a global cache that mapped sets of - // receiver maps to MEGAMORPHIC stubs. The complexity of the MEGAMORPHIC stub - // computation also leads to direct manipulation of the stub cache from the IC - // code, which the global cache solution would avoid. - Code::Kind kind = this->kind(); - Code::Flags flags = Code::ComputeFlags(kind, - NOT_IN_LOOP, - MEGAMORPHIC, - strict_mode); - String* megamorphic_name = GetStubNameForCache(MEGAMORPHIC); - Object* maybe_cached_stub = receiver->map()->FindInCodeCache(megamorphic_name, - flags); - - // Create a set of all receiver maps that have been seen at the IC call site - // and those seen by the MEGAMORPHIC cached stub, if that's the stub that's - // been selected. - MapList receiver_maps; - if (!maybe_cached_stub->IsUndefined()) { - GetReceiverMapsForStub(Code::cast(maybe_cached_stub), &receiver_maps); - } - bool added_map = false; - for (int i = 0; i < target_receiver_maps.length(); ++i) { - if (AddOneReceiverMapIfMissing(&receiver_maps, - target_receiver_maps.at(i))) { - added_map = true; - } - } - ASSERT(receiver_maps.length() > 0); - - // If the maximum number of receiver maps has been exceeded, use the Generic - // version of the IC. - if (receiver_maps.length() > KeyedIC::kMaxKeyedPolymorphism) { - return generic_stub; - } - - // If no maps have been seen at the call site that aren't in the cached - // stub, then use it. - if (!added_map) { - ASSERT(!maybe_cached_stub->IsUndefined()); - ASSERT(maybe_cached_stub->IsCode()); - return Code::cast(maybe_cached_stub); - } - - // Lookup all of the receiver maps in the cache, they should all already - // have MONOMORPHIC stubs. - CodeList handler_ics(KeyedIC::kMaxKeyedPolymorphism); - for (int current = 0; current < receiver_maps.length(); ++current) { - Map* receiver_map(receiver_maps.at(current)); - MaybeObject* maybe_cached_stub = ComputeMonomorphicStubWithoutMapCheck( - receiver_map, - strict_mode, - generic_stub); - Code* cached_stub; - if (!maybe_cached_stub->To(&cached_stub)) { - return maybe_cached_stub; - } - handler_ics.Add(cached_stub); - } - - Code* stub; - // Build the MEGAMORPHIC stub. - maybe_stub = ConstructMegamorphicStub(&receiver_maps, - &handler_ics, - strict_mode); - if (!maybe_stub->To(&stub)) return maybe_stub; - - MaybeObject* maybe_update = receiver->UpdateMapCodeCache( - megamorphic_name, - stub); - if (maybe_update->IsFailure()) return maybe_update; - return stub; -} - - -MaybeObject* KeyedIC::ComputeMonomorphicStubWithoutMapCheck( - Map* receiver_map, - StrictModeFlag strict_mode, - Code* generic_stub) { - if ((receiver_map->instance_type() & kNotStringTag) == 0) { - ASSERT(string_stub() != NULL); - return string_stub(); - } else if (receiver_map->has_external_array_elements()) { - // Determine the array type from the default MONOMORPHIC already generated - // stub. There is no other way to determine the type of the external array - // directly from the receiver type. - Code::Kind kind = this->kind(); - Code::Flags flags = Code::ComputeMonomorphicFlags(kind, - NORMAL, - strict_mode); - String* monomorphic_name = GetStubNameForCache(MONOMORPHIC); - Object* maybe_default_stub = receiver_map->FindInCodeCache(monomorphic_name, - flags); - if (maybe_default_stub->IsUndefined()) { - return generic_stub; - } - Code* default_stub = Code::cast(maybe_default_stub); - return GetExternalArrayStubWithoutMapCheck( - default_stub->external_array_type()); - } else if (receiver_map->has_fast_elements()) { - bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE; - return GetFastElementStubWithoutMapCheck(is_js_array); - } else { - return generic_stub; - } -} - - -MaybeObject* KeyedIC::ComputeMonomorphicStub(JSObject* receiver, - bool is_store, - StrictModeFlag strict_mode, - Code* generic_stub) { - Code* result = NULL; - if (receiver->HasExternalArrayElements()) { - MaybeObject* maybe_stub = - isolate()->stub_cache()->ComputeKeyedLoadOrStoreExternalArray( - receiver, is_store, strict_mode); - if (!maybe_stub->To(&result)) return maybe_stub; - } else if (receiver->map()->has_fast_elements()) { - MaybeObject* maybe_stub = - isolate()->stub_cache()->ComputeKeyedLoadOrStoreFastElement( - receiver, is_store, strict_mode); - if (!maybe_stub->To(&result)) return maybe_stub; - } else { - result = generic_stub; - } - return result; -} - - -String* KeyedStoreIC::GetStubNameForCache(IC::State ic_state) { - if (ic_state == MONOMORPHIC) { - return isolate()->heap()->KeyedStoreSpecializedMonomorphic_symbol(); - } else { - ASSERT(ic_state == MEGAMORPHIC); - return isolate()->heap()->KeyedStoreSpecializedPolymorphic_symbol(); - } -} - - -MaybeObject* KeyedStoreIC::GetFastElementStubWithoutMapCheck( - bool is_js_array) { - return KeyedStoreFastElementStub(is_js_array).TryGetCode(); -} - - -MaybeObject* KeyedStoreIC::GetExternalArrayStubWithoutMapCheck( - ExternalArrayType array_type) { - return KeyedStoreExternalArrayStub(array_type).TryGetCode(); -} - - -MaybeObject* KeyedStoreIC::ConstructMegamorphicStub( - MapList* receiver_maps, - CodeList* targets, - StrictModeFlag strict_mode) { - Object* object; - KeyedStoreStubCompiler compiler(strict_mode); - MaybeObject* maybe_code = compiler.CompileStoreMegamorphic(receiver_maps, - targets); - if (!maybe_code->ToObject(&object)) return maybe_code; - isolate()->counters()->keyed_store_polymorphic_stubs()->Increment(); - PROFILE(isolate(), CodeCreateEvent( - Logger::KEYED_STORE_MEGAMORPHIC_IC_TAG, - Code::cast(object), 0)); - return object; -} - - MaybeObject* KeyedStoreIC::Store(State state, StrictModeFlag strict_mode, Handle object, Handle key, - Handle value, - bool force_generic) { + Handle value) { if (key->IsSymbol()) { Handle name = Handle::cast(key); @@ -1836,27 +1530,29 @@ MaybeObject* KeyedStoreIC::Store(State state, ASSERT(!(use_ic && object->IsJSGlobalProxy())); if (use_ic) { - Code* stub = (strict_mode == kStrictMode) - ? generic_stub_strict() - : generic_stub(); - if (!force_generic) { - if (object->IsJSObject() && key->IsSmi()) { - JSObject* receiver = JSObject::cast(*object); - MaybeObject* maybe_stub = ComputeStub(receiver, - true, - strict_mode, - stub); - stub = maybe_stub->IsFailure() ? - NULL : Code::cast(maybe_stub->ToObjectUnchecked()); + Code* stub = + (strict_mode == kStrictMode) ? generic_stub_strict() : generic_stub(); + if (state == UNINITIALIZED) { + if (object->IsJSObject()) { + Handle receiver = Handle::cast(object); + if (receiver->HasExternalArrayElements()) { + MaybeObject* probe = + isolate()->stub_cache()->ComputeKeyedLoadOrStoreExternalArray( + *receiver, true, strict_mode); + stub = probe->IsFailure() ? + NULL : Code::cast(probe->ToObjectUnchecked()); + } else if (key->IsSmi() && receiver->map()->has_fast_elements()) { + MaybeObject* probe = + isolate()->stub_cache()->ComputeKeyedStoreSpecialized( + *receiver, strict_mode); + stub = probe->IsFailure() ? + NULL : Code::cast(probe->ToObjectUnchecked()); + } } } if (stub != NULL) set_target(stub); } -#ifdef DEBUG - TraceIC("KeyedStoreIC", key, state, target()); -#endif - // Set the property. return Runtime::SetObjectProperty( isolate(), object , key, value, NONE, strict_mode); @@ -2025,16 +1721,7 @@ RUNTIME_FUNCTION(MaybeObject*, KeyedLoadIC_Miss) { ASSERT(args.length() == 2); KeyedLoadIC ic(isolate); IC::State state = IC::StateFrom(ic.target(), args[0], args[1]); - return ic.Load(state, args.at(0), args.at(1), false); -} - - -RUNTIME_FUNCTION(MaybeObject*, KeyedLoadIC_MissForceGeneric) { - NoHandleAllocation na; - ASSERT(args.length() == 2); - KeyedLoadIC ic(isolate); - IC::State state = IC::StateFrom(ic.target(), args[0], args[1]); - return ic.Load(state, args.at(0), args.at(1), true); + return ic.Load(state, args.at(0), args.at(1)); } @@ -2118,42 +1805,7 @@ RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_Miss) { static_cast(extra_ic_state & kStrictMode), args.at(0), args.at(1), - args.at(2), - false); -} - - -RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_Slow) { - NoHandleAllocation na; - ASSERT(args.length() == 3); - KeyedStoreIC ic(isolate); - Code::ExtraICState extra_ic_state = ic.target()->extra_ic_state(); - Handle object = args.at(0); - Handle key = args.at(1); - Handle value = args.at(2); - StrictModeFlag strict_mode = - static_cast(extra_ic_state & kStrictMode); - return Runtime::SetObjectProperty(isolate, - object, - key, - value, - NONE, - strict_mode); -} - - -RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_MissForceGeneric) { - NoHandleAllocation na; - ASSERT(args.length() == 3); - KeyedStoreIC ic(isolate); - IC::State state = IC::StateFrom(ic.target(), args[0], args[1]); - Code::ExtraICState extra_ic_state = ic.target()->extra_ic_state(); - return ic.Store(state, - static_cast(extra_ic_state & kStrictMode), - args.at(0), - args.at(1), - args.at(2), - true); + args.at(2)); } diff --git a/src/ic.h b/src/ic.h index bf03fd9423..0591674bb2 100644 --- a/src/ic.h +++ b/src/ic.h @@ -1,4 +1,4 @@ -// Copyright 2011 the V8 project authors. All rights reserved. +// Copyright 2006-2009 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -39,15 +39,12 @@ namespace internal { #define IC_UTIL_LIST(ICU) \ ICU(LoadIC_Miss) \ ICU(KeyedLoadIC_Miss) \ - ICU(KeyedLoadIC_MissForceGeneric) \ ICU(CallIC_Miss) \ ICU(KeyedCallIC_Miss) \ ICU(StoreIC_Miss) \ ICU(StoreIC_ArrayLength) \ ICU(SharedStoreIC_ExtendStorage) \ ICU(KeyedStoreIC_Miss) \ - ICU(KeyedStoreIC_MissForceGeneric) \ - ICU(KeyedStoreIC_Slow) \ /* Utilities for IC stubs. */ \ ICU(LoadCallbackProperty) \ ICU(StoreCallbackProperty) \ @@ -145,11 +142,11 @@ class IC { void set_target(Code* code) { SetTargetAtAddress(address(), code); } #ifdef DEBUG - void TraceIC(const char* type, - Handle name, - State old_state, - Code* new_target, - const char* extra_info = ""); + static void TraceIC(const char* type, + Handle name, + State old_state, + Code* new_target, + const char* extra_info = ""); #endif Failure* TypeError(const char* type, @@ -328,72 +325,23 @@ class LoadIC: public IC { }; -class KeyedIC: public IC { +class KeyedLoadIC: public IC { public: - explicit KeyedIC(Isolate* isolate) : IC(NO_EXTRA_FRAME, isolate) {} - virtual ~KeyedIC() {} - - static const int kMaxKeyedPolymorphism = 4; - - virtual MaybeObject* GetFastElementStubWithoutMapCheck( - bool is_js_array) = 0; - - virtual MaybeObject* GetExternalArrayStubWithoutMapCheck( - ExternalArrayType array_type) = 0; - - protected: - virtual Code* string_stub() { - return NULL; - } - - virtual Code::Kind kind() const = 0; - - virtual String* GetStubNameForCache(IC::State ic_state) = 0; - - MaybeObject* ComputeStub(JSObject* receiver, - bool is_store, - StrictModeFlag strict_mode, - Code* default_stub); - - virtual MaybeObject* ConstructMegamorphicStub( - MapList* receiver_maps, - CodeList* targets, - StrictModeFlag strict_mode) = 0; - - private: - void GetReceiverMapsForStub(Code* stub, MapList* result); - - MaybeObject* ComputeMonomorphicStubWithoutMapCheck( - Map* receiver_map, - StrictModeFlag strict_mode, - Code* generic_stub); - - MaybeObject* ComputeMonomorphicStub(JSObject* receiver, - bool is_store, - StrictModeFlag strict_mode, - Code* default_stub); -}; - - -class KeyedLoadIC: public KeyedIC { - public: - explicit KeyedLoadIC(Isolate* isolate) : KeyedIC(isolate) { - ASSERT(target()->is_keyed_load_stub()); + explicit KeyedLoadIC(Isolate* isolate) : IC(NO_EXTRA_FRAME, isolate) { + ASSERT(target()->is_keyed_load_stub() || + target()->is_external_array_load_stub()); } MUST_USE_RESULT MaybeObject* Load(State state, Handle object, - Handle key, - bool force_generic_stub); + Handle key); // Code generator routines. - static void GenerateMiss(MacroAssembler* masm, bool force_generic); + static void GenerateMiss(MacroAssembler* masm); static void GenerateRuntimeGetProperty(MacroAssembler* masm); - static void GenerateInitialize(MacroAssembler* masm) { - GenerateMiss(masm, false); - } + static void GenerateInitialize(MacroAssembler* masm) { GenerateMiss(masm); } static void GeneratePreMonomorphic(MacroAssembler* masm) { - GenerateMiss(masm, false); + GenerateMiss(masm); } static void GenerateGeneric(MacroAssembler* masm); static void GenerateString(MacroAssembler* masm); @@ -407,27 +355,6 @@ class KeyedLoadIC: public KeyedIC { static const int kSlowCaseBitFieldMask = (1 << Map::kIsAccessCheckNeeded) | (1 << Map::kHasIndexedInterceptor); - virtual MaybeObject* GetFastElementStubWithoutMapCheck( - bool is_js_array); - - virtual MaybeObject* GetExternalArrayStubWithoutMapCheck( - ExternalArrayType array_type); - - protected: - virtual Code::Kind kind() const { return Code::KEYED_LOAD_IC; } - - virtual String* GetStubNameForCache(IC::State ic_state); - - virtual MaybeObject* ConstructMegamorphicStub( - MapList* receiver_maps, - CodeList* targets, - StrictModeFlag strict_mode); - - virtual Code* string_stub() { - return isolate()->builtins()->builtin( - Builtins::kKeyedLoadIC_String); - } - private: // Update the inline cache. void UpdateCaches(LookupResult* lookup, @@ -452,6 +379,11 @@ class KeyedLoadIC: public KeyedIC { return isolate()->builtins()->builtin( Builtins::kKeyedLoadIC_PreMonomorphic); } + Code* string_stub() { + return isolate()->builtins()->builtin( + Builtins::kKeyedLoadIC_String); + } + Code* indexed_interceptor_stub() { return isolate()->builtins()->builtin( Builtins::kKeyedLoadIC_IndexedInterceptor); @@ -534,46 +466,24 @@ class StoreIC: public IC { }; -class KeyedStoreIC: public KeyedIC { +class KeyedStoreIC: public IC { public: - explicit KeyedStoreIC(Isolate* isolate) : KeyedIC(isolate) { - ASSERT(target()->is_keyed_store_stub()); - } + explicit KeyedStoreIC(Isolate* isolate) : IC(NO_EXTRA_FRAME, isolate) { } MUST_USE_RESULT MaybeObject* Store(State state, - StrictModeFlag strict_mode, + StrictModeFlag strict_mode, Handle object, Handle name, - Handle value, - bool force_generic); + Handle value); // Code generators for stub routines. Only called once at startup. - static void GenerateInitialize(MacroAssembler* masm) { - GenerateMiss(masm, false); - } - static void GenerateMiss(MacroAssembler* masm, bool force_generic); - static void GenerateSlow(MacroAssembler* masm); + static void GenerateInitialize(MacroAssembler* masm) { GenerateMiss(masm); } + static void GenerateMiss(MacroAssembler* masm); static void GenerateRuntimeSetProperty(MacroAssembler* masm, StrictModeFlag strict_mode); static void GenerateGeneric(MacroAssembler* masm, StrictModeFlag strict_mode); - virtual MaybeObject* GetFastElementStubWithoutMapCheck( - bool is_js_array); - - virtual MaybeObject* GetExternalArrayStubWithoutMapCheck( - ExternalArrayType array_type); - - protected: - virtual Code::Kind kind() const { return Code::KEYED_STORE_IC; } - - virtual String* GetStubNameForCache(IC::State ic_state); - - virtual MaybeObject* ConstructMegamorphicStub( - MapList* receiver_maps, - CodeList* targets, - StrictModeFlag strict_mode); - - private: + private: // Update the inline cache. void UpdateCaches(LookupResult* lookup, State state, diff --git a/src/list.h b/src/list.h index ca2b7bce22..ef795578b2 100644 --- a/src/list.h +++ b/src/list.h @@ -164,11 +164,6 @@ class List { DISALLOW_COPY_AND_ASSIGN(List); }; -class Map; -class Code; -typedef List MapList; -typedef List CodeList; - } } // namespace v8::internal #endif // V8_LIST_H_ diff --git a/src/log.cc b/src/log.cc index 1db66fd96d..8cb7ab504c 100644 --- a/src/log.cc +++ b/src/log.cc @@ -1,4 +1,4 @@ -// Copyright 2011 the V8 project authors. All rights reserved. +// Copyright 2009 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -1620,6 +1620,10 @@ void Logger::LogCodeObject(Object* object) { description = "A keyed load IC from the snapshot"; tag = Logger::KEYED_LOAD_IC_TAG; break; + case Code::KEYED_EXTERNAL_ARRAY_LOAD_IC: + description = "A keyed external array load IC from the snapshot"; + tag = Logger::KEYED_EXTERNAL_ARRAY_LOAD_IC_TAG; + break; case Code::LOAD_IC: description = "A load IC from the snapshot"; tag = Logger::LOAD_IC_TAG; @@ -1632,6 +1636,10 @@ void Logger::LogCodeObject(Object* object) { description = "A keyed store IC from the snapshot"; tag = Logger::KEYED_STORE_IC_TAG; break; + case Code::KEYED_EXTERNAL_ARRAY_STORE_IC: + description = "A keyed external array store IC from the snapshot"; + tag = Logger::KEYED_EXTERNAL_ARRAY_STORE_IC_TAG; + break; case Code::CALL_IC: description = "A call IC from the snapshot"; tag = Logger::CALL_IC_TAG; diff --git a/src/log.h b/src/log.h index a96e171a46..3434b736f3 100644 --- a/src/log.h +++ b/src/log.h @@ -1,4 +1,4 @@ -// Copyright 2011 the V8 project authors. All rights reserved. +// Copyright 2006-2008 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -90,51 +90,49 @@ class Ticker; #define LOG(isolate, Call) ((void) 0) #endif -#define LOG_EVENTS_AND_TAGS_LIST(V) \ - V(CODE_CREATION_EVENT, "code-creation") \ - V(CODE_MOVE_EVENT, "code-move") \ - V(CODE_DELETE_EVENT, "code-delete") \ - V(CODE_MOVING_GC, "code-moving-gc") \ - V(SHARED_FUNC_MOVE_EVENT, "sfi-move") \ - V(SNAPSHOT_POSITION_EVENT, "snapshot-pos") \ - V(SNAPSHOT_CODE_NAME_EVENT, "snapshot-code-name") \ - V(TICK_EVENT, "tick") \ - V(REPEAT_META_EVENT, "repeat") \ - V(BUILTIN_TAG, "Builtin") \ - V(CALL_DEBUG_BREAK_TAG, "CallDebugBreak") \ - V(CALL_DEBUG_PREPARE_STEP_IN_TAG, "CallDebugPrepareStepIn") \ - V(CALL_IC_TAG, "CallIC") \ - V(CALL_INITIALIZE_TAG, "CallInitialize") \ - V(CALL_MEGAMORPHIC_TAG, "CallMegamorphic") \ - V(CALL_MISS_TAG, "CallMiss") \ - V(CALL_NORMAL_TAG, "CallNormal") \ - V(CALL_PRE_MONOMORPHIC_TAG, "CallPreMonomorphic") \ - V(KEYED_CALL_DEBUG_BREAK_TAG, "KeyedCallDebugBreak") \ - V(KEYED_CALL_DEBUG_PREPARE_STEP_IN_TAG, \ - "KeyedCallDebugPrepareStepIn") \ - V(KEYED_CALL_IC_TAG, "KeyedCallIC") \ - V(KEYED_CALL_INITIALIZE_TAG, "KeyedCallInitialize") \ - V(KEYED_CALL_MEGAMORPHIC_TAG, "KeyedCallMegamorphic") \ - V(KEYED_CALL_MISS_TAG, "KeyedCallMiss") \ - V(KEYED_CALL_NORMAL_TAG, "KeyedCallNormal") \ - V(KEYED_CALL_PRE_MONOMORPHIC_TAG, "KeyedCallPreMonomorphic") \ - V(CALLBACK_TAG, "Callback") \ - V(EVAL_TAG, "Eval") \ - V(FUNCTION_TAG, "Function") \ - V(KEYED_LOAD_IC_TAG, "KeyedLoadIC") \ - V(KEYED_LOAD_MEGAMORPHIC_IC_TAG, "KeyedLoadMegamorphicIC") \ - V(KEYED_EXTERNAL_ARRAY_LOAD_IC_TAG, "KeyedExternalArrayLoadIC") \ - V(KEYED_STORE_IC_TAG, "KeyedStoreIC") \ - V(KEYED_STORE_MEGAMORPHIC_IC_TAG, "KeyedStoreMegamorphicIC") \ - V(KEYED_EXTERNAL_ARRAY_STORE_IC_TAG, "KeyedExternalArrayStoreIC") \ - V(LAZY_COMPILE_TAG, "LazyCompile") \ - V(LOAD_IC_TAG, "LoadIC") \ - V(REG_EXP_TAG, "RegExp") \ - V(SCRIPT_TAG, "Script") \ - V(STORE_IC_TAG, "StoreIC") \ - V(STUB_TAG, "Stub") \ - V(NATIVE_FUNCTION_TAG, "Function") \ - V(NATIVE_LAZY_COMPILE_TAG, "LazyCompile") \ +#define LOG_EVENTS_AND_TAGS_LIST(V) \ + V(CODE_CREATION_EVENT, "code-creation") \ + V(CODE_MOVE_EVENT, "code-move") \ + V(CODE_DELETE_EVENT, "code-delete") \ + V(CODE_MOVING_GC, "code-moving-gc") \ + V(SHARED_FUNC_MOVE_EVENT, "sfi-move") \ + V(SNAPSHOT_POSITION_EVENT, "snapshot-pos") \ + V(SNAPSHOT_CODE_NAME_EVENT, "snapshot-code-name") \ + V(TICK_EVENT, "tick") \ + V(REPEAT_META_EVENT, "repeat") \ + V(BUILTIN_TAG, "Builtin") \ + V(CALL_DEBUG_BREAK_TAG, "CallDebugBreak") \ + V(CALL_DEBUG_PREPARE_STEP_IN_TAG, "CallDebugPrepareStepIn") \ + V(CALL_IC_TAG, "CallIC") \ + V(CALL_INITIALIZE_TAG, "CallInitialize") \ + V(CALL_MEGAMORPHIC_TAG, "CallMegamorphic") \ + V(CALL_MISS_TAG, "CallMiss") \ + V(CALL_NORMAL_TAG, "CallNormal") \ + V(CALL_PRE_MONOMORPHIC_TAG, "CallPreMonomorphic") \ + V(KEYED_CALL_DEBUG_BREAK_TAG, "KeyedCallDebugBreak") \ + V(KEYED_CALL_DEBUG_PREPARE_STEP_IN_TAG, \ + "KeyedCallDebugPrepareStepIn") \ + V(KEYED_CALL_IC_TAG, "KeyedCallIC") \ + V(KEYED_CALL_INITIALIZE_TAG, "KeyedCallInitialize") \ + V(KEYED_CALL_MEGAMORPHIC_TAG, "KeyedCallMegamorphic") \ + V(KEYED_CALL_MISS_TAG, "KeyedCallMiss") \ + V(KEYED_CALL_NORMAL_TAG, "KeyedCallNormal") \ + V(KEYED_CALL_PRE_MONOMORPHIC_TAG, "KeyedCallPreMonomorphic") \ + V(CALLBACK_TAG, "Callback") \ + V(EVAL_TAG, "Eval") \ + V(FUNCTION_TAG, "Function") \ + V(KEYED_LOAD_IC_TAG, "KeyedLoadIC") \ + V(KEYED_EXTERNAL_ARRAY_LOAD_IC_TAG, "KeyedExternalArrayLoadIC") \ + V(KEYED_STORE_IC_TAG, "KeyedStoreIC") \ + V(KEYED_EXTERNAL_ARRAY_STORE_IC_TAG, "KeyedExternalArrayStoreIC")\ + V(LAZY_COMPILE_TAG, "LazyCompile") \ + V(LOAD_IC_TAG, "LoadIC") \ + V(REG_EXP_TAG, "RegExp") \ + V(SCRIPT_TAG, "Script") \ + V(STORE_IC_TAG, "StoreIC") \ + V(STUB_TAG, "Stub") \ + V(NATIVE_FUNCTION_TAG, "Function") \ + V(NATIVE_LAZY_COMPILE_TAG, "LazyCompile") \ V(NATIVE_SCRIPT_TAG, "Script") // Note that 'NATIVE_' cases for functions and scripts are mapped onto // original tags when writing to the log. diff --git a/src/mark-compact.cc b/src/mark-compact.cc index b82c76aa83..4071073f12 100644 --- a/src/mark-compact.cc +++ b/src/mark-compact.cc @@ -457,7 +457,7 @@ class StaticMarkingVisitor : public StaticVisitorBase { static inline void VisitCodeTarget(Heap* heap, RelocInfo* rinfo) { ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); Code* code = Code::GetCodeFromTargetAddress(rinfo->target_address()); - if (FLAG_cleanup_code_caches_at_gc && code->is_inline_cache_stub()) { + if (FLAG_cleanup_ics_at_gc && code->is_inline_cache_stub()) { IC::Clear(rinfo->pc()); // Please note targets for cleared inline cached do not have to be // marked since they are contained in HEAP->non_monomorphic_cache(). @@ -1056,7 +1056,7 @@ void MarkCompactCollector::MarkUnmarkedObject(HeapObject* object) { ASSERT(HEAP->Contains(object)); if (object->IsMap()) { Map* map = Map::cast(object); - if (FLAG_cleanup_code_caches_at_gc) { + if (FLAG_cleanup_caches_in_maps_at_gc) { map->ClearCodeCache(heap()); } SetMark(map); diff --git a/src/mips/code-stubs-mips.cc b/src/mips/code-stubs-mips.cc index dd4e7c12fa..89981fdb19 100644 --- a/src/mips/code-stubs-mips.cc +++ b/src/mips/code-stubs-mips.cc @@ -6613,3 +6613,4 @@ void StringDictionaryLookupStub::Generate(MacroAssembler* masm) { } } // namespace v8::internal #endif // V8_TARGET_ARCH_MIPS + diff --git a/src/objects-inl.h b/src/objects-inl.h index b2603a2b12..c96bf56f9f 100644 --- a/src/objects-inl.h +++ b/src/objects-inl.h @@ -2595,6 +2595,7 @@ Code::ExtraICState Code::extra_ic_state() { PropertyType Code::type() { + ASSERT(ic_state() == MONOMORPHIC); return ExtractTypeFromFlags(flags()); } @@ -2712,14 +2713,14 @@ void Code::set_check_type(CheckType value) { ExternalArrayType Code::external_array_type() { - ASSERT(is_keyed_load_stub() || is_keyed_store_stub()); + ASSERT(is_external_array_load_stub() || is_external_array_store_stub()); byte type = READ_BYTE_FIELD(this, kExternalArrayTypeOffset); return static_cast(type); } void Code::set_external_array_type(ExternalArrayType value) { - ASSERT(is_keyed_load_stub() || is_keyed_store_stub()); + ASSERT(is_external_array_load_stub() || is_external_array_store_stub()); WRITE_BYTE_FIELD(this, kExternalArrayTypeOffset, value); } @@ -2791,7 +2792,8 @@ Code::Flags Code::ComputeFlags(Kind kind, (kind == CALL_IC && (ic_state == MONOMORPHIC || ic_state == MONOMORPHIC_PROTOTYPE_FAILURE)) || (kind == STORE_IC) || - (kind == KEYED_STORE_IC)); + (kind == KEYED_STORE_IC) || + (kind == KEYED_EXTERNAL_ARRAY_STORE_IC)); // Compute the bit mask. int bits = kind << kFlagsKindShift; if (in_loop) bits |= kFlagsICInLoopMask; diff --git a/src/objects.cc b/src/objects.cc index 126e9d0960..8c7695db53 100644 --- a/src/objects.cc +++ b/src/objects.cc @@ -3811,6 +3811,8 @@ void Map::TraverseTransitionTree(TraverseCallback callback, void* data) { MaybeObject* CodeCache::Update(String* name, Code* code) { + ASSERT(code->ic_state() == MONOMORPHIC); + // The number of monomorphic stubs for normal load/store/call IC's can grow to // a large number and therefore they need to go into a hash table. They are // used to load global properties from cells. @@ -6553,8 +6555,10 @@ const char* Code::Kind2String(Kind kind) { case BUILTIN: return "BUILTIN"; case LOAD_IC: return "LOAD_IC"; case KEYED_LOAD_IC: return "KEYED_LOAD_IC"; + case KEYED_EXTERNAL_ARRAY_LOAD_IC: return "KEYED_EXTERNAL_ARRAY_LOAD_IC"; case STORE_IC: return "STORE_IC"; case KEYED_STORE_IC: return "KEYED_STORE_IC"; + case KEYED_EXTERNAL_ARRAY_STORE_IC: return "KEYED_EXTERNAL_ARRAY_STORE_IC"; case CALL_IC: return "CALL_IC"; case KEYED_CALL_IC: return "KEYED_CALL_IC"; case TYPE_RECORDING_UNARY_OP_IC: return "TYPE_RECORDING_UNARY_OP_IC"; diff --git a/src/objects.h b/src/objects.h index 3b506bf708..e68ac531f7 100644 --- a/src/objects.h +++ b/src/objects.h @@ -30,7 +30,6 @@ #include "allocation.h" #include "builtins.h" -#include "list.h" #include "smart-pointer.h" #include "unicode-inl.h" #if V8_TARGET_ARCH_ARM @@ -3276,10 +3275,12 @@ class Code: public HeapObject { BUILTIN, LOAD_IC, KEYED_LOAD_IC, + KEYED_EXTERNAL_ARRAY_LOAD_IC, CALL_IC, KEYED_CALL_IC, STORE_IC, KEYED_STORE_IC, + KEYED_EXTERNAL_ARRAY_STORE_IC, TYPE_RECORDING_UNARY_OP_IC, TYPE_RECORDING_BINARY_OP_IC, COMPARE_IC, @@ -3362,6 +3363,12 @@ class Code: public HeapObject { return kind() == TYPE_RECORDING_BINARY_OP_IC; } inline bool is_compare_ic_stub() { return kind() == COMPARE_IC; } + inline bool is_external_array_load_stub() { + return kind() == KEYED_EXTERNAL_ARRAY_LOAD_IC; + } + inline bool is_external_array_store_stub() { + return kind() == KEYED_EXTERNAL_ARRAY_STORE_IC; + } // [major_key]: For kind STUB or BINARY_OP_IC, the major key. inline int major_key(); diff --git a/src/spaces.cc b/src/spaces.cc index cda498d0e5..3d3e048040 100644 --- a/src/spaces.cc +++ b/src/spaces.cc @@ -1,4 +1,4 @@ -// Copyright 2011 the V8 project authors. All rights reserved. +// Copyright 2006-2010 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -1564,8 +1564,10 @@ static void ReportCodeKindStatistics() { CASE(BUILTIN); CASE(LOAD_IC); CASE(KEYED_LOAD_IC); + CASE(KEYED_EXTERNAL_ARRAY_LOAD_IC); CASE(STORE_IC); CASE(KEYED_STORE_IC); + CASE(KEYED_EXTERNAL_ARRAY_STORE_IC); CASE(CALL_IC); CASE(KEYED_CALL_IC); CASE(TYPE_RECORDING_UNARY_OP_IC); diff --git a/src/stub-cache.cc b/src/stub-cache.cc index 2c5cd39022..7a1b185df4 100644 --- a/src/stub-cache.cc +++ b/src/stub-cache.cc @@ -1,4 +1,4 @@ -// Copyright 2011 the V8 project authors. All rights reserved. +// Copyright 2006-2009 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -457,6 +457,34 @@ MaybeObject* StubCache::ComputeKeyedLoadFunctionPrototype( } +MaybeObject* StubCache::ComputeKeyedLoadSpecialized(JSObject* receiver) { + // Using NORMAL as the PropertyType for array element loads is a misuse. The + // generated stub always accesses fast elements, not slow-mode fields, but + // some property type is required for the stub lookup. Note that overloading + // the NORMAL PropertyType is only safe as long as no stubs are generated for + // other keyed field loads. This is guaranteed to be the case since all field + // keyed loads that are not array elements go through a generic builtin stub. + Code::Flags flags = + Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, NORMAL); + String* name = heap()->KeyedLoadSpecialized_symbol(); + Object* code = receiver->map()->FindInCodeCache(name, flags); + if (code->IsUndefined()) { + KeyedLoadStubCompiler compiler; + { MaybeObject* maybe_code = compiler.CompileLoadSpecialized(receiver); + if (!maybe_code->ToObject(&code)) return maybe_code; + } + PROFILE(isolate_, + CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), 0)); + Object* result; + { MaybeObject* maybe_result = + receiver->UpdateMapCodeCache(name, Code::cast(code)); + if (!maybe_result->ToObject(&result)) return maybe_result; + } + } + return code; +} + + MaybeObject* StubCache::ComputeStoreField(String* name, JSObject* receiver, int field_index, @@ -485,6 +513,30 @@ MaybeObject* StubCache::ComputeStoreField(String* name, } +MaybeObject* StubCache::ComputeKeyedStoreSpecialized( + JSObject* receiver, + StrictModeFlag strict_mode) { + Code::Flags flags = + Code::ComputeMonomorphicFlags(Code::KEYED_STORE_IC, NORMAL, strict_mode); + String* name = heap()->KeyedStoreSpecialized_symbol(); + Object* code = receiver->map()->FindInCodeCache(name, flags); + if (code->IsUndefined()) { + KeyedStoreStubCompiler compiler(strict_mode); + { MaybeObject* maybe_code = compiler.CompileStoreSpecialized(receiver); + if (!maybe_code->ToObject(&code)) return maybe_code; + } + PROFILE(isolate_, + CodeCreateEvent(Logger::KEYED_STORE_IC_TAG, Code::cast(code), 0)); + Object* result; + { MaybeObject* maybe_result = + receiver->UpdateMapCodeCache(name, Code::cast(code)); + if (!maybe_result->ToObject(&result)) return maybe_result; + } + } + return code; +} + + namespace { ExternalArrayType ElementsKindToExternalArrayType(JSObject::ElementsKind kind) { @@ -513,6 +565,60 @@ ExternalArrayType ElementsKindToExternalArrayType(JSObject::ElementsKind kind) { } } +String* ExternalArrayTypeToStubName(Heap* heap, + ExternalArrayType array_type, + bool is_store) { + if (is_store) { + switch (array_type) { + case kExternalByteArray: + return heap->KeyedStoreExternalByteArray_symbol(); + case kExternalUnsignedByteArray: + return heap->KeyedStoreExternalUnsignedByteArray_symbol(); + case kExternalShortArray: + return heap->KeyedStoreExternalShortArray_symbol(); + case kExternalUnsignedShortArray: + return heap->KeyedStoreExternalUnsignedShortArray_symbol(); + case kExternalIntArray: + return heap->KeyedStoreExternalIntArray_symbol(); + case kExternalUnsignedIntArray: + return heap->KeyedStoreExternalUnsignedIntArray_symbol(); + case kExternalFloatArray: + return heap->KeyedStoreExternalFloatArray_symbol(); + case kExternalDoubleArray: + return heap->KeyedStoreExternalDoubleArray_symbol(); + case kExternalPixelArray: + return heap->KeyedStoreExternalPixelArray_symbol(); + default: + UNREACHABLE(); + return NULL; + } + } else { + switch (array_type) { + case kExternalByteArray: + return heap->KeyedLoadExternalByteArray_symbol(); + case kExternalUnsignedByteArray: + return heap->KeyedLoadExternalUnsignedByteArray_symbol(); + case kExternalShortArray: + return heap->KeyedLoadExternalShortArray_symbol(); + case kExternalUnsignedShortArray: + return heap->KeyedLoadExternalUnsignedShortArray_symbol(); + case kExternalIntArray: + return heap->KeyedLoadExternalIntArray_symbol(); + case kExternalUnsignedIntArray: + return heap->KeyedLoadExternalUnsignedIntArray_symbol(); + case kExternalFloatArray: + return heap->KeyedLoadExternalFloatArray_symbol(); + case kExternalDoubleArray: + return heap->KeyedLoadExternalDoubleArray_symbol(); + case kExternalPixelArray: + return heap->KeyedLoadExternalPixelArray_symbol(); + default: + UNREACHABLE(); + return NULL; + } + } +} + } // anonymous namespace @@ -522,88 +628,37 @@ MaybeObject* StubCache::ComputeKeyedLoadOrStoreExternalArray( StrictModeFlag strict_mode) { Code::Flags flags = Code::ComputeMonomorphicFlags( - is_store ? Code::KEYED_STORE_IC : - Code::KEYED_LOAD_IC, + is_store ? Code::KEYED_EXTERNAL_ARRAY_STORE_IC : + Code::KEYED_EXTERNAL_ARRAY_LOAD_IC, NORMAL, strict_mode); ExternalArrayType array_type = ElementsKindToExternalArrayType(receiver->GetElementsKind()); - String* name = is_store - ? isolate()->heap()->KeyedStoreSpecializedMonomorphic_symbol() - : isolate()->heap()->KeyedLoadSpecializedMonomorphic_symbol(); - Object* maybe_code = receiver->map()->FindInCodeCache(name, flags); - if (!maybe_code->IsUndefined()) return Code::cast(maybe_code); - - MaybeObject* maybe_new_code = NULL; - if (is_store) { - ExternalArrayStoreStubCompiler compiler; - maybe_new_code = compiler.CompileStore(receiver, array_type); - } else { - ExternalArrayLoadStubCompiler compiler; - maybe_new_code = compiler.CompileLoad(receiver, array_type); - } - Code* code; - if (!maybe_new_code->To(&code)) return maybe_new_code; - code->set_external_array_type(array_type); - if (is_store) { - PROFILE(isolate_, - CodeCreateEvent(Logger::KEYED_EXTERNAL_ARRAY_STORE_IC_TAG, - Code::cast(code), 0)); - } else { - PROFILE(isolate_, - CodeCreateEvent(Logger::KEYED_EXTERNAL_ARRAY_LOAD_IC_TAG, - Code::cast(code), 0)); - } - ASSERT(code->IsCode()); - Object* result; - { MaybeObject* maybe_result = - receiver->UpdateMapCodeCache(name, Code::cast(code)); - if (!maybe_result->ToObject(&result)) return maybe_result; - } - return code; -} - - -MaybeObject* StubCache::ComputeKeyedLoadOrStoreFastElement( - JSObject* receiver, - bool is_store, - StrictModeFlag strict_mode) { - Code::Flags flags = - Code::ComputeMonomorphicFlags( - is_store ? Code::KEYED_STORE_IC : - Code::KEYED_LOAD_IC, - NORMAL, - strict_mode); - String* name = is_store - ? isolate()->heap()->KeyedStoreSpecializedMonomorphic_symbol() - : isolate()->heap()->KeyedLoadSpecializedMonomorphic_symbol(); - Object* maybe_code = receiver->map()->FindInCodeCache(name, flags); - if (!maybe_code->IsUndefined()) return Code::cast(maybe_code); - - MaybeObject* maybe_new_code = NULL; - if (is_store) { - KeyedStoreStubCompiler compiler(strict_mode); - maybe_new_code = compiler.CompileStoreFastElement(receiver->map()); - } else { - KeyedLoadStubCompiler compiler; - maybe_new_code = compiler.CompileLoadFastElement(receiver->map()); - } - Code* code; - if (!maybe_new_code->To(&code)) return maybe_new_code; - if (is_store) { - PROFILE(isolate_, - CodeCreateEvent(Logger::KEYED_STORE_IC_TAG, - Code::cast(code), 0)); - } else { - PROFILE(isolate_, - CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, - Code::cast(code), 0)); - } - ASSERT(code->IsCode()); - Object* result; - { MaybeObject* maybe_result = - receiver->UpdateMapCodeCache(name, Code::cast(code)); - if (!maybe_result->ToObject(&result)) return maybe_result; + String* name = ExternalArrayTypeToStubName(heap(), array_type, is_store); + Object* code = receiver->map()->FindInCodeCache(name, flags); + if (code->IsUndefined()) { + ExternalArrayStubCompiler compiler; + { MaybeObject* maybe_code = + is_store ? + compiler.CompileKeyedStoreStub(receiver, array_type, flags) : + compiler.CompileKeyedLoadStub(receiver, array_type, flags); + if (!maybe_code->ToObject(&code)) return maybe_code; + } + Code::cast(code)->set_external_array_type(array_type); + if (is_store) { + PROFILE(isolate_, + CodeCreateEvent(Logger::KEYED_EXTERNAL_ARRAY_STORE_IC_TAG, + Code::cast(code), 0)); + } else { + PROFILE(isolate_, + CodeCreateEvent(Logger::KEYED_EXTERNAL_ARRAY_LOAD_IC_TAG, + Code::cast(code), 0)); + } + Object* result; + { MaybeObject* maybe_result = + receiver->UpdateMapCodeCache(name, Code::cast(code)); + if (!maybe_result->ToObject(&result)) return maybe_result; + } } return code; } @@ -1662,11 +1717,8 @@ MaybeObject* LoadStubCompiler::GetCode(PropertyType type, String* name) { } -MaybeObject* KeyedLoadStubCompiler::GetCode(PropertyType type, - String* name, - InlineCacheState state) { - Code::Flags flags = Code::ComputeFlags( - Code::KEYED_LOAD_IC, NOT_IN_LOOP, state, Code::kNoExtraICState, type); +MaybeObject* KeyedLoadStubCompiler::GetCode(PropertyType type, String* name) { + Code::Flags flags = Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, type); MaybeObject* result = GetCodeWithFlags(flags, name); if (!result->IsFailure()) { PROFILE(isolate(), @@ -1698,11 +1750,9 @@ MaybeObject* StoreStubCompiler::GetCode(PropertyType type, String* name) { } -MaybeObject* KeyedStoreStubCompiler::GetCode(PropertyType type, - String* name, - InlineCacheState state) { - Code::Flags flags = Code::ComputeFlags( - Code::KEYED_STORE_IC, NOT_IN_LOOP, state, strict_mode_, type); +MaybeObject* KeyedStoreStubCompiler::GetCode(PropertyType type, String* name) { + Code::Flags flags = Code::ComputeMonomorphicFlags( + Code::KEYED_STORE_IC, type, strict_mode_); MaybeObject* result = GetCodeWithFlags(flags, name); if (!result->IsFailure()) { PROFILE(isolate(), @@ -1880,34 +1930,15 @@ void CallOptimization::AnalyzePossibleApiFunction(JSFunction* function) { } -MaybeObject* ExternalArrayLoadStubCompiler::GetCode() { +MaybeObject* ExternalArrayStubCompiler::GetCode(Code::Flags flags) { Object* result; - Code::Flags flags = Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, - NORMAL); - { MaybeObject* maybe_result = GetCodeWithFlags(flags, - "ExternalArrayLoadStub"); + { MaybeObject* maybe_result = GetCodeWithFlags(flags, "ExternalArrayStub"); if (!maybe_result->ToObject(&result)) return maybe_result; } Code* code = Code::cast(result); USE(code); PROFILE(isolate(), - CodeCreateEvent(Logger::STUB_TAG, code, "ExternalArrayLoadStub")); - return result; -} - - -MaybeObject* ExternalArrayStoreStubCompiler::GetCode() { - Object* result; - Code::Flags flags = Code::ComputeMonomorphicFlags(Code::KEYED_STORE_IC, - NORMAL); - { MaybeObject* maybe_result = GetCodeWithFlags(flags, - "ExternalArrayStoreStub"); - if (!maybe_result->ToObject(&result)) return maybe_result; - } - Code* code = Code::cast(result); - USE(code); - PROFILE(isolate(), - CodeCreateEvent(Logger::STUB_TAG, code, "ExternalArrayStoreStub")); + CodeCreateEvent(Logger::STUB_TAG, code, "ExternalArrayStub")); return result; } diff --git a/src/stub-cache.h b/src/stub-cache.h index 2a592176d7..358f205b94 100644 --- a/src/stub-cache.h +++ b/src/stub-cache.h @@ -1,4 +1,4 @@ -// Copyright 2011 the V8 project authors. All rights reserved. +// Copyright 2006-2008 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -31,7 +31,6 @@ #include "allocation.h" #include "arguments.h" #include "macro-assembler.h" -#include "objects.h" #include "zone-inl.h" namespace v8 { @@ -145,6 +144,9 @@ class StubCache { String* name, JSFunction* receiver); + MUST_USE_RESULT MaybeObject* ComputeKeyedLoadSpecialized( + JSObject* receiver); + // --- MUST_USE_RESULT MaybeObject* ComputeStoreField( @@ -183,15 +185,16 @@ class StubCache { Map* transition, StrictModeFlag strict_mode); + MUST_USE_RESULT MaybeObject* ComputeKeyedStoreSpecialized( + JSObject* receiver, + StrictModeFlag strict_mode); + + MUST_USE_RESULT MaybeObject* ComputeKeyedLoadOrStoreExternalArray( JSObject* receiver, bool is_store, StrictModeFlag strict_mode); - MUST_USE_RESULT MaybeObject* ComputeKeyedLoadOrStoreFastElement( - JSObject* receiver, - bool is_store, - StrictModeFlag strict_mode); // --- MUST_USE_RESULT MaybeObject* ComputeCallField(int argc, @@ -466,10 +469,7 @@ class StubCompiler BASE_EMBEDDED { Register scratch, Label* miss_label); - static void GenerateLoadMiss(MacroAssembler* masm, - Code::Kind kind); - - static void GenerateKeyedLoadMissForceGeneric(MacroAssembler* masm); + static void GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind); // Generates code that verifies that the property holder has not changed // (checking maps of objects in the prototype chain for fast and global @@ -634,21 +634,10 @@ class KeyedLoadStubCompiler: public StubCompiler { MUST_USE_RESULT MaybeObject* CompileLoadStringLength(String* name); MUST_USE_RESULT MaybeObject* CompileLoadFunctionPrototype(String* name); - MUST_USE_RESULT MaybeObject* CompileLoadFastElement(Map* receiver_map); - - MUST_USE_RESULT MaybeObject* CompileLoadMegamorphic( - MapList* receiver_maps, - CodeList* handler_ics); - - static void GenerateLoadExternalArray(MacroAssembler* masm, - ExternalArrayType array_type); - - static void GenerateLoadFastElement(MacroAssembler* masm); + MUST_USE_RESULT MaybeObject* CompileLoadSpecialized(JSObject* receiver); private: - MaybeObject* GetCode(PropertyType type, - String* name, - InlineCacheState state = MONOMORPHIC); + MaybeObject* GetCode(PropertyType type, String* name); }; @@ -689,22 +678,10 @@ class KeyedStoreStubCompiler: public StubCompiler { Map* transition, String* name); - MUST_USE_RESULT MaybeObject* CompileStoreFastElement(Map* receiver_map); - - MUST_USE_RESULT MaybeObject* CompileStoreMegamorphic( - MapList* receiver_maps, - CodeList* handler_ics); - - static void GenerateStoreFastElement(MacroAssembler* masm, - bool is_js_array); - - static void GenerateStoreExternalArray(MacroAssembler* masm, - ExternalArrayType array_type); + MUST_USE_RESULT MaybeObject* CompileStoreSpecialized(JSObject* receiver); private: - MaybeObject* GetCode(PropertyType type, - String* name, - InlineCacheState state = MONOMORPHIC); + MaybeObject* GetCode(PropertyType type, String* name); StrictModeFlag strict_mode_; }; @@ -871,30 +848,20 @@ class CallOptimization BASE_EMBEDDED { CallHandlerInfo* api_call_info_; }; -class ExternalArrayLoadStubCompiler: public StubCompiler { +class ExternalArrayStubCompiler: public StubCompiler { public: - explicit ExternalArrayLoadStubCompiler() {} + explicit ExternalArrayStubCompiler() {} - MUST_USE_RESULT MaybeObject* CompileLoad( - JSObject* receiver, ExternalArrayType array_type); + MUST_USE_RESULT MaybeObject* CompileKeyedLoadStub( + JSObject* receiver, ExternalArrayType array_type, Code::Flags flags); + + MUST_USE_RESULT MaybeObject* CompileKeyedStoreStub( + JSObject* receiver, ExternalArrayType array_type, Code::Flags flags); private: - MaybeObject* GetCode(); + MaybeObject* GetCode(Code::Flags flags); }; - -class ExternalArrayStoreStubCompiler: public StubCompiler { - public: - explicit ExternalArrayStoreStubCompiler() {} - - MUST_USE_RESULT MaybeObject* CompileStore( - JSObject* receiver, ExternalArrayType array_type); - - private: - MaybeObject* GetCode(); -}; - - } } // namespace v8::internal #endif // V8_STUB_CACHE_H_ diff --git a/src/type-info.cc b/src/type-info.cc index 61fb3b147d..346f8cbf7e 100644 --- a/src/type-info.cc +++ b/src/type-info.cc @@ -82,8 +82,7 @@ bool TypeFeedbackOracle::LoadIsMonomorphic(Property* expr) { if (map_or_code->IsMap()) return true; if (map_or_code->IsCode()) { Handle code(Code::cast(*map_or_code)); - return code->is_keyed_load_stub() && - code->ic_state() == MONOMORPHIC && + return code->kind() == Code::KEYED_EXTERNAL_ARRAY_LOAD_IC && code->FindFirstMap() != NULL; } return false; @@ -95,8 +94,8 @@ bool TypeFeedbackOracle::StoreIsMonomorphic(Expression* expr) { if (map_or_code->IsMap()) return true; if (map_or_code->IsCode()) { Handle code(Code::cast(*map_or_code)); - return code->is_keyed_store_stub() && - code->ic_state() == MONOMORPHIC; + return code->kind() == Code::KEYED_EXTERNAL_ARRAY_STORE_IC && + code->FindFirstMap() != NULL; } return false; } @@ -114,9 +113,7 @@ Handle TypeFeedbackOracle::LoadMonomorphicReceiverType(Property* expr) { Handle::cast(GetInfo(expr->id()))); if (map_or_code->IsCode()) { Handle code(Code::cast(*map_or_code)); - Map* first_map = code->FindFirstMap(); - ASSERT(first_map != NULL); - return Handle(first_map); + return Handle(code->FindFirstMap()); } return Handle(Map::cast(*map_or_code)); } @@ -448,8 +445,8 @@ void TypeFeedbackOracle::PopulateMap(Handle code) { kind == Code::COMPARE_IC) { SetInfo(id, target); } else if (state == MONOMORPHIC) { - if (kind == Code::KEYED_LOAD_IC || - kind == Code::KEYED_STORE_IC) { + if (kind == Code::KEYED_EXTERNAL_ARRAY_LOAD_IC || + kind == Code::KEYED_EXTERNAL_ARRAY_STORE_IC) { SetInfo(id, target); } else if (kind != Code::CALL_IC || target->check_type() == RECEIVER_MAP_CHECK) { diff --git a/src/v8-counters.h b/src/v8-counters.h index e3b16e92b4..5e765b277f 100644 --- a/src/v8-counters.h +++ b/src/v8-counters.h @@ -1,4 +1,4 @@ -// Copyright 2011 the V8 project authors. All rights reserved. +// Copyright 2010 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: @@ -134,7 +134,6 @@ namespace internal { SC(keyed_load_generic_symbol, V8.KeyedLoadGenericSymbol) \ SC(keyed_load_generic_lookup_cache, V8.KeyedLoadGenericLookupCache) \ SC(keyed_load_generic_slow, V8.KeyedLoadGenericSlow) \ - SC(keyed_load_polymorphic_stubs, V8.KeyedLoadPolymorphicStubs) \ SC(keyed_load_external_array_slow, V8.KeyedLoadExternalArraySlow) \ /* How is the generic keyed-call stub used? */ \ SC(keyed_call_generic_smi_fast, V8.KeyedCallGenericSmiFast) \ @@ -180,8 +179,6 @@ namespace internal { SC(keyed_store_inline_miss, V8.KeyedStoreInlineMiss) \ SC(named_store_global_inline, V8.NamedStoreGlobalInline) \ SC(named_store_global_inline_miss, V8.NamedStoreGlobalInlineMiss) \ - SC(keyed_store_polymorphic_stubs, V8.KeyedStorePolymorphicStubs) \ - SC(keyed_store_external_array_slow, V8.KeyedStoreExternalArraySlow) \ SC(store_normal_miss, V8.StoreNormalMiss) \ SC(store_normal_hit, V8.StoreNormalHit) \ SC(cow_arrays_created_stub, V8.COWArraysCreatedStub) \ diff --git a/src/x64/ic-x64.cc b/src/x64/ic-x64.cc index d59ee44d9d..4ba197889a 100644 --- a/src/x64/ic-x64.cc +++ b/src/x64/ic-x64.cc @@ -658,7 +658,7 @@ void KeyedLoadIC::GenerateString(MacroAssembler* masm) { char_at_generator.GenerateSlow(masm, call_helper); __ bind(&miss); - GenerateMiss(masm, false); + GenerateMiss(masm); } @@ -701,7 +701,7 @@ void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) { 1); __ bind(&slow); - GenerateMiss(masm, false); + GenerateMiss(masm); } @@ -1240,7 +1240,7 @@ void LoadIC::GenerateMiss(MacroAssembler* masm) { } -void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, bool force_generic) { +void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- rax : key // -- rdx : receiver @@ -1256,10 +1256,8 @@ void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, bool force_generic) { __ push(rbx); // return address // Perform tail call to the entry. - ExternalReference ref = force_generic - ? ExternalReference(IC_Utility(kKeyedLoadIC_MissForceGeneric), - masm->isolate()) - : ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate()); + ExternalReference ref + = ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate()); __ TailCallExternalReference(ref, 2, 1); } @@ -1443,28 +1441,7 @@ void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm, } -void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) { - // ----------- S t a t e ------------- - // -- rax : value - // -- rcx : key - // -- rdx : receiver - // -- rsp[0] : return address - // ----------------------------------- - - __ pop(rbx); - __ push(rdx); // receiver - __ push(rcx); // key - __ push(rax); // value - __ Push(Smi::FromInt(NONE)); // PropertyAttributes - __ push(rbx); // return address - - // Do tail-call to runtime routine. - ExternalReference ref(IC_Utility(kKeyedStoreIC_Slow), masm->isolate()); - __ TailCallExternalReference(ref, 4, 1); -} - - -void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, bool force_generic) { +void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- rax : value // -- rcx : key @@ -1479,10 +1456,8 @@ void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, bool force_generic) { __ push(rbx); // return address // Do tail-call to runtime routine. - ExternalReference ref = force_generic - ? ExternalReference(IC_Utility(kKeyedStoreIC_MissForceGeneric), - masm->isolate()) - : ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate()); + ExternalReference ref = + ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate()); __ TailCallExternalReference(ref, 3, 1); } diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc index 64f42f49ee..185c3a56a5 100644 --- a/src/x64/macro-assembler-x64.cc +++ b/src/x64/macro-assembler-x64.cc @@ -2601,21 +2601,6 @@ void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg, } -void MacroAssembler::DispatchMap(Register obj, - Handle map, - Handle success, - SmiCheckType smi_check_type) { - Label fail; - if (smi_check_type == DO_SMI_CHECK) { - JumpIfSmi(obj, &fail); - } - Cmp(FieldOperand(obj, HeapObject::kMapOffset), map); - j(equal, success, RelocInfo::CODE_TARGET); - - bind(&fail); -} - - void MacroAssembler::AbortIfNotNumber(Register object) { Label ok; Condition is_smi = CheckSmi(object); diff --git a/src/x64/macro-assembler-x64.h b/src/x64/macro-assembler-x64.h index d7d5af3bd3..0e46253af9 100644 --- a/src/x64/macro-assembler-x64.h +++ b/src/x64/macro-assembler-x64.h @@ -45,7 +45,6 @@ enum AllocationFlags { RESULT_CONTAINS_TOP = 1 << 1 }; - // Default scratch register used by MacroAssembler (and other code that needs // a spare register). The register isn't callee save, and not used by the // function calling convention. @@ -753,14 +752,6 @@ class MacroAssembler: public Assembler { Label* fail, SmiCheckType smi_check_type); - // Check if the map of an object is equal to a specified map and branch to a - // specified target if equal. Skip the smi check if not required (object is - // known to be a heap object) - void DispatchMap(Register obj, - Handle map, - Handle success, - SmiCheckType smi_check_type); - // Check if the object in register heap_object is a string. Afterwards the // register map contains the object map and the register instance_type // contains the instance_type. The registers map and instance_type can be the diff --git a/src/x64/stub-cache-x64.cc b/src/x64/stub-cache-x64.cc index a97d4574f3..a3e664a67e 100644 --- a/src/x64/stub-cache-x64.cc +++ b/src/x64/stub-cache-x64.cc @@ -703,14 +703,6 @@ void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) { } -void StubCompiler::GenerateKeyedLoadMissForceGeneric(MacroAssembler* masm) { - Code* code = masm->isolate()->builtins()->builtin( - Builtins::kKeyedLoadIC_MissForceGeneric); - Handle ic(code); - __ Jump(ic, RelocInfo::CODE_TARGET); -} - - // Both name_reg and receiver_reg are preserved on jumps to miss_label, // but may be destroyed if store is successful. void StubCompiler::GenerateStoreField(MacroAssembler* masm, @@ -2492,35 +2484,8 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object, } -MaybeObject* KeyedStoreStubCompiler::CompileStoreFastElement( - Map* receiver_map) { - // ----------- S t a t e ------------- - // -- rax : value - // -- rcx : key - // -- rdx : receiver - // -- rsp[0] : return address - // ----------------------------------- - bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE; - MaybeObject* maybe_stub = - KeyedStoreFastElementStub(is_js_array).TryGetCode(); - Code* stub; - if (!maybe_stub->To(&stub)) return maybe_stub; - __ DispatchMap(rdx, - Handle(receiver_map), - Handle(stub), - DO_SMI_CHECK); - - Handle ic = isolate()->builtins()->KeyedStoreIC_Miss(); - __ jmp(ic, RelocInfo::CODE_TARGET); - - // Return the generated code. - return GetCode(NORMAL, NULL); -} - - -MaybeObject* KeyedStoreStubCompiler::CompileStoreMegamorphic( - MapList* receiver_maps, - CodeList* handler_ics) { +MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized( + JSObject* receiver) { // ----------- S t a t e ------------- // -- rax : value // -- rcx : key @@ -2528,26 +2493,51 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreMegamorphic( // -- rsp[0] : return address // ----------------------------------- Label miss; + + // Check that the receiver isn't a smi. __ JumpIfSmi(rdx, &miss); - Register map_reg = rbx; - __ movq(map_reg, FieldOperand(rdx, HeapObject::kMapOffset)); - int receiver_count = receiver_maps->length(); - for (int current = 0; current < receiver_count; ++current) { - // Check map and tail call if there's a match - Handle map(receiver_maps->at(current)); - __ Cmp(map_reg, map); - __ j(equal, - Handle(handler_ics->at(current)), - RelocInfo::CODE_TARGET); + // Check that the map matches. + __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset), + Handle(receiver->map())); + __ j(not_equal, &miss); + + // Check that the key is a smi. + __ JumpIfNotSmi(rcx, &miss); + + // Get the elements array and make sure it is a fast element array, not 'cow'. + __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset)); + __ Cmp(FieldOperand(rdi, HeapObject::kMapOffset), + factory()->fixed_array_map()); + __ j(not_equal, &miss); + + // Check that the key is within bounds. + if (receiver->IsJSArray()) { + __ SmiCompare(rcx, FieldOperand(rdx, JSArray::kLengthOffset)); + __ j(above_equal, &miss); + } else { + __ SmiCompare(rcx, FieldOperand(rdi, FixedArray::kLengthOffset)); + __ j(above_equal, &miss); } + // Do the store and update the write barrier. Make sure to preserve + // the value in register eax. + __ movq(rdx, rax); + __ SmiToInteger32(rcx, rcx); + __ movq(FieldOperand(rdi, rcx, times_pointer_size, FixedArray::kHeaderSize), + rax); + __ RecordWrite(rdi, 0, rdx, rcx); + + // Done. + __ ret(0); + + // Handle store cache miss. __ bind(&miss); Handle ic = isolate()->builtins()->KeyedStoreIC_Miss(); __ jmp(ic, RelocInfo::CODE_TARGET); // Return the generated code. - return GetCode(NORMAL, NULL, MEGAMORPHIC); + return GetCode(NORMAL, NULL); } @@ -2561,7 +2551,7 @@ MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name, // ----------------------------------- Label miss; - // Check that receiver is not a smi. + // Chech that receiver is not a smi. __ JumpIfSmi(rax, &miss); // Check the maps of the full prototype chain. Also check that @@ -2952,56 +2942,49 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) { } -MaybeObject* KeyedLoadStubCompiler::CompileLoadFastElement(Map* receiver_map) { - // ----------- S t a t e ------------- - // -- rax : key - // -- rdx : receiver - // -- rsp[0] : return address - // ----------------------------------- - MaybeObject* maybe_stub = KeyedLoadFastElementStub().TryGetCode(); - Code* stub; - if (!maybe_stub->To(&stub)) return maybe_stub; - __ DispatchMap(rdx, - Handle(receiver_map), - Handle(stub), - DO_SMI_CHECK); - - Handle ic = isolate()->builtins()->KeyedLoadIC_Miss(); - __ jmp(ic, RelocInfo::CODE_TARGET); - - // Return the generated code. - return GetCode(NORMAL, NULL); -} - - -MaybeObject* KeyedLoadStubCompiler::CompileLoadMegamorphic( - MapList* receiver_maps, - CodeList* handler_ics) { +MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) { // ----------- S t a t e ------------- // -- rax : key // -- rdx : receiver // -- rsp[0] : return address // ----------------------------------- Label miss; + + // Check that the receiver isn't a smi. __ JumpIfSmi(rdx, &miss); - Register map_reg = rbx; - __ movq(map_reg, FieldOperand(rdx, HeapObject::kMapOffset)); - int receiver_count = receiver_maps->length(); - for (int current = 0; current < receiver_count; ++current) { - // Check map and tail call if there's a match - Handle map(receiver_maps->at(current)); - __ Cmp(map_reg, map); - __ j(equal, - Handle(handler_ics->at(current)), - RelocInfo::CODE_TARGET); - } + // Check that the map matches. + __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset), + Handle(receiver->map())); + __ j(not_equal, &miss); - __ bind(&miss); + // Check that the key is a smi. + __ JumpIfNotSmi(rax, &miss); + + // Get the elements array. + __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset)); + __ AssertFastElements(rcx); + + // Check that the key is within bounds. + __ SmiCompare(rax, FieldOperand(rcx, FixedArray::kLengthOffset)); + __ j(above_equal, &miss); + + // Load the result and make sure it's not the hole. + SmiIndex index = masm()->SmiToIndex(rbx, rax, kPointerSizeLog2); + __ movq(rbx, FieldOperand(rcx, + index.reg, + index.scale, + FixedArray::kHeaderSize)); + __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex); + __ j(equal, &miss); + __ movq(rax, rbx); + __ ret(0); + + __ bind(&miss); GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); // Return the generated code. - return GetCode(NORMAL, NULL, MEGAMORPHIC); + return GetCode(NORMAL, NULL); } @@ -3138,79 +3121,30 @@ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) { } -MaybeObject* ExternalArrayLoadStubCompiler::CompileLoad( - JSObject*receiver, ExternalArrayType array_type) { +MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub( + JSObject* receiver, ExternalArrayType array_type, Code::Flags flags) { // ----------- S t a t e ------------- // -- rax : key // -- rdx : receiver // -- rsp[0] : return address // ----------------------------------- - MaybeObject* maybe_stub = - KeyedLoadExternalArrayStub(array_type).TryGetCode(); - Code* stub; - if (!maybe_stub->To(&stub)) return maybe_stub; - __ DispatchMap(rdx, - Handle(receiver->map()), - Handle(stub), - DO_SMI_CHECK); + Label slow; - Handle ic = isolate()->builtins()->KeyedLoadIC_Miss(); - __ jmp(ic, RelocInfo::CODE_TARGET); - - // Return the generated code. - return GetCode(); -} - -MaybeObject* ExternalArrayStoreStubCompiler::CompileStore( - JSObject* receiver, ExternalArrayType array_type) { - // ----------- S t a t e ------------- - // -- rax : value - // -- rcx : key - // -- rdx : receiver - // -- rsp[0] : return address - // ----------------------------------- - MaybeObject* maybe_stub = - KeyedStoreExternalArrayStub(array_type).TryGetCode(); - Code* stub; - if (!maybe_stub->To(&stub)) return maybe_stub; - __ DispatchMap(rdx, - Handle(receiver->map()), - Handle(stub), - DO_SMI_CHECK); - - Handle ic = isolate()->builtins()->KeyedStoreIC_Miss(); - __ jmp(ic, RelocInfo::CODE_TARGET); - - return GetCode(); -} - - -#undef __ -#define __ ACCESS_MASM(masm) - - -void KeyedLoadStubCompiler::GenerateLoadExternalArray( - MacroAssembler* masm, - ExternalArrayType array_type) { - // ----------- S t a t e ------------- - // -- rax : key - // -- rdx : receiver - // -- rsp[0] : return address - // ----------------------------------- - Label slow, miss_force_generic; - - // This stub is meant to be tail-jumped to, the receiver must already - // have been verified by the caller to not be a smi. + // Check that the object isn't a smi. + __ JumpIfSmi(rdx, &slow); // Check that the key is a smi. - __ JumpIfNotSmi(rax, &miss_force_generic); + __ JumpIfNotSmi(rax, &slow); + + // Check that the map matches. + __ CheckMap(rdx, Handle(receiver->map()), &slow, DO_SMI_CHECK); + __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset)); // Check that the index is in range. - __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset)); __ SmiToInteger32(rcx, rax); __ cmpl(rcx, FieldOperand(rbx, ExternalArray::kLengthOffset)); // Unsigned comparison catches both negative and too-large values. - __ j(above_equal, &miss_force_generic); + __ j(above_equal, &slow); // rax: index (as a smi) // rdx: receiver (JSObject) @@ -3297,7 +3231,7 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray( // Slow case: Jump to runtime. __ bind(&slow); - Counters* counters = masm->isolate()->counters(); + Counters* counters = isolate()->counters(); __ IncrementCounter(counters->keyed_load_external_array_slow(), 1); // ----------- S t a t e ------------- @@ -3306,46 +3240,44 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray( // -- rsp[0] : return address // ----------------------------------- - Handle ic = masm->isolate()->builtins()->KeyedLoadIC_Slow(); - __ jmp(ic, RelocInfo::CODE_TARGET); + __ pop(rbx); + __ push(rdx); // receiver + __ push(rax); // name + __ push(rbx); // return address - // Miss case: Jump to runtime. - __ bind(&miss_force_generic); + // Perform tail call to the entry. + __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1); - // ----------- S t a t e ------------- - // -- rax : key - // -- rdx : receiver - // -- rsp[0] : return address - // ----------------------------------- - Handle miss_ic = - masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric(); - __ jmp(miss_ic, RelocInfo::CODE_TARGET); + // Return the generated code. + return GetCode(flags); } -void KeyedStoreStubCompiler::GenerateStoreExternalArray( - MacroAssembler* masm, - ExternalArrayType array_type) { +MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub( + JSObject* receiver, ExternalArrayType array_type, Code::Flags flags) { // ----------- S t a t e ------------- // -- rax : value // -- rcx : key // -- rdx : receiver // -- rsp[0] : return address // ----------------------------------- - Label slow, miss_force_generic; + Label slow; - // This stub is meant to be tail-jumped to, the receiver must already - // have been verified by the caller to not be a smi. + // Check that the object isn't a smi. + __ JumpIfSmi(rdx, &slow); + + // Check that the map matches. + __ CheckMap(rdx, Handle(receiver->map()), &slow, DO_SMI_CHECK); + __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset)); // Check that the key is a smi. - __ JumpIfNotSmi(rcx, &miss_force_generic); + __ JumpIfNotSmi(rcx, &slow); // Check that the index is in range. - __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset)); __ SmiToInteger32(rdi, rcx); // Untag the index. __ cmpl(rdi, FieldOperand(rbx, ExternalArray::kLengthOffset)); // Unsigned comparison catches both negative and too-large values. - __ j(above_equal, &miss_force_generic); + __ j(above_equal, &slow); // Handle both smis and HeapNumbers in the fast path. Go to the // runtime for all other kinds of values. @@ -3479,116 +3411,21 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray( // -- rsp[0] : return address // ----------------------------------- - Handle ic = masm->isolate()->builtins()->KeyedStoreIC_Slow(); - __ jmp(ic, RelocInfo::CODE_TARGET); + __ pop(rbx); + __ push(rdx); // receiver + __ push(rcx); // key + __ push(rax); // value + __ Push(Smi::FromInt(NONE)); // PropertyAttributes + __ Push(Smi::FromInt( + Code::ExtractExtraICStateFromFlags(flags) & kStrictMode)); + __ push(rbx); // return address - // Miss case: call runtime. - __ bind(&miss_force_generic); + // Do tail-call to runtime routine. + __ TailCallRuntime(Runtime::kSetProperty, 5, 1); - // ----------- S t a t e ------------- - // -- rax : value - // -- rcx : key - // -- rdx : receiver - // -- rsp[0] : return address - // ----------------------------------- - - Handle miss_ic = - masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric(); - __ jmp(miss_ic, RelocInfo::CODE_TARGET); + return GetCode(flags); } - -void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) { - // ----------- S t a t e ------------- - // -- rax : key - // -- rdx : receiver - // -- rsp[0] : return address - // ----------------------------------- - Label miss_force_generic; - - // This stub is meant to be tail-jumped to, the receiver must already - // have been verified by the caller to not be a smi. - - // Check that the key is a smi. - __ JumpIfNotSmi(rax, &miss_force_generic); - - // Get the elements array. - __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset)); - __ AssertFastElements(rcx); - - // Check that the key is within bounds. - __ SmiCompare(rax, FieldOperand(rcx, FixedArray::kLengthOffset)); - __ j(above_equal, &miss_force_generic); - - // Load the result and make sure it's not the hole. - SmiIndex index = masm->SmiToIndex(rbx, rax, kPointerSizeLog2); - __ movq(rbx, FieldOperand(rcx, - index.reg, - index.scale, - FixedArray::kHeaderSize)); - __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex); - __ j(equal, &miss_force_generic); - __ movq(rax, rbx); - __ ret(0); - - __ bind(&miss_force_generic); - Code* code = masm->isolate()->builtins()->builtin( - Builtins::kKeyedLoadIC_MissForceGeneric); - Handle ic(code); - __ jmp(ic, RelocInfo::CODE_TARGET); -} - - -void KeyedStoreStubCompiler::GenerateStoreFastElement(MacroAssembler* masm, - bool is_js_array) { - // ----------- S t a t e ------------- - // -- rax : value - // -- rcx : key - // -- rdx : receiver - // -- rsp[0] : return address - // ----------------------------------- - Label miss_force_generic; - - // This stub is meant to be tail-jumped to, the receiver must already - // have been verified by the caller to not be a smi. - - // Check that the key is a smi. - __ JumpIfNotSmi(rcx, &miss_force_generic); - - // Get the elements array and make sure it is a fast element array, not 'cow'. - __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset)); - __ CompareRoot(FieldOperand(rdi, HeapObject::kMapOffset), - Heap::kFixedArrayMapRootIndex); - __ j(not_equal, &miss_force_generic); - - // Check that the key is within bounds. - if (is_js_array) { - __ SmiCompare(rcx, FieldOperand(rdx, JSArray::kLengthOffset)); - __ j(above_equal, &miss_force_generic); - } else { - __ SmiCompare(rcx, FieldOperand(rdi, FixedArray::kLengthOffset)); - __ j(above_equal, &miss_force_generic); - } - - // Do the store and update the write barrier. Make sure to preserve - // the value in register eax. - __ movq(rdx, rax); - __ SmiToInteger32(rcx, rcx); - __ movq(FieldOperand(rdi, rcx, times_pointer_size, FixedArray::kHeaderSize), - rax); - __ RecordWrite(rdi, 0, rdx, rcx); - - // Done. - __ ret(0); - - // Handle store cache miss. - __ bind(&miss_force_generic); - Handle ic_force_generic = - masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric(); - __ jmp(ic_force_generic, RelocInfo::CODE_TARGET); -} - - #undef __ } } // namespace v8::internal