Reland 7917:
Better support for 'polymorphic' JS and external arrays Allow keyed store/load stubs to switch between external array and fast JS arrays without forcing a state transition to the generic stub. There CL consists of two pieces of functionality. First, code stubs for fast element arrays don't immediately transition to the MEGAMORPHIC state when there's a map mismatch. Second, two ICs are cached per map for fast elements, the MONOMORPHIC version, and a new MEGAMORPHIC version that handles two or more different maps and dispatches to shared stubs to perform the array operation. Review URL: http://codereview.chromium.org/7036016 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@7935 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
7fba506f23
commit
123500999f
@ -1,4 +1,4 @@
|
||||
// Copyright 2010 the V8 project authors. All rights reserved.
|
||||
// Copyright 2011 the V8 project authors. All rights reserved.
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
|
@ -868,10 +868,7 @@ void LoadIC::GenerateMiss(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
|
||||
Object* KeyedLoadIC_Miss(Arguments args);
|
||||
|
||||
|
||||
void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
|
||||
void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
|
||||
// ---------- S t a t e --------------
|
||||
// -- lr : return address
|
||||
// -- r0 : key
|
||||
@ -883,8 +880,11 @@ void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
|
||||
|
||||
__ Push(r1, r0);
|
||||
|
||||
ExternalReference ref =
|
||||
ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate);
|
||||
// Perform tail call to the entry.
|
||||
ExternalReference ref = force_generic
|
||||
? ExternalReference(IC_Utility(kKeyedLoadIC_MissForceGeneric), isolate)
|
||||
: ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate);
|
||||
|
||||
__ TailCallExternalReference(ref, 2, 1);
|
||||
}
|
||||
|
||||
@ -1075,7 +1075,7 @@ void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
|
||||
char_at_generator.GenerateSlow(masm, call_helper);
|
||||
|
||||
__ bind(&miss);
|
||||
GenerateMiss(masm);
|
||||
GenerateMiss(masm, false);
|
||||
}
|
||||
|
||||
|
||||
@ -1115,11 +1115,11 @@ void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
|
||||
1);
|
||||
|
||||
__ bind(&slow);
|
||||
GenerateMiss(masm);
|
||||
GenerateMiss(masm, false);
|
||||
}
|
||||
|
||||
|
||||
void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) {
|
||||
void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
|
||||
// ---------- S t a t e --------------
|
||||
// -- r0 : value
|
||||
// -- r1 : key
|
||||
@ -1130,8 +1130,29 @@ void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) {
|
||||
// Push receiver, key and value for runtime call.
|
||||
__ Push(r2, r1, r0);
|
||||
|
||||
ExternalReference ref = force_generic
|
||||
? ExternalReference(IC_Utility(kKeyedStoreIC_MissForceGeneric),
|
||||
masm->isolate())
|
||||
: ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
|
||||
__ TailCallExternalReference(ref, 3, 1);
|
||||
}
|
||||
|
||||
|
||||
void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
|
||||
// ---------- S t a t e --------------
|
||||
// -- r0 : value
|
||||
// -- r1 : key
|
||||
// -- r2 : receiver
|
||||
// -- lr : return address
|
||||
// -----------------------------------
|
||||
|
||||
// Push receiver, key and value for runtime call.
|
||||
__ Push(r2, r1, r0);
|
||||
|
||||
// The slow case calls into the runtime to complete the store without causing
|
||||
// an IC miss that would otherwise cause a transition to the generic stub.
|
||||
ExternalReference ref =
|
||||
ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
|
||||
ExternalReference(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
|
||||
__ TailCallExternalReference(ref, 3, 1);
|
||||
}
|
||||
|
||||
|
@ -1681,6 +1681,23 @@ void MacroAssembler::CheckMap(Register obj,
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::DispatchMap(Register obj,
|
||||
Register scratch,
|
||||
Handle<Map> map,
|
||||
Handle<Code> success,
|
||||
SmiCheckType smi_check_type) {
|
||||
Label fail;
|
||||
if (smi_check_type == DO_SMI_CHECK) {
|
||||
JumpIfSmi(obj, &fail);
|
||||
}
|
||||
ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
|
||||
mov(ip, Operand(map));
|
||||
cmp(scratch, ip);
|
||||
Jump(success, RelocInfo::CODE_TARGET, eq);
|
||||
bind(&fail);
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::TryGetFunctionPrototype(Register function,
|
||||
Register result,
|
||||
Register scratch,
|
||||
|
@ -578,6 +578,7 @@ class MacroAssembler: public Assembler {
|
||||
Label* fail,
|
||||
SmiCheckType smi_check_type);
|
||||
|
||||
|
||||
void CheckMap(Register obj,
|
||||
Register scratch,
|
||||
Heap::RootListIndex index,
|
||||
@ -585,6 +586,16 @@ class MacroAssembler: public Assembler {
|
||||
SmiCheckType smi_check_type);
|
||||
|
||||
|
||||
// Check if the map of an object is equal to a specified map and branch to a
|
||||
// specified target if equal. Skip the smi check if not required (object is
|
||||
// known to be a heap object)
|
||||
void DispatchMap(Register obj,
|
||||
Register scratch,
|
||||
Handle<Map> map,
|
||||
Handle<Code> success,
|
||||
SmiCheckType smi_check_type);
|
||||
|
||||
|
||||
// Compare the object in a register to a value from the root list.
|
||||
// Uses the ip register as scratch.
|
||||
void CompareRoot(Register obj, Heap::RootListIndex index);
|
||||
|
@ -3092,52 +3092,56 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) {
|
||||
MaybeObject* KeyedLoadStubCompiler::CompileLoadFastElement(Map* receiver_map) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- lr : return address
|
||||
// -- r0 : key
|
||||
// -- r1 : receiver
|
||||
// -----------------------------------
|
||||
MaybeObject* maybe_stub = KeyedLoadFastElementStub().TryGetCode();
|
||||
Code* stub;
|
||||
if (!maybe_stub->To(&stub)) return maybe_stub;
|
||||
__ DispatchMap(r1,
|
||||
r2,
|
||||
Handle<Map>(receiver_map),
|
||||
Handle<Code>(stub),
|
||||
DO_SMI_CHECK);
|
||||
|
||||
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss();
|
||||
__ Jump(ic, RelocInfo::CODE_TARGET);
|
||||
|
||||
// Return the generated code.
|
||||
return GetCode(NORMAL, NULL);
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* KeyedLoadStubCompiler::CompileLoadMegamorphic(
|
||||
MapList* receiver_maps,
|
||||
CodeList* handler_ics) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- lr : return address
|
||||
// -- r0 : key
|
||||
// -- r1 : receiver
|
||||
// -----------------------------------
|
||||
Label miss;
|
||||
__ JumpIfSmi(r1, &miss);
|
||||
|
||||
// Check that the receiver isn't a smi.
|
||||
__ tst(r1, Operand(kSmiTagMask));
|
||||
__ b(eq, &miss);
|
||||
|
||||
// Check that the map matches.
|
||||
int receiver_count = receiver_maps->length();
|
||||
__ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
|
||||
__ cmp(r2, Operand(Handle<Map>(receiver->map())));
|
||||
__ b(ne, &miss);
|
||||
|
||||
// Check that the key is a smi.
|
||||
__ tst(r0, Operand(kSmiTagMask));
|
||||
__ b(ne, &miss);
|
||||
|
||||
// Get the elements array.
|
||||
__ ldr(r2, FieldMemOperand(r1, JSObject::kElementsOffset));
|
||||
__ AssertFastElements(r2);
|
||||
|
||||
// Check that the key is within bounds.
|
||||
__ ldr(r3, FieldMemOperand(r2, FixedArray::kLengthOffset));
|
||||
__ cmp(r0, Operand(r3));
|
||||
__ b(hs, &miss);
|
||||
|
||||
// Load the result and make sure it's not the hole.
|
||||
__ add(r3, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
|
||||
ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
|
||||
__ ldr(r4,
|
||||
MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
|
||||
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
|
||||
__ cmp(r4, ip);
|
||||
__ b(eq, &miss);
|
||||
__ mov(r0, r4);
|
||||
__ Ret();
|
||||
for (int current = 0; current < receiver_count; ++current) {
|
||||
Handle<Map> map(receiver_maps->at(current));
|
||||
Handle<Code> code(handler_ics->at(current));
|
||||
__ mov(ip, Operand(map));
|
||||
__ cmp(r2, ip);
|
||||
__ Jump(code, RelocInfo::CODE_TARGET, eq);
|
||||
}
|
||||
|
||||
__ bind(&miss);
|
||||
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
|
||||
Handle<Code> miss_ic = isolate()->builtins()->KeyedLoadIC_Miss();
|
||||
__ Jump(miss_ic, RelocInfo::CODE_TARGET, al);
|
||||
|
||||
// Return the generated code.
|
||||
return GetCode(NORMAL, NULL);
|
||||
return GetCode(NORMAL, NULL, MEGAMORPHIC);
|
||||
}
|
||||
|
||||
|
||||
@ -3179,69 +3183,27 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
|
||||
JSObject* receiver) {
|
||||
MaybeObject* KeyedStoreStubCompiler::CompileStoreFastElement(
|
||||
Map* receiver_map) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- r0 : value
|
||||
// -- r1 : key
|
||||
// -- r2 : receiver
|
||||
// -- lr : return address
|
||||
// -- r3 : scratch
|
||||
// -- r4 : scratch (elements)
|
||||
// -----------------------------------
|
||||
Label miss;
|
||||
bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
|
||||
MaybeObject* maybe_stub =
|
||||
KeyedStoreFastElementStub(is_js_array).TryGetCode();
|
||||
Code* stub;
|
||||
if (!maybe_stub->To(&stub)) return maybe_stub;
|
||||
__ DispatchMap(r2,
|
||||
r3,
|
||||
Handle<Map>(receiver_map),
|
||||
Handle<Code>(stub),
|
||||
DO_SMI_CHECK);
|
||||
|
||||
Register value_reg = r0;
|
||||
Register key_reg = r1;
|
||||
Register receiver_reg = r2;
|
||||
Register scratch = r3;
|
||||
Register elements_reg = r4;
|
||||
|
||||
// Check that the receiver isn't a smi.
|
||||
__ tst(receiver_reg, Operand(kSmiTagMask));
|
||||
__ b(eq, &miss);
|
||||
|
||||
// Check that the map matches.
|
||||
__ ldr(scratch, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
|
||||
__ cmp(scratch, Operand(Handle<Map>(receiver->map())));
|
||||
__ b(ne, &miss);
|
||||
|
||||
// Check that the key is a smi.
|
||||
__ tst(key_reg, Operand(kSmiTagMask));
|
||||
__ b(ne, &miss);
|
||||
|
||||
// Get the elements array and make sure it is a fast element array, not 'cow'.
|
||||
__ ldr(elements_reg,
|
||||
FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
|
||||
__ ldr(scratch, FieldMemOperand(elements_reg, HeapObject::kMapOffset));
|
||||
__ cmp(scratch, Operand(Handle<Map>(factory()->fixed_array_map())));
|
||||
__ b(ne, &miss);
|
||||
|
||||
// Check that the key is within bounds.
|
||||
if (receiver->IsJSArray()) {
|
||||
__ ldr(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
|
||||
} else {
|
||||
__ ldr(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
|
||||
}
|
||||
// Compare smis.
|
||||
__ cmp(key_reg, scratch);
|
||||
__ b(hs, &miss);
|
||||
|
||||
__ add(scratch,
|
||||
elements_reg, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
|
||||
ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
|
||||
__ str(value_reg,
|
||||
MemOperand(scratch, key_reg, LSL, kPointerSizeLog2 - kSmiTagSize));
|
||||
__ RecordWrite(scratch,
|
||||
Operand(key_reg, LSL, kPointerSizeLog2 - kSmiTagSize),
|
||||
receiver_reg , elements_reg);
|
||||
|
||||
// value_reg (r0) is preserved.
|
||||
// Done.
|
||||
__ Ret();
|
||||
|
||||
__ bind(&miss);
|
||||
Handle<Code> ic = masm()->isolate()->builtins()->KeyedStoreIC_Miss();
|
||||
Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
|
||||
__ Jump(ic, RelocInfo::CODE_TARGET);
|
||||
|
||||
// Return the generated code.
|
||||
@ -3249,6 +3211,38 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* KeyedStoreStubCompiler::CompileStoreMegamorphic(
|
||||
MapList* receiver_maps,
|
||||
CodeList* handler_ics) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- r0 : value
|
||||
// -- r1 : key
|
||||
// -- r2 : receiver
|
||||
// -- lr : return address
|
||||
// -- r3 : scratch
|
||||
// -----------------------------------
|
||||
Label miss;
|
||||
__ JumpIfSmi(r2, &miss);
|
||||
|
||||
int receiver_count = receiver_maps->length();
|
||||
__ ldr(r3, FieldMemOperand(r2, HeapObject::kMapOffset));
|
||||
for (int current = 0; current < receiver_count; ++current) {
|
||||
Handle<Map> map(receiver_maps->at(current));
|
||||
Handle<Code> code(handler_ics->at(current));
|
||||
__ mov(ip, Operand(map));
|
||||
__ cmp(r3, ip);
|
||||
__ Jump(code, RelocInfo::CODE_TARGET, eq);
|
||||
}
|
||||
|
||||
__ bind(&miss);
|
||||
Handle<Code> miss_ic = isolate()->builtins()->KeyedStoreIC_Miss();
|
||||
__ Jump(miss_ic, RelocInfo::CODE_TARGET, al);
|
||||
|
||||
// Return the generated code.
|
||||
return GetCode(NORMAL, NULL, MEGAMORPHIC);
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- r0 : argc
|
||||
@ -3393,6 +3387,60 @@ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* ExternalArrayLoadStubCompiler::CompileLoad(
|
||||
JSObject*receiver, ExternalArrayType array_type) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- lr : return address
|
||||
// -- r0 : key
|
||||
// -- r1 : receiver
|
||||
// -----------------------------------
|
||||
MaybeObject* maybe_stub =
|
||||
KeyedLoadExternalArrayStub(array_type).TryGetCode();
|
||||
Code* stub;
|
||||
if (!maybe_stub->To(&stub)) return maybe_stub;
|
||||
__ DispatchMap(r1,
|
||||
r2,
|
||||
Handle<Map>(receiver->map()),
|
||||
Handle<Code>(stub),
|
||||
DO_SMI_CHECK);
|
||||
|
||||
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss();
|
||||
__ Jump(ic, RelocInfo::CODE_TARGET);
|
||||
|
||||
// Return the generated code.
|
||||
return GetCode();
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* ExternalArrayStoreStubCompiler::CompileStore(
|
||||
JSObject* receiver, ExternalArrayType array_type) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- r0 : value
|
||||
// -- r1 : name
|
||||
// -- r2 : receiver
|
||||
// -- lr : return address
|
||||
// -----------------------------------
|
||||
MaybeObject* maybe_stub =
|
||||
KeyedStoreExternalArrayStub(array_type).TryGetCode();
|
||||
Code* stub;
|
||||
if (!maybe_stub->To(&stub)) return maybe_stub;
|
||||
__ DispatchMap(r2,
|
||||
r3,
|
||||
Handle<Map>(receiver->map()),
|
||||
Handle<Code>(stub),
|
||||
DONT_DO_SMI_CHECK);
|
||||
|
||||
Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
|
||||
__ Jump(ic, RelocInfo::CODE_TARGET);
|
||||
|
||||
return GetCode();
|
||||
}
|
||||
|
||||
|
||||
#undef __
|
||||
#define __ ACCESS_MASM(masm)
|
||||
|
||||
|
||||
static bool IsElementTypeSigned(ExternalArrayType array_type) {
|
||||
switch (array_type) {
|
||||
case kExternalByteArray:
|
||||
@ -3412,30 +3460,24 @@ static bool IsElementTypeSigned(ExternalArrayType array_type) {
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
|
||||
JSObject* receiver_object,
|
||||
ExternalArrayType array_type,
|
||||
Code::Flags flags) {
|
||||
void KeyedLoadStubCompiler::GenerateLoadExternalArray(
|
||||
MacroAssembler* masm,
|
||||
ExternalArrayType array_type) {
|
||||
// ---------- S t a t e --------------
|
||||
// -- lr : return address
|
||||
// -- r0 : key
|
||||
// -- r1 : receiver
|
||||
// -----------------------------------
|
||||
Label slow, failed_allocation;
|
||||
Label miss_force_generic, slow, failed_allocation;
|
||||
|
||||
Register key = r0;
|
||||
Register receiver = r1;
|
||||
|
||||
// Check that the object isn't a smi
|
||||
__ JumpIfSmi(receiver, &slow);
|
||||
// This stub is meant to be tail-jumped to, the receiver must already
|
||||
// have been verified by the caller to not be a smi.
|
||||
|
||||
// Check that the key is a smi.
|
||||
__ JumpIfNotSmi(key, &slow);
|
||||
|
||||
// Make sure that we've got the right map.
|
||||
__ ldr(r2, FieldMemOperand(receiver, HeapObject::kMapOffset));
|
||||
__ cmp(r2, Operand(Handle<Map>(receiver_object->map())));
|
||||
__ b(ne, &slow);
|
||||
__ JumpIfNotSmi(key, &miss_force_generic);
|
||||
|
||||
__ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
|
||||
// r3: elements array
|
||||
@ -3444,7 +3486,7 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
|
||||
__ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset));
|
||||
__ cmp(ip, Operand(key, ASR, kSmiTagSize));
|
||||
// Unsigned comparison catches both negative and too-large values.
|
||||
__ b(lo, &slow);
|
||||
__ b(lo, &miss_force_generic);
|
||||
|
||||
__ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
|
||||
// r3: base pointer of external storage
|
||||
@ -3536,8 +3578,7 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
|
||||
__ Ret();
|
||||
} else {
|
||||
WriteInt32ToHeapNumberStub stub(value, r0, r3);
|
||||
MaybeObject* stub_code = masm()->TryTailCallStub(&stub);
|
||||
if (stub_code->IsFailure()) return stub_code;
|
||||
__ TailCallStub(&stub);
|
||||
}
|
||||
} else if (array_type == kExternalUnsignedIntArray) {
|
||||
// The test is different for unsigned int values. Since we need
|
||||
@ -3582,12 +3623,12 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
|
||||
|
||||
__ bind(&box_int_0);
|
||||
// Integer does not have leading zeros.
|
||||
GenerateUInt2Double(masm(), hiword, loword, r4, 0);
|
||||
GenerateUInt2Double(masm, hiword, loword, r4, 0);
|
||||
__ b(&done);
|
||||
|
||||
__ bind(&box_int_1);
|
||||
// Integer has one leading zero.
|
||||
GenerateUInt2Double(masm(), hiword, loword, r4, 1);
|
||||
GenerateUInt2Double(masm, hiword, loword, r4, 1);
|
||||
|
||||
|
||||
__ bind(&done);
|
||||
@ -3709,7 +3750,7 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
|
||||
// Slow case, key and receiver still in r0 and r1.
|
||||
__ bind(&slow);
|
||||
__ IncrementCounter(
|
||||
masm()->isolate()->counters()->keyed_load_external_array_slow(),
|
||||
masm->isolate()->counters()->keyed_load_external_array_slow(),
|
||||
1, r2, r3);
|
||||
|
||||
// ---------- S t a t e --------------
|
||||
@ -3722,21 +3763,23 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
|
||||
|
||||
__ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
|
||||
|
||||
return GetCode(flags);
|
||||
__ bind(&miss_force_generic);
|
||||
Code* stub = masm->isolate()->builtins()->builtin(
|
||||
Builtins::kKeyedLoadIC_MissForceGeneric);
|
||||
__ Jump(Handle<Code>(stub), RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
|
||||
JSObject* receiver_object,
|
||||
ExternalArrayType array_type,
|
||||
Code::Flags flags) {
|
||||
void KeyedStoreStubCompiler::GenerateStoreExternalArray(
|
||||
MacroAssembler* masm,
|
||||
ExternalArrayType array_type) {
|
||||
// ---------- S t a t e --------------
|
||||
// -- r0 : value
|
||||
// -- r1 : key
|
||||
// -- r2 : receiver
|
||||
// -- lr : return address
|
||||
// -----------------------------------
|
||||
Label slow, check_heap_number;
|
||||
Label slow, check_heap_number, miss_force_generic;
|
||||
|
||||
// Register usage.
|
||||
Register value = r0;
|
||||
@ -3744,25 +3787,20 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
|
||||
Register receiver = r2;
|
||||
// r3 mostly holds the elements array or the destination external array.
|
||||
|
||||
// Check that the object isn't a smi.
|
||||
__ JumpIfSmi(receiver, &slow);
|
||||
|
||||
// Make sure that we've got the right map.
|
||||
__ ldr(r3, FieldMemOperand(receiver, HeapObject::kMapOffset));
|
||||
__ cmp(r3, Operand(Handle<Map>(receiver_object->map())));
|
||||
__ b(ne, &slow);
|
||||
// This stub is meant to be tail-jumped to, the receiver must already
|
||||
// have been verified by the caller to not be a smi.
|
||||
|
||||
__ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
|
||||
|
||||
// Check that the key is a smi.
|
||||
__ JumpIfNotSmi(key, &slow);
|
||||
__ JumpIfNotSmi(key, &miss_force_generic);
|
||||
|
||||
// Check that the index is in range
|
||||
__ SmiUntag(r4, key);
|
||||
__ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset));
|
||||
__ cmp(r4, ip);
|
||||
// Unsigned comparison catches both negative and too-large values.
|
||||
__ b(hs, &slow);
|
||||
__ b(hs, &miss_force_generic);
|
||||
|
||||
// Handle both smis and HeapNumbers in the fast path. Go to the
|
||||
// runtime for all other kinds of values.
|
||||
@ -3800,7 +3838,7 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
|
||||
break;
|
||||
case kExternalFloatArray:
|
||||
// Perform int-to-float conversion and store to memory.
|
||||
StoreIntAsFloat(masm(), r3, r4, r5, r6, r7, r9);
|
||||
StoreIntAsFloat(masm, r3, r4, r5, r6, r7, r9);
|
||||
break;
|
||||
case kExternalDoubleArray:
|
||||
__ add(r3, r3, Operand(r4, LSL, 3));
|
||||
@ -3812,7 +3850,7 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
|
||||
destination = FloatingPointHelper::kCoreRegisters;
|
||||
}
|
||||
FloatingPointHelper::ConvertIntToDouble(
|
||||
masm(), r5, destination,
|
||||
masm, r5, destination,
|
||||
d0, r6, r7, // These are: double_dst, dst1, dst2.
|
||||
r4, s2); // These are: scratch2, single_scratch.
|
||||
if (destination == FloatingPointHelper::kVFPRegisters) {
|
||||
@ -4039,28 +4077,137 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
|
||||
}
|
||||
}
|
||||
|
||||
// Slow case: call runtime.
|
||||
// Slow case, key and receiver still in r0 and r1.
|
||||
__ bind(&slow);
|
||||
__ IncrementCounter(
|
||||
masm->isolate()->counters()->keyed_load_external_array_slow(),
|
||||
1, r2, r3);
|
||||
|
||||
// Entry registers are intact.
|
||||
// ---------- S t a t e --------------
|
||||
// -- r0 : value
|
||||
// -- r1 : key
|
||||
// -- r2 : receiver
|
||||
// -- lr : return address
|
||||
// -- r0 : key
|
||||
// -- r1 : receiver
|
||||
// -----------------------------------
|
||||
Handle<Code> slow_ic =
|
||||
masm->isolate()->builtins()->KeyedStoreIC_Slow();
|
||||
__ Jump(slow_ic, RelocInfo::CODE_TARGET);
|
||||
|
||||
// Miss case, call the runtime.
|
||||
__ bind(&miss_force_generic);
|
||||
|
||||
// ---------- S t a t e --------------
|
||||
// -- lr : return address
|
||||
// -- r0 : key
|
||||
// -- r1 : receiver
|
||||
// -----------------------------------
|
||||
|
||||
// Push receiver, key and value for runtime call.
|
||||
__ Push(r2, r1, r0);
|
||||
Handle<Code> miss_ic =
|
||||
masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
|
||||
__ Jump(miss_ic, RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
__ mov(r1, Operand(Smi::FromInt(NONE))); // PropertyAttributes
|
||||
__ mov(r0, Operand(Smi::FromInt(
|
||||
Code::ExtractExtraICStateFromFlags(flags) & kStrictMode)));
|
||||
__ Push(r1, r0);
|
||||
|
||||
__ TailCallRuntime(Runtime::kSetProperty, 5, 1);
|
||||
void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- lr : return address
|
||||
// -- r0 : key
|
||||
// -- r1 : receiver
|
||||
// -----------------------------------
|
||||
Label miss_force_generic;
|
||||
|
||||
return GetCode(flags);
|
||||
// This stub is meant to be tail-jumped to, the receiver must already
|
||||
// have been verified by the caller to not be a smi.
|
||||
|
||||
// Check that the key is a smi.
|
||||
__ JumpIfNotSmi(r0, &miss_force_generic);
|
||||
|
||||
// Get the elements array.
|
||||
__ ldr(r2, FieldMemOperand(r1, JSObject::kElementsOffset));
|
||||
__ AssertFastElements(r2);
|
||||
|
||||
// Check that the key is within bounds.
|
||||
__ ldr(r3, FieldMemOperand(r2, FixedArray::kLengthOffset));
|
||||
__ cmp(r0, Operand(r3));
|
||||
__ b(hs, &miss_force_generic);
|
||||
|
||||
// Load the result and make sure it's not the hole.
|
||||
__ add(r3, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
|
||||
ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
|
||||
__ ldr(r4,
|
||||
MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
|
||||
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
|
||||
__ cmp(r4, ip);
|
||||
__ b(eq, &miss_force_generic);
|
||||
__ mov(r0, r4);
|
||||
__ Ret();
|
||||
|
||||
__ bind(&miss_force_generic);
|
||||
Code* stub = masm->isolate()->builtins()->builtin(
|
||||
Builtins::kKeyedLoadIC_MissForceGeneric);
|
||||
__ Jump(Handle<Code>(stub), RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
|
||||
void KeyedStoreStubCompiler::GenerateStoreFastElement(MacroAssembler* masm,
|
||||
bool is_js_array) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- r0 : value
|
||||
// -- r1 : key
|
||||
// -- r2 : receiver
|
||||
// -- lr : return address
|
||||
// -- r3 : scratch
|
||||
// -- r4 : scratch (elements)
|
||||
// -----------------------------------
|
||||
Label miss_force_generic;
|
||||
|
||||
Register value_reg = r0;
|
||||
Register key_reg = r1;
|
||||
Register receiver_reg = r2;
|
||||
Register scratch = r3;
|
||||
Register elements_reg = r4;
|
||||
|
||||
// This stub is meant to be tail-jumped to, the receiver must already
|
||||
// have been verified by the caller to not be a smi.
|
||||
|
||||
// Check that the key is a smi.
|
||||
__ JumpIfNotSmi(r0, &miss_force_generic);
|
||||
|
||||
// Get the elements array and make sure it is a fast element array, not 'cow'.
|
||||
__ ldr(elements_reg,
|
||||
FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
|
||||
__ CheckMap(elements_reg,
|
||||
scratch,
|
||||
Heap::kFixedArrayMapRootIndex,
|
||||
&miss_force_generic,
|
||||
DONT_DO_SMI_CHECK);
|
||||
|
||||
// Check that the key is within bounds.
|
||||
if (is_js_array) {
|
||||
__ ldr(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
|
||||
} else {
|
||||
__ ldr(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
|
||||
}
|
||||
// Compare smis.
|
||||
__ cmp(key_reg, scratch);
|
||||
__ b(hs, &miss_force_generic);
|
||||
|
||||
__ add(scratch,
|
||||
elements_reg, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
|
||||
ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
|
||||
__ str(value_reg,
|
||||
MemOperand(scratch, key_reg, LSL, kPointerSizeLog2 - kSmiTagSize));
|
||||
__ RecordWrite(scratch,
|
||||
Operand(key_reg, LSL, kPointerSizeLog2 - kSmiTagSize),
|
||||
receiver_reg , elements_reg);
|
||||
|
||||
// value_reg (r0) is preserved.
|
||||
// Done.
|
||||
__ Ret();
|
||||
|
||||
__ bind(&miss_force_generic);
|
||||
Handle<Code> ic =
|
||||
masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
|
||||
__ Jump(ic, RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Copyright 2006-2008 the V8 project authors. All rights reserved.
|
||||
// Copyright 2011 the V8 project authors. All rights reserved.
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
@ -1341,8 +1341,18 @@ static void Generate_KeyedLoadIC_Initialize(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
|
||||
static void Generate_KeyedLoadIC_Slow(MacroAssembler* masm) {
|
||||
KeyedLoadIC::GenerateRuntimeGetProperty(masm);
|
||||
}
|
||||
|
||||
|
||||
static void Generate_KeyedLoadIC_Miss(MacroAssembler* masm) {
|
||||
KeyedLoadIC::GenerateMiss(masm);
|
||||
KeyedLoadIC::GenerateMiss(masm, false);
|
||||
}
|
||||
|
||||
|
||||
static void Generate_KeyedLoadIC_MissForceGeneric(MacroAssembler* masm) {
|
||||
KeyedLoadIC::GenerateMiss(masm, true);
|
||||
}
|
||||
|
||||
|
||||
@ -1431,7 +1441,17 @@ static void Generate_KeyedStoreIC_Generic_Strict(MacroAssembler* masm) {
|
||||
|
||||
|
||||
static void Generate_KeyedStoreIC_Miss(MacroAssembler* masm) {
|
||||
KeyedStoreIC::GenerateMiss(masm);
|
||||
KeyedStoreIC::GenerateMiss(masm, false);
|
||||
}
|
||||
|
||||
|
||||
static void Generate_KeyedStoreIC_MissForceGeneric(MacroAssembler* masm) {
|
||||
KeyedStoreIC::GenerateMiss(masm, true);
|
||||
}
|
||||
|
||||
|
||||
static void Generate_KeyedStoreIC_Slow(MacroAssembler* masm) {
|
||||
KeyedStoreIC::GenerateSlow(masm);
|
||||
}
|
||||
|
||||
|
||||
|
227
src/builtins.h
227
src/builtins.h
@ -1,4 +1,4 @@
|
||||
// Copyright 2010 the V8 project authors. All rights reserved.
|
||||
// Copyright 2011 the V8 project authors. All rights reserved.
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
@ -67,115 +67,122 @@ enum BuiltinExtraArguments {
|
||||
|
||||
|
||||
// Define list of builtins implemented in assembly.
|
||||
#define BUILTIN_LIST_A(V) \
|
||||
V(ArgumentsAdaptorTrampoline, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(JSConstructCall, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(JSConstructStubCountdown, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(JSConstructStubGeneric, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(JSConstructStubApi, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(JSEntryTrampoline, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(JSConstructEntryTrampoline, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(LazyCompile, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(LazyRecompile, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(NotifyDeoptimized, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(NotifyLazyDeoptimized, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(NotifyOSR, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
\
|
||||
V(LoadIC_Miss, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(KeyedLoadIC_Miss, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(StoreIC_Miss, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(KeyedStoreIC_Miss, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
\
|
||||
V(LoadIC_Initialize, LOAD_IC, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(LoadIC_PreMonomorphic, LOAD_IC, PREMONOMORPHIC, \
|
||||
Code::kNoExtraICState) \
|
||||
V(LoadIC_Normal, LOAD_IC, MONOMORPHIC, \
|
||||
Code::kNoExtraICState) \
|
||||
V(LoadIC_ArrayLength, LOAD_IC, MONOMORPHIC, \
|
||||
Code::kNoExtraICState) \
|
||||
V(LoadIC_StringLength, LOAD_IC, MONOMORPHIC, \
|
||||
Code::kNoExtraICState) \
|
||||
V(LoadIC_StringWrapperLength, LOAD_IC, MONOMORPHIC, \
|
||||
Code::kNoExtraICState) \
|
||||
V(LoadIC_FunctionPrototype, LOAD_IC, MONOMORPHIC, \
|
||||
Code::kNoExtraICState) \
|
||||
V(LoadIC_Megamorphic, LOAD_IC, MEGAMORPHIC, \
|
||||
Code::kNoExtraICState) \
|
||||
\
|
||||
V(KeyedLoadIC_Initialize, KEYED_LOAD_IC, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(KeyedLoadIC_PreMonomorphic, KEYED_LOAD_IC, PREMONOMORPHIC, \
|
||||
Code::kNoExtraICState) \
|
||||
V(KeyedLoadIC_Generic, KEYED_LOAD_IC, MEGAMORPHIC, \
|
||||
Code::kNoExtraICState) \
|
||||
V(KeyedLoadIC_String, KEYED_LOAD_IC, MEGAMORPHIC, \
|
||||
Code::kNoExtraICState) \
|
||||
V(KeyedLoadIC_IndexedInterceptor, KEYED_LOAD_IC, MEGAMORPHIC, \
|
||||
Code::kNoExtraICState) \
|
||||
\
|
||||
V(StoreIC_Initialize, STORE_IC, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(StoreIC_ArrayLength, STORE_IC, MONOMORPHIC, \
|
||||
Code::kNoExtraICState) \
|
||||
V(StoreIC_Normal, STORE_IC, MONOMORPHIC, \
|
||||
Code::kNoExtraICState) \
|
||||
V(StoreIC_Megamorphic, STORE_IC, MEGAMORPHIC, \
|
||||
Code::kNoExtraICState) \
|
||||
V(StoreIC_GlobalProxy, STORE_IC, MEGAMORPHIC, \
|
||||
Code::kNoExtraICState) \
|
||||
V(StoreIC_Initialize_Strict, STORE_IC, UNINITIALIZED, \
|
||||
kStrictMode) \
|
||||
V(StoreIC_ArrayLength_Strict, STORE_IC, MONOMORPHIC, \
|
||||
kStrictMode) \
|
||||
V(StoreIC_Normal_Strict, STORE_IC, MONOMORPHIC, \
|
||||
kStrictMode) \
|
||||
V(StoreIC_Megamorphic_Strict, STORE_IC, MEGAMORPHIC, \
|
||||
kStrictMode) \
|
||||
V(StoreIC_GlobalProxy_Strict, STORE_IC, MEGAMORPHIC, \
|
||||
kStrictMode) \
|
||||
\
|
||||
V(KeyedStoreIC_Initialize, KEYED_STORE_IC, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(KeyedStoreIC_Generic, KEYED_STORE_IC, MEGAMORPHIC, \
|
||||
Code::kNoExtraICState) \
|
||||
\
|
||||
V(KeyedStoreIC_Initialize_Strict, KEYED_STORE_IC, UNINITIALIZED, \
|
||||
kStrictMode) \
|
||||
V(KeyedStoreIC_Generic_Strict, KEYED_STORE_IC, MEGAMORPHIC, \
|
||||
kStrictMode) \
|
||||
\
|
||||
/* Uses KeyedLoadIC_Initialize; must be after in list. */ \
|
||||
V(FunctionCall, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(FunctionApply, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
\
|
||||
V(ArrayCode, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(ArrayConstructCode, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
\
|
||||
V(StringConstructCode, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
\
|
||||
V(OnStackReplacement, BUILTIN, UNINITIALIZED, \
|
||||
#define BUILTIN_LIST_A(V) \
|
||||
V(ArgumentsAdaptorTrampoline, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(JSConstructCall, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(JSConstructStubCountdown, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(JSConstructStubGeneric, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(JSConstructStubApi, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(JSEntryTrampoline, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(JSConstructEntryTrampoline, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(LazyCompile, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(LazyRecompile, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(NotifyDeoptimized, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(NotifyLazyDeoptimized, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(NotifyOSR, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
\
|
||||
V(LoadIC_Miss, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(KeyedLoadIC_Miss, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(KeyedLoadIC_MissForceGeneric, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(KeyedLoadIC_Slow, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(StoreIC_Miss, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(KeyedStoreIC_Miss, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(KeyedStoreIC_MissForceGeneric, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(KeyedStoreIC_Slow, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(LoadIC_Initialize, LOAD_IC, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(LoadIC_PreMonomorphic, LOAD_IC, PREMONOMORPHIC, \
|
||||
Code::kNoExtraICState) \
|
||||
V(LoadIC_Normal, LOAD_IC, MONOMORPHIC, \
|
||||
Code::kNoExtraICState) \
|
||||
V(LoadIC_ArrayLength, LOAD_IC, MONOMORPHIC, \
|
||||
Code::kNoExtraICState) \
|
||||
V(LoadIC_StringLength, LOAD_IC, MONOMORPHIC, \
|
||||
Code::kNoExtraICState) \
|
||||
V(LoadIC_StringWrapperLength, LOAD_IC, MONOMORPHIC, \
|
||||
Code::kNoExtraICState) \
|
||||
V(LoadIC_FunctionPrototype, LOAD_IC, MONOMORPHIC, \
|
||||
Code::kNoExtraICState) \
|
||||
V(LoadIC_Megamorphic, LOAD_IC, MEGAMORPHIC, \
|
||||
Code::kNoExtraICState) \
|
||||
\
|
||||
V(KeyedLoadIC_Initialize, KEYED_LOAD_IC, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(KeyedLoadIC_PreMonomorphic, KEYED_LOAD_IC, PREMONOMORPHIC, \
|
||||
Code::kNoExtraICState) \
|
||||
V(KeyedLoadIC_Generic, KEYED_LOAD_IC, MEGAMORPHIC, \
|
||||
Code::kNoExtraICState) \
|
||||
V(KeyedLoadIC_String, KEYED_LOAD_IC, MEGAMORPHIC, \
|
||||
Code::kNoExtraICState) \
|
||||
V(KeyedLoadIC_IndexedInterceptor, KEYED_LOAD_IC, MEGAMORPHIC, \
|
||||
Code::kNoExtraICState) \
|
||||
\
|
||||
V(StoreIC_Initialize, STORE_IC, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(StoreIC_ArrayLength, STORE_IC, MONOMORPHIC, \
|
||||
Code::kNoExtraICState) \
|
||||
V(StoreIC_Normal, STORE_IC, MONOMORPHIC, \
|
||||
Code::kNoExtraICState) \
|
||||
V(StoreIC_Megamorphic, STORE_IC, MEGAMORPHIC, \
|
||||
Code::kNoExtraICState) \
|
||||
V(StoreIC_GlobalProxy, STORE_IC, MEGAMORPHIC, \
|
||||
Code::kNoExtraICState) \
|
||||
V(StoreIC_Initialize_Strict, STORE_IC, UNINITIALIZED, \
|
||||
kStrictMode) \
|
||||
V(StoreIC_ArrayLength_Strict, STORE_IC, MONOMORPHIC, \
|
||||
kStrictMode) \
|
||||
V(StoreIC_Normal_Strict, STORE_IC, MONOMORPHIC, \
|
||||
kStrictMode) \
|
||||
V(StoreIC_Megamorphic_Strict, STORE_IC, MEGAMORPHIC, \
|
||||
kStrictMode) \
|
||||
V(StoreIC_GlobalProxy_Strict, STORE_IC, MEGAMORPHIC, \
|
||||
kStrictMode) \
|
||||
\
|
||||
V(KeyedStoreIC_Initialize, KEYED_STORE_IC, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(KeyedStoreIC_Generic, KEYED_STORE_IC, MEGAMORPHIC, \
|
||||
Code::kNoExtraICState) \
|
||||
\
|
||||
V(KeyedStoreIC_Initialize_Strict, KEYED_STORE_IC, UNINITIALIZED, \
|
||||
kStrictMode) \
|
||||
V(KeyedStoreIC_Generic_Strict, KEYED_STORE_IC, MEGAMORPHIC, \
|
||||
kStrictMode) \
|
||||
\
|
||||
/* Uses KeyedLoadIC_Initialize; must be after in list. */ \
|
||||
V(FunctionCall, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(FunctionApply, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
\
|
||||
V(ArrayCode, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
V(ArrayConstructCode, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
\
|
||||
V(StringConstructCode, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState) \
|
||||
\
|
||||
V(OnStackReplacement, BUILTIN, UNINITIALIZED, \
|
||||
Code::kNoExtraICState)
|
||||
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Copyright 2006-2008 the V8 project authors. All rights reserved.
|
||||
// Copyright 2011 the V8 project authors. All rights reserved.
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
@ -29,6 +29,7 @@
|
||||
|
||||
#include "bootstrapper.h"
|
||||
#include "code-stubs.h"
|
||||
#include "stub-cache.h"
|
||||
#include "factory.h"
|
||||
#include "gdb-jit.h"
|
||||
#include "macro-assembler.h"
|
||||
@ -243,4 +244,24 @@ const char* InstanceofStub::GetName() {
|
||||
}
|
||||
|
||||
|
||||
void KeyedLoadFastElementStub::Generate(MacroAssembler* masm) {
|
||||
KeyedLoadStubCompiler::GenerateLoadFastElement(masm);
|
||||
}
|
||||
|
||||
|
||||
void KeyedStoreFastElementStub::Generate(MacroAssembler* masm) {
|
||||
KeyedStoreStubCompiler::GenerateStoreFastElement(masm, is_js_array_);
|
||||
}
|
||||
|
||||
|
||||
void KeyedLoadExternalArrayStub::Generate(MacroAssembler* masm) {
|
||||
KeyedLoadStubCompiler::GenerateLoadExternalArray(masm, array_type_);
|
||||
}
|
||||
|
||||
|
||||
void KeyedStoreExternalArrayStub::Generate(MacroAssembler* masm) {
|
||||
KeyedStoreStubCompiler::GenerateStoreExternalArray(masm, array_type_);
|
||||
}
|
||||
|
||||
|
||||
} } // namespace v8::internal
|
||||
|
@ -66,6 +66,10 @@ namespace internal {
|
||||
V(NumberToString) \
|
||||
V(CEntry) \
|
||||
V(JSEntry) \
|
||||
V(KeyedLoadFastElement) \
|
||||
V(KeyedStoreFastElement) \
|
||||
V(KeyedLoadExternalArray) \
|
||||
V(KeyedStoreExternalArray) \
|
||||
V(DebuggerStatement) \
|
||||
V(StringDictionaryNegativeLookup)
|
||||
|
||||
@ -922,6 +926,86 @@ class AllowStubCallsScope {
|
||||
DISALLOW_COPY_AND_ASSIGN(AllowStubCallsScope);
|
||||
};
|
||||
|
||||
#ifdef DEBUG
|
||||
#define DECLARE_ARRAY_STUB_PRINT(name) void Print() { PrintF(#name); }
|
||||
#else
|
||||
#define DECLARE_ARRAY_STUB_PRINT(name)
|
||||
#endif
|
||||
|
||||
|
||||
class KeyedLoadFastElementStub : public CodeStub {
|
||||
public:
|
||||
explicit KeyedLoadFastElementStub() {
|
||||
}
|
||||
|
||||
Major MajorKey() { return KeyedLoadFastElement; }
|
||||
int MinorKey() { return 0; }
|
||||
|
||||
void Generate(MacroAssembler* masm);
|
||||
|
||||
const char* GetName() { return "KeyedLoadFastElementStub"; }
|
||||
|
||||
DECLARE_ARRAY_STUB_PRINT(KeyedLoadFastElementStub)
|
||||
};
|
||||
|
||||
|
||||
class KeyedStoreFastElementStub : public CodeStub {
|
||||
public:
|
||||
explicit KeyedStoreFastElementStub(bool is_js_array)
|
||||
: is_js_array_(is_js_array) { }
|
||||
|
||||
Major MajorKey() { return KeyedStoreFastElement; }
|
||||
int MinorKey() { return is_js_array_ ? 1 : 0; }
|
||||
|
||||
void Generate(MacroAssembler* masm);
|
||||
|
||||
const char* GetName() { return "KeyedStoreFastElementStub"; }
|
||||
|
||||
DECLARE_ARRAY_STUB_PRINT(KeyedStoreFastElementStub)
|
||||
|
||||
private:
|
||||
bool is_js_array_;
|
||||
};
|
||||
|
||||
|
||||
class KeyedLoadExternalArrayStub : public CodeStub {
|
||||
public:
|
||||
explicit KeyedLoadExternalArrayStub(ExternalArrayType array_type)
|
||||
: array_type_(array_type) { }
|
||||
|
||||
Major MajorKey() { return KeyedLoadExternalArray; }
|
||||
int MinorKey() { return array_type_; }
|
||||
|
||||
void Generate(MacroAssembler* masm);
|
||||
|
||||
const char* GetName() { return "KeyedLoadExternalArrayStub"; }
|
||||
|
||||
DECLARE_ARRAY_STUB_PRINT(KeyedLoadExternalArrayStub)
|
||||
|
||||
protected:
|
||||
ExternalArrayType array_type_;
|
||||
};
|
||||
|
||||
|
||||
class KeyedStoreExternalArrayStub : public CodeStub {
|
||||
public:
|
||||
explicit KeyedStoreExternalArrayStub(ExternalArrayType array_type)
|
||||
: array_type_(array_type) { }
|
||||
|
||||
Major MajorKey() { return KeyedStoreExternalArray; }
|
||||
int MinorKey() { return array_type_; }
|
||||
|
||||
void Generate(MacroAssembler* masm);
|
||||
|
||||
const char* GetName() { return "KeyedStoreExternalArrayStub"; }
|
||||
|
||||
DECLARE_ARRAY_STUB_PRINT(KeyedStoreExternalArrayStub)
|
||||
|
||||
protected:
|
||||
ExternalArrayType array_type_;
|
||||
};
|
||||
|
||||
|
||||
} } // namespace v8::internal
|
||||
|
||||
#endif // V8_CODE_STUBS_H_
|
||||
|
@ -287,10 +287,9 @@ DEFINE_bool(native_code_counters, false,
|
||||
DEFINE_bool(always_compact, false, "Perform compaction on every full GC")
|
||||
DEFINE_bool(never_compact, false,
|
||||
"Never perform compaction on full GC - testing only")
|
||||
DEFINE_bool(cleanup_ics_at_gc, true,
|
||||
"Flush inline caches prior to mark compact collection.")
|
||||
DEFINE_bool(cleanup_caches_in_maps_at_gc, true,
|
||||
"Flush code caches in maps during mark compact cycle.")
|
||||
DEFINE_bool(cleanup_code_caches_at_gc, true,
|
||||
"Flush inline caches prior to mark compact collection and "
|
||||
"flush code caches in maps during mark compact cycle.")
|
||||
DEFINE_int(random_seed, 0,
|
||||
"Default seed for initializing random generator "
|
||||
"(0, the default, means to use system random).")
|
||||
|
39
src/heap.h
39
src/heap.h
@ -1,4 +1,4 @@
|
||||
// Copyright 2010 the V8 project authors. All rights reserved.
|
||||
// Copyright 2011 the V8 project authors. All rights reserved.
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
@ -178,8 +178,14 @@ inline Heap* _inline_get_heap_();
|
||||
V(value_of_symbol, "valueOf") \
|
||||
V(InitializeVarGlobal_symbol, "InitializeVarGlobal") \
|
||||
V(InitializeConstGlobal_symbol, "InitializeConstGlobal") \
|
||||
V(KeyedLoadSpecialized_symbol, "KeyedLoadSpecialized") \
|
||||
V(KeyedStoreSpecialized_symbol, "KeyedStoreSpecialized") \
|
||||
V(KeyedLoadSpecializedMonomorphic_symbol, \
|
||||
"KeyedLoadSpecializedMonomorphic") \
|
||||
V(KeyedLoadSpecializedPolymorphic_symbol, \
|
||||
"KeyedLoadSpecializedPolymorphic") \
|
||||
V(KeyedStoreSpecializedMonomorphic_symbol, \
|
||||
"KeyedStoreSpecializedMonomorphic") \
|
||||
V(KeyedStoreSpecializedPolymorphic_symbol, \
|
||||
"KeyedStoreSpecializedPolymorphic") \
|
||||
V(stack_overflow_symbol, "kStackOverflowBoilerplate") \
|
||||
V(illegal_access_symbol, "illegal access") \
|
||||
V(out_of_memory_symbol, "out-of-memory") \
|
||||
@ -207,32 +213,7 @@ inline Heap* _inline_get_heap_();
|
||||
V(global_eval_symbol, "GlobalEval") \
|
||||
V(identity_hash_symbol, "v8::IdentityHash") \
|
||||
V(closure_symbol, "(closure)") \
|
||||
V(use_strict, "use strict") \
|
||||
V(KeyedLoadExternalByteArray_symbol, "KeyedLoadExternalByteArray") \
|
||||
V(KeyedLoadExternalUnsignedByteArray_symbol, \
|
||||
"KeyedLoadExternalUnsignedByteArray") \
|
||||
V(KeyedLoadExternalShortArray_symbol, \
|
||||
"KeyedLoadExternalShortArray") \
|
||||
V(KeyedLoadExternalUnsignedShortArray_symbol, \
|
||||
"KeyedLoadExternalUnsignedShortArray") \
|
||||
V(KeyedLoadExternalIntArray_symbol, "KeyedLoadExternalIntArray") \
|
||||
V(KeyedLoadExternalUnsignedIntArray_symbol, \
|
||||
"KeyedLoadExternalUnsignedIntArray") \
|
||||
V(KeyedLoadExternalFloatArray_symbol, "KeyedLoadExternalFloatArray") \
|
||||
V(KeyedLoadExternalDoubleArray_symbol, "KeyedLoadExternalDoubleArray") \
|
||||
V(KeyedLoadExternalPixelArray_symbol, "KeyedLoadExternalPixelArray") \
|
||||
V(KeyedStoreExternalByteArray_symbol, "KeyedStoreExternalByteArray") \
|
||||
V(KeyedStoreExternalUnsignedByteArray_symbol, \
|
||||
"KeyedStoreExternalUnsignedByteArray") \
|
||||
V(KeyedStoreExternalShortArray_symbol, "KeyedStoreExternalShortArray") \
|
||||
V(KeyedStoreExternalUnsignedShortArray_symbol, \
|
||||
"KeyedStoreExternalUnsignedShortArray") \
|
||||
V(KeyedStoreExternalIntArray_symbol, "KeyedStoreExternalIntArray") \
|
||||
V(KeyedStoreExternalUnsignedIntArray_symbol, \
|
||||
"KeyedStoreExternalUnsignedIntArray") \
|
||||
V(KeyedStoreExternalFloatArray_symbol, "KeyedStoreExternalFloatArray") \
|
||||
V(KeyedStoreExternalDoubleArray_symbol, "KeyedStoreExternalDoubleArray") \
|
||||
V(KeyedStoreExternalPixelArray_symbol, "KeyedStoreExternalPixelArray")
|
||||
V(use_strict, "use strict")
|
||||
|
||||
// Forward declarations.
|
||||
class GCTracer;
|
||||
|
@ -655,7 +655,7 @@ void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
|
||||
char_at_generator.GenerateSlow(masm, call_helper);
|
||||
|
||||
__ bind(&miss);
|
||||
GenerateMiss(masm);
|
||||
GenerateMiss(masm, false);
|
||||
}
|
||||
|
||||
|
||||
@ -698,7 +698,7 @@ void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
|
||||
__ TailCallExternalReference(ref, 2, 1);
|
||||
|
||||
__ bind(&slow);
|
||||
GenerateMiss(masm);
|
||||
GenerateMiss(masm, false);
|
||||
}
|
||||
|
||||
|
||||
@ -1222,7 +1222,7 @@ void LoadIC::GenerateMiss(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
|
||||
void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
|
||||
void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- eax : key
|
||||
// -- edx : receiver
|
||||
@ -1237,8 +1237,10 @@ void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
|
||||
__ push(ebx); // return address
|
||||
|
||||
// Perform tail call to the entry.
|
||||
ExternalReference ref =
|
||||
ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate());
|
||||
ExternalReference ref = force_generic
|
||||
? ExternalReference(IC_Utility(kKeyedLoadIC_MissForceGeneric),
|
||||
masm->isolate())
|
||||
: ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate());
|
||||
__ TailCallExternalReference(ref, 2, 1);
|
||||
}
|
||||
|
||||
@ -1430,7 +1432,7 @@ void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
|
||||
}
|
||||
|
||||
|
||||
void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) {
|
||||
void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- eax : value
|
||||
// -- ecx : key
|
||||
@ -1445,8 +1447,30 @@ void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) {
|
||||
__ push(ebx);
|
||||
|
||||
// Do tail-call to runtime routine.
|
||||
ExternalReference ref =
|
||||
ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
|
||||
ExternalReference ref = force_generic
|
||||
? ExternalReference(IC_Utility(kKeyedStoreIC_MissForceGeneric),
|
||||
masm->isolate())
|
||||
: ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
|
||||
__ TailCallExternalReference(ref, 3, 1);
|
||||
}
|
||||
|
||||
|
||||
void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- eax : value
|
||||
// -- ecx : key
|
||||
// -- edx : receiver
|
||||
// -- esp[0] : return address
|
||||
// -----------------------------------
|
||||
|
||||
__ pop(ebx);
|
||||
__ push(edx);
|
||||
__ push(ecx);
|
||||
__ push(eax);
|
||||
__ push(ebx); // return address
|
||||
|
||||
// Do tail-call to runtime routine.
|
||||
ExternalReference ref(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
|
||||
__ TailCallExternalReference(ref, 3, 1);
|
||||
}
|
||||
|
||||
|
@ -286,6 +286,21 @@ void MacroAssembler::CheckMap(Register obj,
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::DispatchMap(Register obj,
|
||||
Handle<Map> map,
|
||||
Handle<Code> success,
|
||||
SmiCheckType smi_check_type) {
|
||||
Label fail;
|
||||
if (smi_check_type == DONT_DO_SMI_CHECK) {
|
||||
JumpIfSmi(obj, &fail);
|
||||
}
|
||||
cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
|
||||
j(equal, success);
|
||||
|
||||
bind(&fail);
|
||||
}
|
||||
|
||||
|
||||
Condition MacroAssembler::IsObjectStringType(Register heap_object,
|
||||
Register map,
|
||||
Register instance_type) {
|
||||
|
@ -45,6 +45,7 @@ enum AllocationFlags {
|
||||
RESULT_CONTAINS_TOP = 1 << 1
|
||||
};
|
||||
|
||||
|
||||
// Convenience for platform-independent signatures. We do not normally
|
||||
// distinguish memory operands from other operands on ia32.
|
||||
typedef Operand MemOperand;
|
||||
@ -206,14 +207,22 @@ class MacroAssembler: public Assembler {
|
||||
// Compare instance type for map.
|
||||
void CmpInstanceType(Register map, InstanceType type);
|
||||
|
||||
// Check if the map of an object is equal to a specified map and
|
||||
// branch to label if not. Skip the smi check if not required
|
||||
// (object is known to be a heap object)
|
||||
// Check if the map of an object is equal to a specified map and branch to
|
||||
// label if not. Skip the smi check if not required (object is known to be a
|
||||
// heap object)
|
||||
void CheckMap(Register obj,
|
||||
Handle<Map> map,
|
||||
Label* fail,
|
||||
SmiCheckType smi_check_type);
|
||||
|
||||
// Check if the map of an object is equal to a specified map and branch to a
|
||||
// specified target if equal. Skip the smi check if not required (object is
|
||||
// known to be a heap object)
|
||||
void DispatchMap(Register obj,
|
||||
Handle<Map> map,
|
||||
Handle<Code> success,
|
||||
SmiCheckType smi_check_type);
|
||||
|
||||
// Check if the object in register heap_object is a string. Afterwards the
|
||||
// register map contains the object map and the register instance_type
|
||||
// contains the instance_type. The registers map and instance_type can be the
|
||||
|
@ -713,6 +713,14 @@ void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
|
||||
}
|
||||
|
||||
|
||||
void StubCompiler::GenerateKeyedLoadMissForceGeneric(MacroAssembler* masm) {
|
||||
Code* code = masm->isolate()->builtins()->builtin(
|
||||
Builtins::kKeyedLoadIC_MissForceGeneric);
|
||||
Handle<Code> ic(code);
|
||||
__ jmp(ic, RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
|
||||
// Both name_reg and receiver_reg are preserved on jumps to miss_label,
|
||||
// but may be destroyed if store is successful.
|
||||
void StubCompiler::GenerateStoreField(MacroAssembler* masm,
|
||||
@ -2655,8 +2663,35 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
|
||||
JSObject* receiver) {
|
||||
MaybeObject* KeyedStoreStubCompiler::CompileStoreFastElement(
|
||||
Map* receiver_map) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- eax : value
|
||||
// -- ecx : key
|
||||
// -- edx : receiver
|
||||
// -- esp[0] : return address
|
||||
// -----------------------------------
|
||||
bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
|
||||
MaybeObject* maybe_stub =
|
||||
KeyedStoreFastElementStub(is_js_array).TryGetCode();
|
||||
Code* stub;
|
||||
if (!maybe_stub->To(&stub)) return maybe_stub;
|
||||
__ DispatchMap(edx,
|
||||
Handle<Map>(receiver_map),
|
||||
Handle<Code>(stub),
|
||||
DO_SMI_CHECK);
|
||||
|
||||
Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
|
||||
__ jmp(ic, RelocInfo::CODE_TARGET);
|
||||
|
||||
// Return the generated code.
|
||||
return GetCode(NORMAL, NULL);
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* KeyedStoreStubCompiler::CompileStoreMegamorphic(
|
||||
MapList* receiver_maps,
|
||||
CodeList* handler_ics) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- eax : value
|
||||
// -- ecx : key
|
||||
@ -2664,51 +2699,22 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
|
||||
// -- esp[0] : return address
|
||||
// -----------------------------------
|
||||
Label miss;
|
||||
__ JumpIfSmi(edx, &miss);
|
||||
|
||||
// Check that the receiver isn't a smi.
|
||||
__ test(edx, Immediate(kSmiTagMask));
|
||||
__ j(zero, &miss);
|
||||
|
||||
// Check that the map matches.
|
||||
__ cmp(FieldOperand(edx, HeapObject::kMapOffset),
|
||||
Immediate(Handle<Map>(receiver->map())));
|
||||
__ j(not_equal, &miss);
|
||||
|
||||
// Check that the key is a smi.
|
||||
__ test(ecx, Immediate(kSmiTagMask));
|
||||
__ j(not_zero, &miss);
|
||||
|
||||
// Get the elements array and make sure it is a fast element array, not 'cow'.
|
||||
__ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
|
||||
__ cmp(FieldOperand(edi, HeapObject::kMapOffset),
|
||||
Immediate(factory()->fixed_array_map()));
|
||||
__ j(not_equal, &miss);
|
||||
|
||||
// Check that the key is within bounds.
|
||||
if (receiver->IsJSArray()) {
|
||||
__ cmp(ecx, FieldOperand(edx, JSArray::kLengthOffset)); // Compare smis.
|
||||
__ j(above_equal, &miss);
|
||||
} else {
|
||||
__ cmp(ecx, FieldOperand(edi, FixedArray::kLengthOffset)); // Compare smis.
|
||||
__ j(above_equal, &miss);
|
||||
Register map_reg = ebx;
|
||||
__ mov(map_reg, FieldOperand(edx, HeapObject::kMapOffset));
|
||||
int receiver_count = receiver_maps->length();
|
||||
for (int current = 0; current < receiver_count; ++current) {
|
||||
Handle<Map> map(receiver_maps->at(current));
|
||||
__ cmp(map_reg, map);
|
||||
__ j(equal, Handle<Code>(handler_ics->at(current)));
|
||||
}
|
||||
|
||||
// Do the store and update the write barrier. Make sure to preserve
|
||||
// the value in register eax.
|
||||
__ mov(edx, Operand(eax));
|
||||
__ mov(FieldOperand(edi, ecx, times_2, FixedArray::kHeaderSize), eax);
|
||||
__ RecordWrite(edi, 0, edx, ecx);
|
||||
|
||||
// Done.
|
||||
__ ret(0);
|
||||
|
||||
// Handle store cache miss.
|
||||
__ bind(&miss);
|
||||
Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
|
||||
__ jmp(ic, RelocInfo::CODE_TARGET);
|
||||
Handle<Code> miss_ic = isolate()->builtins()->KeyedStoreIC_Miss();
|
||||
__ jmp(miss_ic, RelocInfo::CODE_TARGET);
|
||||
|
||||
// Return the generated code.
|
||||
return GetCode(NORMAL, NULL);
|
||||
return GetCode(NORMAL, NULL, MEGAMORPHIC);
|
||||
}
|
||||
|
||||
|
||||
@ -3121,48 +3127,52 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) {
|
||||
MaybeObject* KeyedLoadStubCompiler::CompileLoadFastElement(Map* receiver_map) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- eax : key
|
||||
// -- edx : receiver
|
||||
// -- esp[0] : return address
|
||||
// -----------------------------------
|
||||
MaybeObject* maybe_stub = KeyedLoadFastElementStub().TryGetCode();
|
||||
Code* stub;
|
||||
if (!maybe_stub->To(&stub)) return maybe_stub;
|
||||
__ DispatchMap(edx,
|
||||
Handle<Map>(receiver_map),
|
||||
Handle<Code>(stub),
|
||||
DO_SMI_CHECK);
|
||||
|
||||
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
|
||||
|
||||
// Return the generated code.
|
||||
return GetCode(NORMAL, NULL);
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* KeyedLoadStubCompiler::CompileLoadMegamorphic(
|
||||
MapList* receiver_maps,
|
||||
CodeList* handler_ics) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- eax : key
|
||||
// -- edx : receiver
|
||||
// -- esp[0] : return address
|
||||
// -----------------------------------
|
||||
Label miss;
|
||||
__ JumpIfSmi(edx, &miss);
|
||||
|
||||
// Check that the receiver isn't a smi.
|
||||
__ test(edx, Immediate(kSmiTagMask));
|
||||
__ j(zero, &miss);
|
||||
|
||||
// Check that the map matches.
|
||||
__ cmp(FieldOperand(edx, HeapObject::kMapOffset),
|
||||
Immediate(Handle<Map>(receiver->map())));
|
||||
__ j(not_equal, &miss);
|
||||
|
||||
// Check that the key is a smi.
|
||||
__ test(eax, Immediate(kSmiTagMask));
|
||||
__ j(not_zero, &miss);
|
||||
|
||||
// Get the elements array.
|
||||
__ mov(ecx, FieldOperand(edx, JSObject::kElementsOffset));
|
||||
__ AssertFastElements(ecx);
|
||||
|
||||
// Check that the key is within bounds.
|
||||
__ cmp(eax, FieldOperand(ecx, FixedArray::kLengthOffset));
|
||||
__ j(above_equal, &miss);
|
||||
|
||||
// Load the result and make sure it's not the hole.
|
||||
__ mov(ebx, Operand(ecx, eax, times_2,
|
||||
FixedArray::kHeaderSize - kHeapObjectTag));
|
||||
__ cmp(ebx, factory()->the_hole_value());
|
||||
__ j(equal, &miss);
|
||||
__ mov(eax, ebx);
|
||||
__ ret(0);
|
||||
Register map_reg = ebx;
|
||||
__ mov(map_reg, FieldOperand(edx, HeapObject::kMapOffset));
|
||||
int receiver_count = receiver_maps->length();
|
||||
for (int current = 0; current < receiver_count; ++current) {
|
||||
Handle<Map> map(receiver_maps->at(current));
|
||||
__ cmp(map_reg, map);
|
||||
__ j(equal, Handle<Code>(handler_ics->at(current)));
|
||||
}
|
||||
|
||||
__ bind(&miss);
|
||||
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
|
||||
|
||||
// Return the generated code.
|
||||
return GetCode(NORMAL, NULL);
|
||||
return GetCode(NORMAL, NULL, MEGAMORPHIC);
|
||||
}
|
||||
|
||||
|
||||
@ -3305,36 +3315,82 @@ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
|
||||
JSObject*receiver, ExternalArrayType array_type, Code::Flags flags) {
|
||||
MaybeObject* ExternalArrayLoadStubCompiler::CompileLoad(
|
||||
JSObject*receiver, ExternalArrayType array_type) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- eax : key
|
||||
// -- edx : receiver
|
||||
// -- esp[0] : return address
|
||||
// -----------------------------------
|
||||
Label slow, failed_allocation;
|
||||
MaybeObject* maybe_stub =
|
||||
KeyedLoadExternalArrayStub(array_type).TryGetCode();
|
||||
Code* stub;
|
||||
if (!maybe_stub->To(&stub)) return maybe_stub;
|
||||
__ DispatchMap(edx,
|
||||
Handle<Map>(receiver->map()),
|
||||
Handle<Code>(stub),
|
||||
DO_SMI_CHECK);
|
||||
|
||||
// Check that the object isn't a smi.
|
||||
__ test(edx, Immediate(kSmiTagMask));
|
||||
__ j(zero, &slow);
|
||||
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss();
|
||||
__ jmp(ic, RelocInfo::CODE_TARGET);
|
||||
|
||||
// Return the generated code.
|
||||
return GetCode();
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* ExternalArrayStoreStubCompiler::CompileStore(
|
||||
JSObject* receiver, ExternalArrayType array_type) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- eax : value
|
||||
// -- ecx : key
|
||||
// -- edx : receiver
|
||||
// -- esp[0] : return address
|
||||
// -----------------------------------
|
||||
MaybeObject* maybe_stub =
|
||||
KeyedStoreExternalArrayStub(array_type).TryGetCode();
|
||||
Code* stub;
|
||||
if (!maybe_stub->To(&stub)) return maybe_stub;
|
||||
__ DispatchMap(edx,
|
||||
Handle<Map>(receiver->map()),
|
||||
Handle<Code>(stub),
|
||||
DO_SMI_CHECK);
|
||||
|
||||
Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
|
||||
__ jmp(ic, RelocInfo::CODE_TARGET);
|
||||
|
||||
return GetCode();
|
||||
}
|
||||
|
||||
|
||||
#undef __
|
||||
#define __ ACCESS_MASM(masm)
|
||||
|
||||
|
||||
void KeyedLoadStubCompiler::GenerateLoadExternalArray(
|
||||
MacroAssembler* masm,
|
||||
ExternalArrayType array_type) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- eax : key
|
||||
// -- edx : receiver
|
||||
// -- esp[0] : return address
|
||||
// -----------------------------------
|
||||
Label miss_force_generic, failed_allocation, slow;
|
||||
|
||||
// This stub is meant to be tail-jumped to, the receiver must already
|
||||
// have been verified by the caller to not be a smi.
|
||||
|
||||
// Check that the key is a smi.
|
||||
__ test(eax, Immediate(kSmiTagMask));
|
||||
__ j(not_zero, &slow);
|
||||
__ j(not_zero, &miss_force_generic);
|
||||
|
||||
// Check that the map matches.
|
||||
__ CheckMap(edx, Handle<Map>(receiver->map()), &slow, DO_SMI_CHECK);
|
||||
__ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset));
|
||||
|
||||
// eax: key, known to be a smi.
|
||||
// edx: receiver, known to be a JSObject.
|
||||
// ebx: elements object, known to be an external array.
|
||||
// Check that the index is in range.
|
||||
__ mov(ecx, eax);
|
||||
__ SmiUntag(ecx); // Untag the index.
|
||||
__ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset));
|
||||
__ cmp(ecx, FieldOperand(ebx, ExternalArray::kLengthOffset));
|
||||
// Unsigned comparison catches both negative and too-large values.
|
||||
__ j(above_equal, &slow);
|
||||
__ j(above_equal, &miss_force_generic);
|
||||
__ mov(ebx, FieldOperand(ebx, ExternalArray::kExternalPointerOffset));
|
||||
// ebx: base pointer of external storage
|
||||
switch (array_type) {
|
||||
@ -3441,47 +3497,48 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
|
||||
|
||||
// Slow case: Jump to runtime.
|
||||
__ bind(&slow);
|
||||
Counters* counters = isolate()->counters();
|
||||
Counters* counters = masm->isolate()->counters();
|
||||
__ IncrementCounter(counters->keyed_load_external_array_slow(), 1);
|
||||
|
||||
// ----------- S t a t e -------------
|
||||
// -- eax : key
|
||||
// -- edx : receiver
|
||||
// -- esp[0] : return address
|
||||
// -----------------------------------
|
||||
|
||||
__ pop(ebx);
|
||||
__ push(edx); // receiver
|
||||
__ push(eax); // name
|
||||
__ push(ebx); // return address
|
||||
Handle<Code> ic = masm->isolate()->builtins()->KeyedLoadIC_Slow();
|
||||
__ jmp(ic, RelocInfo::CODE_TARGET);
|
||||
|
||||
// Perform tail call to the entry.
|
||||
__ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
|
||||
|
||||
// Return the generated code.
|
||||
return GetCode(flags);
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
|
||||
JSObject* receiver, ExternalArrayType array_type, Code::Flags flags) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- eax : value
|
||||
// -- ecx : key
|
||||
// -- eax : key
|
||||
// -- edx : receiver
|
||||
// -- esp[0] : return address
|
||||
// -----------------------------------
|
||||
Label slow, check_heap_number;
|
||||
|
||||
// Check that the object isn't a smi.
|
||||
__ test(edx, Immediate(kSmiTagMask));
|
||||
__ j(zero, &slow);
|
||||
// Miss case: Jump to runtime.
|
||||
__ bind(&miss_force_generic);
|
||||
Handle<Code> miss_ic =
|
||||
masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
|
||||
__ jmp(miss_ic, RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
// Check that the map matches.
|
||||
__ CheckMap(edx, Handle<Map>(receiver->map()), &slow, DO_SMI_CHECK);
|
||||
|
||||
void KeyedStoreStubCompiler::GenerateStoreExternalArray(
|
||||
MacroAssembler* masm,
|
||||
ExternalArrayType array_type) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- eax : key
|
||||
// -- edx : receiver
|
||||
// -- esp[0] : return address
|
||||
// -----------------------------------
|
||||
Label miss_force_generic, slow, check_heap_number;
|
||||
|
||||
// This stub is meant to be tail-jumped to, the receiver must already
|
||||
// have been verified by the caller to not be a smi.
|
||||
|
||||
// Check that the key is a smi.
|
||||
__ test(ecx, Immediate(kSmiTagMask));
|
||||
__ j(not_zero, &slow);
|
||||
__ j(not_zero, &miss_force_generic);
|
||||
|
||||
// Check that the index is in range.
|
||||
__ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
|
||||
@ -3560,7 +3617,7 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
|
||||
// edi: elements array
|
||||
// ebx: untagged index
|
||||
__ cmp(FieldOperand(eax, HeapObject::kMapOffset),
|
||||
Immediate(factory()->heap_number_map()));
|
||||
Immediate(masm->isolate()->factory()->heap_number_map()));
|
||||
__ j(not_equal, &slow);
|
||||
|
||||
// The WebGL specification leaves the behavior of storing NaN and
|
||||
@ -3655,6 +3712,9 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
|
||||
|
||||
// Slow case: call runtime.
|
||||
__ bind(&slow);
|
||||
Counters* counters = masm->isolate()->counters();
|
||||
__ IncrementCounter(counters->keyed_store_external_array_slow(), 1);
|
||||
|
||||
// ----------- S t a t e -------------
|
||||
// -- eax : value
|
||||
// -- ecx : key
|
||||
@ -3662,19 +3722,109 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
|
||||
// -- esp[0] : return address
|
||||
// -----------------------------------
|
||||
|
||||
__ pop(ebx);
|
||||
__ push(edx);
|
||||
__ push(ecx);
|
||||
__ push(eax);
|
||||
__ push(Immediate(Smi::FromInt(NONE))); // PropertyAttributes
|
||||
__ push(Immediate(Smi::FromInt(
|
||||
Code::ExtractExtraICStateFromFlags(flags) & kStrictMode)));
|
||||
__ push(ebx); // return address
|
||||
Handle<Code> ic = masm->isolate()->builtins()->KeyedStoreIC_Slow();
|
||||
__ jmp(ic, RelocInfo::CODE_TARGET);
|
||||
|
||||
// Do tail-call to runtime routine.
|
||||
__ TailCallRuntime(Runtime::kSetProperty, 5, 1);
|
||||
// ----------- S t a t e -------------
|
||||
// -- eax : value
|
||||
// -- ecx : key
|
||||
// -- edx : receiver
|
||||
// -- esp[0] : return address
|
||||
// -----------------------------------
|
||||
|
||||
return GetCode(flags);
|
||||
__ bind(&miss_force_generic);
|
||||
Handle<Code> miss_ic =
|
||||
masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
|
||||
__ jmp(miss_ic, RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- eax : key
|
||||
// -- edx : receiver
|
||||
// -- esp[0] : return address
|
||||
// -----------------------------------
|
||||
Label miss_force_generic;
|
||||
|
||||
// This stub is meant to be tail-jumped to, the receiver must already
|
||||
// have been verified by the caller to not be a smi.
|
||||
|
||||
// Check that the key is a smi.
|
||||
__ test(eax, Immediate(kSmiTagMask));
|
||||
__ j(not_zero, &miss_force_generic);
|
||||
|
||||
// Get the elements array.
|
||||
__ mov(ecx, FieldOperand(edx, JSObject::kElementsOffset));
|
||||
__ AssertFastElements(ecx);
|
||||
|
||||
// Check that the key is within bounds.
|
||||
__ cmp(eax, FieldOperand(ecx, FixedArray::kLengthOffset));
|
||||
__ j(above_equal, &miss_force_generic);
|
||||
|
||||
// Load the result and make sure it's not the hole.
|
||||
__ mov(ebx, Operand(ecx, eax, times_2,
|
||||
FixedArray::kHeaderSize - kHeapObjectTag));
|
||||
__ cmp(ebx, masm->isolate()->factory()->the_hole_value());
|
||||
__ j(equal, &miss_force_generic);
|
||||
__ mov(eax, ebx);
|
||||
__ ret(0);
|
||||
|
||||
__ bind(&miss_force_generic);
|
||||
Handle<Code> miss_ic =
|
||||
masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
|
||||
__ jmp(miss_ic, RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
|
||||
void KeyedStoreStubCompiler::GenerateStoreFastElement(MacroAssembler* masm,
|
||||
bool is_js_array) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- eax : key
|
||||
// -- edx : receiver
|
||||
// -- esp[0] : return address
|
||||
// -----------------------------------
|
||||
Label miss_force_generic;
|
||||
|
||||
// This stub is meant to be tail-jumped to, the receiver must already
|
||||
// have been verified by the caller to not be a smi.
|
||||
|
||||
// Check that the key is a smi.
|
||||
__ test(ecx, Immediate(kSmiTagMask));
|
||||
__ j(not_zero, &miss_force_generic);
|
||||
|
||||
// Get the elements array and make sure it is a fast element array, not 'cow'.
|
||||
__ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
|
||||
__ cmp(FieldOperand(edi, HeapObject::kMapOffset),
|
||||
Immediate(masm->isolate()->factory()->fixed_array_map()));
|
||||
__ j(not_equal, &miss_force_generic);
|
||||
|
||||
if (is_js_array) {
|
||||
// Check that the key is within bounds.
|
||||
__ cmp(ecx, FieldOperand(edx, JSArray::kLengthOffset)); // smis.
|
||||
__ j(above_equal, &miss_force_generic);
|
||||
} else {
|
||||
// Check that the key is within bounds.
|
||||
__ cmp(ecx, FieldOperand(edi, FixedArray::kLengthOffset)); // smis.
|
||||
__ j(above_equal, &miss_force_generic);
|
||||
}
|
||||
|
||||
// Do the store and update the write barrier. Make sure to preserve
|
||||
// the value in register eax.
|
||||
__ mov(edx, Operand(eax));
|
||||
__ mov(FieldOperand(edi, ecx, times_2, FixedArray::kHeaderSize), eax);
|
||||
__ RecordWrite(edi, 0, edx, ecx);
|
||||
|
||||
// Done.
|
||||
__ ret(0);
|
||||
|
||||
// Handle store cache miss, replacing the ic with the generic stub.
|
||||
__ bind(&miss_force_generic);
|
||||
Handle<Code> ic_force_generic =
|
||||
masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
|
||||
__ jmp(ic_force_generic, RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
|
||||
|
436
src/ic.cc
436
src/ic.cc
@ -1,4 +1,4 @@
|
||||
// Copyright 2006-2009 the V8 project authors. All rights reserved.
|
||||
// Copyright 2011 the V8 project authors. All rights reserved.
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
@ -67,7 +67,33 @@ void IC::TraceIC(const char* type,
|
||||
State new_state = StateFrom(new_target,
|
||||
HEAP->undefined_value(),
|
||||
HEAP->undefined_value());
|
||||
PrintF("[%s (%c->%c)%s", type,
|
||||
PrintF("[%s in ", type);
|
||||
StackFrameIterator it;
|
||||
while (it.frame()->fp() != this->fp()) it.Advance();
|
||||
StackFrame* raw_frame = it.frame();
|
||||
if (raw_frame->is_internal()) {
|
||||
Isolate* isolate = new_target->GetIsolate();
|
||||
Code* apply_builtin = isolate->builtins()->builtin(
|
||||
Builtins::kFunctionApply);
|
||||
if (raw_frame->unchecked_code() == apply_builtin) {
|
||||
PrintF("apply from ");
|
||||
it.Advance();
|
||||
raw_frame = it.frame();
|
||||
}
|
||||
}
|
||||
if (raw_frame->is_java_script()) {
|
||||
JavaScriptFrame* frame = JavaScriptFrame::cast(raw_frame);
|
||||
Code* js_code = frame->unchecked_code();
|
||||
// Find the function on the stack and both the active code for the
|
||||
// function and the original code.
|
||||
JSFunction* function = JSFunction::cast(frame->function());
|
||||
function->PrintName();
|
||||
int code_offset = address() - js_code->instruction_start();
|
||||
PrintF("+%d", code_offset);
|
||||
} else {
|
||||
PrintF("<unknown>");
|
||||
}
|
||||
PrintF(" (%c->%c)%s",
|
||||
TransitionMarkFromState(old_state),
|
||||
TransitionMarkFromState(new_state),
|
||||
extra_info);
|
||||
@ -274,11 +300,9 @@ void IC::Clear(Address address) {
|
||||
switch (target->kind()) {
|
||||
case Code::LOAD_IC: return LoadIC::Clear(address, target);
|
||||
case Code::KEYED_LOAD_IC:
|
||||
case Code::KEYED_EXTERNAL_ARRAY_LOAD_IC:
|
||||
return KeyedLoadIC::Clear(address, target);
|
||||
case Code::STORE_IC: return StoreIC::Clear(address, target);
|
||||
case Code::KEYED_STORE_IC:
|
||||
case Code::KEYED_EXTERNAL_ARRAY_STORE_IC:
|
||||
return KeyedStoreIC::Clear(address, target);
|
||||
case Code::CALL_IC: return CallIC::Clear(address, target);
|
||||
case Code::KEYED_CALL_IC: return KeyedCallIC::Clear(address, target);
|
||||
@ -1032,9 +1056,49 @@ void LoadIC::UpdateCaches(LookupResult* lookup,
|
||||
}
|
||||
|
||||
|
||||
String* KeyedLoadIC::GetStubNameForCache(IC::State ic_state) {
|
||||
if (ic_state == MONOMORPHIC) {
|
||||
return isolate()->heap()->KeyedLoadSpecializedMonomorphic_symbol();
|
||||
} else {
|
||||
ASSERT(ic_state == MEGAMORPHIC);
|
||||
return isolate()->heap()->KeyedLoadSpecializedPolymorphic_symbol();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* KeyedLoadIC::GetFastElementStubWithoutMapCheck(
|
||||
bool is_js_array) {
|
||||
return KeyedLoadFastElementStub().TryGetCode();
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* KeyedLoadIC::GetExternalArrayStubWithoutMapCheck(
|
||||
ExternalArrayType array_type) {
|
||||
return KeyedLoadExternalArrayStub(array_type).TryGetCode();
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* KeyedLoadIC::ConstructMegamorphicStub(
|
||||
MapList* receiver_maps,
|
||||
CodeList* targets,
|
||||
StrictModeFlag strict_mode) {
|
||||
Object* object;
|
||||
KeyedLoadStubCompiler compiler;
|
||||
MaybeObject* maybe_code = compiler.CompileLoadMegamorphic(receiver_maps,
|
||||
targets);
|
||||
if (!maybe_code->ToObject(&object)) return maybe_code;
|
||||
isolate()->counters()->keyed_load_polymorphic_stubs()->Increment();
|
||||
PROFILE(isolate(), CodeCreateEvent(
|
||||
Logger::KEYED_LOAD_MEGAMORPHIC_IC_TAG,
|
||||
Code::cast(object), 0));
|
||||
return object;
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* KeyedLoadIC::Load(State state,
|
||||
Handle<Object> object,
|
||||
Handle<Object> key) {
|
||||
Handle<Object> key,
|
||||
bool force_generic_stub) {
|
||||
// Check for values that can be converted into a symbol.
|
||||
// TODO(1295): Remove this code.
|
||||
HandleScope scope(isolate());
|
||||
@ -1160,34 +1224,31 @@ MaybeObject* KeyedLoadIC::Load(State state,
|
||||
|
||||
if (use_ic) {
|
||||
Code* stub = generic_stub();
|
||||
if (state == UNINITIALIZED) {
|
||||
if (!force_generic_stub) {
|
||||
if (object->IsString() && key->IsNumber()) {
|
||||
stub = string_stub();
|
||||
if (state == UNINITIALIZED) {
|
||||
stub = string_stub();
|
||||
}
|
||||
} else if (object->IsJSObject()) {
|
||||
Handle<JSObject> receiver = Handle<JSObject>::cast(object);
|
||||
if (receiver->HasExternalArrayElements()) {
|
||||
MaybeObject* probe =
|
||||
isolate()->stub_cache()->ComputeKeyedLoadOrStoreExternalArray(
|
||||
*receiver, false, kNonStrictMode);
|
||||
stub = probe->IsFailure() ?
|
||||
NULL : Code::cast(probe->ToObjectUnchecked());
|
||||
} else if (receiver->HasIndexedInterceptor()) {
|
||||
JSObject* receiver = JSObject::cast(*object);
|
||||
if (receiver->HasIndexedInterceptor()) {
|
||||
stub = indexed_interceptor_stub();
|
||||
} else if (key->IsSmi() &&
|
||||
receiver->map()->has_fast_elements()) {
|
||||
MaybeObject* probe =
|
||||
isolate()->stub_cache()->ComputeKeyedLoadSpecialized(*receiver);
|
||||
stub = probe->IsFailure() ?
|
||||
NULL : Code::cast(probe->ToObjectUnchecked());
|
||||
} else if (key->IsSmi()) {
|
||||
MaybeObject* maybe_stub = ComputeStub(receiver,
|
||||
false,
|
||||
kNonStrictMode,
|
||||
stub);
|
||||
stub = maybe_stub->IsFailure() ?
|
||||
NULL : Code::cast(maybe_stub->ToObjectUnchecked());
|
||||
}
|
||||
}
|
||||
}
|
||||
if (stub != NULL) set_target(stub);
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
TraceIC("KeyedLoadIC", key, state, target());
|
||||
TraceIC("KeyedLoadIC", key, state, target());
|
||||
#endif // DEBUG
|
||||
}
|
||||
|
||||
// Get the property.
|
||||
return Runtime::GetObjectProperty(isolate(), object, key);
|
||||
@ -1484,11 +1545,256 @@ void StoreIC::UpdateCaches(LookupResult* lookup,
|
||||
}
|
||||
|
||||
|
||||
static bool AddOneReceiverMapIfMissing(MapList* receiver_maps,
|
||||
Map* new_receiver_map) {
|
||||
for (int current = 0; current < receiver_maps->length(); ++current) {
|
||||
if (receiver_maps->at(current) == new_receiver_map) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
receiver_maps->Add(new_receiver_map);
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
void KeyedIC::GetReceiverMapsForStub(Code* stub, MapList* result) {
|
||||
ASSERT(stub->is_inline_cache_stub());
|
||||
if (stub == string_stub()) {
|
||||
return result->Add(isolate()->heap()->string_map());
|
||||
} else if (stub->is_keyed_load_stub() || stub->is_keyed_store_stub()) {
|
||||
if (stub->ic_state() == MONOMORPHIC) {
|
||||
result->Add(Map::cast(stub->FindFirstMap()));
|
||||
} else {
|
||||
ASSERT(stub->ic_state() == MEGAMORPHIC);
|
||||
AssertNoAllocation no_allocation;
|
||||
int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
|
||||
for (RelocIterator it(stub, mask); !it.done(); it.next()) {
|
||||
RelocInfo* info = it.rinfo();
|
||||
Object* object = info->target_object();
|
||||
ASSERT(object->IsMap());
|
||||
result->Add(Map::cast(object));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* KeyedIC::ComputeStub(JSObject* receiver,
|
||||
bool is_store,
|
||||
StrictModeFlag strict_mode,
|
||||
Code* generic_stub) {
|
||||
State ic_state = target()->ic_state();
|
||||
Code* monomorphic_stub;
|
||||
// Always compute the MONOMORPHIC stub, even if the MEGAMORPHIC stub ends up
|
||||
// being used. This is necessary because the megamorphic stub needs to have
|
||||
// access to more information than what is stored in the receiver map in some
|
||||
// cases (external arrays need the array type from the MONOMORPHIC stub).
|
||||
MaybeObject* maybe_stub = ComputeMonomorphicStub(receiver,
|
||||
is_store,
|
||||
strict_mode,
|
||||
generic_stub);
|
||||
if (!maybe_stub->To(&monomorphic_stub)) return maybe_stub;
|
||||
|
||||
if (ic_state == UNINITIALIZED || ic_state == PREMONOMORPHIC) {
|
||||
return monomorphic_stub;
|
||||
}
|
||||
ASSERT(target() != generic_stub);
|
||||
|
||||
// Don't handle megamorphic property accesses for INTERCEPTORS or CALLBACKS
|
||||
// via megamorphic stubs, since they don't have a map in their relocation info
|
||||
// and so the stubs can't be harvested for the object needed for a map check.
|
||||
if (target()->type() != NORMAL) {
|
||||
return generic_stub;
|
||||
}
|
||||
|
||||
// Determine the list of receiver maps that this call site has seen,
|
||||
// adding the map that was just encountered.
|
||||
MapList target_receiver_maps;
|
||||
GetReceiverMapsForStub(target(), &target_receiver_maps);
|
||||
if (!AddOneReceiverMapIfMissing(&target_receiver_maps, receiver->map())) {
|
||||
// If the miss wasn't due to an unseen map, a MEGAMORPHIC stub
|
||||
// won't help, use the generic stub.
|
||||
return generic_stub;
|
||||
}
|
||||
|
||||
// TODO(1385): Currently MEGAMORPHIC stubs are cached in the receiver map stub
|
||||
// cache, but that can put receiver types together from unrelated call sites
|
||||
// into the same stub--they always handle the union of all receiver maps seen
|
||||
// at all call sites involving the receiver map. This is only an
|
||||
// approximation: ideally, there would be a global cache that mapped sets of
|
||||
// receiver maps to MEGAMORPHIC stubs. The complexity of the MEGAMORPHIC stub
|
||||
// computation also leads to direct manipulation of the stub cache from the IC
|
||||
// code, which the global cache solution would avoid.
|
||||
Code::Kind kind = this->kind();
|
||||
Code::Flags flags = Code::ComputeFlags(kind,
|
||||
NOT_IN_LOOP,
|
||||
MEGAMORPHIC,
|
||||
strict_mode);
|
||||
String* megamorphic_name = GetStubNameForCache(MEGAMORPHIC);
|
||||
Object* maybe_cached_stub = receiver->map()->FindInCodeCache(megamorphic_name,
|
||||
flags);
|
||||
|
||||
// Create a set of all receiver maps that have been seen at the IC call site
|
||||
// and those seen by the MEGAMORPHIC cached stub, if that's the stub that's
|
||||
// been selected.
|
||||
MapList receiver_maps;
|
||||
if (!maybe_cached_stub->IsUndefined()) {
|
||||
GetReceiverMapsForStub(Code::cast(maybe_cached_stub), &receiver_maps);
|
||||
}
|
||||
bool added_map = false;
|
||||
for (int i = 0; i < target_receiver_maps.length(); ++i) {
|
||||
if (AddOneReceiverMapIfMissing(&receiver_maps,
|
||||
target_receiver_maps.at(i))) {
|
||||
added_map = true;
|
||||
}
|
||||
}
|
||||
ASSERT(receiver_maps.length() > 0);
|
||||
|
||||
// If the maximum number of receiver maps has been exceeded, use the Generic
|
||||
// version of the IC.
|
||||
if (receiver_maps.length() > KeyedIC::kMaxKeyedPolymorphism) {
|
||||
return generic_stub;
|
||||
}
|
||||
|
||||
// If no maps have been seen at the call site that aren't in the cached
|
||||
// stub, then use it.
|
||||
if (!added_map) {
|
||||
ASSERT(!maybe_cached_stub->IsUndefined());
|
||||
ASSERT(maybe_cached_stub->IsCode());
|
||||
return Code::cast(maybe_cached_stub);
|
||||
}
|
||||
|
||||
// Lookup all of the receiver maps in the cache, they should all already
|
||||
// have MONOMORPHIC stubs.
|
||||
CodeList handler_ics(KeyedIC::kMaxKeyedPolymorphism);
|
||||
for (int current = 0; current < receiver_maps.length(); ++current) {
|
||||
Map* receiver_map(receiver_maps.at(current));
|
||||
MaybeObject* maybe_cached_stub = ComputeMonomorphicStubWithoutMapCheck(
|
||||
receiver_map,
|
||||
strict_mode,
|
||||
generic_stub);
|
||||
Code* cached_stub;
|
||||
if (!maybe_cached_stub->To(&cached_stub)) {
|
||||
return maybe_cached_stub;
|
||||
}
|
||||
handler_ics.Add(cached_stub);
|
||||
}
|
||||
|
||||
Code* stub;
|
||||
// Build the MEGAMORPHIC stub.
|
||||
maybe_stub = ConstructMegamorphicStub(&receiver_maps,
|
||||
&handler_ics,
|
||||
strict_mode);
|
||||
if (!maybe_stub->To(&stub)) return maybe_stub;
|
||||
|
||||
MaybeObject* maybe_update = receiver->UpdateMapCodeCache(
|
||||
megamorphic_name,
|
||||
stub);
|
||||
if (maybe_update->IsFailure()) return maybe_update;
|
||||
return stub;
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* KeyedIC::ComputeMonomorphicStubWithoutMapCheck(
|
||||
Map* receiver_map,
|
||||
StrictModeFlag strict_mode,
|
||||
Code* generic_stub) {
|
||||
if ((receiver_map->instance_type() & kNotStringTag) == 0) {
|
||||
ASSERT(string_stub() != NULL);
|
||||
return string_stub();
|
||||
} else if (receiver_map->has_external_array_elements()) {
|
||||
// Determine the array type from the default MONOMORPHIC already generated
|
||||
// stub. There is no other way to determine the type of the external array
|
||||
// directly from the receiver type.
|
||||
Code::Kind kind = this->kind();
|
||||
Code::Flags flags = Code::ComputeMonomorphicFlags(kind,
|
||||
NORMAL,
|
||||
strict_mode);
|
||||
String* monomorphic_name = GetStubNameForCache(MONOMORPHIC);
|
||||
Object* maybe_default_stub = receiver_map->FindInCodeCache(monomorphic_name,
|
||||
flags);
|
||||
if (maybe_default_stub->IsUndefined()) {
|
||||
return generic_stub;
|
||||
}
|
||||
Code* default_stub = Code::cast(maybe_default_stub);
|
||||
return GetExternalArrayStubWithoutMapCheck(
|
||||
default_stub->external_array_type());
|
||||
} else if (receiver_map->has_fast_elements()) {
|
||||
bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
|
||||
return GetFastElementStubWithoutMapCheck(is_js_array);
|
||||
} else {
|
||||
return generic_stub;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* KeyedIC::ComputeMonomorphicStub(JSObject* receiver,
|
||||
bool is_store,
|
||||
StrictModeFlag strict_mode,
|
||||
Code* generic_stub) {
|
||||
Code* result = NULL;
|
||||
if (receiver->HasExternalArrayElements()) {
|
||||
MaybeObject* maybe_stub =
|
||||
isolate()->stub_cache()->ComputeKeyedLoadOrStoreExternalArray(
|
||||
receiver, is_store, strict_mode);
|
||||
if (!maybe_stub->To(&result)) return maybe_stub;
|
||||
} else if (receiver->map()->has_fast_elements()) {
|
||||
MaybeObject* maybe_stub =
|
||||
isolate()->stub_cache()->ComputeKeyedLoadOrStoreFastElement(
|
||||
receiver, is_store, strict_mode);
|
||||
if (!maybe_stub->To(&result)) return maybe_stub;
|
||||
} else {
|
||||
result = generic_stub;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
String* KeyedStoreIC::GetStubNameForCache(IC::State ic_state) {
|
||||
if (ic_state == MONOMORPHIC) {
|
||||
return isolate()->heap()->KeyedStoreSpecializedMonomorphic_symbol();
|
||||
} else {
|
||||
ASSERT(ic_state == MEGAMORPHIC);
|
||||
return isolate()->heap()->KeyedStoreSpecializedPolymorphic_symbol();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* KeyedStoreIC::GetFastElementStubWithoutMapCheck(
|
||||
bool is_js_array) {
|
||||
return KeyedStoreFastElementStub(is_js_array).TryGetCode();
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* KeyedStoreIC::GetExternalArrayStubWithoutMapCheck(
|
||||
ExternalArrayType array_type) {
|
||||
return KeyedStoreExternalArrayStub(array_type).TryGetCode();
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* KeyedStoreIC::ConstructMegamorphicStub(
|
||||
MapList* receiver_maps,
|
||||
CodeList* targets,
|
||||
StrictModeFlag strict_mode) {
|
||||
Object* object;
|
||||
KeyedStoreStubCompiler compiler(strict_mode);
|
||||
MaybeObject* maybe_code = compiler.CompileStoreMegamorphic(receiver_maps,
|
||||
targets);
|
||||
if (!maybe_code->ToObject(&object)) return maybe_code;
|
||||
isolate()->counters()->keyed_store_polymorphic_stubs()->Increment();
|
||||
PROFILE(isolate(), CodeCreateEvent(
|
||||
Logger::KEYED_STORE_MEGAMORPHIC_IC_TAG,
|
||||
Code::cast(object), 0));
|
||||
return object;
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* KeyedStoreIC::Store(State state,
|
||||
StrictModeFlag strict_mode,
|
||||
Handle<Object> object,
|
||||
Handle<Object> key,
|
||||
Handle<Object> value) {
|
||||
Handle<Object> value,
|
||||
bool force_generic) {
|
||||
if (key->IsSymbol()) {
|
||||
Handle<String> name = Handle<String>::cast(key);
|
||||
|
||||
@ -1530,29 +1836,27 @@ MaybeObject* KeyedStoreIC::Store(State state,
|
||||
ASSERT(!(use_ic && object->IsJSGlobalProxy()));
|
||||
|
||||
if (use_ic) {
|
||||
Code* stub =
|
||||
(strict_mode == kStrictMode) ? generic_stub_strict() : generic_stub();
|
||||
if (state == UNINITIALIZED) {
|
||||
if (object->IsJSObject()) {
|
||||
Handle<JSObject> receiver = Handle<JSObject>::cast(object);
|
||||
if (receiver->HasExternalArrayElements()) {
|
||||
MaybeObject* probe =
|
||||
isolate()->stub_cache()->ComputeKeyedLoadOrStoreExternalArray(
|
||||
*receiver, true, strict_mode);
|
||||
stub = probe->IsFailure() ?
|
||||
NULL : Code::cast(probe->ToObjectUnchecked());
|
||||
} else if (key->IsSmi() && receiver->map()->has_fast_elements()) {
|
||||
MaybeObject* probe =
|
||||
isolate()->stub_cache()->ComputeKeyedStoreSpecialized(
|
||||
*receiver, strict_mode);
|
||||
stub = probe->IsFailure() ?
|
||||
NULL : Code::cast(probe->ToObjectUnchecked());
|
||||
}
|
||||
Code* stub = (strict_mode == kStrictMode)
|
||||
? generic_stub_strict()
|
||||
: generic_stub();
|
||||
if (!force_generic) {
|
||||
if (object->IsJSObject() && key->IsSmi()) {
|
||||
JSObject* receiver = JSObject::cast(*object);
|
||||
MaybeObject* maybe_stub = ComputeStub(receiver,
|
||||
true,
|
||||
strict_mode,
|
||||
stub);
|
||||
stub = maybe_stub->IsFailure() ?
|
||||
NULL : Code::cast(maybe_stub->ToObjectUnchecked());
|
||||
}
|
||||
}
|
||||
if (stub != NULL) set_target(stub);
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
TraceIC("KeyedStoreIC", key, state, target());
|
||||
#endif
|
||||
|
||||
// Set the property.
|
||||
return Runtime::SetObjectProperty(
|
||||
isolate(), object , key, value, NONE, strict_mode);
|
||||
@ -1721,7 +2025,16 @@ RUNTIME_FUNCTION(MaybeObject*, KeyedLoadIC_Miss) {
|
||||
ASSERT(args.length() == 2);
|
||||
KeyedLoadIC ic(isolate);
|
||||
IC::State state = IC::StateFrom(ic.target(), args[0], args[1]);
|
||||
return ic.Load(state, args.at<Object>(0), args.at<Object>(1));
|
||||
return ic.Load(state, args.at<Object>(0), args.at<Object>(1), false);
|
||||
}
|
||||
|
||||
|
||||
RUNTIME_FUNCTION(MaybeObject*, KeyedLoadIC_MissForceGeneric) {
|
||||
NoHandleAllocation na;
|
||||
ASSERT(args.length() == 2);
|
||||
KeyedLoadIC ic(isolate);
|
||||
IC::State state = IC::StateFrom(ic.target(), args[0], args[1]);
|
||||
return ic.Load(state, args.at<Object>(0), args.at<Object>(1), true);
|
||||
}
|
||||
|
||||
|
||||
@ -1805,7 +2118,42 @@ RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_Miss) {
|
||||
static_cast<StrictModeFlag>(extra_ic_state & kStrictMode),
|
||||
args.at<Object>(0),
|
||||
args.at<Object>(1),
|
||||
args.at<Object>(2));
|
||||
args.at<Object>(2),
|
||||
false);
|
||||
}
|
||||
|
||||
|
||||
RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_Slow) {
|
||||
NoHandleAllocation na;
|
||||
ASSERT(args.length() == 3);
|
||||
KeyedStoreIC ic(isolate);
|
||||
Code::ExtraICState extra_ic_state = ic.target()->extra_ic_state();
|
||||
Handle<Object> object = args.at<Object>(0);
|
||||
Handle<Object> key = args.at<Object>(1);
|
||||
Handle<Object> value = args.at<Object>(2);
|
||||
StrictModeFlag strict_mode =
|
||||
static_cast<StrictModeFlag>(extra_ic_state & kStrictMode);
|
||||
return Runtime::SetObjectProperty(isolate,
|
||||
object,
|
||||
key,
|
||||
value,
|
||||
NONE,
|
||||
strict_mode);
|
||||
}
|
||||
|
||||
|
||||
RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_MissForceGeneric) {
|
||||
NoHandleAllocation na;
|
||||
ASSERT(args.length() == 3);
|
||||
KeyedStoreIC ic(isolate);
|
||||
IC::State state = IC::StateFrom(ic.target(), args[0], args[1]);
|
||||
Code::ExtraICState extra_ic_state = ic.target()->extra_ic_state();
|
||||
return ic.Store(state,
|
||||
static_cast<StrictModeFlag>(extra_ic_state & kStrictMode),
|
||||
args.at<Object>(0),
|
||||
args.at<Object>(1),
|
||||
args.at<Object>(2),
|
||||
true);
|
||||
}
|
||||
|
||||
|
||||
|
142
src/ic.h
142
src/ic.h
@ -1,4 +1,4 @@
|
||||
// Copyright 2006-2009 the V8 project authors. All rights reserved.
|
||||
// Copyright 2011 the V8 project authors. All rights reserved.
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
@ -39,12 +39,15 @@ namespace internal {
|
||||
#define IC_UTIL_LIST(ICU) \
|
||||
ICU(LoadIC_Miss) \
|
||||
ICU(KeyedLoadIC_Miss) \
|
||||
ICU(KeyedLoadIC_MissForceGeneric) \
|
||||
ICU(CallIC_Miss) \
|
||||
ICU(KeyedCallIC_Miss) \
|
||||
ICU(StoreIC_Miss) \
|
||||
ICU(StoreIC_ArrayLength) \
|
||||
ICU(SharedStoreIC_ExtendStorage) \
|
||||
ICU(KeyedStoreIC_Miss) \
|
||||
ICU(KeyedStoreIC_MissForceGeneric) \
|
||||
ICU(KeyedStoreIC_Slow) \
|
||||
/* Utilities for IC stubs. */ \
|
||||
ICU(LoadCallbackProperty) \
|
||||
ICU(StoreCallbackProperty) \
|
||||
@ -142,11 +145,11 @@ class IC {
|
||||
void set_target(Code* code) { SetTargetAtAddress(address(), code); }
|
||||
|
||||
#ifdef DEBUG
|
||||
static void TraceIC(const char* type,
|
||||
Handle<Object> name,
|
||||
State old_state,
|
||||
Code* new_target,
|
||||
const char* extra_info = "");
|
||||
void TraceIC(const char* type,
|
||||
Handle<Object> name,
|
||||
State old_state,
|
||||
Code* new_target,
|
||||
const char* extra_info = "");
|
||||
#endif
|
||||
|
||||
Failure* TypeError(const char* type,
|
||||
@ -325,23 +328,72 @@ class LoadIC: public IC {
|
||||
};
|
||||
|
||||
|
||||
class KeyedLoadIC: public IC {
|
||||
class KeyedIC: public IC {
|
||||
public:
|
||||
explicit KeyedLoadIC(Isolate* isolate) : IC(NO_EXTRA_FRAME, isolate) {
|
||||
ASSERT(target()->is_keyed_load_stub() ||
|
||||
target()->is_external_array_load_stub());
|
||||
explicit KeyedIC(Isolate* isolate) : IC(NO_EXTRA_FRAME, isolate) {}
|
||||
virtual ~KeyedIC() {}
|
||||
|
||||
static const int kMaxKeyedPolymorphism = 4;
|
||||
|
||||
virtual MaybeObject* GetFastElementStubWithoutMapCheck(
|
||||
bool is_js_array) = 0;
|
||||
|
||||
virtual MaybeObject* GetExternalArrayStubWithoutMapCheck(
|
||||
ExternalArrayType array_type) = 0;
|
||||
|
||||
protected:
|
||||
virtual Code* string_stub() {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
virtual Code::Kind kind() const = 0;
|
||||
|
||||
virtual String* GetStubNameForCache(IC::State ic_state) = 0;
|
||||
|
||||
MaybeObject* ComputeStub(JSObject* receiver,
|
||||
bool is_store,
|
||||
StrictModeFlag strict_mode,
|
||||
Code* default_stub);
|
||||
|
||||
virtual MaybeObject* ConstructMegamorphicStub(
|
||||
MapList* receiver_maps,
|
||||
CodeList* targets,
|
||||
StrictModeFlag strict_mode) = 0;
|
||||
|
||||
private:
|
||||
void GetReceiverMapsForStub(Code* stub, MapList* result);
|
||||
|
||||
MaybeObject* ComputeMonomorphicStubWithoutMapCheck(
|
||||
Map* receiver_map,
|
||||
StrictModeFlag strict_mode,
|
||||
Code* generic_stub);
|
||||
|
||||
MaybeObject* ComputeMonomorphicStub(JSObject* receiver,
|
||||
bool is_store,
|
||||
StrictModeFlag strict_mode,
|
||||
Code* default_stub);
|
||||
};
|
||||
|
||||
|
||||
class KeyedLoadIC: public KeyedIC {
|
||||
public:
|
||||
explicit KeyedLoadIC(Isolate* isolate) : KeyedIC(isolate) {
|
||||
ASSERT(target()->is_keyed_load_stub());
|
||||
}
|
||||
|
||||
MUST_USE_RESULT MaybeObject* Load(State state,
|
||||
Handle<Object> object,
|
||||
Handle<Object> key);
|
||||
Handle<Object> key,
|
||||
bool force_generic_stub);
|
||||
|
||||
// Code generator routines.
|
||||
static void GenerateMiss(MacroAssembler* masm);
|
||||
static void GenerateMiss(MacroAssembler* masm, bool force_generic);
|
||||
static void GenerateRuntimeGetProperty(MacroAssembler* masm);
|
||||
static void GenerateInitialize(MacroAssembler* masm) { GenerateMiss(masm); }
|
||||
static void GenerateInitialize(MacroAssembler* masm) {
|
||||
GenerateMiss(masm, false);
|
||||
}
|
||||
static void GeneratePreMonomorphic(MacroAssembler* masm) {
|
||||
GenerateMiss(masm);
|
||||
GenerateMiss(masm, false);
|
||||
}
|
||||
static void GenerateGeneric(MacroAssembler* masm);
|
||||
static void GenerateString(MacroAssembler* masm);
|
||||
@ -355,6 +407,27 @@ class KeyedLoadIC: public IC {
|
||||
static const int kSlowCaseBitFieldMask =
|
||||
(1 << Map::kIsAccessCheckNeeded) | (1 << Map::kHasIndexedInterceptor);
|
||||
|
||||
virtual MaybeObject* GetFastElementStubWithoutMapCheck(
|
||||
bool is_js_array);
|
||||
|
||||
virtual MaybeObject* GetExternalArrayStubWithoutMapCheck(
|
||||
ExternalArrayType array_type);
|
||||
|
||||
protected:
|
||||
virtual Code::Kind kind() const { return Code::KEYED_LOAD_IC; }
|
||||
|
||||
virtual String* GetStubNameForCache(IC::State ic_state);
|
||||
|
||||
virtual MaybeObject* ConstructMegamorphicStub(
|
||||
MapList* receiver_maps,
|
||||
CodeList* targets,
|
||||
StrictModeFlag strict_mode);
|
||||
|
||||
virtual Code* string_stub() {
|
||||
return isolate()->builtins()->builtin(
|
||||
Builtins::kKeyedLoadIC_String);
|
||||
}
|
||||
|
||||
private:
|
||||
// Update the inline cache.
|
||||
void UpdateCaches(LookupResult* lookup,
|
||||
@ -379,11 +452,6 @@ class KeyedLoadIC: public IC {
|
||||
return isolate()->builtins()->builtin(
|
||||
Builtins::kKeyedLoadIC_PreMonomorphic);
|
||||
}
|
||||
Code* string_stub() {
|
||||
return isolate()->builtins()->builtin(
|
||||
Builtins::kKeyedLoadIC_String);
|
||||
}
|
||||
|
||||
Code* indexed_interceptor_stub() {
|
||||
return isolate()->builtins()->builtin(
|
||||
Builtins::kKeyedLoadIC_IndexedInterceptor);
|
||||
@ -466,24 +534,46 @@ class StoreIC: public IC {
|
||||
};
|
||||
|
||||
|
||||
class KeyedStoreIC: public IC {
|
||||
class KeyedStoreIC: public KeyedIC {
|
||||
public:
|
||||
explicit KeyedStoreIC(Isolate* isolate) : IC(NO_EXTRA_FRAME, isolate) { }
|
||||
explicit KeyedStoreIC(Isolate* isolate) : KeyedIC(isolate) {
|
||||
ASSERT(target()->is_keyed_store_stub());
|
||||
}
|
||||
|
||||
MUST_USE_RESULT MaybeObject* Store(State state,
|
||||
StrictModeFlag strict_mode,
|
||||
StrictModeFlag strict_mode,
|
||||
Handle<Object> object,
|
||||
Handle<Object> name,
|
||||
Handle<Object> value);
|
||||
Handle<Object> value,
|
||||
bool force_generic);
|
||||
|
||||
// Code generators for stub routines. Only called once at startup.
|
||||
static void GenerateInitialize(MacroAssembler* masm) { GenerateMiss(masm); }
|
||||
static void GenerateMiss(MacroAssembler* masm);
|
||||
static void GenerateInitialize(MacroAssembler* masm) {
|
||||
GenerateMiss(masm, false);
|
||||
}
|
||||
static void GenerateMiss(MacroAssembler* masm, bool force_generic);
|
||||
static void GenerateSlow(MacroAssembler* masm);
|
||||
static void GenerateRuntimeSetProperty(MacroAssembler* masm,
|
||||
StrictModeFlag strict_mode);
|
||||
static void GenerateGeneric(MacroAssembler* masm, StrictModeFlag strict_mode);
|
||||
|
||||
private:
|
||||
virtual MaybeObject* GetFastElementStubWithoutMapCheck(
|
||||
bool is_js_array);
|
||||
|
||||
virtual MaybeObject* GetExternalArrayStubWithoutMapCheck(
|
||||
ExternalArrayType array_type);
|
||||
|
||||
protected:
|
||||
virtual Code::Kind kind() const { return Code::KEYED_STORE_IC; }
|
||||
|
||||
virtual String* GetStubNameForCache(IC::State ic_state);
|
||||
|
||||
virtual MaybeObject* ConstructMegamorphicStub(
|
||||
MapList* receiver_maps,
|
||||
CodeList* targets,
|
||||
StrictModeFlag strict_mode);
|
||||
|
||||
private:
|
||||
// Update the inline cache.
|
||||
void UpdateCaches(LookupResult* lookup,
|
||||
State state,
|
||||
|
@ -164,6 +164,11 @@ class List {
|
||||
DISALLOW_COPY_AND_ASSIGN(List);
|
||||
};
|
||||
|
||||
class Map;
|
||||
class Code;
|
||||
typedef List<Map*> MapList;
|
||||
typedef List<Code*> CodeList;
|
||||
|
||||
} } // namespace v8::internal
|
||||
|
||||
#endif // V8_LIST_H_
|
||||
|
10
src/log.cc
10
src/log.cc
@ -1,4 +1,4 @@
|
||||
// Copyright 2009 the V8 project authors. All rights reserved.
|
||||
// Copyright 2011 the V8 project authors. All rights reserved.
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
@ -1620,10 +1620,6 @@ void Logger::LogCodeObject(Object* object) {
|
||||
description = "A keyed load IC from the snapshot";
|
||||
tag = Logger::KEYED_LOAD_IC_TAG;
|
||||
break;
|
||||
case Code::KEYED_EXTERNAL_ARRAY_LOAD_IC:
|
||||
description = "A keyed external array load IC from the snapshot";
|
||||
tag = Logger::KEYED_EXTERNAL_ARRAY_LOAD_IC_TAG;
|
||||
break;
|
||||
case Code::LOAD_IC:
|
||||
description = "A load IC from the snapshot";
|
||||
tag = Logger::LOAD_IC_TAG;
|
||||
@ -1636,10 +1632,6 @@ void Logger::LogCodeObject(Object* object) {
|
||||
description = "A keyed store IC from the snapshot";
|
||||
tag = Logger::KEYED_STORE_IC_TAG;
|
||||
break;
|
||||
case Code::KEYED_EXTERNAL_ARRAY_STORE_IC:
|
||||
description = "A keyed external array store IC from the snapshot";
|
||||
tag = Logger::KEYED_EXTERNAL_ARRAY_STORE_IC_TAG;
|
||||
break;
|
||||
case Code::CALL_IC:
|
||||
description = "A call IC from the snapshot";
|
||||
tag = Logger::CALL_IC_TAG;
|
||||
|
90
src/log.h
90
src/log.h
@ -1,4 +1,4 @@
|
||||
// Copyright 2006-2008 the V8 project authors. All rights reserved.
|
||||
// Copyright 2011 the V8 project authors. All rights reserved.
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
@ -90,49 +90,51 @@ class Ticker;
|
||||
#define LOG(isolate, Call) ((void) 0)
|
||||
#endif
|
||||
|
||||
#define LOG_EVENTS_AND_TAGS_LIST(V) \
|
||||
V(CODE_CREATION_EVENT, "code-creation") \
|
||||
V(CODE_MOVE_EVENT, "code-move") \
|
||||
V(CODE_DELETE_EVENT, "code-delete") \
|
||||
V(CODE_MOVING_GC, "code-moving-gc") \
|
||||
V(SHARED_FUNC_MOVE_EVENT, "sfi-move") \
|
||||
V(SNAPSHOT_POSITION_EVENT, "snapshot-pos") \
|
||||
V(SNAPSHOT_CODE_NAME_EVENT, "snapshot-code-name") \
|
||||
V(TICK_EVENT, "tick") \
|
||||
V(REPEAT_META_EVENT, "repeat") \
|
||||
V(BUILTIN_TAG, "Builtin") \
|
||||
V(CALL_DEBUG_BREAK_TAG, "CallDebugBreak") \
|
||||
V(CALL_DEBUG_PREPARE_STEP_IN_TAG, "CallDebugPrepareStepIn") \
|
||||
V(CALL_IC_TAG, "CallIC") \
|
||||
V(CALL_INITIALIZE_TAG, "CallInitialize") \
|
||||
V(CALL_MEGAMORPHIC_TAG, "CallMegamorphic") \
|
||||
V(CALL_MISS_TAG, "CallMiss") \
|
||||
V(CALL_NORMAL_TAG, "CallNormal") \
|
||||
V(CALL_PRE_MONOMORPHIC_TAG, "CallPreMonomorphic") \
|
||||
V(KEYED_CALL_DEBUG_BREAK_TAG, "KeyedCallDebugBreak") \
|
||||
V(KEYED_CALL_DEBUG_PREPARE_STEP_IN_TAG, \
|
||||
"KeyedCallDebugPrepareStepIn") \
|
||||
V(KEYED_CALL_IC_TAG, "KeyedCallIC") \
|
||||
V(KEYED_CALL_INITIALIZE_TAG, "KeyedCallInitialize") \
|
||||
V(KEYED_CALL_MEGAMORPHIC_TAG, "KeyedCallMegamorphic") \
|
||||
V(KEYED_CALL_MISS_TAG, "KeyedCallMiss") \
|
||||
V(KEYED_CALL_NORMAL_TAG, "KeyedCallNormal") \
|
||||
V(KEYED_CALL_PRE_MONOMORPHIC_TAG, "KeyedCallPreMonomorphic") \
|
||||
V(CALLBACK_TAG, "Callback") \
|
||||
V(EVAL_TAG, "Eval") \
|
||||
V(FUNCTION_TAG, "Function") \
|
||||
V(KEYED_LOAD_IC_TAG, "KeyedLoadIC") \
|
||||
V(KEYED_EXTERNAL_ARRAY_LOAD_IC_TAG, "KeyedExternalArrayLoadIC") \
|
||||
V(KEYED_STORE_IC_TAG, "KeyedStoreIC") \
|
||||
V(KEYED_EXTERNAL_ARRAY_STORE_IC_TAG, "KeyedExternalArrayStoreIC")\
|
||||
V(LAZY_COMPILE_TAG, "LazyCompile") \
|
||||
V(LOAD_IC_TAG, "LoadIC") \
|
||||
V(REG_EXP_TAG, "RegExp") \
|
||||
V(SCRIPT_TAG, "Script") \
|
||||
V(STORE_IC_TAG, "StoreIC") \
|
||||
V(STUB_TAG, "Stub") \
|
||||
V(NATIVE_FUNCTION_TAG, "Function") \
|
||||
V(NATIVE_LAZY_COMPILE_TAG, "LazyCompile") \
|
||||
#define LOG_EVENTS_AND_TAGS_LIST(V) \
|
||||
V(CODE_CREATION_EVENT, "code-creation") \
|
||||
V(CODE_MOVE_EVENT, "code-move") \
|
||||
V(CODE_DELETE_EVENT, "code-delete") \
|
||||
V(CODE_MOVING_GC, "code-moving-gc") \
|
||||
V(SHARED_FUNC_MOVE_EVENT, "sfi-move") \
|
||||
V(SNAPSHOT_POSITION_EVENT, "snapshot-pos") \
|
||||
V(SNAPSHOT_CODE_NAME_EVENT, "snapshot-code-name") \
|
||||
V(TICK_EVENT, "tick") \
|
||||
V(REPEAT_META_EVENT, "repeat") \
|
||||
V(BUILTIN_TAG, "Builtin") \
|
||||
V(CALL_DEBUG_BREAK_TAG, "CallDebugBreak") \
|
||||
V(CALL_DEBUG_PREPARE_STEP_IN_TAG, "CallDebugPrepareStepIn") \
|
||||
V(CALL_IC_TAG, "CallIC") \
|
||||
V(CALL_INITIALIZE_TAG, "CallInitialize") \
|
||||
V(CALL_MEGAMORPHIC_TAG, "CallMegamorphic") \
|
||||
V(CALL_MISS_TAG, "CallMiss") \
|
||||
V(CALL_NORMAL_TAG, "CallNormal") \
|
||||
V(CALL_PRE_MONOMORPHIC_TAG, "CallPreMonomorphic") \
|
||||
V(KEYED_CALL_DEBUG_BREAK_TAG, "KeyedCallDebugBreak") \
|
||||
V(KEYED_CALL_DEBUG_PREPARE_STEP_IN_TAG, \
|
||||
"KeyedCallDebugPrepareStepIn") \
|
||||
V(KEYED_CALL_IC_TAG, "KeyedCallIC") \
|
||||
V(KEYED_CALL_INITIALIZE_TAG, "KeyedCallInitialize") \
|
||||
V(KEYED_CALL_MEGAMORPHIC_TAG, "KeyedCallMegamorphic") \
|
||||
V(KEYED_CALL_MISS_TAG, "KeyedCallMiss") \
|
||||
V(KEYED_CALL_NORMAL_TAG, "KeyedCallNormal") \
|
||||
V(KEYED_CALL_PRE_MONOMORPHIC_TAG, "KeyedCallPreMonomorphic") \
|
||||
V(CALLBACK_TAG, "Callback") \
|
||||
V(EVAL_TAG, "Eval") \
|
||||
V(FUNCTION_TAG, "Function") \
|
||||
V(KEYED_LOAD_IC_TAG, "KeyedLoadIC") \
|
||||
V(KEYED_LOAD_MEGAMORPHIC_IC_TAG, "KeyedLoadMegamorphicIC") \
|
||||
V(KEYED_EXTERNAL_ARRAY_LOAD_IC_TAG, "KeyedExternalArrayLoadIC") \
|
||||
V(KEYED_STORE_IC_TAG, "KeyedStoreIC") \
|
||||
V(KEYED_STORE_MEGAMORPHIC_IC_TAG, "KeyedStoreMegamorphicIC") \
|
||||
V(KEYED_EXTERNAL_ARRAY_STORE_IC_TAG, "KeyedExternalArrayStoreIC") \
|
||||
V(LAZY_COMPILE_TAG, "LazyCompile") \
|
||||
V(LOAD_IC_TAG, "LoadIC") \
|
||||
V(REG_EXP_TAG, "RegExp") \
|
||||
V(SCRIPT_TAG, "Script") \
|
||||
V(STORE_IC_TAG, "StoreIC") \
|
||||
V(STUB_TAG, "Stub") \
|
||||
V(NATIVE_FUNCTION_TAG, "Function") \
|
||||
V(NATIVE_LAZY_COMPILE_TAG, "LazyCompile") \
|
||||
V(NATIVE_SCRIPT_TAG, "Script")
|
||||
// Note that 'NATIVE_' cases for functions and scripts are mapped onto
|
||||
// original tags when writing to the log.
|
||||
|
@ -457,7 +457,7 @@ class StaticMarkingVisitor : public StaticVisitorBase {
|
||||
static inline void VisitCodeTarget(Heap* heap, RelocInfo* rinfo) {
|
||||
ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
|
||||
Code* code = Code::GetCodeFromTargetAddress(rinfo->target_address());
|
||||
if (FLAG_cleanup_ics_at_gc && code->is_inline_cache_stub()) {
|
||||
if (FLAG_cleanup_code_caches_at_gc && code->is_inline_cache_stub()) {
|
||||
IC::Clear(rinfo->pc());
|
||||
// Please note targets for cleared inline cached do not have to be
|
||||
// marked since they are contained in HEAP->non_monomorphic_cache().
|
||||
@ -1056,7 +1056,7 @@ void MarkCompactCollector::MarkUnmarkedObject(HeapObject* object) {
|
||||
ASSERT(HEAP->Contains(object));
|
||||
if (object->IsMap()) {
|
||||
Map* map = Map::cast(object);
|
||||
if (FLAG_cleanup_caches_in_maps_at_gc) {
|
||||
if (FLAG_cleanup_code_caches_at_gc) {
|
||||
map->ClearCodeCache(heap());
|
||||
}
|
||||
SetMark(map);
|
||||
|
@ -6613,4 +6613,3 @@ void StringDictionaryLookupStub::Generate(MacroAssembler* masm) {
|
||||
} } // namespace v8::internal
|
||||
|
||||
#endif // V8_TARGET_ARCH_MIPS
|
||||
|
||||
|
@ -2595,7 +2595,6 @@ Code::ExtraICState Code::extra_ic_state() {
|
||||
|
||||
|
||||
PropertyType Code::type() {
|
||||
ASSERT(ic_state() == MONOMORPHIC);
|
||||
return ExtractTypeFromFlags(flags());
|
||||
}
|
||||
|
||||
@ -2713,14 +2712,14 @@ void Code::set_check_type(CheckType value) {
|
||||
|
||||
|
||||
ExternalArrayType Code::external_array_type() {
|
||||
ASSERT(is_external_array_load_stub() || is_external_array_store_stub());
|
||||
ASSERT(is_keyed_load_stub() || is_keyed_store_stub());
|
||||
byte type = READ_BYTE_FIELD(this, kExternalArrayTypeOffset);
|
||||
return static_cast<ExternalArrayType>(type);
|
||||
}
|
||||
|
||||
|
||||
void Code::set_external_array_type(ExternalArrayType value) {
|
||||
ASSERT(is_external_array_load_stub() || is_external_array_store_stub());
|
||||
ASSERT(is_keyed_load_stub() || is_keyed_store_stub());
|
||||
WRITE_BYTE_FIELD(this, kExternalArrayTypeOffset, value);
|
||||
}
|
||||
|
||||
@ -2792,8 +2791,7 @@ Code::Flags Code::ComputeFlags(Kind kind,
|
||||
(kind == CALL_IC && (ic_state == MONOMORPHIC ||
|
||||
ic_state == MONOMORPHIC_PROTOTYPE_FAILURE)) ||
|
||||
(kind == STORE_IC) ||
|
||||
(kind == KEYED_STORE_IC) ||
|
||||
(kind == KEYED_EXTERNAL_ARRAY_STORE_IC));
|
||||
(kind == KEYED_STORE_IC));
|
||||
// Compute the bit mask.
|
||||
int bits = kind << kFlagsKindShift;
|
||||
if (in_loop) bits |= kFlagsICInLoopMask;
|
||||
|
@ -3811,8 +3811,6 @@ void Map::TraverseTransitionTree(TraverseCallback callback, void* data) {
|
||||
|
||||
|
||||
MaybeObject* CodeCache::Update(String* name, Code* code) {
|
||||
ASSERT(code->ic_state() == MONOMORPHIC);
|
||||
|
||||
// The number of monomorphic stubs for normal load/store/call IC's can grow to
|
||||
// a large number and therefore they need to go into a hash table. They are
|
||||
// used to load global properties from cells.
|
||||
@ -6555,10 +6553,8 @@ const char* Code::Kind2String(Kind kind) {
|
||||
case BUILTIN: return "BUILTIN";
|
||||
case LOAD_IC: return "LOAD_IC";
|
||||
case KEYED_LOAD_IC: return "KEYED_LOAD_IC";
|
||||
case KEYED_EXTERNAL_ARRAY_LOAD_IC: return "KEYED_EXTERNAL_ARRAY_LOAD_IC";
|
||||
case STORE_IC: return "STORE_IC";
|
||||
case KEYED_STORE_IC: return "KEYED_STORE_IC";
|
||||
case KEYED_EXTERNAL_ARRAY_STORE_IC: return "KEYED_EXTERNAL_ARRAY_STORE_IC";
|
||||
case CALL_IC: return "CALL_IC";
|
||||
case KEYED_CALL_IC: return "KEYED_CALL_IC";
|
||||
case TYPE_RECORDING_UNARY_OP_IC: return "TYPE_RECORDING_UNARY_OP_IC";
|
||||
|
@ -30,6 +30,7 @@
|
||||
|
||||
#include "allocation.h"
|
||||
#include "builtins.h"
|
||||
#include "list.h"
|
||||
#include "smart-pointer.h"
|
||||
#include "unicode-inl.h"
|
||||
#if V8_TARGET_ARCH_ARM
|
||||
@ -3280,12 +3281,10 @@ class Code: public HeapObject {
|
||||
BUILTIN,
|
||||
LOAD_IC,
|
||||
KEYED_LOAD_IC,
|
||||
KEYED_EXTERNAL_ARRAY_LOAD_IC,
|
||||
CALL_IC,
|
||||
KEYED_CALL_IC,
|
||||
STORE_IC,
|
||||
KEYED_STORE_IC,
|
||||
KEYED_EXTERNAL_ARRAY_STORE_IC,
|
||||
TYPE_RECORDING_UNARY_OP_IC,
|
||||
TYPE_RECORDING_BINARY_OP_IC,
|
||||
COMPARE_IC,
|
||||
@ -3368,12 +3367,6 @@ class Code: public HeapObject {
|
||||
return kind() == TYPE_RECORDING_BINARY_OP_IC;
|
||||
}
|
||||
inline bool is_compare_ic_stub() { return kind() == COMPARE_IC; }
|
||||
inline bool is_external_array_load_stub() {
|
||||
return kind() == KEYED_EXTERNAL_ARRAY_LOAD_IC;
|
||||
}
|
||||
inline bool is_external_array_store_stub() {
|
||||
return kind() == KEYED_EXTERNAL_ARRAY_STORE_IC;
|
||||
}
|
||||
|
||||
// [major_key]: For kind STUB or BINARY_OP_IC, the major key.
|
||||
inline int major_key();
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Copyright 2006-2010 the V8 project authors. All rights reserved.
|
||||
// Copyright 2011 the V8 project authors. All rights reserved.
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
@ -1564,10 +1564,8 @@ static void ReportCodeKindStatistics() {
|
||||
CASE(BUILTIN);
|
||||
CASE(LOAD_IC);
|
||||
CASE(KEYED_LOAD_IC);
|
||||
CASE(KEYED_EXTERNAL_ARRAY_LOAD_IC);
|
||||
CASE(STORE_IC);
|
||||
CASE(KEYED_STORE_IC);
|
||||
CASE(KEYED_EXTERNAL_ARRAY_STORE_IC);
|
||||
CASE(CALL_IC);
|
||||
CASE(KEYED_CALL_IC);
|
||||
CASE(TYPE_RECORDING_UNARY_OP_IC);
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Copyright 2006-2009 the V8 project authors. All rights reserved.
|
||||
// Copyright 2011 the V8 project authors. All rights reserved.
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
@ -457,34 +457,6 @@ MaybeObject* StubCache::ComputeKeyedLoadFunctionPrototype(
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* StubCache::ComputeKeyedLoadSpecialized(JSObject* receiver) {
|
||||
// Using NORMAL as the PropertyType for array element loads is a misuse. The
|
||||
// generated stub always accesses fast elements, not slow-mode fields, but
|
||||
// some property type is required for the stub lookup. Note that overloading
|
||||
// the NORMAL PropertyType is only safe as long as no stubs are generated for
|
||||
// other keyed field loads. This is guaranteed to be the case since all field
|
||||
// keyed loads that are not array elements go through a generic builtin stub.
|
||||
Code::Flags flags =
|
||||
Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, NORMAL);
|
||||
String* name = heap()->KeyedLoadSpecialized_symbol();
|
||||
Object* code = receiver->map()->FindInCodeCache(name, flags);
|
||||
if (code->IsUndefined()) {
|
||||
KeyedLoadStubCompiler compiler;
|
||||
{ MaybeObject* maybe_code = compiler.CompileLoadSpecialized(receiver);
|
||||
if (!maybe_code->ToObject(&code)) return maybe_code;
|
||||
}
|
||||
PROFILE(isolate_,
|
||||
CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), 0));
|
||||
Object* result;
|
||||
{ MaybeObject* maybe_result =
|
||||
receiver->UpdateMapCodeCache(name, Code::cast(code));
|
||||
if (!maybe_result->ToObject(&result)) return maybe_result;
|
||||
}
|
||||
}
|
||||
return code;
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* StubCache::ComputeStoreField(String* name,
|
||||
JSObject* receiver,
|
||||
int field_index,
|
||||
@ -513,30 +485,6 @@ MaybeObject* StubCache::ComputeStoreField(String* name,
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* StubCache::ComputeKeyedStoreSpecialized(
|
||||
JSObject* receiver,
|
||||
StrictModeFlag strict_mode) {
|
||||
Code::Flags flags =
|
||||
Code::ComputeMonomorphicFlags(Code::KEYED_STORE_IC, NORMAL, strict_mode);
|
||||
String* name = heap()->KeyedStoreSpecialized_symbol();
|
||||
Object* code = receiver->map()->FindInCodeCache(name, flags);
|
||||
if (code->IsUndefined()) {
|
||||
KeyedStoreStubCompiler compiler(strict_mode);
|
||||
{ MaybeObject* maybe_code = compiler.CompileStoreSpecialized(receiver);
|
||||
if (!maybe_code->ToObject(&code)) return maybe_code;
|
||||
}
|
||||
PROFILE(isolate_,
|
||||
CodeCreateEvent(Logger::KEYED_STORE_IC_TAG, Code::cast(code), 0));
|
||||
Object* result;
|
||||
{ MaybeObject* maybe_result =
|
||||
receiver->UpdateMapCodeCache(name, Code::cast(code));
|
||||
if (!maybe_result->ToObject(&result)) return maybe_result;
|
||||
}
|
||||
}
|
||||
return code;
|
||||
}
|
||||
|
||||
|
||||
namespace {
|
||||
|
||||
ExternalArrayType ElementsKindToExternalArrayType(JSObject::ElementsKind kind) {
|
||||
@ -565,60 +513,6 @@ ExternalArrayType ElementsKindToExternalArrayType(JSObject::ElementsKind kind) {
|
||||
}
|
||||
}
|
||||
|
||||
String* ExternalArrayTypeToStubName(Heap* heap,
|
||||
ExternalArrayType array_type,
|
||||
bool is_store) {
|
||||
if (is_store) {
|
||||
switch (array_type) {
|
||||
case kExternalByteArray:
|
||||
return heap->KeyedStoreExternalByteArray_symbol();
|
||||
case kExternalUnsignedByteArray:
|
||||
return heap->KeyedStoreExternalUnsignedByteArray_symbol();
|
||||
case kExternalShortArray:
|
||||
return heap->KeyedStoreExternalShortArray_symbol();
|
||||
case kExternalUnsignedShortArray:
|
||||
return heap->KeyedStoreExternalUnsignedShortArray_symbol();
|
||||
case kExternalIntArray:
|
||||
return heap->KeyedStoreExternalIntArray_symbol();
|
||||
case kExternalUnsignedIntArray:
|
||||
return heap->KeyedStoreExternalUnsignedIntArray_symbol();
|
||||
case kExternalFloatArray:
|
||||
return heap->KeyedStoreExternalFloatArray_symbol();
|
||||
case kExternalDoubleArray:
|
||||
return heap->KeyedStoreExternalDoubleArray_symbol();
|
||||
case kExternalPixelArray:
|
||||
return heap->KeyedStoreExternalPixelArray_symbol();
|
||||
default:
|
||||
UNREACHABLE();
|
||||
return NULL;
|
||||
}
|
||||
} else {
|
||||
switch (array_type) {
|
||||
case kExternalByteArray:
|
||||
return heap->KeyedLoadExternalByteArray_symbol();
|
||||
case kExternalUnsignedByteArray:
|
||||
return heap->KeyedLoadExternalUnsignedByteArray_symbol();
|
||||
case kExternalShortArray:
|
||||
return heap->KeyedLoadExternalShortArray_symbol();
|
||||
case kExternalUnsignedShortArray:
|
||||
return heap->KeyedLoadExternalUnsignedShortArray_symbol();
|
||||
case kExternalIntArray:
|
||||
return heap->KeyedLoadExternalIntArray_symbol();
|
||||
case kExternalUnsignedIntArray:
|
||||
return heap->KeyedLoadExternalUnsignedIntArray_symbol();
|
||||
case kExternalFloatArray:
|
||||
return heap->KeyedLoadExternalFloatArray_symbol();
|
||||
case kExternalDoubleArray:
|
||||
return heap->KeyedLoadExternalDoubleArray_symbol();
|
||||
case kExternalPixelArray:
|
||||
return heap->KeyedLoadExternalPixelArray_symbol();
|
||||
default:
|
||||
UNREACHABLE();
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} // anonymous namespace
|
||||
|
||||
|
||||
@ -628,37 +522,88 @@ MaybeObject* StubCache::ComputeKeyedLoadOrStoreExternalArray(
|
||||
StrictModeFlag strict_mode) {
|
||||
Code::Flags flags =
|
||||
Code::ComputeMonomorphicFlags(
|
||||
is_store ? Code::KEYED_EXTERNAL_ARRAY_STORE_IC :
|
||||
Code::KEYED_EXTERNAL_ARRAY_LOAD_IC,
|
||||
is_store ? Code::KEYED_STORE_IC :
|
||||
Code::KEYED_LOAD_IC,
|
||||
NORMAL,
|
||||
strict_mode);
|
||||
ExternalArrayType array_type =
|
||||
ElementsKindToExternalArrayType(receiver->GetElementsKind());
|
||||
String* name = ExternalArrayTypeToStubName(heap(), array_type, is_store);
|
||||
Object* code = receiver->map()->FindInCodeCache(name, flags);
|
||||
if (code->IsUndefined()) {
|
||||
ExternalArrayStubCompiler compiler;
|
||||
{ MaybeObject* maybe_code =
|
||||
is_store ?
|
||||
compiler.CompileKeyedStoreStub(receiver, array_type, flags) :
|
||||
compiler.CompileKeyedLoadStub(receiver, array_type, flags);
|
||||
if (!maybe_code->ToObject(&code)) return maybe_code;
|
||||
}
|
||||
Code::cast(code)->set_external_array_type(array_type);
|
||||
if (is_store) {
|
||||
PROFILE(isolate_,
|
||||
CodeCreateEvent(Logger::KEYED_EXTERNAL_ARRAY_STORE_IC_TAG,
|
||||
Code::cast(code), 0));
|
||||
} else {
|
||||
PROFILE(isolate_,
|
||||
CodeCreateEvent(Logger::KEYED_EXTERNAL_ARRAY_LOAD_IC_TAG,
|
||||
Code::cast(code), 0));
|
||||
}
|
||||
Object* result;
|
||||
{ MaybeObject* maybe_result =
|
||||
receiver->UpdateMapCodeCache(name, Code::cast(code));
|
||||
if (!maybe_result->ToObject(&result)) return maybe_result;
|
||||
}
|
||||
String* name = is_store
|
||||
? isolate()->heap()->KeyedStoreSpecializedMonomorphic_symbol()
|
||||
: isolate()->heap()->KeyedLoadSpecializedMonomorphic_symbol();
|
||||
Object* maybe_code = receiver->map()->FindInCodeCache(name, flags);
|
||||
if (!maybe_code->IsUndefined()) return Code::cast(maybe_code);
|
||||
|
||||
MaybeObject* maybe_new_code = NULL;
|
||||
if (is_store) {
|
||||
ExternalArrayStoreStubCompiler compiler(strict_mode);
|
||||
maybe_new_code = compiler.CompileStore(receiver, array_type);
|
||||
} else {
|
||||
ExternalArrayLoadStubCompiler compiler(strict_mode);
|
||||
maybe_new_code = compiler.CompileLoad(receiver, array_type);
|
||||
}
|
||||
Code* code;
|
||||
if (!maybe_new_code->To(&code)) return maybe_new_code;
|
||||
code->set_external_array_type(array_type);
|
||||
if (is_store) {
|
||||
PROFILE(isolate_,
|
||||
CodeCreateEvent(Logger::KEYED_EXTERNAL_ARRAY_STORE_IC_TAG,
|
||||
Code::cast(code), 0));
|
||||
} else {
|
||||
PROFILE(isolate_,
|
||||
CodeCreateEvent(Logger::KEYED_EXTERNAL_ARRAY_LOAD_IC_TAG,
|
||||
Code::cast(code), 0));
|
||||
}
|
||||
ASSERT(code->IsCode());
|
||||
Object* result;
|
||||
{ MaybeObject* maybe_result =
|
||||
receiver->UpdateMapCodeCache(name, Code::cast(code));
|
||||
if (!maybe_result->ToObject(&result)) return maybe_result;
|
||||
}
|
||||
return code;
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* StubCache::ComputeKeyedLoadOrStoreFastElement(
|
||||
JSObject* receiver,
|
||||
bool is_store,
|
||||
StrictModeFlag strict_mode) {
|
||||
Code::Flags flags =
|
||||
Code::ComputeMonomorphicFlags(
|
||||
is_store ? Code::KEYED_STORE_IC :
|
||||
Code::KEYED_LOAD_IC,
|
||||
NORMAL,
|
||||
strict_mode);
|
||||
String* name = is_store
|
||||
? isolate()->heap()->KeyedStoreSpecializedMonomorphic_symbol()
|
||||
: isolate()->heap()->KeyedLoadSpecializedMonomorphic_symbol();
|
||||
Object* maybe_code = receiver->map()->FindInCodeCache(name, flags);
|
||||
if (!maybe_code->IsUndefined()) return Code::cast(maybe_code);
|
||||
|
||||
MaybeObject* maybe_new_code = NULL;
|
||||
if (is_store) {
|
||||
KeyedStoreStubCompiler compiler(strict_mode);
|
||||
maybe_new_code = compiler.CompileStoreFastElement(receiver->map());
|
||||
} else {
|
||||
KeyedLoadStubCompiler compiler;
|
||||
maybe_new_code = compiler.CompileLoadFastElement(receiver->map());
|
||||
}
|
||||
Code* code;
|
||||
if (!maybe_new_code->To(&code)) return maybe_new_code;
|
||||
if (is_store) {
|
||||
PROFILE(isolate_,
|
||||
CodeCreateEvent(Logger::KEYED_STORE_IC_TAG,
|
||||
Code::cast(code), 0));
|
||||
} else {
|
||||
PROFILE(isolate_,
|
||||
CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG,
|
||||
Code::cast(code), 0));
|
||||
}
|
||||
ASSERT(code->IsCode());
|
||||
Object* result;
|
||||
{ MaybeObject* maybe_result =
|
||||
receiver->UpdateMapCodeCache(name, Code::cast(code));
|
||||
if (!maybe_result->ToObject(&result)) return maybe_result;
|
||||
}
|
||||
return code;
|
||||
}
|
||||
@ -1717,8 +1662,11 @@ MaybeObject* LoadStubCompiler::GetCode(PropertyType type, String* name) {
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* KeyedLoadStubCompiler::GetCode(PropertyType type, String* name) {
|
||||
Code::Flags flags = Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, type);
|
||||
MaybeObject* KeyedLoadStubCompiler::GetCode(PropertyType type,
|
||||
String* name,
|
||||
InlineCacheState state) {
|
||||
Code::Flags flags = Code::ComputeFlags(
|
||||
Code::KEYED_LOAD_IC, NOT_IN_LOOP, state, Code::kNoExtraICState, type);
|
||||
MaybeObject* result = GetCodeWithFlags(flags, name);
|
||||
if (!result->IsFailure()) {
|
||||
PROFILE(isolate(),
|
||||
@ -1750,9 +1698,11 @@ MaybeObject* StoreStubCompiler::GetCode(PropertyType type, String* name) {
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* KeyedStoreStubCompiler::GetCode(PropertyType type, String* name) {
|
||||
Code::Flags flags = Code::ComputeMonomorphicFlags(
|
||||
Code::KEYED_STORE_IC, type, strict_mode_);
|
||||
MaybeObject* KeyedStoreStubCompiler::GetCode(PropertyType type,
|
||||
String* name,
|
||||
InlineCacheState state) {
|
||||
Code::Flags flags = Code::ComputeFlags(
|
||||
Code::KEYED_STORE_IC, NOT_IN_LOOP, state, strict_mode_, type);
|
||||
MaybeObject* result = GetCodeWithFlags(flags, name);
|
||||
if (!result->IsFailure()) {
|
||||
PROFILE(isolate(),
|
||||
@ -1930,15 +1880,36 @@ void CallOptimization::AnalyzePossibleApiFunction(JSFunction* function) {
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* ExternalArrayStubCompiler::GetCode(Code::Flags flags) {
|
||||
MaybeObject* ExternalArrayLoadStubCompiler::GetCode() {
|
||||
Object* result;
|
||||
{ MaybeObject* maybe_result = GetCodeWithFlags(flags, "ExternalArrayStub");
|
||||
Code::Flags flags = Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC,
|
||||
NORMAL,
|
||||
strict_mode_);
|
||||
{ MaybeObject* maybe_result = GetCodeWithFlags(flags,
|
||||
"ExternalArrayLoadStub");
|
||||
if (!maybe_result->ToObject(&result)) return maybe_result;
|
||||
}
|
||||
Code* code = Code::cast(result);
|
||||
USE(code);
|
||||
PROFILE(isolate(),
|
||||
CodeCreateEvent(Logger::STUB_TAG, code, "ExternalArrayStub"));
|
||||
CodeCreateEvent(Logger::STUB_TAG, code, "ExternalArrayLoadStub"));
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* ExternalArrayStoreStubCompiler::GetCode() {
|
||||
Object* result;
|
||||
Code::Flags flags = Code::ComputeMonomorphicFlags(Code::KEYED_STORE_IC,
|
||||
NORMAL,
|
||||
strict_mode_);
|
||||
{ MaybeObject* maybe_result = GetCodeWithFlags(flags,
|
||||
"ExternalArrayStoreStub");
|
||||
if (!maybe_result->ToObject(&result)) return maybe_result;
|
||||
}
|
||||
Code* code = Code::cast(result);
|
||||
USE(code);
|
||||
PROFILE(isolate(),
|
||||
CodeCreateEvent(Logger::STUB_TAG, code, "ExternalArrayStoreStub"));
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Copyright 2006-2008 the V8 project authors. All rights reserved.
|
||||
// Copyright 2011 the V8 project authors. All rights reserved.
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
@ -31,6 +31,7 @@
|
||||
#include "allocation.h"
|
||||
#include "arguments.h"
|
||||
#include "macro-assembler.h"
|
||||
#include "objects.h"
|
||||
#include "zone-inl.h"
|
||||
|
||||
namespace v8 {
|
||||
@ -144,9 +145,6 @@ class StubCache {
|
||||
String* name,
|
||||
JSFunction* receiver);
|
||||
|
||||
MUST_USE_RESULT MaybeObject* ComputeKeyedLoadSpecialized(
|
||||
JSObject* receiver);
|
||||
|
||||
// ---
|
||||
|
||||
MUST_USE_RESULT MaybeObject* ComputeStoreField(
|
||||
@ -185,16 +183,15 @@ class StubCache {
|
||||
Map* transition,
|
||||
StrictModeFlag strict_mode);
|
||||
|
||||
MUST_USE_RESULT MaybeObject* ComputeKeyedStoreSpecialized(
|
||||
JSObject* receiver,
|
||||
StrictModeFlag strict_mode);
|
||||
|
||||
|
||||
MUST_USE_RESULT MaybeObject* ComputeKeyedLoadOrStoreExternalArray(
|
||||
JSObject* receiver,
|
||||
bool is_store,
|
||||
StrictModeFlag strict_mode);
|
||||
|
||||
MUST_USE_RESULT MaybeObject* ComputeKeyedLoadOrStoreFastElement(
|
||||
JSObject* receiver,
|
||||
bool is_store,
|
||||
StrictModeFlag strict_mode);
|
||||
// ---
|
||||
|
||||
MUST_USE_RESULT MaybeObject* ComputeCallField(int argc,
|
||||
@ -469,7 +466,10 @@ class StubCompiler BASE_EMBEDDED {
|
||||
Register scratch,
|
||||
Label* miss_label);
|
||||
|
||||
static void GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind);
|
||||
static void GenerateLoadMiss(MacroAssembler* masm,
|
||||
Code::Kind kind);
|
||||
|
||||
static void GenerateKeyedLoadMissForceGeneric(MacroAssembler* masm);
|
||||
|
||||
// Generates code that verifies that the property holder has not changed
|
||||
// (checking maps of objects in the prototype chain for fast and global
|
||||
@ -634,10 +634,21 @@ class KeyedLoadStubCompiler: public StubCompiler {
|
||||
MUST_USE_RESULT MaybeObject* CompileLoadStringLength(String* name);
|
||||
MUST_USE_RESULT MaybeObject* CompileLoadFunctionPrototype(String* name);
|
||||
|
||||
MUST_USE_RESULT MaybeObject* CompileLoadSpecialized(JSObject* receiver);
|
||||
MUST_USE_RESULT MaybeObject* CompileLoadFastElement(Map* receiver_map);
|
||||
|
||||
MUST_USE_RESULT MaybeObject* CompileLoadMegamorphic(
|
||||
MapList* receiver_maps,
|
||||
CodeList* handler_ics);
|
||||
|
||||
static void GenerateLoadExternalArray(MacroAssembler* masm,
|
||||
ExternalArrayType array_type);
|
||||
|
||||
static void GenerateLoadFastElement(MacroAssembler* masm);
|
||||
|
||||
private:
|
||||
MaybeObject* GetCode(PropertyType type, String* name);
|
||||
MaybeObject* GetCode(PropertyType type,
|
||||
String* name,
|
||||
InlineCacheState state = MONOMORPHIC);
|
||||
};
|
||||
|
||||
|
||||
@ -678,10 +689,22 @@ class KeyedStoreStubCompiler: public StubCompiler {
|
||||
Map* transition,
|
||||
String* name);
|
||||
|
||||
MUST_USE_RESULT MaybeObject* CompileStoreSpecialized(JSObject* receiver);
|
||||
MUST_USE_RESULT MaybeObject* CompileStoreFastElement(Map* receiver_map);
|
||||
|
||||
MUST_USE_RESULT MaybeObject* CompileStoreMegamorphic(
|
||||
MapList* receiver_maps,
|
||||
CodeList* handler_ics);
|
||||
|
||||
static void GenerateStoreFastElement(MacroAssembler* masm,
|
||||
bool is_js_array);
|
||||
|
||||
static void GenerateStoreExternalArray(MacroAssembler* masm,
|
||||
ExternalArrayType array_type);
|
||||
|
||||
private:
|
||||
MaybeObject* GetCode(PropertyType type, String* name);
|
||||
MaybeObject* GetCode(PropertyType type,
|
||||
String* name,
|
||||
InlineCacheState state = MONOMORPHIC);
|
||||
|
||||
StrictModeFlag strict_mode_;
|
||||
};
|
||||
@ -848,20 +871,36 @@ class CallOptimization BASE_EMBEDDED {
|
||||
CallHandlerInfo* api_call_info_;
|
||||
};
|
||||
|
||||
class ExternalArrayStubCompiler: public StubCompiler {
|
||||
class ExternalArrayLoadStubCompiler: public StubCompiler {
|
||||
public:
|
||||
explicit ExternalArrayStubCompiler() {}
|
||||
explicit ExternalArrayLoadStubCompiler(StrictModeFlag strict_mode)
|
||||
: strict_mode_(strict_mode) { }
|
||||
|
||||
MUST_USE_RESULT MaybeObject* CompileKeyedLoadStub(
|
||||
JSObject* receiver, ExternalArrayType array_type, Code::Flags flags);
|
||||
|
||||
MUST_USE_RESULT MaybeObject* CompileKeyedStoreStub(
|
||||
JSObject* receiver, ExternalArrayType array_type, Code::Flags flags);
|
||||
MUST_USE_RESULT MaybeObject* CompileLoad(
|
||||
JSObject* receiver, ExternalArrayType array_type);
|
||||
|
||||
private:
|
||||
MaybeObject* GetCode(Code::Flags flags);
|
||||
MaybeObject* GetCode();
|
||||
|
||||
StrictModeFlag strict_mode_;
|
||||
};
|
||||
|
||||
|
||||
class ExternalArrayStoreStubCompiler: public StubCompiler {
|
||||
public:
|
||||
explicit ExternalArrayStoreStubCompiler(StrictModeFlag strict_mode)
|
||||
: strict_mode_(strict_mode) {}
|
||||
|
||||
MUST_USE_RESULT MaybeObject* CompileStore(
|
||||
JSObject* receiver, ExternalArrayType array_type);
|
||||
|
||||
private:
|
||||
MaybeObject* GetCode();
|
||||
|
||||
StrictModeFlag strict_mode_;
|
||||
};
|
||||
|
||||
|
||||
} } // namespace v8::internal
|
||||
|
||||
#endif // V8_STUB_CACHE_H_
|
||||
|
@ -82,7 +82,8 @@ bool TypeFeedbackOracle::LoadIsMonomorphic(Property* expr) {
|
||||
if (map_or_code->IsMap()) return true;
|
||||
if (map_or_code->IsCode()) {
|
||||
Handle<Code> code(Code::cast(*map_or_code));
|
||||
return code->kind() == Code::KEYED_EXTERNAL_ARRAY_LOAD_IC &&
|
||||
return code->is_keyed_load_stub() &&
|
||||
code->ic_state() == MONOMORPHIC &&
|
||||
code->FindFirstMap() != NULL;
|
||||
}
|
||||
return false;
|
||||
@ -94,8 +95,8 @@ bool TypeFeedbackOracle::StoreIsMonomorphic(Expression* expr) {
|
||||
if (map_or_code->IsMap()) return true;
|
||||
if (map_or_code->IsCode()) {
|
||||
Handle<Code> code(Code::cast(*map_or_code));
|
||||
return code->kind() == Code::KEYED_EXTERNAL_ARRAY_STORE_IC &&
|
||||
code->FindFirstMap() != NULL;
|
||||
return code->is_keyed_store_stub() &&
|
||||
code->ic_state() == MONOMORPHIC;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
@ -113,7 +114,9 @@ Handle<Map> TypeFeedbackOracle::LoadMonomorphicReceiverType(Property* expr) {
|
||||
Handle<HeapObject>::cast(GetInfo(expr->id())));
|
||||
if (map_or_code->IsCode()) {
|
||||
Handle<Code> code(Code::cast(*map_or_code));
|
||||
return Handle<Map>(code->FindFirstMap());
|
||||
Map* first_map = code->FindFirstMap();
|
||||
ASSERT(first_map != NULL);
|
||||
return Handle<Map>(first_map);
|
||||
}
|
||||
return Handle<Map>(Map::cast(*map_or_code));
|
||||
}
|
||||
@ -445,8 +448,8 @@ void TypeFeedbackOracle::PopulateMap(Handle<Code> code) {
|
||||
kind == Code::COMPARE_IC) {
|
||||
SetInfo(id, target);
|
||||
} else if (state == MONOMORPHIC) {
|
||||
if (kind == Code::KEYED_EXTERNAL_ARRAY_LOAD_IC ||
|
||||
kind == Code::KEYED_EXTERNAL_ARRAY_STORE_IC) {
|
||||
if (kind == Code::KEYED_LOAD_IC ||
|
||||
kind == Code::KEYED_STORE_IC) {
|
||||
SetInfo(id, target);
|
||||
} else if (kind != Code::CALL_IC ||
|
||||
target->check_type() == RECEIVER_MAP_CHECK) {
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Copyright 2010 the V8 project authors. All rights reserved.
|
||||
// Copyright 2011 the V8 project authors. All rights reserved.
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
@ -134,6 +134,7 @@ namespace internal {
|
||||
SC(keyed_load_generic_symbol, V8.KeyedLoadGenericSymbol) \
|
||||
SC(keyed_load_generic_lookup_cache, V8.KeyedLoadGenericLookupCache) \
|
||||
SC(keyed_load_generic_slow, V8.KeyedLoadGenericSlow) \
|
||||
SC(keyed_load_polymorphic_stubs, V8.KeyedLoadPolymorphicStubs) \
|
||||
SC(keyed_load_external_array_slow, V8.KeyedLoadExternalArraySlow) \
|
||||
/* How is the generic keyed-call stub used? */ \
|
||||
SC(keyed_call_generic_smi_fast, V8.KeyedCallGenericSmiFast) \
|
||||
@ -179,6 +180,8 @@ namespace internal {
|
||||
SC(keyed_store_inline_miss, V8.KeyedStoreInlineMiss) \
|
||||
SC(named_store_global_inline, V8.NamedStoreGlobalInline) \
|
||||
SC(named_store_global_inline_miss, V8.NamedStoreGlobalInlineMiss) \
|
||||
SC(keyed_store_polymorphic_stubs, V8.KeyedStorePolymorphicStubs) \
|
||||
SC(keyed_store_external_array_slow, V8.KeyedStoreExternalArraySlow) \
|
||||
SC(store_normal_miss, V8.StoreNormalMiss) \
|
||||
SC(store_normal_hit, V8.StoreNormalHit) \
|
||||
SC(cow_arrays_created_stub, V8.COWArraysCreatedStub) \
|
||||
|
@ -658,7 +658,7 @@ void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
|
||||
char_at_generator.GenerateSlow(masm, call_helper);
|
||||
|
||||
__ bind(&miss);
|
||||
GenerateMiss(masm);
|
||||
GenerateMiss(masm, false);
|
||||
}
|
||||
|
||||
|
||||
@ -701,7 +701,7 @@ void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
|
||||
1);
|
||||
|
||||
__ bind(&slow);
|
||||
GenerateMiss(masm);
|
||||
GenerateMiss(masm, false);
|
||||
}
|
||||
|
||||
|
||||
@ -1240,7 +1240,7 @@ void LoadIC::GenerateMiss(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
|
||||
void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
|
||||
void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- rax : key
|
||||
// -- rdx : receiver
|
||||
@ -1256,8 +1256,10 @@ void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
|
||||
__ push(rbx); // return address
|
||||
|
||||
// Perform tail call to the entry.
|
||||
ExternalReference ref
|
||||
= ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate());
|
||||
ExternalReference ref = force_generic
|
||||
? ExternalReference(IC_Utility(kKeyedLoadIC_MissForceGeneric),
|
||||
masm->isolate())
|
||||
: ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate());
|
||||
__ TailCallExternalReference(ref, 2, 1);
|
||||
}
|
||||
|
||||
@ -1441,7 +1443,7 @@ void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
|
||||
}
|
||||
|
||||
|
||||
void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) {
|
||||
void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- rax : value
|
||||
// -- rcx : key
|
||||
@ -1456,8 +1458,30 @@ void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) {
|
||||
__ push(rbx); // return address
|
||||
|
||||
// Do tail-call to runtime routine.
|
||||
ExternalReference ref =
|
||||
ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
|
||||
ExternalReference ref(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
|
||||
__ TailCallExternalReference(ref, 3, 1);
|
||||
}
|
||||
|
||||
|
||||
void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- rax : value
|
||||
// -- rcx : key
|
||||
// -- rdx : receiver
|
||||
// -- rsp[0] : return address
|
||||
// -----------------------------------
|
||||
|
||||
__ pop(rbx);
|
||||
__ push(rdx); // receiver
|
||||
__ push(rcx); // key
|
||||
__ push(rax); // value
|
||||
__ push(rbx); // return address
|
||||
|
||||
// Do tail-call to runtime routine.
|
||||
ExternalReference ref = force_generic
|
||||
? ExternalReference(IC_Utility(kKeyedStoreIC_MissForceGeneric),
|
||||
masm->isolate())
|
||||
: ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
|
||||
__ TailCallExternalReference(ref, 3, 1);
|
||||
}
|
||||
|
||||
|
@ -2601,6 +2601,21 @@ void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::DispatchMap(Register obj,
|
||||
Handle<Map> map,
|
||||
Handle<Code> success,
|
||||
SmiCheckType smi_check_type) {
|
||||
Label fail;
|
||||
if (smi_check_type == DO_SMI_CHECK) {
|
||||
JumpIfSmi(obj, &fail);
|
||||
}
|
||||
Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
|
||||
j(equal, success, RelocInfo::CODE_TARGET);
|
||||
|
||||
bind(&fail);
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::AbortIfNotNumber(Register object) {
|
||||
Label ok;
|
||||
Condition is_smi = CheckSmi(object);
|
||||
|
@ -45,6 +45,7 @@ enum AllocationFlags {
|
||||
RESULT_CONTAINS_TOP = 1 << 1
|
||||
};
|
||||
|
||||
|
||||
// Default scratch register used by MacroAssembler (and other code that needs
|
||||
// a spare register). The register isn't callee save, and not used by the
|
||||
// function calling convention.
|
||||
@ -752,6 +753,14 @@ class MacroAssembler: public Assembler {
|
||||
Label* fail,
|
||||
SmiCheckType smi_check_type);
|
||||
|
||||
// Check if the map of an object is equal to a specified map and branch to a
|
||||
// specified target if equal. Skip the smi check if not required (object is
|
||||
// known to be a heap object)
|
||||
void DispatchMap(Register obj,
|
||||
Handle<Map> map,
|
||||
Handle<Code> success,
|
||||
SmiCheckType smi_check_type);
|
||||
|
||||
// Check if the object in register heap_object is a string. Afterwards the
|
||||
// register map contains the object map and the register instance_type
|
||||
// contains the instance_type. The registers map and instance_type can be the
|
||||
|
@ -703,6 +703,14 @@ void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
|
||||
}
|
||||
|
||||
|
||||
void StubCompiler::GenerateKeyedLoadMissForceGeneric(MacroAssembler* masm) {
|
||||
Code* code = masm->isolate()->builtins()->builtin(
|
||||
Builtins::kKeyedLoadIC_MissForceGeneric);
|
||||
Handle<Code> ic(code);
|
||||
__ Jump(ic, RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
|
||||
// Both name_reg and receiver_reg are preserved on jumps to miss_label,
|
||||
// but may be destroyed if store is successful.
|
||||
void StubCompiler::GenerateStoreField(MacroAssembler* masm,
|
||||
@ -2485,8 +2493,35 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
|
||||
JSObject* receiver) {
|
||||
MaybeObject* KeyedStoreStubCompiler::CompileStoreFastElement(
|
||||
Map* receiver_map) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- rax : value
|
||||
// -- rcx : key
|
||||
// -- rdx : receiver
|
||||
// -- rsp[0] : return address
|
||||
// -----------------------------------
|
||||
bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
|
||||
MaybeObject* maybe_stub =
|
||||
KeyedStoreFastElementStub(is_js_array).TryGetCode();
|
||||
Code* stub;
|
||||
if (!maybe_stub->To(&stub)) return maybe_stub;
|
||||
__ DispatchMap(rdx,
|
||||
Handle<Map>(receiver_map),
|
||||
Handle<Code>(stub),
|
||||
DO_SMI_CHECK);
|
||||
|
||||
Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
|
||||
__ jmp(ic, RelocInfo::CODE_TARGET);
|
||||
|
||||
// Return the generated code.
|
||||
return GetCode(NORMAL, NULL);
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* KeyedStoreStubCompiler::CompileStoreMegamorphic(
|
||||
MapList* receiver_maps,
|
||||
CodeList* handler_ics) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- rax : value
|
||||
// -- rcx : key
|
||||
@ -2494,51 +2529,26 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
|
||||
// -- rsp[0] : return address
|
||||
// -----------------------------------
|
||||
Label miss;
|
||||
|
||||
// Check that the receiver isn't a smi.
|
||||
__ JumpIfSmi(rdx, &miss);
|
||||
|
||||
// Check that the map matches.
|
||||
__ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
|
||||
Handle<Map>(receiver->map()));
|
||||
__ j(not_equal, &miss);
|
||||
|
||||
// Check that the key is a smi.
|
||||
__ JumpIfNotSmi(rcx, &miss);
|
||||
|
||||
// Get the elements array and make sure it is a fast element array, not 'cow'.
|
||||
__ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset));
|
||||
__ Cmp(FieldOperand(rdi, HeapObject::kMapOffset),
|
||||
factory()->fixed_array_map());
|
||||
__ j(not_equal, &miss);
|
||||
|
||||
// Check that the key is within bounds.
|
||||
if (receiver->IsJSArray()) {
|
||||
__ SmiCompare(rcx, FieldOperand(rdx, JSArray::kLengthOffset));
|
||||
__ j(above_equal, &miss);
|
||||
} else {
|
||||
__ SmiCompare(rcx, FieldOperand(rdi, FixedArray::kLengthOffset));
|
||||
__ j(above_equal, &miss);
|
||||
Register map_reg = rbx;
|
||||
__ movq(map_reg, FieldOperand(rdx, HeapObject::kMapOffset));
|
||||
int receiver_count = receiver_maps->length();
|
||||
for (int current = 0; current < receiver_count; ++current) {
|
||||
// Check map and tail call if there's a match
|
||||
Handle<Map> map(receiver_maps->at(current));
|
||||
__ Cmp(map_reg, map);
|
||||
__ j(equal,
|
||||
Handle<Code>(handler_ics->at(current)),
|
||||
RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
// Do the store and update the write barrier. Make sure to preserve
|
||||
// the value in register eax.
|
||||
__ movq(rdx, rax);
|
||||
__ SmiToInteger32(rcx, rcx);
|
||||
__ movq(FieldOperand(rdi, rcx, times_pointer_size, FixedArray::kHeaderSize),
|
||||
rax);
|
||||
__ RecordWrite(rdi, 0, rdx, rcx);
|
||||
|
||||
// Done.
|
||||
__ ret(0);
|
||||
|
||||
// Handle store cache miss.
|
||||
__ bind(&miss);
|
||||
Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
|
||||
__ jmp(ic, RelocInfo::CODE_TARGET);
|
||||
|
||||
// Return the generated code.
|
||||
return GetCode(NORMAL, NULL);
|
||||
return GetCode(NORMAL, NULL, MEGAMORPHIC);
|
||||
}
|
||||
|
||||
|
||||
@ -2552,7 +2562,7 @@ MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
|
||||
// -----------------------------------
|
||||
Label miss;
|
||||
|
||||
// Chech that receiver is not a smi.
|
||||
// Check that receiver is not a smi.
|
||||
__ JumpIfSmi(rax, &miss);
|
||||
|
||||
// Check the maps of the full prototype chain. Also check that
|
||||
@ -2943,49 +2953,56 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) {
|
||||
MaybeObject* KeyedLoadStubCompiler::CompileLoadFastElement(Map* receiver_map) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- rax : key
|
||||
// -- rdx : receiver
|
||||
// -- rsp[0] : return address
|
||||
// -----------------------------------
|
||||
MaybeObject* maybe_stub = KeyedLoadFastElementStub().TryGetCode();
|
||||
Code* stub;
|
||||
if (!maybe_stub->To(&stub)) return maybe_stub;
|
||||
__ DispatchMap(rdx,
|
||||
Handle<Map>(receiver_map),
|
||||
Handle<Code>(stub),
|
||||
DO_SMI_CHECK);
|
||||
|
||||
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss();
|
||||
__ jmp(ic, RelocInfo::CODE_TARGET);
|
||||
|
||||
// Return the generated code.
|
||||
return GetCode(NORMAL, NULL);
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* KeyedLoadStubCompiler::CompileLoadMegamorphic(
|
||||
MapList* receiver_maps,
|
||||
CodeList* handler_ics) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- rax : key
|
||||
// -- rdx : receiver
|
||||
// -- rsp[0] : return address
|
||||
// -----------------------------------
|
||||
Label miss;
|
||||
|
||||
// Check that the receiver isn't a smi.
|
||||
__ JumpIfSmi(rdx, &miss);
|
||||
|
||||
// Check that the map matches.
|
||||
__ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
|
||||
Handle<Map>(receiver->map()));
|
||||
__ j(not_equal, &miss);
|
||||
Register map_reg = rbx;
|
||||
__ movq(map_reg, FieldOperand(rdx, HeapObject::kMapOffset));
|
||||
int receiver_count = receiver_maps->length();
|
||||
for (int current = 0; current < receiver_count; ++current) {
|
||||
// Check map and tail call if there's a match
|
||||
Handle<Map> map(receiver_maps->at(current));
|
||||
__ Cmp(map_reg, map);
|
||||
__ j(equal,
|
||||
Handle<Code>(handler_ics->at(current)),
|
||||
RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
// Check that the key is a smi.
|
||||
__ JumpIfNotSmi(rax, &miss);
|
||||
|
||||
// Get the elements array.
|
||||
__ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
|
||||
__ AssertFastElements(rcx);
|
||||
|
||||
// Check that the key is within bounds.
|
||||
__ SmiCompare(rax, FieldOperand(rcx, FixedArray::kLengthOffset));
|
||||
__ j(above_equal, &miss);
|
||||
|
||||
// Load the result and make sure it's not the hole.
|
||||
SmiIndex index = masm()->SmiToIndex(rbx, rax, kPointerSizeLog2);
|
||||
__ movq(rbx, FieldOperand(rcx,
|
||||
index.reg,
|
||||
index.scale,
|
||||
FixedArray::kHeaderSize));
|
||||
__ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
|
||||
__ j(equal, &miss);
|
||||
__ movq(rax, rbx);
|
||||
__ ret(0);
|
||||
|
||||
__ bind(&miss);
|
||||
__ bind(&miss);
|
||||
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
|
||||
|
||||
// Return the generated code.
|
||||
return GetCode(NORMAL, NULL);
|
||||
return GetCode(NORMAL, NULL, MEGAMORPHIC);
|
||||
}
|
||||
|
||||
|
||||
@ -3122,30 +3139,79 @@ MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
|
||||
JSObject* receiver, ExternalArrayType array_type, Code::Flags flags) {
|
||||
MaybeObject* ExternalArrayLoadStubCompiler::CompileLoad(
|
||||
JSObject*receiver, ExternalArrayType array_type) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- rax : key
|
||||
// -- rdx : receiver
|
||||
// -- rsp[0] : return address
|
||||
// -----------------------------------
|
||||
Label slow;
|
||||
MaybeObject* maybe_stub =
|
||||
KeyedLoadExternalArrayStub(array_type).TryGetCode();
|
||||
Code* stub;
|
||||
if (!maybe_stub->To(&stub)) return maybe_stub;
|
||||
__ DispatchMap(rdx,
|
||||
Handle<Map>(receiver->map()),
|
||||
Handle<Code>(stub),
|
||||
DO_SMI_CHECK);
|
||||
|
||||
// Check that the object isn't a smi.
|
||||
__ JumpIfSmi(rdx, &slow);
|
||||
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss();
|
||||
__ jmp(ic, RelocInfo::CODE_TARGET);
|
||||
|
||||
// Return the generated code.
|
||||
return GetCode();
|
||||
}
|
||||
|
||||
MaybeObject* ExternalArrayStoreStubCompiler::CompileStore(
|
||||
JSObject* receiver, ExternalArrayType array_type) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- rax : value
|
||||
// -- rcx : key
|
||||
// -- rdx : receiver
|
||||
// -- rsp[0] : return address
|
||||
// -----------------------------------
|
||||
MaybeObject* maybe_stub =
|
||||
KeyedStoreExternalArrayStub(array_type).TryGetCode();
|
||||
Code* stub;
|
||||
if (!maybe_stub->To(&stub)) return maybe_stub;
|
||||
__ DispatchMap(rdx,
|
||||
Handle<Map>(receiver->map()),
|
||||
Handle<Code>(stub),
|
||||
DO_SMI_CHECK);
|
||||
|
||||
Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
|
||||
__ jmp(ic, RelocInfo::CODE_TARGET);
|
||||
|
||||
return GetCode();
|
||||
}
|
||||
|
||||
|
||||
#undef __
|
||||
#define __ ACCESS_MASM(masm)
|
||||
|
||||
|
||||
void KeyedLoadStubCompiler::GenerateLoadExternalArray(
|
||||
MacroAssembler* masm,
|
||||
ExternalArrayType array_type) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- rax : key
|
||||
// -- rdx : receiver
|
||||
// -- rsp[0] : return address
|
||||
// -----------------------------------
|
||||
Label slow, miss_force_generic;
|
||||
|
||||
// This stub is meant to be tail-jumped to, the receiver must already
|
||||
// have been verified by the caller to not be a smi.
|
||||
|
||||
// Check that the key is a smi.
|
||||
__ JumpIfNotSmi(rax, &slow);
|
||||
|
||||
// Check that the map matches.
|
||||
__ CheckMap(rdx, Handle<Map>(receiver->map()), &slow, DO_SMI_CHECK);
|
||||
__ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
|
||||
__ JumpIfNotSmi(rax, &miss_force_generic);
|
||||
|
||||
// Check that the index is in range.
|
||||
__ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
|
||||
__ SmiToInteger32(rcx, rax);
|
||||
__ cmpl(rcx, FieldOperand(rbx, ExternalArray::kLengthOffset));
|
||||
// Unsigned comparison catches both negative and too-large values.
|
||||
__ j(above_equal, &slow);
|
||||
__ j(above_equal, &miss_force_generic);
|
||||
|
||||
// rax: index (as a smi)
|
||||
// rdx: receiver (JSObject)
|
||||
@ -3232,7 +3298,7 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
|
||||
|
||||
// Slow case: Jump to runtime.
|
||||
__ bind(&slow);
|
||||
Counters* counters = isolate()->counters();
|
||||
Counters* counters = masm->isolate()->counters();
|
||||
__ IncrementCounter(counters->keyed_load_external_array_slow(), 1);
|
||||
|
||||
// ----------- S t a t e -------------
|
||||
@ -3241,44 +3307,46 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
|
||||
// -- rsp[0] : return address
|
||||
// -----------------------------------
|
||||
|
||||
__ pop(rbx);
|
||||
__ push(rdx); // receiver
|
||||
__ push(rax); // name
|
||||
__ push(rbx); // return address
|
||||
Handle<Code> ic = masm->isolate()->builtins()->KeyedLoadIC_Slow();
|
||||
__ jmp(ic, RelocInfo::CODE_TARGET);
|
||||
|
||||
// Perform tail call to the entry.
|
||||
__ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
|
||||
// Miss case: Jump to runtime.
|
||||
__ bind(&miss_force_generic);
|
||||
|
||||
// Return the generated code.
|
||||
return GetCode(flags);
|
||||
// ----------- S t a t e -------------
|
||||
// -- rax : key
|
||||
// -- rdx : receiver
|
||||
// -- rsp[0] : return address
|
||||
// -----------------------------------
|
||||
Handle<Code> miss_ic =
|
||||
masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
|
||||
__ jmp(miss_ic, RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
|
||||
JSObject* receiver, ExternalArrayType array_type, Code::Flags flags) {
|
||||
void KeyedStoreStubCompiler::GenerateStoreExternalArray(
|
||||
MacroAssembler* masm,
|
||||
ExternalArrayType array_type) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- rax : value
|
||||
// -- rcx : key
|
||||
// -- rdx : receiver
|
||||
// -- rsp[0] : return address
|
||||
// -----------------------------------
|
||||
Label slow;
|
||||
Label slow, miss_force_generic;
|
||||
|
||||
// Check that the object isn't a smi.
|
||||
__ JumpIfSmi(rdx, &slow);
|
||||
|
||||
// Check that the map matches.
|
||||
__ CheckMap(rdx, Handle<Map>(receiver->map()), &slow, DO_SMI_CHECK);
|
||||
__ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
|
||||
// This stub is meant to be tail-jumped to, the receiver must already
|
||||
// have been verified by the caller to not be a smi.
|
||||
|
||||
// Check that the key is a smi.
|
||||
__ JumpIfNotSmi(rcx, &slow);
|
||||
__ JumpIfNotSmi(rcx, &miss_force_generic);
|
||||
|
||||
// Check that the index is in range.
|
||||
__ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
|
||||
__ SmiToInteger32(rdi, rcx); // Untag the index.
|
||||
__ cmpl(rdi, FieldOperand(rbx, ExternalArray::kLengthOffset));
|
||||
// Unsigned comparison catches both negative and too-large values.
|
||||
__ j(above_equal, &slow);
|
||||
__ j(above_equal, &miss_force_generic);
|
||||
|
||||
// Handle both smis and HeapNumbers in the fast path. Go to the
|
||||
// runtime for all other kinds of values.
|
||||
@ -3412,21 +3480,116 @@ MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
|
||||
// -- rsp[0] : return address
|
||||
// -----------------------------------
|
||||
|
||||
__ pop(rbx);
|
||||
__ push(rdx); // receiver
|
||||
__ push(rcx); // key
|
||||
__ push(rax); // value
|
||||
__ Push(Smi::FromInt(NONE)); // PropertyAttributes
|
||||
__ Push(Smi::FromInt(
|
||||
Code::ExtractExtraICStateFromFlags(flags) & kStrictMode));
|
||||
__ push(rbx); // return address
|
||||
Handle<Code> ic = masm->isolate()->builtins()->KeyedStoreIC_Slow();
|
||||
__ jmp(ic, RelocInfo::CODE_TARGET);
|
||||
|
||||
// Do tail-call to runtime routine.
|
||||
__ TailCallRuntime(Runtime::kSetProperty, 5, 1);
|
||||
// Miss case: call runtime.
|
||||
__ bind(&miss_force_generic);
|
||||
|
||||
return GetCode(flags);
|
||||
// ----------- S t a t e -------------
|
||||
// -- rax : value
|
||||
// -- rcx : key
|
||||
// -- rdx : receiver
|
||||
// -- rsp[0] : return address
|
||||
// -----------------------------------
|
||||
|
||||
Handle<Code> miss_ic =
|
||||
masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
|
||||
__ jmp(miss_ic, RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
|
||||
void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- rax : key
|
||||
// -- rdx : receiver
|
||||
// -- rsp[0] : return address
|
||||
// -----------------------------------
|
||||
Label miss_force_generic;
|
||||
|
||||
// This stub is meant to be tail-jumped to, the receiver must already
|
||||
// have been verified by the caller to not be a smi.
|
||||
|
||||
// Check that the key is a smi.
|
||||
__ JumpIfNotSmi(rax, &miss_force_generic);
|
||||
|
||||
// Get the elements array.
|
||||
__ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
|
||||
__ AssertFastElements(rcx);
|
||||
|
||||
// Check that the key is within bounds.
|
||||
__ SmiCompare(rax, FieldOperand(rcx, FixedArray::kLengthOffset));
|
||||
__ j(above_equal, &miss_force_generic);
|
||||
|
||||
// Load the result and make sure it's not the hole.
|
||||
SmiIndex index = masm->SmiToIndex(rbx, rax, kPointerSizeLog2);
|
||||
__ movq(rbx, FieldOperand(rcx,
|
||||
index.reg,
|
||||
index.scale,
|
||||
FixedArray::kHeaderSize));
|
||||
__ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
|
||||
__ j(equal, &miss_force_generic);
|
||||
__ movq(rax, rbx);
|
||||
__ ret(0);
|
||||
|
||||
__ bind(&miss_force_generic);
|
||||
Code* code = masm->isolate()->builtins()->builtin(
|
||||
Builtins::kKeyedLoadIC_MissForceGeneric);
|
||||
Handle<Code> ic(code);
|
||||
__ jmp(ic, RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
|
||||
void KeyedStoreStubCompiler::GenerateStoreFastElement(MacroAssembler* masm,
|
||||
bool is_js_array) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- rax : value
|
||||
// -- rcx : key
|
||||
// -- rdx : receiver
|
||||
// -- rsp[0] : return address
|
||||
// -----------------------------------
|
||||
Label miss_force_generic;
|
||||
|
||||
// This stub is meant to be tail-jumped to, the receiver must already
|
||||
// have been verified by the caller to not be a smi.
|
||||
|
||||
// Check that the key is a smi.
|
||||
__ JumpIfNotSmi(rcx, &miss_force_generic);
|
||||
|
||||
// Get the elements array and make sure it is a fast element array, not 'cow'.
|
||||
__ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset));
|
||||
__ CompareRoot(FieldOperand(rdi, HeapObject::kMapOffset),
|
||||
Heap::kFixedArrayMapRootIndex);
|
||||
__ j(not_equal, &miss_force_generic);
|
||||
|
||||
// Check that the key is within bounds.
|
||||
if (is_js_array) {
|
||||
__ SmiCompare(rcx, FieldOperand(rdx, JSArray::kLengthOffset));
|
||||
__ j(above_equal, &miss_force_generic);
|
||||
} else {
|
||||
__ SmiCompare(rcx, FieldOperand(rdi, FixedArray::kLengthOffset));
|
||||
__ j(above_equal, &miss_force_generic);
|
||||
}
|
||||
|
||||
// Do the store and update the write barrier. Make sure to preserve
|
||||
// the value in register eax.
|
||||
__ movq(rdx, rax);
|
||||
__ SmiToInteger32(rcx, rcx);
|
||||
__ movq(FieldOperand(rdi, rcx, times_pointer_size, FixedArray::kHeaderSize),
|
||||
rax);
|
||||
__ RecordWrite(rdi, 0, rdx, rcx);
|
||||
|
||||
// Done.
|
||||
__ ret(0);
|
||||
|
||||
// Handle store cache miss.
|
||||
__ bind(&miss_force_generic);
|
||||
Handle<Code> ic_force_generic =
|
||||
masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
|
||||
__ jmp(ic_force_generic, RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
|
||||
#undef __
|
||||
|
||||
} } // namespace v8::internal
|
||||
|
Loading…
Reference in New Issue
Block a user