Additional work to get array literal allocation tracking working, even with --always-opt

BUG=

Review URL: https://codereview.chromium.org/11817017

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@13406 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
mvstanton@chromium.org 2013-01-17 08:41:27 +00:00
parent fd4d32e733
commit 7884216804
35 changed files with 555 additions and 191 deletions

View File

@ -344,7 +344,7 @@ static void GenerateFastCloneShallowArrayCommon(
MacroAssembler* masm,
int length,
FastCloneShallowArrayStub::Mode mode,
AllocationSiteInfoMode allocation_site_info_mode,
AllocationSiteMode allocation_site_mode,
Label* fail) {
// Registers on entry:
//
@ -358,9 +358,10 @@ static void GenerateFastCloneShallowArrayCommon(
? FixedDoubleArray::SizeFor(length)
: FixedArray::SizeFor(length);
}
int size = JSArray::kSize;
int allocation_info_start = size;
if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) {
if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
size += AllocationSiteInfo::kSize;
}
size += elements_size;
@ -373,7 +374,7 @@ static void GenerateFastCloneShallowArrayCommon(
}
__ AllocateInNewSpace(size, r0, r1, r2, fail, flags);
if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) {
if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
__ mov(r2, Operand(Handle<Map>(masm->isolate()->heap()->
allocation_site_info_map())));
__ str(r2, FieldMemOperand(r0, allocation_info_start));
@ -392,7 +393,7 @@ static void GenerateFastCloneShallowArrayCommon(
// Get hold of the elements array of the boilerplate and setup the
// elements pointer in the resulting object.
__ ldr(r3, FieldMemOperand(r3, JSArray::kElementsOffset));
if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) {
if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
__ add(r2, r0, Operand(JSArray::kSize + AllocationSiteInfo::kSize));
} else {
__ add(r2, r0, Operand(JSArray::kSize));
@ -423,22 +424,14 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
__ b(eq, &slow_case);
FastCloneShallowArrayStub::Mode mode = mode_;
AllocationSiteInfoMode allocation_site_info_mode =
DONT_TRACK_ALLOCATION_SITE_INFO;
if (mode == CLONE_ANY_ELEMENTS_WITH_ALLOCATION_SITE_INFO) {
mode = CLONE_ANY_ELEMENTS;
allocation_site_info_mode = TRACK_ALLOCATION_SITE_INFO;
}
if (mode == CLONE_ANY_ELEMENTS) {
Label double_elements, check_fast_elements;
__ ldr(r0, FieldMemOperand(r3, JSArray::kElementsOffset));
__ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
__ CompareRoot(r0, Heap::kFixedCOWArrayMapRootIndex);
__ b(ne, &check_fast_elements);
GenerateFastCloneShallowArrayCommon(masm, 0,
COPY_ON_WRITE_ELEMENTS,
allocation_site_info_mode,
GenerateFastCloneShallowArrayCommon(masm, 0, COPY_ON_WRITE_ELEMENTS,
allocation_site_mode_,
&slow_case);
// Return and remove the on-stack parameters.
__ add(sp, sp, Operand(3 * kPointerSize));
@ -447,9 +440,8 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
__ bind(&check_fast_elements);
__ CompareRoot(r0, Heap::kFixedArrayMapRootIndex);
__ b(ne, &double_elements);
GenerateFastCloneShallowArrayCommon(masm, length_,
CLONE_ELEMENTS,
allocation_site_info_mode,
GenerateFastCloneShallowArrayCommon(masm, length_, CLONE_ELEMENTS,
allocation_site_mode_,
&slow_case);
// Return and remove the on-stack parameters.
__ add(sp, sp, Operand(3 * kPointerSize));
@ -483,7 +475,8 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
}
GenerateFastCloneShallowArrayCommon(masm, length_, mode,
allocation_site_info_mode, &slow_case);
allocation_site_mode_,
&slow_case);
// Return and remove the on-stack parameters.
__ add(sp, sp, Operand(3 * kPointerSize));

View File

@ -144,7 +144,8 @@ void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
#define __ ACCESS_MASM(masm)
void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
MacroAssembler* masm) {
MacroAssembler* masm, AllocationSiteMode mode,
Label* allocation_site_info_found) {
// ----------- S t a t e -------------
// -- r0 : value
// -- r1 : key
@ -153,6 +154,12 @@ void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
// -- r3 : target map, scratch for subsequent call
// -- r4 : scratch (elements)
// -----------------------------------
if (mode == TRACK_ALLOCATION_SITE) {
ASSERT(allocation_site_info_found != NULL);
masm->TestJSArrayForAllocationSiteInfo(r2, r4,
allocation_site_info_found);
}
// Set transitioned map.
__ str(r3, FieldMemOperand(r2, HeapObject::kMapOffset));
__ RecordWriteField(r2,
@ -167,7 +174,7 @@ void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
void ElementsTransitionGenerator::GenerateSmiToDouble(
MacroAssembler* masm, Label* fail) {
MacroAssembler* masm, AllocationSiteMode mode, Label* fail) {
// ----------- S t a t e -------------
// -- r0 : value
// -- r1 : key
@ -179,7 +186,7 @@ void ElementsTransitionGenerator::GenerateSmiToDouble(
Label loop, entry, convert_hole, gc_required, only_change_map, done;
bool vfp2_supported = CpuFeatures::IsSupported(VFP2);
if (FLAG_track_allocation_sites) {
if (mode == TRACK_ALLOCATION_SITE) {
masm->TestJSArrayForAllocationSiteInfo(r2, r4, fail);
}
@ -308,7 +315,7 @@ void ElementsTransitionGenerator::GenerateSmiToDouble(
void ElementsTransitionGenerator::GenerateDoubleToObject(
MacroAssembler* masm, Label* fail) {
MacroAssembler* masm, AllocationSiteMode mode, Label* fail) {
// ----------- S t a t e -------------
// -- r0 : value
// -- r1 : key
@ -319,6 +326,10 @@ void ElementsTransitionGenerator::GenerateDoubleToObject(
// -----------------------------------
Label entry, loop, convert_hole, gc_required, only_change_map;
if (mode == TRACK_ALLOCATION_SITE) {
masm->TestJSArrayForAllocationSiteInfo(r2, r4, fail);
}
// Check for empty arrays, which only require a map transition and no changes
// to the backing store.
__ ldr(r4, FieldMemOperand(r2, JSObject::kElementsOffset));

View File

@ -1719,7 +1719,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
if (has_fast_elements && constant_elements_values->map() ==
isolate()->heap()->fixed_cow_array_map()) {
FastCloneShallowArrayStub stub(
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
DONT_TRACK_ALLOCATION_SITE,
length);
__ CallStub(&stub);
__ IncrementCounter(
isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2);
@ -1730,19 +1732,17 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
} else {
ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
FLAG_smi_only_arrays);
FastCloneShallowArrayStub::Mode mode = has_fast_elements
? FastCloneShallowArrayStub::CLONE_ELEMENTS
: FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
FastCloneShallowArrayStub::Mode mode =
FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites
? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE;
// Tracking allocation info allows us to pre-transition later if it makes
// sense.
if (mode == FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS &&
FLAG_track_allocation_sites) {
mode = FastCloneShallowArrayStub::
CLONE_ANY_ELEMENTS_WITH_ALLOCATION_SITE_INFO;
if (has_fast_elements) {
mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
}
FastCloneShallowArrayStub stub(mode, length);
FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
__ CallStub(&stub);
}

View File

@ -1249,7 +1249,9 @@ void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) {
// Must return the modified receiver in r0.
if (!FLAG_trace_elements_transitions) {
Label fail;
ElementsTransitionGenerator::GenerateSmiToDouble(masm, &fail);
AllocationSiteMode mode = AllocationSiteInfo::GetMode(FAST_SMI_ELEMENTS,
FAST_DOUBLE_ELEMENTS);
ElementsTransitionGenerator::GenerateSmiToDouble(masm, mode, &fail);
__ mov(r0, r2);
__ Ret();
__ bind(&fail);
@ -1270,7 +1272,9 @@ void KeyedStoreIC::GenerateTransitionElementsDoubleToObject(
// Must return the modified receiver in r0.
if (!FLAG_trace_elements_transitions) {
Label fail;
ElementsTransitionGenerator::GenerateDoubleToObject(masm, &fail);
AllocationSiteMode mode = AllocationSiteInfo::GetMode(FAST_DOUBLE_ELEMENTS,
FAST_ELEMENTS);
ElementsTransitionGenerator::GenerateDoubleToObject(masm, mode, &fail);
__ mov(r0, r2);
__ Ret();
__ bind(&fail);
@ -1406,7 +1410,9 @@ static void KeyedStoreGenerateGenericHelper(
r4,
slow);
ASSERT(receiver_map.is(r3)); // Transition code expects map in r3
ElementsTransitionGenerator::GenerateSmiToDouble(masm, slow);
AllocationSiteMode mode = AllocationSiteInfo::GetMode(FAST_SMI_ELEMENTS,
FAST_DOUBLE_ELEMENTS);
ElementsTransitionGenerator::GenerateSmiToDouble(masm, mode, slow);
__ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
__ jmp(&fast_double_without_map_check);
@ -1418,7 +1424,9 @@ static void KeyedStoreGenerateGenericHelper(
r4,
slow);
ASSERT(receiver_map.is(r3)); // Transition code expects map in r3
ElementsTransitionGenerator::GenerateMapChangeElementsTransition(masm);
mode = AllocationSiteInfo::GetMode(FAST_SMI_ELEMENTS, FAST_ELEMENTS);
ElementsTransitionGenerator::GenerateMapChangeElementsTransition(masm, mode,
slow);
__ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
__ jmp(&finish_object_store);
@ -1432,7 +1440,8 @@ static void KeyedStoreGenerateGenericHelper(
r4,
slow);
ASSERT(receiver_map.is(r3)); // Transition code expects map in r3
ElementsTransitionGenerator::GenerateDoubleToObject(masm, slow);
mode = AllocationSiteInfo::GetMode(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS);
ElementsTransitionGenerator::GenerateDoubleToObject(masm, mode, slow);
__ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
__ jmp(&finish_object_store);
}

View File

@ -5431,6 +5431,8 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
Handle<FixedArray> literals(instr->environment()->closure()->literals());
ElementsKind boilerplate_elements_kind =
instr->hydrogen()->boilerplate_elements_kind();
AllocationSiteMode allocation_site_mode =
instr->hydrogen()->allocation_site_mode();
// Deopt if the array literal boilerplate ElementsKind is of a type different
// than the expected one. The check isn't necessary if the boilerplate has
@ -5462,7 +5464,7 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
ASSERT(instr->hydrogen()->depth() == 1);
FastCloneShallowArrayStub::Mode mode =
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
FastCloneShallowArrayStub stub(mode, length);
FastCloneShallowArrayStub stub(mode, DONT_TRACK_ALLOCATION_SITE, length);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
} else if (instr->hydrogen()->depth() > 1) {
CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
@ -5471,9 +5473,9 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
} else {
FastCloneShallowArrayStub::Mode mode =
boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS
? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
: FastCloneShallowArrayStub::CLONE_ELEMENTS;
FastCloneShallowArrayStub stub(mode, length);
? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
: FastCloneShallowArrayStub::CLONE_ELEMENTS;
FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
}
}
@ -5482,10 +5484,14 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
Register result,
Register source,
int* offset) {
int* offset,
AllocationSiteMode mode) {
ASSERT(!source.is(r2));
ASSERT(!result.is(r2));
bool create_allocation_site_info = mode == TRACK_ALLOCATION_SITE &&
object->map()->CanTrackAllocationSite();
// Only elements backing stores for non-COW arrays need to be copied.
Handle<FixedArrayBase> elements(object->elements());
bool has_elements = elements->length() > 0 &&
@ -5495,8 +5501,13 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
// this object and its backing store.
int object_offset = *offset;
int object_size = object->map()->instance_size();
int elements_offset = *offset + object_size;
int elements_size = has_elements ? elements->Size() : 0;
int elements_offset = *offset + object_size;
if (create_allocation_site_info) {
elements_offset += AllocationSiteInfo::kSize;
*offset += AllocationSiteInfo::kSize;
}
*offset += object_size + elements_size;
// Copy object header.
@ -5521,7 +5532,8 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
__ add(r2, result, Operand(*offset));
__ str(r2, FieldMemOperand(result, total_offset));
__ LoadHeapObject(source, value_object);
EmitDeepCopy(value_object, result, source, offset);
EmitDeepCopy(value_object, result, source, offset,
DONT_TRACK_ALLOCATION_SITE);
} else if (value->IsHeapObject()) {
__ LoadHeapObject(r2, Handle<HeapObject>::cast(value));
__ str(r2, FieldMemOperand(result, total_offset));
@ -5531,6 +5543,14 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
}
}
// Build Allocation Site Info if desired
if (create_allocation_site_info) {
__ mov(r2, Operand(Handle<Map>(isolate()->heap()->
allocation_site_info_map())));
__ str(r2, FieldMemOperand(result, object_size));
__ str(source, FieldMemOperand(result, object_size + kPointerSize));
}
if (has_elements) {
// Copy elements backing store header.
__ LoadHeapObject(source, elements);
@ -5566,7 +5586,8 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
__ add(r2, result, Operand(*offset));
__ str(r2, FieldMemOperand(result, total_offset));
__ LoadHeapObject(source, value_object);
EmitDeepCopy(value_object, result, source, offset);
EmitDeepCopy(value_object, result, source, offset,
DONT_TRACK_ALLOCATION_SITE);
} else if (value->IsHeapObject()) {
__ LoadHeapObject(r2, Handle<HeapObject>::cast(value));
__ str(r2, FieldMemOperand(result, total_offset));
@ -5617,7 +5638,8 @@ void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
__ bind(&allocated);
int offset = 0;
__ LoadHeapObject(r1, instr->hydrogen()->boilerplate());
EmitDeepCopy(instr->hydrogen()->boilerplate(), r0, r1, &offset);
EmitDeepCopy(instr->hydrogen()->boilerplate(), r0, r1, &offset,
instr->hydrogen()->allocation_site_mode());
ASSERT_EQ(size, offset);
}

View File

@ -365,7 +365,8 @@ class LCodeGen BASE_EMBEDDED {
void EmitDeepCopy(Handle<JSObject> object,
Register result,
Register source,
int* offset);
int* offset,
AllocationSiteMode mode);
// Emit optimized code for integer division.
// Inputs are signed.

View File

@ -1694,7 +1694,9 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
&try_holey_map);
__ mov(r2, receiver);
ElementsTransitionGenerator::
GenerateMapChangeElementsTransition(masm());
GenerateMapChangeElementsTransition(masm(),
DONT_TRACK_ALLOCATION_SITE,
NULL);
__ jmp(&fast_object);
__ bind(&try_holey_map);
@ -1705,7 +1707,9 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
&call_builtin);
__ mov(r2, receiver);
ElementsTransitionGenerator::
GenerateMapChangeElementsTransition(masm());
GenerateMapChangeElementsTransition(masm(),
DONT_TRACK_ALLOCATION_SITE,
NULL);
__ bind(&fast_object);
} else {
__ CheckFastObjectElements(r3, r3, &call_builtin);

View File

@ -570,15 +570,16 @@ bool ToBooleanStub::Types::CanBeUndetectable() const {
void ElementsTransitionAndStoreStub::Generate(MacroAssembler* masm) {
Label fail;
AllocationSiteMode mode = AllocationSiteInfo::GetMode(from_, to_);
ASSERT(!IsFastHoleyElementsKind(from_) || IsFastHoleyElementsKind(to_));
if (!FLAG_trace_elements_transitions) {
if (IsFastSmiOrObjectElementsKind(to_)) {
if (IsFastSmiOrObjectElementsKind(from_)) {
ElementsTransitionGenerator::
GenerateMapChangeElementsTransition(masm);
GenerateMapChangeElementsTransition(masm, mode, &fail);
} else if (IsFastDoubleElementsKind(from_)) {
ASSERT(!IsFastSmiElementsKind(to_));
ElementsTransitionGenerator::GenerateDoubleToObject(masm, &fail);
ElementsTransitionGenerator::GenerateDoubleToObject(masm, mode, &fail);
} else {
UNREACHABLE();
}
@ -588,14 +589,14 @@ void ElementsTransitionAndStoreStub::Generate(MacroAssembler* masm) {
grow_mode_);
} else if (IsFastSmiElementsKind(from_) &&
IsFastDoubleElementsKind(to_)) {
ElementsTransitionGenerator::GenerateSmiToDouble(masm, &fail);
ElementsTransitionGenerator::GenerateSmiToDouble(masm, mode, &fail);
KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(masm,
is_jsarray_,
grow_mode_);
} else if (IsFastDoubleElementsKind(from_)) {
ASSERT(to_ == FAST_HOLEY_DOUBLE_ELEMENTS);
ElementsTransitionGenerator::
GenerateMapChangeElementsTransition(masm);
GenerateMapChangeElementsTransition(masm, mode, &fail);
} else {
UNREACHABLE();
}

View File

@ -427,14 +427,16 @@ class FastCloneShallowArrayStub : public PlatformCodeStub {
CLONE_DOUBLE_ELEMENTS,
COPY_ON_WRITE_ELEMENTS,
CLONE_ANY_ELEMENTS,
CLONE_ANY_ELEMENTS_WITH_ALLOCATION_SITE_INFO,
LAST_CLONE_MODE = CLONE_ANY_ELEMENTS_WITH_ALLOCATION_SITE_INFO
LAST_CLONE_MODE = CLONE_ANY_ELEMENTS
};
static const int kFastCloneModeCount = LAST_CLONE_MODE + 1;
FastCloneShallowArrayStub(Mode mode, int length)
FastCloneShallowArrayStub(Mode mode,
AllocationSiteMode allocation_site_mode,
int length)
: mode_(mode),
allocation_site_mode_(allocation_site_mode),
length_((mode == COPY_ON_WRITE_ELEMENTS) ? 0 : length) {
ASSERT_GE(length_, 0);
ASSERT_LE(length_, kMaximumClonedLength);
@ -444,12 +446,21 @@ class FastCloneShallowArrayStub : public PlatformCodeStub {
private:
Mode mode_;
AllocationSiteMode allocation_site_mode_;
int length_;
class AllocationSiteModeBits: public BitField<AllocationSiteMode, 0, 1> {};
class ModeBits: public BitField<Mode, 1, 4> {};
class LengthBits: public BitField<int, 5, 4> {};
// Ensure data fits within available bits.
STATIC_ASSERT(LAST_ALLOCATION_SITE_MODE == 1);
STATIC_ASSERT(kFastCloneModeCount < 16);
STATIC_ASSERT(kMaximumClonedLength < 16);
Major MajorKey() { return FastCloneShallowArray; }
int MinorKey() {
ASSERT(mode_ >= 0 && mode_ <= LAST_CLONE_MODE);
return length_ * kFastCloneModeCount + mode_;
return AllocationSiteModeBits::encode(allocation_site_mode_)
| ModeBits::encode(mode_)
| LengthBits::encode(length_);
}
};

View File

@ -96,9 +96,17 @@ UnaryMathFunction CreateSqrtFunction();
class ElementsTransitionGenerator : public AllStatic {
public:
static void GenerateMapChangeElementsTransition(MacroAssembler* masm);
static void GenerateSmiToDouble(MacroAssembler* masm, Label* fail);
static void GenerateDoubleToObject(MacroAssembler* masm, Label* fail);
// If |mode| is set to DONT_TRACK_ALLOCATION_SITE,
// |allocation_site_info_found| may be NULL.
static void GenerateMapChangeElementsTransition(MacroAssembler* masm,
AllocationSiteMode mode,
Label* allocation_site_info_found);
static void GenerateSmiToDouble(MacroAssembler* masm,
AllocationSiteMode mode,
Label* fail);
static void GenerateDoubleToObject(MacroAssembler* masm,
AllocationSiteMode mode,
Label* fail);
private:
DISALLOW_COPY_AND_ASSIGN(ElementsTransitionGenerator);

View File

@ -194,6 +194,8 @@ DEFINE_bool(trace_all_uses, false, "trace all use positions")
DEFINE_bool(trace_range, false, "trace range analysis")
DEFINE_bool(trace_gvn, false, "trace global value numbering")
DEFINE_bool(trace_representation, false, "trace representation types")
DEFINE_bool(trace_track_allocation_sites, false,
"trace the tracking of allocation sites")
DEFINE_bool(stress_pointer_maps, false, "pointer map for every instruction")
DEFINE_bool(stress_environments, false, "environment for every instruction")
DEFINE_int(deopt_every_n_times,

View File

@ -4394,7 +4394,8 @@ MaybeObject* Heap::AllocateGlobalObject(JSFunction* constructor) {
}
MaybeObject* Heap::CopyJSObject(JSObject* source) {
MaybeObject* Heap::CopyJSObject(JSObject* source,
AllocationSiteMode mode) {
// Never used to copy functions. If functions need to be copied we
// have to be careful to clear the literals array.
SLOW_ASSERT(!source->IsJSFunction());
@ -4404,13 +4405,25 @@ MaybeObject* Heap::CopyJSObject(JSObject* source) {
int object_size = map->instance_size();
Object* clone;
bool track_origin = mode == TRACK_ALLOCATION_SITE &&
map->CanTrackAllocationSite();
WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER;
// If we're forced to always allocate, we use the general allocation
// functions which may leave us with an object in old space.
int adjusted_object_size = object_size;
if (always_allocate()) {
// We'll only track origin if we are certain to allocate in new space
if (track_origin) {
const int kMinFreeNewSpaceAfterGC = InitialSemiSpaceSize() * 3/4;
if ((object_size + AllocationSiteInfo::kSize) < kMinFreeNewSpaceAfterGC) {
adjusted_object_size += AllocationSiteInfo::kSize;
}
}
{ MaybeObject* maybe_clone =
AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE);
AllocateRaw(adjusted_object_size, NEW_SPACE, OLD_POINTER_SPACE);
if (!maybe_clone->ToObject(&clone)) return maybe_clone;
}
Address clone_address = HeapObject::cast(clone)->address();
@ -4423,7 +4436,11 @@ MaybeObject* Heap::CopyJSObject(JSObject* source) {
(object_size - JSObject::kHeaderSize) / kPointerSize);
} else {
wb_mode = SKIP_WRITE_BARRIER;
{ MaybeObject* maybe_clone = new_space_.AllocateRaw(object_size);
if (track_origin) {
adjusted_object_size += AllocationSiteInfo::kSize;
}
{ MaybeObject* maybe_clone = new_space_.AllocateRaw(adjusted_object_size);
if (!maybe_clone->ToObject(&clone)) return maybe_clone;
}
SLOW_ASSERT(InNewSpace(clone));
@ -4434,6 +4451,13 @@ MaybeObject* Heap::CopyJSObject(JSObject* source) {
object_size);
}
if (adjusted_object_size > object_size) {
AllocationSiteInfo* alloc_info = reinterpret_cast<AllocationSiteInfo*>(
reinterpret_cast<Address>(clone) + object_size);
alloc_info->set_map(allocation_site_info_map());
alloc_info->set_payload(source);
}
SLOW_ASSERT(
JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind());
FixedArrayBase* elements = FixedArrayBase::cast(source->elements());

View File

@ -624,7 +624,9 @@ class Heap {
// Returns a deep copy of the JavaScript object.
// Properties and elements are copied too.
// Returns failure if allocation failed.
MUST_USE_RESULT MaybeObject* CopyJSObject(JSObject* source);
MUST_USE_RESULT MaybeObject* CopyJSObject(
JSObject* source,
AllocationSiteMode mode = DONT_TRACK_ALLOCATION_SITE);
// Allocates the function prototype.
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation

View File

@ -4968,19 +4968,29 @@ class HAllocateObject: public HTemplateInstruction<1> {
template <int V>
class HMaterializedLiteral: public HTemplateInstruction<V> {
public:
HMaterializedLiteral<V>(int index, int depth, AllocationSiteMode mode)
: literal_index_(index), depth_(depth), allocation_site_mode_(mode) {
this->set_representation(Representation::Tagged());
}
HMaterializedLiteral<V>(int index, int depth)
: literal_index_(index), depth_(depth) {
: literal_index_(index), depth_(depth),
allocation_site_mode_(DONT_TRACK_ALLOCATION_SITE) {
this->set_representation(Representation::Tagged());
}
int literal_index() const { return literal_index_; }
int depth() const { return depth_; }
AllocationSiteMode allocation_site_mode() const {
return allocation_site_mode_;
}
private:
virtual bool IsDeletable() const { return true; }
int literal_index_;
int depth_;
AllocationSiteMode allocation_site_mode_;
};
@ -4990,8 +5000,9 @@ class HFastLiteral: public HMaterializedLiteral<1> {
Handle<JSObject> boilerplate,
int total_size,
int literal_index,
int depth)
: HMaterializedLiteral<1>(literal_index, depth),
int depth,
AllocationSiteMode mode)
: HMaterializedLiteral<1>(literal_index, depth, mode),
boilerplate_(boilerplate),
total_size_(total_size) {
SetOperandAt(0, context);
@ -5006,7 +5017,6 @@ class HFastLiteral: public HMaterializedLiteral<1> {
HValue* context() { return OperandAt(0); }
Handle<JSObject> boilerplate() const { return boilerplate_; }
int total_size() const { return total_size_; }
virtual Representation RequiredInputRepresentation(int index) {
return Representation::Tagged();
}
@ -5026,8 +5036,9 @@ class HArrayLiteral: public HMaterializedLiteral<1> {
Handle<HeapObject> boilerplate_object,
int length,
int literal_index,
int depth)
: HMaterializedLiteral<1>(literal_index, depth),
int depth,
AllocationSiteMode mode)
: HMaterializedLiteral<1>(literal_index, depth, mode),
length_(length),
boilerplate_object_(boilerplate_object) {
SetOperandAt(0, context);
@ -5043,7 +5054,6 @@ class HArrayLiteral: public HMaterializedLiteral<1> {
}
Handle<HeapObject> boilerplate_object() const { return boilerplate_object_; }
int length() const { return length_; }
bool IsCopyOnWrite() const;
virtual Representation RequiredInputRepresentation(int index) {

View File

@ -5213,7 +5213,8 @@ void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
boilerplate_object,
total_size,
expr->literal_index(),
expr->depth());
expr->depth(),
DONT_TRACK_ALLOCATION_SITE);
} else {
literal = new(zone()) HObjectLiteral(context,
expr->constant_properties(),
@ -5323,7 +5324,13 @@ void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
Handle<JSObject> boilerplate = Handle<JSObject>::cast(raw_boilerplate);
ElementsKind boilerplate_elements_kind =
Handle<JSObject>::cast(boilerplate)->GetElementsKind();
Handle<JSObject>::cast(boilerplate)->GetElementsKind();
// TODO(mvstanton): This heuristic is only a temporary solution. In the
// end, we want to quit creating allocation site info after a certain number
// of GCs for a call site.
AllocationSiteMode mode = AllocationSiteInfo::GetMode(
boilerplate_elements_kind);
// Check whether to use fast or slow deep-copying for boilerplate.
int total_size = 0;
@ -5332,17 +5339,22 @@ void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
HFastLiteral::kMaxLiteralDepth,
&max_properties,
&total_size)) {
if (mode == TRACK_ALLOCATION_SITE) {
total_size += AllocationSiteInfo::kSize;
}
literal = new(zone()) HFastLiteral(context,
boilerplate,
total_size,
expr->literal_index(),
expr->depth());
expr->depth(),
mode);
} else {
literal = new(zone()) HArrayLiteral(context,
boilerplate,
length,
expr->literal_index(),
expr->depth());
expr->depth(),
mode);
}
// The array is expected in the bailout environment during computation

View File

@ -323,7 +323,7 @@ static void GenerateFastCloneShallowArrayCommon(
MacroAssembler* masm,
int length,
FastCloneShallowArrayStub::Mode mode,
AllocationSiteInfoMode allocation_site_info_mode,
AllocationSiteMode allocation_site_mode,
Label* fail) {
// Registers on entry:
//
@ -339,7 +339,7 @@ static void GenerateFastCloneShallowArrayCommon(
}
int size = JSArray::kSize;
int allocation_info_start = size;
if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) {
if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
size += AllocationSiteInfo::kSize;
}
size += elements_size;
@ -352,7 +352,7 @@ static void GenerateFastCloneShallowArrayCommon(
}
__ AllocateInNewSpace(size, eax, ebx, edx, fail, flags);
if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) {
if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
__ mov(FieldOperand(eax, allocation_info_start),
Immediate(Handle<Map>(masm->isolate()->heap()->
allocation_site_info_map())));
@ -371,7 +371,7 @@ static void GenerateFastCloneShallowArrayCommon(
// Get hold of the elements array of the boilerplate and setup the
// elements pointer in the resulting object.
__ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset));
if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) {
if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
__ lea(edx, Operand(eax, JSArray::kSize + AllocationSiteInfo::kSize));
} else {
__ lea(edx, Operand(eax, JSArray::kSize));
@ -425,30 +425,21 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
FastCloneShallowArrayStub::Mode mode = mode_;
// ecx is boilerplate object.
AllocationSiteInfoMode allocation_site_info_mode =
DONT_TRACK_ALLOCATION_SITE_INFO;
if (mode == CLONE_ANY_ELEMENTS_WITH_ALLOCATION_SITE_INFO) {
mode = CLONE_ANY_ELEMENTS;
allocation_site_info_mode = TRACK_ALLOCATION_SITE_INFO;
}
if (mode == CLONE_ANY_ELEMENTS) {
Label double_elements, check_fast_elements;
__ mov(ebx, FieldOperand(ecx, JSArray::kElementsOffset));
__ CheckMap(ebx, factory->fixed_cow_array_map(),
&check_fast_elements, DONT_DO_SMI_CHECK);
GenerateFastCloneShallowArrayCommon(masm, 0,
COPY_ON_WRITE_ELEMENTS,
allocation_site_info_mode,
GenerateFastCloneShallowArrayCommon(masm, 0, COPY_ON_WRITE_ELEMENTS,
allocation_site_mode_,
&slow_case);
__ ret(3 * kPointerSize);
__ bind(&check_fast_elements);
__ CheckMap(ebx, factory->fixed_array_map(),
&double_elements, DONT_DO_SMI_CHECK);
GenerateFastCloneShallowArrayCommon(masm, length_,
CLONE_ELEMENTS,
allocation_site_info_mode,
GenerateFastCloneShallowArrayCommon(masm, length_, CLONE_ELEMENTS,
allocation_site_mode_,
&slow_case);
__ ret(3 * kPointerSize);
@ -479,7 +470,9 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
}
GenerateFastCloneShallowArrayCommon(masm, length_, mode,
allocation_site_info_mode, &slow_case);
allocation_site_mode_,
&slow_case);
// Return and remove the on-stack parameters.
__ ret(3 * kPointerSize);

View File

@ -390,7 +390,8 @@ OS::MemCopyFunction CreateMemCopyFunction() {
void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
MacroAssembler* masm) {
MacroAssembler* masm, AllocationSiteMode mode,
Label* allocation_site_info_found) {
// ----------- S t a t e -------------
// -- eax : value
// -- ebx : target map
@ -398,6 +399,12 @@ void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
if (mode == TRACK_ALLOCATION_SITE) {
ASSERT(allocation_site_info_found != NULL);
masm->TestJSArrayForAllocationSiteInfo(edx, edi,
allocation_site_info_found);
}
// Set transitioned map.
__ mov(FieldOperand(edx, HeapObject::kMapOffset), ebx);
__ RecordWriteField(edx,
@ -411,7 +418,7 @@ void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
void ElementsTransitionGenerator::GenerateSmiToDouble(
MacroAssembler* masm, Label* fail) {
MacroAssembler* masm, AllocationSiteMode mode, Label* fail) {
// ----------- S t a t e -------------
// -- eax : value
// -- ebx : target map
@ -421,7 +428,7 @@ void ElementsTransitionGenerator::GenerateSmiToDouble(
// -----------------------------------
Label loop, entry, convert_hole, gc_required, only_change_map;
if (FLAG_track_allocation_sites) {
if (mode == TRACK_ALLOCATION_SITE) {
masm->TestJSArrayForAllocationSiteInfo(edx, edi, fail);
}
@ -550,7 +557,7 @@ void ElementsTransitionGenerator::GenerateSmiToDouble(
void ElementsTransitionGenerator::GenerateDoubleToObject(
MacroAssembler* masm, Label* fail) {
MacroAssembler* masm, AllocationSiteMode mode, Label* fail) {
// ----------- S t a t e -------------
// -- eax : value
// -- ebx : target map
@ -560,6 +567,10 @@ void ElementsTransitionGenerator::GenerateDoubleToObject(
// -----------------------------------
Label loop, entry, convert_hole, gc_required, only_change_map, success;
if (mode == TRACK_ALLOCATION_SITE) {
masm->TestJSArrayForAllocationSiteInfo(edx, edi, fail);
}
// Check for empty arrays, which only require a map transition and no changes
// to the backing store.
__ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));

View File

@ -1664,6 +1664,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
FastCloneShallowArrayStub stub(
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
DONT_TRACK_ALLOCATION_SITE,
length);
__ CallStub(&stub);
} else if (expr->depth() > 1) {
@ -1673,21 +1674,19 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
} else {
ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
FLAG_smi_only_arrays);
FastCloneShallowArrayStub::Mode mode =
FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites
? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE;
// If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
// change, so it's possible to specialize the stub in advance.
FastCloneShallowArrayStub::Mode mode = has_constant_fast_elements
? FastCloneShallowArrayStub::CLONE_ELEMENTS
: FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
// Tracking allocation info allows us to pre-transition later if it makes
// sense.
if (mode == FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS &&
FLAG_track_allocation_sites) {
mode = FastCloneShallowArrayStub::
CLONE_ANY_ELEMENTS_WITH_ALLOCATION_SITE_INFO;
if (has_constant_fast_elements) {
mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
}
FastCloneShallowArrayStub stub(mode, length);
FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
__ CallStub(&stub);
}

View File

@ -835,7 +835,9 @@ static void KeyedStoreGenerateGenericHelper(
ebx,
edi,
slow);
ElementsTransitionGenerator::GenerateSmiToDouble(masm, slow);
AllocationSiteMode mode = AllocationSiteInfo::GetMode(FAST_SMI_ELEMENTS,
FAST_DOUBLE_ELEMENTS);
ElementsTransitionGenerator::GenerateSmiToDouble(masm, mode, slow);
__ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset));
__ jmp(&fast_double_without_map_check);
@ -846,7 +848,9 @@ static void KeyedStoreGenerateGenericHelper(
ebx,
edi,
slow);
ElementsTransitionGenerator::GenerateMapChangeElementsTransition(masm);
mode = AllocationSiteInfo::GetMode(FAST_SMI_ELEMENTS, FAST_ELEMENTS);
ElementsTransitionGenerator::GenerateMapChangeElementsTransition(masm, mode,
slow);
__ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset));
__ jmp(&finish_object_store);
@ -860,7 +864,8 @@ static void KeyedStoreGenerateGenericHelper(
ebx,
edi,
slow);
ElementsTransitionGenerator::GenerateDoubleToObject(masm, slow);
mode = AllocationSiteInfo::GetMode(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS);
ElementsTransitionGenerator::GenerateDoubleToObject(masm, mode, slow);
__ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset));
__ jmp(&finish_object_store);
}
@ -1650,7 +1655,9 @@ void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) {
// Must return the modified receiver in eax.
if (!FLAG_trace_elements_transitions) {
Label fail;
ElementsTransitionGenerator::GenerateSmiToDouble(masm, &fail);
AllocationSiteMode mode = AllocationSiteInfo::GetMode(FAST_SMI_ELEMENTS,
FAST_DOUBLE_ELEMENTS);
ElementsTransitionGenerator::GenerateSmiToDouble(masm, mode, &fail);
__ mov(eax, edx);
__ Ret();
__ bind(&fail);
@ -1676,7 +1683,9 @@ void KeyedStoreIC::GenerateTransitionElementsDoubleToObject(
// Must return the modified receiver in eax.
if (!FLAG_trace_elements_transitions) {
Label fail;
ElementsTransitionGenerator::GenerateDoubleToObject(masm, &fail);
AllocationSiteMode mode = AllocationSiteInfo::GetMode(FAST_DOUBLE_ELEMENTS,
FAST_ELEMENTS);
ElementsTransitionGenerator::GenerateDoubleToObject(masm, mode, &fail);
__ mov(eax, edx);
__ Ret();
__ bind(&fail);

View File

@ -5300,6 +5300,8 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
Handle<FixedArray> literals(instr->environment()->closure()->literals());
ElementsKind boilerplate_elements_kind =
instr->hydrogen()->boilerplate_elements_kind();
AllocationSiteMode allocation_site_mode =
instr->hydrogen()->allocation_site_mode();
// Deopt if the array literal boilerplate ElementsKind is of a type different
// than the expected one. The check isn't necessary if the boilerplate has
@ -5330,7 +5332,7 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
ASSERT(instr->hydrogen()->depth() == 1);
FastCloneShallowArrayStub::Mode mode =
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
FastCloneShallowArrayStub stub(mode, length);
FastCloneShallowArrayStub stub(mode, DONT_TRACK_ALLOCATION_SITE, length);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
} else if (instr->hydrogen()->depth() > 1) {
CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
@ -5339,9 +5341,9 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
} else {
FastCloneShallowArrayStub::Mode mode =
boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS
? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
: FastCloneShallowArrayStub::CLONE_ELEMENTS;
FastCloneShallowArrayStub stub(mode, length);
? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
: FastCloneShallowArrayStub::CLONE_ELEMENTS;
FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
}
}
@ -5350,10 +5352,14 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
Register result,
Register source,
int* offset) {
int* offset,
AllocationSiteMode mode) {
ASSERT(!source.is(ecx));
ASSERT(!result.is(ecx));
bool create_allocation_site_info = mode == TRACK_ALLOCATION_SITE &&
object->map()->CanTrackAllocationSite();
if (FLAG_debug_code) {
__ LoadHeapObject(ecx, object);
__ cmp(source, ecx);
@ -5376,8 +5382,13 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
// this object and its backing store.
int object_offset = *offset;
int object_size = object->map()->instance_size();
int elements_offset = *offset + object_size;
int elements_size = has_elements ? elements->Size() : 0;
int elements_offset = *offset + object_size;
if (create_allocation_site_info) {
elements_offset += AllocationSiteInfo::kSize;
*offset += AllocationSiteInfo::kSize;
}
*offset += object_size + elements_size;
// Copy object header.
@ -5402,7 +5413,8 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
__ lea(ecx, Operand(result, *offset));
__ mov(FieldOperand(result, total_offset), ecx);
__ LoadHeapObject(source, value_object);
EmitDeepCopy(value_object, result, source, offset);
EmitDeepCopy(value_object, result, source, offset,
DONT_TRACK_ALLOCATION_SITE);
} else if (value->IsHeapObject()) {
__ LoadHeapObject(ecx, Handle<HeapObject>::cast(value));
__ mov(FieldOperand(result, total_offset), ecx);
@ -5411,6 +5423,14 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
}
}
// Build Allocation Site Info if desired
if (create_allocation_site_info) {
__ mov(FieldOperand(result, object_size),
Immediate(Handle<Map>(isolate()->heap()->
allocation_site_info_map())));
__ mov(FieldOperand(result, object_size + kPointerSize), source);
}
if (has_elements) {
// Copy elements backing store header.
__ LoadHeapObject(source, elements);
@ -5443,7 +5463,8 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
__ lea(ecx, Operand(result, *offset));
__ mov(FieldOperand(result, total_offset), ecx);
__ LoadHeapObject(source, value_object);
EmitDeepCopy(value_object, result, source, offset);
EmitDeepCopy(value_object, result, source, offset,
DONT_TRACK_ALLOCATION_SITE);
} else if (value->IsHeapObject()) {
__ LoadHeapObject(ecx, Handle<HeapObject>::cast(value));
__ mov(FieldOperand(result, total_offset), ecx);
@ -5493,7 +5514,8 @@ void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
__ bind(&allocated);
int offset = 0;
__ LoadHeapObject(ebx, instr->hydrogen()->boilerplate());
EmitDeepCopy(instr->hydrogen()->boilerplate(), eax, ebx, &offset);
EmitDeepCopy(instr->hydrogen()->boilerplate(), eax, ebx, &offset,
instr->hydrogen()->allocation_site_mode());
ASSERT_EQ(size, offset);
}

View File

@ -350,7 +350,8 @@ class LCodeGen BASE_EMBEDDED {
void EmitDeepCopy(Handle<JSObject> object,
Register result,
Register source,
int* offset);
int* offset,
AllocationSiteMode mode);
void EnsureSpaceForLazyDeopt();
void DoLoadKeyedExternalArray(LLoadKeyed* instr);

View File

@ -1616,7 +1616,9 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
&try_holey_map);
ElementsTransitionGenerator::
GenerateMapChangeElementsTransition(masm());
GenerateMapChangeElementsTransition(masm(),
DONT_TRACK_ALLOCATION_SITE,
NULL);
// Restore edi.
__ mov(edi, FieldOperand(edx, JSArray::kElementsOffset));
__ jmp(&fast_object);
@ -1628,7 +1630,9 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
edi,
&call_builtin);
ElementsTransitionGenerator::
GenerateMapChangeElementsTransition(masm());
GenerateMapChangeElementsTransition(masm(),
DONT_TRACK_ALLOCATION_SITE,
NULL);
// Restore edi.
__ mov(edi, FieldOperand(edx, JSArray::kElementsOffset));
__ bind(&fast_object);

View File

@ -1862,6 +1862,12 @@ Handle<Code> KeyedStoreIC::ComputePolymorphicStub(
Handle<Code> cached_stub;
Handle<Map> transitioned_map =
receiver_map->FindTransitionedMap(receiver_maps);
// TODO(mvstanton): The code below is doing pessimistic elements
// transitions. I would like to stop doing that and rely on Allocation Site
// Tracking to do a better job of ensuring the data types are what they need
// to be. Not all the elements are in place yet, pessimistic elements
// transitions are still important for performance.
if (!transitioned_map.is_null()) {
cached_stub = ElementsTransitionAndStoreStub(
receiver_map->elements_kind(), // original elements_kind

View File

@ -3378,6 +3378,11 @@ Code::Flags Code::flags() {
}
inline bool Map::CanTrackAllocationSite() {
return instance_type() == JS_ARRAY_TYPE;
}
void Map::set_owns_descriptors(bool is_shared) {
set_bit_field3(OwnsDescriptors::update(bit_field3(), is_shared));
}

View File

@ -7510,6 +7510,31 @@ AllocationSiteInfo* AllocationSiteInfo::FindForJSObject(JSObject* object) {
}
// Heuristic: We only need to create allocation site info if the boilerplate
// elements kind is the initial elements kind.
AllocationSiteMode AllocationSiteInfo::GetMode(
ElementsKind boilerplate_elements_kind) {
if (FLAG_track_allocation_sites &&
IsFastSmiElementsKind(boilerplate_elements_kind)) {
return TRACK_ALLOCATION_SITE;
}
return DONT_TRACK_ALLOCATION_SITE;
}
AllocationSiteMode AllocationSiteInfo::GetMode(ElementsKind from,
ElementsKind to) {
if (FLAG_track_allocation_sites &&
IsFastSmiElementsKind(from) &&
(IsFastObjectElementsKind(to) || IsFastDoubleElementsKind(to))) {
return TRACK_ALLOCATION_SITE;
}
return DONT_TRACK_ALLOCATION_SITE;
}
uint32_t StringHasher::MakeArrayIndexHash(uint32_t value, int length) {
// For array indexes mix the length into the hash as an array index could
// be zero.
@ -9878,6 +9903,10 @@ MaybeObject* JSObject::SetFastElement(uint32_t index,
ElementsKind kind = HasFastHoleyElements()
? FAST_HOLEY_ELEMENTS
: FAST_ELEMENTS;
MaybeObject* trans = PossiblyTransitionArrayBoilerplate(kind);
if (trans->IsFailure()) return trans;
MaybeObject* maybe_new_map = GetElementsTransitionMap(GetIsolate(),
kind);
if (!maybe_new_map->To(&new_map)) return maybe_new_map;
@ -10409,15 +10438,31 @@ Handle<Object> JSObject::TransitionElementsKind(Handle<JSObject> object,
MaybeObject* JSObject::PossiblyTransitionArrayBoilerplate(
ElementsKind to_kind) {
MaybeObject* ret = NULL;
if (IsJSArray()) {
AllocationSiteInfo* info = AllocationSiteInfo::FindForJSObject(this);
if (info != NULL) {
JSObject* payload = JSObject::cast(info->payload());
if (payload->GetElementsKind() != to_kind) {
if (IsMoreGeneralElementsKindTransition(payload->GetElementsKind(),
to_kind)) {
ret = payload->TransitionElementsKind(to_kind);
}
if (!FLAG_track_allocation_sites || !IsJSArray()) {
return ret;
}
AllocationSiteInfo* info = AllocationSiteInfo::FindForJSObject(this);
if (info == NULL) {
return ret;
}
ASSERT(info->payload()->IsJSArray());
JSArray* payload = JSArray::cast(info->payload());
ElementsKind kind = payload->GetElementsKind();
if (IsMoreGeneralElementsKindTransition(kind, to_kind)) {
// If the array is huge, it's not likely to be defined in a local
// function, so we shouldn't make new instances of it very often.
uint32_t length = 0;
CHECK(payload->length()->ToArrayIndex(&length));
if (length <= 8 * 1024) {
ret = payload->TransitionElementsKind(to_kind);
if (FLAG_trace_track_allocation_sites) {
PrintF(
"AllocationSiteInfo: JSArray %p boilerplate updated %s->%s\n",
reinterpret_cast<void*>(this),
ElementsKindToString(kind),
ElementsKindToString(to_kind));
}
}
}

View File

@ -4981,7 +4981,7 @@ class Map: public HeapObject {
set_bit_field3(EnumLengthBits::update(bit_field3(), length));
}
inline bool CanTrackAllocationSite();
inline bool owns_descriptors();
inline void set_owns_descriptors(bool is_shared);
inline bool is_observed();
@ -6905,9 +6905,10 @@ class TypeFeedbackInfo: public Struct {
};
enum AllocationSiteInfoMode {
DONT_TRACK_ALLOCATION_SITE_INFO,
TRACK_ALLOCATION_SITE_INFO
enum AllocationSiteMode {
DONT_TRACK_ALLOCATION_SITE,
TRACK_ALLOCATION_SITE,
LAST_ALLOCATION_SITE_MODE = TRACK_ALLOCATION_SITE
};
@ -6923,6 +6924,9 @@ class AllocationSiteInfo: public Struct {
// Returns NULL if no AllocationSiteInfo is available for object.
static AllocationSiteInfo* FindForJSObject(JSObject* object);
static AllocationSiteMode GetMode(ElementsKind boilerplate_elements_kind);
static AllocationSiteMode GetMode(ElementsKind from, ElementsKind to);
static const int kPayloadOffset = HeapObject::kHeaderSize;
static const int kSize = kPayloadOffset + kPointerSize;

View File

@ -667,7 +667,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateArrayLiteralShallow) {
isolate->heap()->fixed_cow_array_map()) {
isolate->counters()->cow_arrays_created_runtime()->Increment();
}
return isolate->heap()->CopyJSObject(JSObject::cast(*boilerplate));
JSObject* boilerplate_object = JSObject::cast(*boilerplate);
AllocationSiteMode mode = AllocationSiteInfo::GetMode(
boilerplate_object->GetElementsKind());
return isolate->heap()->CopyJSObject(boilerplate_object, mode);
}

View File

@ -316,7 +316,7 @@ static void GenerateFastCloneShallowArrayCommon(
MacroAssembler* masm,
int length,
FastCloneShallowArrayStub::Mode mode,
AllocationSiteInfoMode allocation_site_info_mode,
AllocationSiteMode allocation_site_mode,
Label* fail) {
// Registers on entry:
//
@ -332,7 +332,7 @@ static void GenerateFastCloneShallowArrayCommon(
}
int size = JSArray::kSize;
int allocation_info_start = size;
if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) {
if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
size += AllocationSiteInfo::kSize;
}
size += elements_size;
@ -345,7 +345,7 @@ static void GenerateFastCloneShallowArrayCommon(
}
__ AllocateInNewSpace(size, rax, rbx, rdx, fail, flags);
if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) {
if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
__ LoadRoot(kScratchRegister, Heap::kAllocationSiteInfoMapRootIndex);
__ movq(FieldOperand(rax, allocation_info_start), kScratchRegister);
__ movq(FieldOperand(rax, allocation_info_start + kPointerSize), rcx);
@ -363,7 +363,7 @@ static void GenerateFastCloneShallowArrayCommon(
// Get hold of the elements array of the boilerplate and setup the
// elements pointer in the resulting object.
__ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) {
if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
__ lea(rdx, Operand(rax, JSArray::kSize + AllocationSiteInfo::kSize));
} else {
__ lea(rdx, Operand(rax, JSArray::kSize));
@ -414,22 +414,14 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
FastCloneShallowArrayStub::Mode mode = mode_;
// rcx is boilerplate object.
Factory* factory = masm->isolate()->factory();
AllocationSiteInfoMode allocation_site_info_mode =
DONT_TRACK_ALLOCATION_SITE_INFO;
if (mode == CLONE_ANY_ELEMENTS_WITH_ALLOCATION_SITE_INFO) {
mode = CLONE_ANY_ELEMENTS;
allocation_site_info_mode = TRACK_ALLOCATION_SITE_INFO;
}
if (mode == CLONE_ANY_ELEMENTS) {
Label double_elements, check_fast_elements;
__ movq(rbx, FieldOperand(rcx, JSArray::kElementsOffset));
__ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
factory->fixed_cow_array_map());
__ j(not_equal, &check_fast_elements);
GenerateFastCloneShallowArrayCommon(masm, 0,
COPY_ON_WRITE_ELEMENTS,
allocation_site_info_mode,
GenerateFastCloneShallowArrayCommon(masm, 0, COPY_ON_WRITE_ELEMENTS,
allocation_site_mode_,
&slow_case);
__ ret(3 * kPointerSize);
@ -437,9 +429,8 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
__ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
factory->fixed_array_map());
__ j(not_equal, &double_elements);
GenerateFastCloneShallowArrayCommon(masm, length_,
CLONE_ELEMENTS,
allocation_site_info_mode,
GenerateFastCloneShallowArrayCommon(masm, length_, CLONE_ELEMENTS,
allocation_site_mode_,
&slow_case);
__ ret(3 * kPointerSize);
@ -471,7 +462,8 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
}
GenerateFastCloneShallowArrayCommon(masm, length_, mode,
allocation_site_info_mode, &slow_case);
allocation_site_mode_,
&slow_case);
__ ret(3 * kPointerSize);
__ bind(&slow_case);

View File

@ -251,7 +251,8 @@ ModuloFunction CreateModuloFunction() {
#define __ ACCESS_MASM(masm)
void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
MacroAssembler* masm) {
MacroAssembler* masm, AllocationSiteMode mode,
Label* allocation_site_info_found) {
// ----------- S t a t e -------------
// -- rax : value
// -- rbx : target map
@ -259,6 +260,12 @@ void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
// -- rdx : receiver
// -- rsp[0] : return address
// -----------------------------------
if (mode == TRACK_ALLOCATION_SITE) {
ASSERT(allocation_site_info_found != NULL);
masm->TestJSArrayForAllocationSiteInfo(rdx, rdi,
allocation_site_info_found);
}
// Set transitioned map.
__ movq(FieldOperand(rdx, HeapObject::kMapOffset), rbx);
__ RecordWriteField(rdx,
@ -272,7 +279,7 @@ void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
void ElementsTransitionGenerator::GenerateSmiToDouble(
MacroAssembler* masm, Label* fail) {
MacroAssembler* masm, AllocationSiteMode mode, Label* fail) {
// ----------- S t a t e -------------
// -- rax : value
// -- rbx : target map
@ -283,7 +290,7 @@ void ElementsTransitionGenerator::GenerateSmiToDouble(
// The fail label is not actually used since we do not allocate.
Label allocated, new_backing_store, only_change_map, done;
if (FLAG_track_allocation_sites) {
if (mode == TRACK_ALLOCATION_SITE) {
masm->TestJSArrayForAllocationSiteInfo(rdx, rdi, fail);
}
@ -398,7 +405,7 @@ void ElementsTransitionGenerator::GenerateSmiToDouble(
void ElementsTransitionGenerator::GenerateDoubleToObject(
MacroAssembler* masm, Label* fail) {
MacroAssembler* masm, AllocationSiteMode mode, Label* fail) {
// ----------- S t a t e -------------
// -- rax : value
// -- rbx : target map
@ -408,6 +415,10 @@ void ElementsTransitionGenerator::GenerateDoubleToObject(
// -----------------------------------
Label loop, entry, convert_hole, gc_required, only_change_map;
if (mode == TRACK_ALLOCATION_SITE) {
masm->TestJSArrayForAllocationSiteInfo(rdx, rdi, fail);
}
// Check for empty arrays, which only require a map transition and no changes
// to the backing store.
__ movq(r8, FieldOperand(rdx, JSObject::kElementsOffset));

View File

@ -1689,6 +1689,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
FastCloneShallowArrayStub stub(
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
DONT_TRACK_ALLOCATION_SITE,
length);
__ CallStub(&stub);
} else if (expr->depth() > 1) {
@ -1698,21 +1699,19 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
} else {
ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
FLAG_smi_only_arrays);
FastCloneShallowArrayStub::Mode mode =
FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites
? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE;
// If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
// change, so it's possible to specialize the stub in advance.
FastCloneShallowArrayStub::Mode mode = has_constant_fast_elements
? FastCloneShallowArrayStub::CLONE_ELEMENTS
: FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
// Tracking allocation info allows us to pre-transition later if it makes
// sense.
if (mode == FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS &&
FLAG_track_allocation_sites) {
mode = FastCloneShallowArrayStub::
CLONE_ANY_ELEMENTS_WITH_ALLOCATION_SITE_INFO;
if (has_constant_fast_elements) {
mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
}
FastCloneShallowArrayStub stub(mode, length);
FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
__ CallStub(&stub);
}

View File

@ -709,7 +709,9 @@ static void KeyedStoreGenerateGenericHelper(
rbx,
rdi,
slow);
ElementsTransitionGenerator::GenerateSmiToDouble(masm, slow);
AllocationSiteMode mode = AllocationSiteInfo::GetMode(FAST_SMI_ELEMENTS,
FAST_DOUBLE_ELEMENTS);
ElementsTransitionGenerator::GenerateSmiToDouble(masm, mode, slow);
__ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
__ jmp(&fast_double_without_map_check);
@ -720,7 +722,9 @@ static void KeyedStoreGenerateGenericHelper(
rbx,
rdi,
slow);
ElementsTransitionGenerator::GenerateMapChangeElementsTransition(masm);
mode = AllocationSiteInfo::GetMode(FAST_SMI_ELEMENTS, FAST_ELEMENTS);
ElementsTransitionGenerator::GenerateMapChangeElementsTransition(masm, mode,
slow);
__ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
__ jmp(&finish_object_store);
@ -734,7 +738,8 @@ static void KeyedStoreGenerateGenericHelper(
rbx,
rdi,
slow);
ElementsTransitionGenerator::GenerateDoubleToObject(masm, slow);
mode = AllocationSiteInfo::GetMode(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS);
ElementsTransitionGenerator::GenerateDoubleToObject(masm, mode, slow);
__ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
__ jmp(&finish_object_store);
}
@ -1670,7 +1675,9 @@ void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) {
// Must return the modified receiver in eax.
if (!FLAG_trace_elements_transitions) {
Label fail;
ElementsTransitionGenerator::GenerateSmiToDouble(masm, &fail);
AllocationSiteMode mode = AllocationSiteInfo::GetMode(FAST_SMI_ELEMENTS,
FAST_DOUBLE_ELEMENTS);
ElementsTransitionGenerator::GenerateSmiToDouble(masm, mode, &fail);
__ movq(rax, rdx);
__ Ret();
__ bind(&fail);
@ -1693,7 +1700,9 @@ void KeyedStoreIC::GenerateTransitionElementsDoubleToObject(
// Must return the modified receiver in eax.
if (!FLAG_trace_elements_transitions) {
Label fail;
ElementsTransitionGenerator::GenerateDoubleToObject(masm, &fail);
AllocationSiteMode mode = AllocationSiteInfo::GetMode(FAST_DOUBLE_ELEMENTS,
FAST_ELEMENTS);
ElementsTransitionGenerator::GenerateDoubleToObject(masm, mode, &fail);
__ movq(rax, rdx);
__ Ret();
__ bind(&fail);

View File

@ -4903,6 +4903,8 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
Handle<FixedArray> literals(instr->environment()->closure()->literals());
ElementsKind boilerplate_elements_kind =
instr->hydrogen()->boilerplate_elements_kind();
AllocationSiteMode allocation_site_mode =
instr->hydrogen()->allocation_site_mode();
// Deopt if the array literal boilerplate ElementsKind is of a type different
// than the expected one. The check isn't necessary if the boilerplate has
@ -4933,7 +4935,7 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
ASSERT(instr->hydrogen()->depth() == 1);
FastCloneShallowArrayStub::Mode mode =
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
FastCloneShallowArrayStub stub(mode, length);
FastCloneShallowArrayStub stub(mode, DONT_TRACK_ALLOCATION_SITE, length);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
} else if (instr->hydrogen()->depth() > 1) {
CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
@ -4942,9 +4944,9 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
} else {
FastCloneShallowArrayStub::Mode mode =
boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS
? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
: FastCloneShallowArrayStub::CLONE_ELEMENTS;
FastCloneShallowArrayStub stub(mode, length);
? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
: FastCloneShallowArrayStub::CLONE_ELEMENTS;
FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
}
}
@ -4953,10 +4955,14 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
Register result,
Register source,
int* offset) {
int* offset,
AllocationSiteMode mode) {
ASSERT(!source.is(rcx));
ASSERT(!result.is(rcx));
bool create_allocation_site_info = mode == TRACK_ALLOCATION_SITE &&
object->map()->CanTrackAllocationSite();
// Only elements backing stores for non-COW arrays need to be copied.
Handle<FixedArrayBase> elements(object->elements());
bool has_elements = elements->length() > 0 &&
@ -4966,8 +4972,13 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
// this object and its backing store.
int object_offset = *offset;
int object_size = object->map()->instance_size();
int elements_offset = *offset + object_size;
int elements_size = has_elements ? elements->Size() : 0;
int elements_offset = *offset + object_size;
if (create_allocation_site_info) {
elements_offset += AllocationSiteInfo::kSize;
*offset += AllocationSiteInfo::kSize;
}
*offset += object_size + elements_size;
// Copy object header.
@ -4992,7 +5003,8 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
__ lea(rcx, Operand(result, *offset));
__ movq(FieldOperand(result, total_offset), rcx);
__ LoadHeapObject(source, value_object);
EmitDeepCopy(value_object, result, source, offset);
EmitDeepCopy(value_object, result, source, offset,
DONT_TRACK_ALLOCATION_SITE);
} else if (value->IsHeapObject()) {
__ LoadHeapObject(rcx, Handle<HeapObject>::cast(value));
__ movq(FieldOperand(result, total_offset), rcx);
@ -5002,6 +5014,13 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
}
}
// Build Allocation Site Info if desired
if (create_allocation_site_info) {
__ LoadRoot(kScratchRegister, Heap::kAllocationSiteInfoMapRootIndex);
__ movq(FieldOperand(result, object_size), kScratchRegister);
__ movq(FieldOperand(result, object_size + kPointerSize), source);
}
if (has_elements) {
// Copy elements backing store header.
__ LoadHeapObject(source, elements);
@ -5032,7 +5051,8 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
__ lea(rcx, Operand(result, *offset));
__ movq(FieldOperand(result, total_offset), rcx);
__ LoadHeapObject(source, value_object);
EmitDeepCopy(value_object, result, source, offset);
EmitDeepCopy(value_object, result, source, offset,
DONT_TRACK_ALLOCATION_SITE);
} else if (value->IsHeapObject()) {
__ LoadHeapObject(rcx, Handle<HeapObject>::cast(value));
__ movq(FieldOperand(result, total_offset), rcx);
@ -5082,7 +5102,8 @@ void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
__ bind(&allocated);
int offset = 0;
__ LoadHeapObject(rbx, instr->hydrogen()->boilerplate());
EmitDeepCopy(instr->hydrogen()->boilerplate(), rax, rbx, &offset);
EmitDeepCopy(instr->hydrogen()->boilerplate(), rax, rbx, &offset,
instr->hydrogen()->allocation_site_mode());
ASSERT_EQ(size, offset);
}

View File

@ -334,7 +334,8 @@ class LCodeGen BASE_EMBEDDED {
void EmitDeepCopy(Handle<JSObject> object,
Register result,
Register source,
int* offset);
int* offset,
AllocationSiteMode mode);
struct JumpTableEntry {
inline JumpTableEntry(Address entry, bool frame, bool is_lazy)

View File

@ -1579,7 +1579,9 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
&try_holey_map);
ElementsTransitionGenerator::
GenerateMapChangeElementsTransition(masm());
GenerateMapChangeElementsTransition(masm(),
DONT_TRACK_ALLOCATION_SITE,
NULL);
// Restore edi.
__ movq(rdi, FieldOperand(rdx, JSArray::kElementsOffset));
__ jmp(&fast_object);
@ -1591,7 +1593,9 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
rdi,
&call_builtin);
ElementsTransitionGenerator::
GenerateMapChangeElementsTransition(masm());
GenerateMapChangeElementsTransition(masm(),
DONT_TRACK_ALLOCATION_SITE,
NULL);
__ movq(rdi, FieldOperand(rdx, JSArray::kElementsOffset));
__ bind(&fast_object);
} else {

View File

@ -0,0 +1,114 @@
// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --allow-natives-syntax --smi-only-arrays --expose-gc
// Flags: --track-allocation-sites
// Test element kind of objects.
// Since --smi-only-arrays affects builtins, its default setting at compile
// time sticks if built with snapshot. If --smi-only-arrays is deactivated
// by default, only a no-snapshot build actually has smi-only arrays enabled
// in this test case. Depending on whether smi-only arrays are actually
// enabled, this test takes the appropriate code path to check smi-only arrays.
support_smi_only_arrays = %HasFastSmiElements(new Array(1,2,3,4,5,6,7,8));
if (support_smi_only_arrays) {
print("Tests include smi-only arrays.");
} else {
print("Tests do NOT include smi-only arrays.");
}
var elements_kind = {
fast_smi_only : 'fast smi only elements',
fast : 'fast elements',
fast_double : 'fast double elements',
dictionary : 'dictionary elements',
external_byte : 'external byte elements',
external_unsigned_byte : 'external unsigned byte elements',
external_short : 'external short elements',
external_unsigned_short : 'external unsigned short elements',
external_int : 'external int elements',
external_unsigned_int : 'external unsigned int elements',
external_float : 'external float elements',
external_double : 'external double elements',
external_pixel : 'external pixel elements'
}
function getKind(obj) {
if (%HasFastSmiElements(obj)) return elements_kind.fast_smi_only;
if (%HasFastObjectElements(obj)) return elements_kind.fast;
if (%HasFastDoubleElements(obj)) return elements_kind.fast_double;
if (%HasDictionaryElements(obj)) return elements_kind.dictionary;
}
function assertKind(expected, obj, name_opt) {
if (!support_smi_only_arrays &&
expected == elements_kind.fast_smi_only) {
expected = elements_kind.fast;
}
assertEquals(expected, getKind(obj), name_opt);
}
if (support_smi_only_arrays) {
function fastliteralcase(value) {
var literal = [1, 2, 3];
literal[0] = value;
return literal;
}
// Case: [1,2,3] as allocation site
obj = fastliteralcase(1);
assertKind(elements_kind.fast_smi_only, obj);
obj = fastliteralcase(1.5);
assertKind(elements_kind.fast_double, obj);
obj = fastliteralcase(2);
assertKind(elements_kind.fast_double, obj);
// Verify that we will not pretransition the double->fast path.
obj = fastliteralcase("elliot");
assertKind(elements_kind.fast, obj);
// This fails until we turn off optimistic transitions to the
// most general elements kind seen on keyed stores. It's a goal
// to turn it off, but for now we need it.
// obj = fastliteralcase(3);
// assertKind(elements_kind.fast_double, obj);
function fastliteralcase_smifast(value) {
var literal = [1, 2, 3, 4];
literal[0] = value;
return literal;
}
obj = fastliteralcase_smifast(1);
assertKind(elements_kind.fast_smi_only, obj);
obj = fastliteralcase_smifast("carter");
assertKind(elements_kind.fast, obj);
obj = fastliteralcase_smifast(2);
assertKind(elements_kind.fast, obj);
}