The goal is to discover the appropriate heap space for objects created in full

code. By the time we optimize the code, we'll be able to decide on new or old
space based on the number of surviving objects after one or more gcs.

The mechanism is a "memento" placed behind objects in the heap. It's currently
done for array and object literals, with plans to use mementos for constructed
objects as well (in a later CL).

The feature is behind the flag allocation_site_pretenuring, currently off.

R=hpayer@chromium.org

Review URL: https://codereview.chromium.org/40063002

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18104 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
mvstanton@chromium.org 2013-11-27 14:03:40 +00:00
parent 50a4bb5084
commit f3a22f965e
17 changed files with 310 additions and 85 deletions

View File

@ -82,4 +82,21 @@ void AllocationSiteCreationContext::ExitScope(
}
}
bool AllocationSiteUsageContext::ShouldCreateMemento(Handle<JSObject> object) {
if (activated_ && AllocationSite::CanTrack(object->map()->instance_type())) {
if (FLAG_allocation_site_pretenuring ||
AllocationSite::GetMode(object->GetElementsKind()) ==
TRACK_ALLOCATION_SITE) {
if (FLAG_trace_creation_allocation_sites) {
PrintF("*** Creating Memento for %s %p\n",
object->IsJSArray() ? "JSArray" : "JSObject",
static_cast<void*>(*object));
}
return true;
}
}
return false;
}
} } // namespace v8::internal

View File

@ -41,16 +41,14 @@ namespace internal {
// boilerplate with AllocationSite and AllocationMemento support.
class AllocationSiteContext {
public:
AllocationSiteContext(Isolate* isolate, bool activated) {
explicit AllocationSiteContext(Isolate* isolate) {
isolate_ = isolate;
activated_ = activated;
};
Handle<AllocationSite> top() { return top_; }
Handle<AllocationSite> current() { return current_; }
// If activated, then recursively create mementos
bool activated() const { return activated_; }
bool ShouldCreateMemento(Handle<JSObject> object) { return false; }
Isolate* isolate() { return isolate_; }
@ -68,7 +66,6 @@ class AllocationSiteContext {
Isolate* isolate_;
Handle<AllocationSite> top_;
Handle<AllocationSite> current_;
bool activated_;
};
@ -77,7 +74,7 @@ class AllocationSiteContext {
class AllocationSiteCreationContext : public AllocationSiteContext {
public:
explicit AllocationSiteCreationContext(Isolate* isolate)
: AllocationSiteContext(isolate, true) { }
: AllocationSiteContext(isolate) { }
Handle<AllocationSite> EnterNewScope();
void ExitScope(Handle<AllocationSite> site, Handle<JSObject> object);
@ -90,8 +87,9 @@ class AllocationSiteUsageContext : public AllocationSiteContext {
public:
AllocationSiteUsageContext(Isolate* isolate, Handle<AllocationSite> site,
bool activated)
: AllocationSiteContext(isolate, activated),
top_site_(site) { }
: AllocationSiteContext(isolate),
top_site_(site),
activated_(activated) { }
inline Handle<AllocationSite> EnterNewScope() {
if (top().is_null()) {
@ -113,8 +111,11 @@ class AllocationSiteUsageContext : public AllocationSiteContext {
ASSERT(object.is_null() || *object == scope_site->transition_info());
}
bool ShouldCreateMemento(Handle<JSObject> object);
private:
Handle<AllocationSite> top_site_;
bool activated_;
};

View File

@ -1784,6 +1784,14 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
Handle<FixedArrayBase> constant_elements_values(
FixedArrayBase::cast(constant_elements->get(1)));
AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites
? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE;
if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
// If the only customer of allocation sites is transitioning, then
// we can turn it off if we don't have anywhere else to transition to.
allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
}
__ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
__ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
__ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
@ -1792,7 +1800,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
isolate()->heap()->fixed_cow_array_map()) {
FastCloneShallowArrayStub stub(
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
DONT_TRACK_ALLOCATION_SITE,
allocation_site_mode,
length);
__ CallStub(&stub);
__ IncrementCounter(
@ -1807,12 +1815,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
FLAG_smi_only_arrays);
FastCloneShallowArrayStub::Mode mode =
FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites
? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE;
if (has_fast_elements) {
mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
}
FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);

View File

@ -483,6 +483,18 @@ void Heap::ScavengePointer(HeapObject** p) {
}
void Heap::UpdateAllocationSiteFeedback(HeapObject* object) {
if (FLAG_allocation_site_pretenuring && object->IsJSObject()) {
AllocationMemento* memento = AllocationMemento::FindForJSObject(
JSObject::cast(object), true);
if (memento != NULL) {
ASSERT(memento->IsValid());
memento->GetAllocationSite()->IncrementMementoFoundCount();
}
}
}
void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
ASSERT(object->GetIsolate()->heap()->InFromSpace(object));
@ -501,12 +513,7 @@ void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
return;
}
if (FLAG_trace_track_allocation_sites && object->IsJSObject()) {
if (AllocationMemento::FindForJSObject(JSObject::cast(object), true) !=
NULL) {
object->GetIsolate()->heap()->allocation_mementos_found_++;
}
}
UpdateAllocationSiteFeedback(object);
// AllocationMementos are unrooted and shouldn't survive a scavenge
ASSERT(object->map() != object->GetHeap()->allocation_memento_map());

View File

@ -87,7 +87,6 @@ Heap::Heap()
contexts_disposed_(0),
global_ic_age_(0),
flush_monomorphic_ics_(false),
allocation_mementos_found_(0),
scan_on_scavenge_pages_(0),
new_space_(this),
old_pointer_space_(NULL),
@ -506,6 +505,40 @@ void Heap::RepairFreeListsAfterBoot() {
void Heap::GarbageCollectionEpilogue() {
if (FLAG_allocation_site_pretenuring) {
int tenure_decisions = 0;
int dont_tenure_decisions = 0;
int allocation_mementos_found = 0;
Object* cur = allocation_sites_list();
while (cur->IsAllocationSite()) {
AllocationSite* casted = AllocationSite::cast(cur);
allocation_mementos_found += casted->memento_found_count()->value();
if (casted->DigestPretenuringFeedback()) {
if (casted->GetPretenureMode() == TENURED) {
tenure_decisions++;
} else {
dont_tenure_decisions++;
}
}
cur = casted->weak_next();
}
// TODO(mvstanton): Pretenure decisions are only made once for an allocation
// site. Find a sane way to decide about revisiting the decision later.
if (FLAG_trace_track_allocation_sites &&
(allocation_mementos_found > 0 ||
tenure_decisions > 0 ||
dont_tenure_decisions > 0)) {
PrintF("GC: (#mementos, #tenure decisions, #donttenure decisions) "
"(%d, %d, %d)\n",
allocation_mementos_found,
tenure_decisions,
dont_tenure_decisions);
}
}
store_buffer()->GCEpilogue();
// In release mode, we only zap the from space under heap verification.
@ -1393,8 +1426,6 @@ class ScavengeWeakObjectRetainer : public WeakObjectRetainer {
void Heap::Scavenge() {
RelocationLock relocation_lock(this);
allocation_mementos_found_ = 0;
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) VerifyNonPointerSpacePointers(this);
#endif
@ -1542,11 +1573,6 @@ void Heap::Scavenge() {
gc_state_ = NOT_IN_GC;
scavenges_since_last_idle_round_++;
if (FLAG_trace_track_allocation_sites && allocation_mementos_found_ > 0) {
PrintF("AllocationMementos found during scavenge = %d\n",
allocation_mementos_found_);
}
}
@ -4359,6 +4385,17 @@ MaybeObject* Heap::CopyCode(Code* code, Vector<byte> reloc_info) {
}
void Heap::InitializeAllocationMemento(AllocationMemento* memento,
AllocationSite* allocation_site) {
memento->set_map_no_write_barrier(allocation_memento_map());
ASSERT(allocation_site->map() == allocation_site_map());
memento->set_allocation_site(allocation_site, SKIP_WRITE_BARRIER);
if (FLAG_allocation_site_pretenuring) {
allocation_site->IncrementMementoCreateCount();
}
}
MaybeObject* Heap::AllocateWithAllocationSite(Map* map, AllocationSpace space,
Handle<AllocationSite> allocation_site) {
ASSERT(gc_state_ == NOT_IN_GC);
@ -4375,9 +4412,7 @@ MaybeObject* Heap::AllocateWithAllocationSite(Map* map, AllocationSpace space,
HeapObject::cast(result)->set_map_no_write_barrier(map);
AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>(
reinterpret_cast<Address>(result) + map->instance_size());
alloc_memento->set_map_no_write_barrier(allocation_memento_map());
ASSERT(allocation_site->map() == allocation_site_map());
alloc_memento->set_allocation_site(*allocation_site, SKIP_WRITE_BARRIER);
InitializeAllocationMemento(alloc_memento, *allocation_site);
return result;
}
@ -4810,8 +4845,7 @@ MaybeObject* Heap::CopyJSObject(JSObject* source, AllocationSite* site) {
int object_size = map->instance_size();
Object* clone;
ASSERT(site == NULL || (AllocationSite::CanTrack(map->instance_type()) &&
map->instance_type() == JS_ARRAY_TYPE));
ASSERT(site == NULL || AllocationSite::CanTrack(map->instance_type()));
WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER;
@ -4850,9 +4884,7 @@ MaybeObject* Heap::CopyJSObject(JSObject* source, AllocationSite* site) {
if (site != NULL) {
AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>(
reinterpret_cast<Address>(clone) + object_size);
alloc_memento->set_map_no_write_barrier(allocation_memento_map());
ASSERT(site->map() == allocation_site_map());
alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER);
InitializeAllocationMemento(alloc_memento, site);
HeapProfiler* profiler = isolate()->heap_profiler();
if (profiler->is_tracking_allocations()) {
profiler->UpdateObjectSizeEvent(HeapObject::cast(clone)->address(),

View File

@ -1456,6 +1456,11 @@ class Heap {
static inline void ScavengePointer(HeapObject** p);
static inline void ScavengeObject(HeapObject** p, HeapObject* object);
// An object may have an AllocationSite associated with it through a trailing
// AllocationMemento. Its feedback should be updated when objects are found
// in the heap.
static inline void UpdateAllocationSiteFeedback(HeapObject* object);
// Support for partial snapshots. After calling this we have a linear
// space to write objects in each space.
void ReserveSpace(int *sizes, Address* addresses);
@ -1892,9 +1897,6 @@ class Heap {
bool flush_monomorphic_ics_;
// AllocationMementos found in new space.
int allocation_mementos_found_;
int scan_on_scavenge_pages_;
NewSpace new_space_;
@ -2110,6 +2112,8 @@ class Heap {
void InitializeJSObjectFromMap(JSObject* obj,
FixedArray* properties,
Map* map);
void InitializeAllocationMemento(AllocationMemento* memento,
AllocationSite* allocation_site);
bool CreateInitialMaps();
bool CreateInitialObjects();

View File

@ -2240,6 +2240,23 @@ HInnerAllocatedObject* HGraphBuilder::BuildJSArrayHeader(HValue* array,
BuildCreateAllocationMemento(array,
JSArray::kSize,
allocation_site_payload);
if (FLAG_allocation_site_pretenuring) {
// TODO(mvstanton): move this code into BuildCreateAllocationMemento when
// constructed arrays also pay attention to pretenuring.
HObjectAccess access =
HObjectAccess::ForAllocationSiteOffset(
AllocationSite::kMementoCreateCountOffset);
HValue* create_info = Add<HLoadNamedField>(allocation_site_payload,
access);
HInstruction* new_create_info = HAdd::New(zone(), context(),
create_info,
graph()->GetConstant1());
new_create_info->ClearFlag(HValue::kCanOverflow);
HStoreNamedField* store = Add<HStoreNamedField>(allocation_site_payload,
access, new_create_info);
// No write barrier needed to store a smi.
store->SkipWriteBarrier();
}
}
int elements_location = JSArray::kSize;
@ -9332,8 +9349,26 @@ HInstruction* HOptimizedGraphBuilder::BuildFastLiteral(
? HType::JSArray() : HType::JSObject();
HValue* object_size_constant = Add<HConstant>(
boilerplate_object->map()->instance_size());
// We should pull pre-tenure mode from the allocation site.
// For now, just see what it says, and remark on it if it sez
// we should pretenure. That means the rudimentary counting in the garbage
// collector is having an effect.
PretenureFlag pretenure_flag = isolate()->heap()->GetPretenureMode();
if (FLAG_allocation_site_pretenuring) {
pretenure_flag = site_context->current()->GetPretenureMode()
? TENURED
: NOT_TENURED;
if (FLAG_trace_track_allocation_sites) {
PrintF("Hydrogen: AllocationSite %p boilerplate %p %s\n",
static_cast<void*>(*(site_context->current())),
static_cast<void*>(*boilerplate_object),
pretenure_flag == TENURED ? "tenured" : "not tenured");
}
}
HInstruction* object = Add<HAllocate>(object_size_constant, type,
isolate()->heap()->GetPretenureMode(), instance_type);
pretenure_flag, instance_type);
BuildEmitObjectHeader(boilerplate_object, object);
@ -9347,10 +9382,10 @@ HInstruction* HOptimizedGraphBuilder::BuildFastLiteral(
HValue* object_elements_size = Add<HConstant>(elements_size);
if (boilerplate_object->HasFastDoubleElements()) {
object_elements = Add<HAllocate>(object_elements_size, HType::JSObject(),
isolate()->heap()->GetPretenureMode(), FIXED_DOUBLE_ARRAY_TYPE);
pretenure_flag, FIXED_DOUBLE_ARRAY_TYPE);
} else {
object_elements = Add<HAllocate>(object_elements_size, HType::JSObject(),
isolate()->heap()->GetPretenureMode(), FIXED_ARRAY_TYPE);
pretenure_flag, FIXED_ARRAY_TYPE);
}
}
BuildInitElementsInObjectHeader(boilerplate_object, object, object_elements);
@ -9363,7 +9398,8 @@ HInstruction* HOptimizedGraphBuilder::BuildFastLiteral(
// Copy in-object properties.
if (boilerplate_object->map()->NumberOfFields() != 0) {
BuildEmitInObjectProperties(boilerplate_object, object, site_context);
BuildEmitInObjectProperties(boilerplate_object, object, site_context,
pretenure_flag);
}
return object;
}
@ -9416,7 +9452,8 @@ void HOptimizedGraphBuilder::BuildInitElementsInObjectHeader(
void HOptimizedGraphBuilder::BuildEmitInObjectProperties(
Handle<JSObject> boilerplate_object,
HInstruction* object,
AllocationSiteUsageContext* site_context) {
AllocationSiteUsageContext* site_context,
PretenureFlag pretenure_flag) {
Handle<DescriptorArray> descriptors(
boilerplate_object->map()->instance_descriptors());
int limit = boilerplate_object->map()->NumberOfOwnDescriptors();
@ -9452,15 +9489,13 @@ void HOptimizedGraphBuilder::BuildEmitInObjectProperties(
if (representation.IsDouble()) {
// Allocate a HeapNumber box and store the value into it.
HValue* heap_number_constant = Add<HConstant>(HeapNumber::kSize);
// TODO(mvstanton): This heap number alloc does not have a corresponding
// This heap number alloc does not have a corresponding
// AllocationSite. That is okay because
// 1) it's a child object of another object with a valid allocation site
// 2) we can just use the mode of the parent object for pretenuring
// The todo is replace GetPretenureMode() with
// site_context->top()->GetPretenureMode().
HInstruction* double_box =
Add<HAllocate>(heap_number_constant, HType::HeapNumber(),
isolate()->heap()->GetPretenureMode(), HEAP_NUMBER_TYPE);
pretenure_flag, HEAP_NUMBER_TYPE);
AddStoreMapConstant(double_box,
isolate()->factory()->heap_number_map());
Add<HStoreNamedField>(double_box, HObjectAccess::ForHeapNumberValue(),

View File

@ -2422,7 +2422,8 @@ class HOptimizedGraphBuilder : public HGraphBuilder, public AstVisitor {
void BuildEmitInObjectProperties(Handle<JSObject> boilerplate_object,
HInstruction* object,
AllocationSiteUsageContext* site_context);
AllocationSiteUsageContext* site_context,
PretenureFlag pretenure_flag);
void BuildEmitElements(Handle<JSObject> boilerplate_object,
Handle<FixedArrayBase> elements,

View File

@ -1720,6 +1720,14 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
Handle<FixedArrayBase> constant_elements_values(
FixedArrayBase::cast(constant_elements->get(1)));
AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites
? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE;
if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
// If the only customer of allocation sites is transitioning, then
// we can turn it off if we don't have anywhere else to transition to.
allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
}
Heap* heap = isolate()->heap();
if (has_constant_fast_elements &&
constant_elements_values->map() == heap->fixed_cow_array_map()) {
@ -1732,7 +1740,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ mov(ecx, Immediate(constant_elements));
FastCloneShallowArrayStub stub(
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
DONT_TRACK_ALLOCATION_SITE,
allocation_site_mode,
length);
__ CallStub(&stub);
} else if (expr->depth() > 1 || Serializer::enabled() ||
@ -1748,14 +1756,11 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
FLAG_smi_only_arrays);
FastCloneShallowArrayStub::Mode mode =
FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites
? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE;
// If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
// change, so it's possible to specialize the stub in advance.
if (has_constant_fast_elements) {
mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
}
__ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));

View File

@ -406,8 +406,6 @@ void MarkCompactCollector::CollectGarbage() {
ASSERT(state_ == PREPARE_GC);
ASSERT(encountered_weak_collections_ == Smi::FromInt(0));
heap()->allocation_mementos_found_ = 0;
MarkLiveObjects();
ASSERT(heap_->incremental_marking()->IsStopped());
@ -449,11 +447,6 @@ void MarkCompactCollector::CollectGarbage() {
marking_parity_ = EVEN_MARKING_PARITY;
}
if (FLAG_trace_track_allocation_sites &&
heap()->allocation_mementos_found_ > 0) {
PrintF("AllocationMementos found during mark-sweep = %d\n",
heap()->allocation_mementos_found_);
}
tracer_ = NULL;
}
@ -1889,6 +1882,14 @@ class MarkCompactWeakObjectRetainer : public WeakObjectRetainer {
virtual Object* RetainAs(Object* object) {
if (Marking::MarkBitFrom(HeapObject::cast(object)).Get()) {
return object;
} else if (object->IsAllocationSite() &&
!(AllocationSite::cast(object)->IsZombie())) {
// "dead" AllocationSites need to live long enough for a traversal of new
// space. These sites get a one-time reprieve.
AllocationSite* site = AllocationSite::cast(object);
site->MarkZombie();
site->GetHeap()->mark_compact_collector()->MarkAllocationSite(site);
return object;
} else {
return NULL;
}
@ -2000,12 +2001,7 @@ int MarkCompactCollector::DiscoverAndPromoteBlackObjectsOnPage(
int size = object->Size();
survivors_size += size;
if (FLAG_trace_track_allocation_sites && object->IsJSObject()) {
if (AllocationMemento::FindForJSObject(JSObject::cast(object), true)
!= NULL) {
heap()->allocation_mementos_found_++;
}
}
Heap::UpdateAllocationSiteFeedback(object);
offset++;
current_cell >>= 1;
@ -2098,6 +2094,12 @@ void MarkCompactCollector::MarkStringTable(RootMarkingVisitor* visitor) {
}
void MarkCompactCollector::MarkAllocationSite(AllocationSite* site) {
MarkBit mark_bit = Marking::MarkBitFrom(site);
SetMark(site, mark_bit);
}
void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
// Mark the heap roots including global variables, stack variables,
// etc., and all objects reachable from them.

View File

@ -739,6 +739,10 @@ class MarkCompactCollector {
// marking its contents.
void MarkWeakObjectToCodeTable();
// Special case for processing weak references in a full collection. We need
// to artifically keep AllocationSites alive for a time.
void MarkAllocationSite(AllocationSite* site);
private:
MarkCompactCollector();
~MarkCompactCollector();

View File

@ -1796,6 +1796,14 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
Handle<FixedArrayBase> constant_elements_values(
FixedArrayBase::cast(constant_elements->get(1)));
AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites
? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE;
if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
// If the only customer of allocation sites is transitioning, then
// we can turn it off if we don't have anywhere else to transition to.
allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
}
__ mov(a0, result_register());
__ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
__ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
@ -1805,7 +1813,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
isolate()->heap()->fixed_cow_array_map()) {
FastCloneShallowArrayStub stub(
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
DONT_TRACK_ALLOCATION_SITE,
allocation_site_mode,
length);
__ CallStub(&stub);
__ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(),
@ -1820,12 +1828,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
FLAG_smi_only_arrays);
FastCloneShallowArrayStub::Mode mode =
FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites
? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE;
if (has_fast_elements) {
mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
}
FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);

View File

@ -1322,6 +1322,16 @@ void AllocationSite::Initialize() {
}
void AllocationSite::MarkZombie() {
ASSERT(!IsZombie());
set_pretenure_decision(Smi::FromInt(kZombie));
// Clear all non-smi fields
set_transition_info(Smi::FromInt(0));
set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
SKIP_WRITE_BARRIER);
}
// Heuristic: We only need to create allocation site info if the boilerplate
// elements kind is the initial elements kind.
AllocationSiteMode AllocationSite::GetMode(
@ -1348,6 +1358,9 @@ AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
inline bool AllocationSite::CanTrack(InstanceType type) {
if (FLAG_allocation_site_pretenuring) {
return type == JS_ARRAY_TYPE || type == JS_OBJECT_TYPE;
}
return type == JS_ARRAY_TYPE;
}
@ -1367,6 +1380,45 @@ inline DependentCode::DependencyGroup AllocationSite::ToDependencyGroup(
}
inline void AllocationSite::IncrementMementoFoundCount() {
int value = memento_found_count()->value();
set_memento_found_count(Smi::FromInt(value + 1));
}
inline void AllocationSite::IncrementMementoCreateCount() {
ASSERT(FLAG_allocation_site_pretenuring);
int value = memento_create_count()->value();
set_memento_create_count(Smi::FromInt(value + 1));
}
inline bool AllocationSite::DigestPretenuringFeedback() {
bool decision_made = false;
if (!PretenuringDecisionMade()) {
int create_count = memento_create_count()->value();
if (create_count >= kPretenureMinimumCreated) {
int found_count = memento_found_count()->value();
double ratio = static_cast<double>(found_count) / create_count;
if (FLAG_trace_track_allocation_sites) {
PrintF("AllocationSite: %p (created, found, ratio) (%d, %d, %f)\n",
static_cast<void*>(this), create_count, found_count, ratio);
}
int result = ratio >= kPretenureRatio ? kTenure : kDontTenure;
set_pretenure_decision(Smi::FromInt(result));
decision_made = true;
// TODO(mvstanton): if the decision represents a change, any dependent
// code registered for pretenuring changes should be deopted.
}
}
// Clear feedback calculation fields until the next gc.
set_memento_found_count(Smi::FromInt(0));
set_memento_create_count(Smi::FromInt(0));
return decision_made;
}
void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
object->ValidateElements();
ElementsKind elements_kind = object->map()->elements_kind();

View File

@ -5718,10 +5718,7 @@ Handle<JSObject> JSObjectWalkVisitor<ContextObject>::StructureWalk(
Handle<JSObject> copy;
if (copying) {
Handle<AllocationSite> site_to_pass;
if (site_context()->activated() &&
AllocationSite::CanTrack(object->map()->instance_type()) &&
AllocationSite::GetMode(object->GetElementsKind()) ==
TRACK_ALLOCATION_SITE) {
if (site_context()->ShouldCreateMemento(object)) {
site_to_pass = site_context()->current();
}
CALL_AND_RETRY_OR_DIE(isolate,
@ -9181,9 +9178,10 @@ Handle<String> SeqString::Truncate(Handle<SeqString> string, int new_length) {
AllocationMemento* AllocationMemento::FindForJSObject(JSObject* object,
bool in_GC) {
// Currently, AllocationMemento objects are only allocated immediately
// after JSArrays in NewSpace, and detecting whether a JSArray has one
// involves carefully checking the object immediately after the JSArray
// (if there is one) to see if it's an AllocationMemento.
// after JSArrays and some JSObjects in NewSpace. Detecting whether a
// memento is present involves carefully checking the object immediately
// after the current object (if there is one) to see if it's an
// AllocationMemento.
if (FLAG_track_allocation_sites && object->GetHeap()->InNewSpace(object)) {
Address ptr_end = (reinterpret_cast<Address>(object) - kHeapObjectTag) +
object->Size();
@ -9201,7 +9199,9 @@ AllocationMemento* AllocationMemento::FindForJSObject(JSObject* object,
object->GetHeap()->allocation_memento_map()) {
AllocationMemento* memento = AllocationMemento::cast(
reinterpret_cast<Object*>(ptr_end + kHeapObjectTag));
return memento;
if (memento->IsValid()) {
return memento;
}
}
}
}
@ -12789,6 +12789,9 @@ void JSObject::TransitionElementsKind(Handle<JSObject> object,
}
const double AllocationSite::kPretenureRatio = 0.60;
bool AllocationSite::IsNestedSite() {
ASSERT(FLAG_trace_track_allocation_sites);
Object* current = GetHeap()->allocation_sites_list();

View File

@ -8120,6 +8120,16 @@ enum AllocationSiteMode {
class AllocationSite: public Struct {
public:
static const uint32_t kMaximumArrayBytesToPretransition = 8 * 1024;
static const double kPretenureRatio;
static const int kPretenureMinimumCreated = 100;
// Values for pretenure decision field.
enum {
kUndecided = 0,
kDontTenure = 1,
kTenure = 2,
kZombie = 3
};
DECL_ACCESSORS(transition_info, Object)
// nested_site threads a list of sites that represent nested literals
@ -8128,16 +8138,14 @@ class AllocationSite: public Struct {
DECL_ACCESSORS(nested_site, Object)
DECL_ACCESSORS(memento_found_count, Smi)
DECL_ACCESSORS(memento_create_count, Smi)
// TODO(mvstanton): we don't need a whole integer to record pretenure
// decision. Consider sharing space with memento_found_count.
DECL_ACCESSORS(pretenure_decision, Smi)
DECL_ACCESSORS(dependent_code, DependentCode)
DECL_ACCESSORS(weak_next, Object)
inline void Initialize();
bool HasNestedSites() {
return nested_site()->IsAllocationSite();
}
// This method is expensive, it should only be called for reporting.
bool IsNestedSite();
@ -8145,6 +8153,28 @@ class AllocationSite: public Struct {
class UnusedBits: public BitField<int, 15, 14> {};
class DoNotInlineBit: public BitField<bool, 29, 1> {};
inline void IncrementMementoFoundCount();
inline void IncrementMementoCreateCount();
PretenureFlag GetPretenureMode() {
int mode = pretenure_decision()->value();
// Zombie objects "decide" to be untenured.
return (mode == kTenure) ? TENURED : NOT_TENURED;
}
// The pretenuring decision is made during gc, and the zombie state allows
// us to recognize when an allocation site is just being kept alive because
// a later traversal of new space may discover AllocationMementos that point
// to this AllocationSite.
bool IsZombie() {
return pretenure_decision()->value() == kZombie;
}
inline void MarkZombie();
inline bool DigestPretenuringFeedback();
ElementsKind GetElementsKind() {
ASSERT(!SitePointsToLiteral());
int value = Smi::cast(transition_info())->value();
@ -8218,6 +8248,10 @@ class AllocationSite: public Struct {
private:
inline DependentCode::DependencyGroup ToDependencyGroup(Reason reason);
bool PretenuringDecisionMade() {
return pretenure_decision()->value() != kUndecided;
}
DISALLOW_IMPLICIT_CONSTRUCTORS(AllocationSite);
};
@ -8229,7 +8263,10 @@ class AllocationMemento: public Struct {
DECL_ACCESSORS(allocation_site, Object)
bool IsValid() { return allocation_site()->IsAllocationSite(); }
bool IsValid() {
return allocation_site()->IsAllocationSite() &&
!AllocationSite::cast(allocation_site())->IsZombie();
}
AllocationSite* GetAllocationSite() {
ASSERT(IsValid());
return AllocationSite::cast(allocation_site());

View File

@ -1741,6 +1741,14 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
Handle<FixedArrayBase> constant_elements_values(
FixedArrayBase::cast(constant_elements->get(1)));
AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites
? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE;
if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
// If the only customer of allocation sites is transitioning, then
// we can turn it off if we don't have anywhere else to transition to.
allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
}
Heap* heap = isolate()->heap();
if (has_constant_fast_elements &&
constant_elements_values->map() == heap->fixed_cow_array_map()) {
@ -1753,7 +1761,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ Move(rcx, constant_elements);
FastCloneShallowArrayStub stub(
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
DONT_TRACK_ALLOCATION_SITE,
allocation_site_mode,
length);
__ CallStub(&stub);
} else if (expr->depth() > 1 || Serializer::enabled() ||
@ -1769,14 +1777,11 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
FLAG_smi_only_arrays);
FastCloneShallowArrayStub::Mode mode =
FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites
? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE;
// If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
// change, so it's possible to specialize the stub in advance.
if (has_constant_fast_elements) {
mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
}
__ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));

View File

@ -2184,6 +2184,7 @@ TEST(OptimizedAllocationAlwaysInNewSpace) {
TEST(OptimizedPretenuringAllocationFolding) {
i::FLAG_allow_natives_syntax = true;
i::FLAG_allocation_site_pretenuring = false;
CcTest::InitializeVM();
if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
@ -2220,6 +2221,7 @@ TEST(OptimizedPretenuringAllocationFolding) {
TEST(OptimizedPretenuringAllocationFoldingBlocks) {
i::FLAG_allow_natives_syntax = true;
i::FLAG_allocation_site_pretenuring = false;
CcTest::InitializeVM();
if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
@ -2256,6 +2258,7 @@ TEST(OptimizedPretenuringAllocationFoldingBlocks) {
TEST(OptimizedPretenuringObjectArrayLiterals) {
i::FLAG_allow_natives_syntax = true;
i::FLAG_allocation_site_pretenuring = false;
CcTest::InitializeVM();
if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
@ -2281,6 +2284,7 @@ TEST(OptimizedPretenuringObjectArrayLiterals) {
TEST(OptimizedPretenuringMixedInObjectProperties) {
i::FLAG_allow_natives_syntax = true;
i::FLAG_allocation_site_pretenuring = false;
CcTest::InitializeVM();
if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
@ -2312,6 +2316,7 @@ TEST(OptimizedPretenuringMixedInObjectProperties) {
TEST(OptimizedPretenuringDoubleArrayProperties) {
i::FLAG_allow_natives_syntax = true;
i::FLAG_allocation_site_pretenuring = false;
CcTest::InitializeVM();
if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
@ -2337,6 +2342,7 @@ TEST(OptimizedPretenuringDoubleArrayProperties) {
TEST(OptimizedPretenuringdoubleArrayLiterals) {
i::FLAG_allow_natives_syntax = true;
i::FLAG_allocation_site_pretenuring = false;
CcTest::InitializeVM();
if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
@ -2362,6 +2368,7 @@ TEST(OptimizedPretenuringdoubleArrayLiterals) {
TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
i::FLAG_allow_natives_syntax = true;
i::FLAG_allocation_site_pretenuring = false;
CcTest::InitializeVM();
if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
@ -2396,6 +2403,7 @@ TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
TEST(OptimizedPretenuringNestedObjectLiterals) {
i::FLAG_allow_natives_syntax = true;
i::FLAG_allocation_site_pretenuring = false;
CcTest::InitializeVM();
if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
@ -2430,6 +2438,7 @@ TEST(OptimizedPretenuringNestedObjectLiterals) {
TEST(OptimizedPretenuringNestedDoubleLiterals) {
i::FLAG_allow_natives_syntax = true;
i::FLAG_allocation_site_pretenuring = false;
CcTest::InitializeVM();
if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
@ -2493,6 +2502,7 @@ TEST(OptimizedAllocationArrayLiterals) {
TEST(OptimizedPretenuringCallNew) {
i::FLAG_allow_natives_syntax = true;
i::FLAG_allocation_site_pretenuring = false;
i::FLAG_pretenuring_call_new = true;
CcTest::InitializeVM();
if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;