Allocation site pretenuring.

Pretenuring decisions are made based on allocation site lifetime statistics.

BUG=
R=mstarzinger@chromium.org, mvstanton@chromium.org

Review URL: https://codereview.chromium.org/96783002

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18532 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
hpayer@chromium.org 2014-01-10 12:11:54 +00:00
parent 967d6499d2
commit 4ef951cf43
8 changed files with 178 additions and 74 deletions

View File

@ -484,15 +484,16 @@ void Heap::ScavengePointer(HeapObject** p) {
void Heap::UpdateAllocationSiteFeedback(HeapObject* object) {
Heap* heap = object->GetHeap();
if (FLAG_allocation_site_pretenuring &&
heap->new_space_high_promotion_mode_active_ &&
AllocationSite::CanTrack(object->map()->instance_type())) {
AllocationMemento* memento = AllocationMemento::FindForHeapObject(
object, true);
object, heap, true);
if (memento != NULL) {
ASSERT(memento->IsValid());
bool add_to_scratchpad =
memento->GetAllocationSite()->IncrementMementoFoundCount();
Heap* heap = object->GetIsolate()->heap();
if (add_to_scratchpad && heap->allocation_sites_scratchpad_length <
kAllocationSiteScratchpadSize) {
heap->allocation_sites_scratchpad[

View File

@ -506,7 +506,8 @@ void Heap::RepairFreeListsAfterBoot() {
void Heap::ProcessPretenuringFeedback() {
if (FLAG_allocation_site_pretenuring) {
if (FLAG_allocation_site_pretenuring &&
new_space_high_promotion_mode_active_) {
int tenure_decisions = 0;
int dont_tenure_decisions = 0;
int allocation_mementos_found = 0;
@ -514,7 +515,7 @@ void Heap::ProcessPretenuringFeedback() {
int active_allocation_sites = 0;
// If the scratchpad overflowed, we have to iterate over the allocation
// stites list.
// sites list.
bool use_scratchpad =
allocation_sites_scratchpad_length < kAllocationSiteScratchpadSize;
@ -1100,12 +1101,15 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector,
PrintPID("Limited new space size due to high promotion rate: %d MB\n",
new_space_.InitialCapacity() / MB);
}
// Support for global pre-tenuring uses the high promotion mode as a
// heuristic indicator of whether to pretenure or not, we trigger
// deoptimization here to take advantage of pre-tenuring as soon as
// possible.
// The high promotion mode is our indicator to turn on pretenuring. We have
// to deoptimize all optimized code in global pretenuring mode and all
// code which should be tenured in local pretenuring mode.
if (FLAG_pretenuring) {
isolate_->stack_guard()->FullDeopt();
if (FLAG_allocation_site_pretenuring) {
ResetAllAllocationSitesDependentCode(NOT_TENURED);
} else {
isolate_->stack_guard()->FullDeopt();
}
}
} else if (new_space_high_promotion_mode_active_ &&
IsStableOrDecreasingSurvivalTrend() &&
@ -1118,9 +1122,9 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector,
PrintPID("Unlimited new space size due to low promotion rate: %d MB\n",
new_space_.MaximumCapacity() / MB);
}
// Trigger deoptimization here to turn off pre-tenuring as soon as
// Trigger deoptimization here to turn off global pretenuring as soon as
// possible.
if (FLAG_pretenuring) {
if (FLAG_pretenuring && !FLAG_allocation_site_pretenuring) {
isolate_->stack_guard()->FullDeopt();
}
}
@ -1212,6 +1216,8 @@ void Heap::MarkCompact(GCTracer* tracer) {
gc_state_ = MARK_COMPACT;
LOG(isolate_, ResourceEvent("markcompact", "begin"));
uint64_t size_of_objects_before_gc = SizeOfObjects();
mark_compact_collector_.Prepare(tracer);
ms_count_++;
@ -1228,6 +1234,10 @@ void Heap::MarkCompact(GCTracer* tracer) {
isolate_->counters()->objs_since_last_full()->Set(0);
flush_monomorphic_ics_ = false;
if (FLAG_allocation_site_pretenuring) {
EvaluateOldSpaceLocalPretenuring(size_of_objects_before_gc);
}
}
@ -1966,6 +1976,39 @@ void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer,
}
void Heap::ResetAllAllocationSitesDependentCode(PretenureFlag flag) {
Object* cur = allocation_sites_list();
while (cur->IsAllocationSite()) {
AllocationSite* casted = AllocationSite::cast(cur);
if (casted->GetPretenureMode() == flag) {
casted->ResetPretenureDecision();
}
cur = casted->weak_next();
}
}
void Heap::EvaluateOldSpaceLocalPretenuring(
uint64_t size_of_objects_before_gc) {
uint64_t size_of_objects_after_gc = SizeOfObjects();
double old_generation_survival_rate =
(static_cast<double>(size_of_objects_after_gc) * 100) /
static_cast<double>(size_of_objects_before_gc);
if (old_generation_survival_rate < kOldSurvivalRateLowThreshold) {
// Too many objects died in the old generation, pretenuring of wrong
// allocation sites may be the cause for that. We have to deopt all
// dependent code registered in the allocation sites to re-evaluate
// our pretenuring decisions.
ResetAllAllocationSitesDependentCode(TENURED);
if (FLAG_trace_pretenuring) {
PrintF("Deopt all allocation sites dependent code due to low survival "
"rate in the old generation %f\n", old_generation_survival_rate);
}
}
}
void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) {
DisallowHeapAllocation no_allocation;
// All external strings are listed in the external string table.

View File

@ -2042,6 +2042,7 @@ class Heap {
// Pretenuring decisions are made based on feedback collected during new
// space evacuation. Note that between feedback collection and calling this
// method object in old space must not move.
// Right now we only process pretenuring feedback in high promotion mode.
void ProcessPretenuringFeedback();
// Checks whether a global GC is necessary
@ -2168,6 +2169,15 @@ class Heap {
void ProcessArrayBuffers(WeakObjectRetainer* retainer, bool record_slots);
void ProcessAllocationSites(WeakObjectRetainer* retainer, bool record_slots);
// Deopts all code that contains allocation instruction which are tenured or
// not tenured. Moreover it clears the pretenuring allocation site statistics.
void ResetAllAllocationSitesDependentCode(PretenureFlag flag);
// Evaluates local pretenuring for the old space and calls
// ResetAllTenuredAllocationSitesDependentCode if too many objects died in
// the old space.
void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc);
// Called on heap tear-down.
void TearDownArrayBuffers();
@ -2211,6 +2221,8 @@ class Heap {
static const int kYoungSurvivalRateLowThreshold = 10;
static const int kYoungSurvivalRateAllowedDeviation = 15;
static const int kOldSurvivalRateLowThreshold = 20;
int young_survivors_after_last_gc_;
int high_survival_rate_period_length_;
int low_survival_rate_period_length_;

View File

@ -9499,15 +9499,11 @@ HInstruction* HOptimizedGraphBuilder::BuildFastLiteral(
HValue* object_size_constant = Add<HConstant>(
boilerplate_object->map()->instance_size());
// We should pull pre-tenure mode from the allocation site.
// For now, just see what it says, and remark on it if it sez
// we should pretenure. That means the rudimentary counting in the garbage
// collector is having an effect.
PretenureFlag pretenure_flag = isolate()->heap()->GetPretenureMode();
if (FLAG_allocation_site_pretenuring) {
pretenure_flag = site_context->current()->GetPretenureMode()
? TENURED
: NOT_TENURED;
pretenure_flag = site_context->current()->GetPretenureMode();
site_context->current()->AddDependentCompilationInfo(
AllocationSite::TENURING, top_info());
}
HInstruction* object = Add<HAllocate>(object_size_constant, type,

View File

@ -1385,20 +1385,22 @@ inline void AllocationSite::IncrementMementoCreateCount() {
inline bool AllocationSite::DigestPretenuringFeedback() {
bool decision_made = false;
if (!PretenuringDecisionMade()) {
int create_count = memento_create_count()->value();
if (create_count >= kPretenureMinimumCreated) {
int found_count = memento_found_count()->value();
double ratio = static_cast<double>(found_count) / create_count;
if (FLAG_trace_track_allocation_sites) {
PrintF("AllocationSite: %p (created, found, ratio) (%d, %d, %f)\n",
static_cast<void*>(this), create_count, found_count, ratio);
}
int result = ratio >= kPretenureRatio ? kTenure : kDontTenure;
set_pretenure_decision(Smi::FromInt(result));
decision_made = true;
// TODO(mvstanton): if the decision represents a change, any dependent
// code registered for pretenuring changes should be deopted.
int create_count = memento_create_count()->value();
if (create_count >= kPretenureMinimumCreated) {
int found_count = memento_found_count()->value();
double ratio = static_cast<double>(found_count) / create_count;
if (FLAG_trace_track_allocation_sites) {
PrintF("AllocationSite: %p (created, found, ratio) (%d, %d, %f)\n",
static_cast<void*>(this), create_count, found_count, ratio);
}
int current_mode = GetPretenureMode();
int result = ratio >= kPretenureRatio ? kTenure : kDontTenure;
set_pretenure_decision(Smi::FromInt(result));
decision_made = true;
if (current_mode != GetPretenureMode()) {
dependent_code()->DeoptimizeDependentCodeGroup(
GetIsolate(),
DependentCode::kAllocationSiteTenuringChangedGroup);
}
}

View File

@ -9203,19 +9203,20 @@ Handle<String> SeqString::Truncate(Handle<SeqString> string, int new_length) {
AllocationMemento* AllocationMemento::FindForHeapObject(HeapObject* object,
Heap* heap,
bool in_GC) {
// AllocationMemento objects are only allocated immediately after objects in
// NewSpace. Detecting whether a memento is present involves carefully
// checking the object immediately after the current object (if there is one)
// to see if it's an AllocationMemento.
ASSERT(object->GetHeap()->InNewSpace(object));
ASSERT(heap->InNewSpace(object));
Address ptr_end = (reinterpret_cast<Address>(object) - kHeapObjectTag) +
object->Size();
Address top;
if (in_GC) {
top = object->GetHeap()->new_space()->FromSpacePageHigh();
top = heap->new_space()->FromSpacePageHigh();
} else {
top = object->GetHeap()->NewSpaceTop();
top = heap->NewSpaceTop();
}
if ((ptr_end + AllocationMemento::kSize) <= top) {
// There is room in newspace for allocation info. Do we have some?
@ -12793,6 +12794,24 @@ void JSObject::TransitionElementsKind(Handle<JSObject> object,
const double AllocationSite::kPretenureRatio = 0.60;
void AllocationSite::ResetPretenureDecision() {
dependent_code()->DeoptimizeDependentCodeGroup(
GetIsolate(),
DependentCode::kAllocationSiteTenuringChangedGroup);
set_pretenure_decision(Smi::FromInt(kUndecided));
set_memento_found_count(Smi::FromInt(0));
set_memento_create_count(Smi::FromInt(0));
}
PretenureFlag AllocationSite::GetPretenureMode() {
int mode = pretenure_decision()->value();
// Zombie objects "decide" to be untenured.
return (mode == kTenure && GetHeap()->GetPretenureMode() == TENURED)
? TENURED : NOT_TENURED;
}
bool AllocationSite::IsNestedSite() {
ASSERT(FLAG_trace_track_allocation_sites);
Object* current = GetHeap()->allocation_sites_list();
@ -12891,9 +12910,10 @@ MaybeObject* JSObject::UpdateAllocationSite(ElementsKind to_kind) {
return this;
}
if (!GetHeap()->InNewSpace(this)) return this;
Heap* heap = GetHeap();
if (!heap->InNewSpace(this)) return this;
AllocationMemento* memento = AllocationMemento::FindForHeapObject(this);
AllocationMemento* memento = AllocationMemento::FindForHeapObject(this, heap);
if (memento == NULL || !memento->IsValid()) {
return this;
}

View File

@ -8157,11 +8157,9 @@ class AllocationSite: public Struct {
inline void IncrementMementoCreateCount();
PretenureFlag GetPretenureMode() {
int mode = pretenure_decision()->value();
// Zombie objects "decide" to be untenured.
return (mode == kTenure) ? TENURED : NOT_TENURED;
}
PretenureFlag GetPretenureMode();
void ResetPretenureDecision();
// The pretenuring decision is made during gc, and the zombie state allows
// us to recognize when an allocation site is just being kept alive because
@ -8277,6 +8275,7 @@ class AllocationMemento: public Struct {
// Returns NULL if no AllocationMemento is available for object.
static AllocationMemento* FindForHeapObject(HeapObject* object,
Heap* heap,
bool in_GC = false);
static inline AllocationMemento* cast(Object* obj);

View File

@ -2190,7 +2190,7 @@ TEST(OptimizedAllocationAlwaysInNewSpace) {
TEST(OptimizedPretenuringAllocationFolding) {
i::FLAG_allow_natives_syntax = true;
i::FLAG_allocation_site_pretenuring = false;
i::FLAG_max_new_space_size = 2048;
CcTest::InitializeVM();
if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
@ -2206,8 +2206,13 @@ TEST(OptimizedPretenuringAllocationFolding) {
" this.e = 1.3;"
" this.f = [{}];"
"}"
"var number_elements = 20000;"
"var elements = new Array();"
"function f() {"
" return new DataObject();"
" for (var i = 0; i < 20000-1; i++) {"
" elements[i] = new DataObject();"
" }"
" return new DataObject()"
"};"
"f(); f(); f();"
"%OptimizeFunctionOnNextCall(f);"
@ -2227,7 +2232,7 @@ TEST(OptimizedPretenuringAllocationFolding) {
TEST(OptimizedPretenuringAllocationFoldingBlocks) {
i::FLAG_allow_natives_syntax = true;
i::FLAG_allocation_site_pretenuring = false;
i::FLAG_max_new_space_size = 2048;
CcTest::InitializeVM();
if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
@ -2235,6 +2240,8 @@ TEST(OptimizedPretenuringAllocationFoldingBlocks) {
CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
v8::Local<v8::Value> res = CompileRun(
"var number_elements = 3000;"
"var elements = new Array(number_elements);"
"function DataObject() {"
" this.a = [{}];"
" this.b = [{}];"
@ -2244,7 +2251,10 @@ TEST(OptimizedPretenuringAllocationFoldingBlocks) {
" this.f = 1.3;"
"}"
"function f() {"
" return new DataObject();"
" for (var i = 0; i < number_elements; i++) {"
" elements[i] = new DataObject();"
" }"
" return elements[number_elements - 1];"
"};"
"f(); f(); f();"
"%OptimizeFunctionOnNextCall(f);"
@ -2264,17 +2274,20 @@ TEST(OptimizedPretenuringAllocationFoldingBlocks) {
TEST(OptimizedPretenuringObjectArrayLiterals) {
i::FLAG_allow_natives_syntax = true;
i::FLAG_allocation_site_pretenuring = false;
i::FLAG_max_new_space_size = 2048;
CcTest::InitializeVM();
if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
v8::HandleScope scope(CcTest::isolate());
CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
v8::Local<v8::Value> res = CompileRun(
"var number_elements = 20000;"
"var elements = new Array(number_elements);"
"function f() {"
" var numbers = [{}, {}, {}];"
" return numbers;"
" for (var i = 0; i < number_elements; i++) {"
" elements[i] = [{}, {}, {}];"
" }"
" return elements[number_elements - 1];"
"};"
"f(); f(); f();"
"%OptimizeFunctionOnNextCall(f);"
@ -2290,17 +2303,20 @@ TEST(OptimizedPretenuringObjectArrayLiterals) {
TEST(OptimizedPretenuringMixedInObjectProperties) {
i::FLAG_allow_natives_syntax = true;
i::FLAG_allocation_site_pretenuring = false;
i::FLAG_max_new_space_size = 2048;
CcTest::InitializeVM();
if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
v8::HandleScope scope(CcTest::isolate());
CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
v8::Local<v8::Value> res = CompileRun(
"var number_elements = 20000;"
"var elements = new Array(number_elements);"
"function f() {"
" var numbers = {a: {c: 2.2, d: {}}, b: 1.1};"
" return numbers;"
" for (var i = 0; i < number_elements; i++) {"
" elements[i] = {a: {c: 2.2, d: {}}, b: 1.1};"
" }"
" return elements[number_elements - 1];"
"};"
"f(); f(); f();"
"%OptimizeFunctionOnNextCall(f);"
@ -2322,17 +2338,20 @@ TEST(OptimizedPretenuringMixedInObjectProperties) {
TEST(OptimizedPretenuringDoubleArrayProperties) {
i::FLAG_allow_natives_syntax = true;
i::FLAG_allocation_site_pretenuring = false;
i::FLAG_max_new_space_size = 2048;
CcTest::InitializeVM();
if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
v8::HandleScope scope(CcTest::isolate());
CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
v8::Local<v8::Value> res = CompileRun(
"var number_elements = 20000;"
"var elements = new Array(number_elements);"
"function f() {"
" var numbers = {a: 1.1, b: 2.2};"
" return numbers;"
" for (var i = 0; i < number_elements; i++) {"
" elements[i] = {a: 1.1, b: 2.2};"
" }"
" return elements[i - 1];"
"};"
"f(); f(); f();"
"%OptimizeFunctionOnNextCall(f);"
@ -2348,17 +2367,20 @@ TEST(OptimizedPretenuringDoubleArrayProperties) {
TEST(OptimizedPretenuringdoubleArrayLiterals) {
i::FLAG_allow_natives_syntax = true;
i::FLAG_allocation_site_pretenuring = false;
i::FLAG_max_new_space_size = 2048;
CcTest::InitializeVM();
if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
v8::HandleScope scope(CcTest::isolate());
CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
v8::Local<v8::Value> res = CompileRun(
"var number_elements = 20000;"
"var elements = new Array(number_elements);"
"function f() {"
" var numbers = [1.1, 2.2, 3.3];"
" return numbers;"
" for (var i = 0; i < number_elements; i++) {"
" elements[i] = [1.1, 2.2, 3.3];"
" }"
" return elements[number_elements - 1];"
"};"
"f(); f(); f();"
"%OptimizeFunctionOnNextCall(f);"
@ -2374,17 +2396,20 @@ TEST(OptimizedPretenuringdoubleArrayLiterals) {
TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
i::FLAG_allow_natives_syntax = true;
i::FLAG_allocation_site_pretenuring = false;
i::FLAG_max_new_space_size = 2048;
CcTest::InitializeVM();
if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
v8::HandleScope scope(CcTest::isolate());
CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
v8::Local<v8::Value> res = CompileRun(
"var number_elements = 20000;"
"var elements = new Array(number_elements);"
"function f() {"
" var numbers = [[{}, {}, {}],[1.1, 2.2, 3.3]];"
" return numbers;"
" for (var i = 0; i < number_elements; i++) {"
" elements[i] = [[{}, {}, {}], [1.1, 2.2, 3.3]];"
" }"
" return elements[number_elements - 1];"
"};"
"f(); f(); f();"
"%OptimizeFunctionOnNextCall(f);"
@ -2409,17 +2434,20 @@ TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
TEST(OptimizedPretenuringNestedObjectLiterals) {
i::FLAG_allow_natives_syntax = true;
i::FLAG_allocation_site_pretenuring = false;
i::FLAG_max_new_space_size = 2048;
CcTest::InitializeVM();
if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
v8::HandleScope scope(CcTest::isolate());
CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
v8::Local<v8::Value> res = CompileRun(
"var number_elements = 20000;"
"var elements = new Array(number_elements);"
"function f() {"
" var numbers = [[{}, {}, {}],[{}, {}, {}]];"
" return numbers;"
" for (var i = 0; i < number_elements; i++) {"
" elements[i] = [[{}, {}, {}],[{}, {}, {}]];"
" }"
" return elements[number_elements - 1];"
"};"
"f(); f(); f();"
"%OptimizeFunctionOnNextCall(f);"
@ -2444,17 +2472,20 @@ TEST(OptimizedPretenuringNestedObjectLiterals) {
TEST(OptimizedPretenuringNestedDoubleLiterals) {
i::FLAG_allow_natives_syntax = true;
i::FLAG_allocation_site_pretenuring = false;
i::FLAG_max_new_space_size = 2048;
CcTest::InitializeVM();
if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
v8::HandleScope scope(CcTest::isolate());
CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
v8::Local<v8::Value> res = CompileRun(
"var number_elements = 20000;"
"var elements = new Array(number_elements);"
"function f() {"
" var numbers = [[1.1, 1.2, 1.3],[2.1, 2.2, 2.3]];"
" return numbers;"
" for (var i = 0; i < number_elements; i++) {"
" elements[i] = [[1.1, 1.2, 1.3],[2.1, 2.2, 2.3]];"
" }"
" return elements[number_elements - 1];"
"};"
"f(); f(); f();"
"%OptimizeFunctionOnNextCall(f);"