Fix store buffer ensure space heuristics.

This change results in less scan on scavenge memory chunks.

BUG=
R=mstarzinger@chromium.org

Review URL: https://codereview.chromium.org/15896037

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@15037 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
hpayer@chromium.org 2013-06-10 14:22:41 +00:00
parent 29a1044409
commit 09495dfea2

View File

@ -188,10 +188,10 @@ void StoreBuffer::EnsureSpace(intptr_t space_needed) {
{ 3, ((Page::kPageSize / kPointerSize) / 3) / 256 }, { 3, ((Page::kPageSize / kPointerSize) / 3) / 256 },
{ 1, 0} { 1, 0}
}; };
for (int i = kSampleFinenesses - 1; i >= 0; i--) { for (int i = 0; i < kSampleFinenesses; i++) {
ExemptPopularPages(samples[i].prime_sample_step, samples[i].threshold); ExemptPopularPages(samples[i].prime_sample_step, samples[i].threshold);
// As a last resort we mark all pages as being exempt from the store buffer. // As a last resort we mark all pages as being exempt from the store buffer.
ASSERT(i != 0 || old_top_ == old_start_); ASSERT(i != (kSampleFinenesses - 1) || old_top_ == old_start_);
if (old_limit_ - old_top_ > old_top_ - old_start_) return; if (old_limit_ - old_top_ > old_top_ - old_start_) return;
} }
UNREACHABLE(); UNREACHABLE();