Remove guard page mechanism from promotion queue.

BUG=chromium:411210
LOG=n
R=jarin@chromium.org

Review URL: https://codereview.chromium.org/557243002

git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@23824 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
hpayer@chromium.org 2014-09-10 07:51:29 +00:00
parent d579836b9a
commit ed37edc5c0
5 changed files with 28 additions and 33 deletions

View File

@ -31,18 +31,12 @@ void PromotionQueue::insert(HeapObject* target, int size) {
NewSpacePage::FromAddress(reinterpret_cast<Address>(rear_));
DCHECK(!rear_page->prev_page()->is_anchor());
rear_ = reinterpret_cast<intptr_t*>(rear_page->prev_page()->area_end());
ActivateGuardIfOnTheSamePage();
}
if (guard_) {
DCHECK(GetHeadPage() ==
Page::FromAllocationTop(reinterpret_cast<Address>(limit_)));
if ((rear_ - 2) < limit_) {
RelocateQueueHead();
emergency_stack_->Add(Entry(target, size));
return;
}
if ((rear_ - 2) < limit_) {
RelocateQueueHead();
emergency_stack_->Add(Entry(target, size));
return;
}
*(--rear_) = reinterpret_cast<intptr_t>(target);
@ -55,13 +49,6 @@ void PromotionQueue::insert(HeapObject* target, int size) {
}
void PromotionQueue::ActivateGuardIfOnTheSamePage() {
guard_ = guard_ ||
heap_->new_space()->active_space()->current_page()->address() ==
GetHeadPage()->address();
}
template <>
bool inline Heap::IsOneByte(Vector<const char> str, int chars) {
// TODO(dcarney): incorporate Latin-1 check when Latin-1 is supported?

View File

@ -1365,7 +1365,6 @@ void PromotionQueue::Initialize() {
front_ = rear_ =
reinterpret_cast<intptr_t*>(heap_->new_space()->ToSpaceEnd());
emergency_stack_ = NULL;
guard_ = false;
}
@ -1963,15 +1962,16 @@ class ScavengingVisitor : public StaticVisitorBase {
HeapObject* target = NULL; // Initialization to please compiler.
if (allocation.To(&target)) {
// Order is important here: Set the promotion limit before storing a
// filler for double alignment or migrating the object. Otherwise we
// may end up overwriting promotion queue entries when we migrate the
// object.
heap->promotion_queue()->SetNewLimit(heap->new_space()->top());
if (alignment != kObjectAlignment) {
target = EnsureDoubleAligned(heap, target, allocation_size);
}
// Order is important here: Set the promotion limit before migrating
// the object. Otherwise we may end up overwriting promotion queue
// entries when we migrate the object.
heap->promotion_queue()->SetNewLimit(heap->new_space()->top());
// Order is important: slot might be inside of the target if target
// was allocated over a dead object and slot comes from the store
// buffer.

View File

@ -383,18 +383,11 @@ class PromotionQueue {
emergency_stack_ = NULL;
}
inline void ActivateGuardIfOnTheSamePage();
Page* GetHeadPage() {
return Page::FromAllocationTop(reinterpret_cast<Address>(rear_));
}
void SetNewLimit(Address limit) {
if (!guard_) {
return;
}
DCHECK(GetHeadPage() == Page::FromAllocationTop(limit));
limit_ = reinterpret_cast<intptr_t*>(limit);
if (limit_ <= rear_) {
@ -451,8 +444,6 @@ class PromotionQueue {
intptr_t* rear_;
intptr_t* limit_;
bool guard_;
static const int kEntrySizeInWords = 2;
struct Entry {

View File

@ -1360,7 +1360,6 @@ bool NewSpace::AddFreshPage() {
Address limit = NewSpacePage::FromLimit(top)->area_end();
if (heap()->gc_state() == Heap::SCAVENGE) {
heap()->promotion_queue()->SetNewLimit(limit);
heap()->promotion_queue()->ActivateGuardIfOnTheSamePage();
}
int remaining_in_page = static_cast<int>(limit - top);

View File

@ -0,0 +1,18 @@
// Copyright 2014 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --allow-natives-syntax --gc-interval=439 --random-seed=-423594851
var __v_3;
function __f_2() {
var __v_1 = new Array(3);
__v_1[0] = 10;
__v_1[1] = 15.5;
__v_3 = __f_2();
__v_1[2] = 20;
return __v_1;
}
for (var __v_2 = 0; __v_2 < 3; ++__v_2) {
__v_3 = __f_2();
}