Change store buffer overflow check to not rely on the store buffer being (2*Size) aligned.
This reduces the reserved virtual memory size needed for the store buffer. BUG=chromium:578883 LOG=NO Review URL: https://codereview.chromium.org/1851473002 Cr-Commit-Position: refs/heads/master@{#35174}
This commit is contained in:
parent
bd4fb28ecd
commit
6a62857388
@ -738,12 +738,12 @@ void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
|
||||
str(scratch, MemOperand(ip));
|
||||
// Call stub on end of buffer.
|
||||
// Check for end of buffer.
|
||||
tst(scratch, Operand(StoreBuffer::kStoreBufferOverflowBit));
|
||||
tst(scratch, Operand(StoreBuffer::kStoreBufferMask));
|
||||
if (and_then == kFallThroughAtEnd) {
|
||||
b(eq, &done);
|
||||
b(ne, &done);
|
||||
} else {
|
||||
DCHECK(and_then == kReturnAtEnd);
|
||||
Ret(eq);
|
||||
Ret(ne);
|
||||
}
|
||||
push(lr);
|
||||
StoreBufferOverflowStub store_buffer_overflow(isolate(), fp_mode);
|
||||
|
@ -4032,13 +4032,12 @@ void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
|
||||
Str(scratch1, MemOperand(scratch2));
|
||||
// Call stub on end of buffer.
|
||||
// Check for end of buffer.
|
||||
DCHECK(StoreBuffer::kStoreBufferOverflowBit ==
|
||||
(1 << (14 + kPointerSizeLog2)));
|
||||
Tst(scratch1, StoreBuffer::kStoreBufferMask);
|
||||
if (and_then == kFallThroughAtEnd) {
|
||||
Tbz(scratch1, (14 + kPointerSizeLog2), &done);
|
||||
B(ne, &done);
|
||||
} else {
|
||||
DCHECK(and_then == kReturnAtEnd);
|
||||
Tbnz(scratch1, (14 + kPointerSizeLog2), &store_buffer_overflow);
|
||||
B(eq, &store_buffer_overflow);
|
||||
Ret();
|
||||
}
|
||||
|
||||
|
@ -26,11 +26,10 @@ void StoreBuffer::SetUp() {
|
||||
// Allocate 3x the buffer size, so that we can start the new store buffer
|
||||
// aligned to 2x the size. This lets us use a bit test to detect the end of
|
||||
// the area.
|
||||
virtual_memory_ = new base::VirtualMemory(kStoreBufferSize * 3);
|
||||
virtual_memory_ = new base::VirtualMemory(kStoreBufferSize * 2);
|
||||
uintptr_t start_as_int =
|
||||
reinterpret_cast<uintptr_t>(virtual_memory_->address());
|
||||
start_ =
|
||||
reinterpret_cast<Address*>(RoundUp(start_as_int, kStoreBufferSize * 2));
|
||||
start_ = reinterpret_cast<Address*>(RoundUp(start_as_int, kStoreBufferSize));
|
||||
limit_ = start_ + (kStoreBufferSize / kPointerSize);
|
||||
|
||||
DCHECK(reinterpret_cast<Address>(start_) >= virtual_memory_->address());
|
||||
@ -41,9 +40,7 @@ void StoreBuffer::SetUp() {
|
||||
DCHECK(start_ <= vm_limit);
|
||||
DCHECK(limit_ <= vm_limit);
|
||||
USE(vm_limit);
|
||||
DCHECK((reinterpret_cast<uintptr_t>(limit_) & kStoreBufferOverflowBit) != 0);
|
||||
DCHECK((reinterpret_cast<uintptr_t>(limit_ - 1) & kStoreBufferOverflowBit) ==
|
||||
0);
|
||||
DCHECK((reinterpret_cast<uintptr_t>(limit_) & kStoreBufferMask) == 0);
|
||||
|
||||
if (!virtual_memory_->Commit(reinterpret_cast<Address>(start_),
|
||||
kStoreBufferSize,
|
||||
|
@ -18,9 +18,8 @@ namespace internal {
|
||||
// code. On buffer overflow the slots are moved to the remembered set.
|
||||
class StoreBuffer {
|
||||
public:
|
||||
static const int kStoreBufferOverflowBit = 1 << (14 + kPointerSizeLog2);
|
||||
static const int kStoreBufferSize = kStoreBufferOverflowBit;
|
||||
static const int kStoreBufferLength = kStoreBufferSize / sizeof(Address);
|
||||
static const int kStoreBufferSize = 1 << (14 + kPointerSizeLog2);
|
||||
static const int kStoreBufferMask = kStoreBufferSize - 1;
|
||||
|
||||
static void StoreBufferOverflow(Isolate* isolate);
|
||||
|
||||
|
@ -204,15 +204,15 @@ void MacroAssembler::RememberedSetHelper(
|
||||
mov(Operand::StaticVariable(store_buffer), scratch);
|
||||
// Call stub on end of buffer.
|
||||
// Check for end of buffer.
|
||||
test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
|
||||
test(scratch, Immediate(StoreBuffer::kStoreBufferMask));
|
||||
if (and_then == kReturnAtEnd) {
|
||||
Label buffer_overflowed;
|
||||
j(not_equal, &buffer_overflowed, Label::kNear);
|
||||
j(equal, &buffer_overflowed, Label::kNear);
|
||||
ret(0);
|
||||
bind(&buffer_overflowed);
|
||||
} else {
|
||||
DCHECK(and_then == kFallThroughAtEnd);
|
||||
j(equal, &done, Label::kNear);
|
||||
j(not_equal, &done, Label::kNear);
|
||||
}
|
||||
StoreBufferOverflowStub store_buffer_overflow(isolate(), save_fp);
|
||||
CallStub(&store_buffer_overflow);
|
||||
|
@ -482,12 +482,12 @@ void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
|
||||
sw(scratch, MemOperand(t8));
|
||||
// Call stub on end of buffer.
|
||||
// Check for end of buffer.
|
||||
And(t8, scratch, Operand(StoreBuffer::kStoreBufferOverflowBit));
|
||||
And(t8, scratch, Operand(StoreBuffer::kStoreBufferMask));
|
||||
if (and_then == kFallThroughAtEnd) {
|
||||
Branch(&done, eq, t8, Operand(zero_reg));
|
||||
Branch(&done, ne, t8, Operand(zero_reg));
|
||||
} else {
|
||||
DCHECK(and_then == kReturnAtEnd);
|
||||
Ret(eq, t8, Operand(zero_reg));
|
||||
Ret(ne, t8, Operand(zero_reg));
|
||||
}
|
||||
push(ra);
|
||||
StoreBufferOverflowStub store_buffer_overflow(isolate(), fp_mode);
|
||||
|
@ -485,13 +485,13 @@ void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
|
||||
sd(scratch, MemOperand(t8));
|
||||
// Call stub on end of buffer.
|
||||
// Check for end of buffer.
|
||||
And(t8, scratch, Operand(StoreBuffer::kStoreBufferOverflowBit));
|
||||
And(t8, scratch, Operand(StoreBuffer::kStoreBufferMask));
|
||||
DCHECK(!scratch.is(t8));
|
||||
if (and_then == kFallThroughAtEnd) {
|
||||
Branch(&done, eq, t8, Operand(zero_reg));
|
||||
Branch(&done, ne, t8, Operand(zero_reg));
|
||||
} else {
|
||||
DCHECK(and_then == kReturnAtEnd);
|
||||
Ret(eq, t8, Operand(zero_reg));
|
||||
Ret(ne, t8, Operand(zero_reg));
|
||||
}
|
||||
push(ra);
|
||||
StoreBufferOverflowStub store_buffer_overflow(isolate(), fp_mode);
|
||||
|
@ -570,14 +570,14 @@ void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
|
||||
StoreP(scratch, MemOperand(ip));
|
||||
// Call stub on end of buffer.
|
||||
// Check for end of buffer.
|
||||
mov(r0, Operand(StoreBuffer::kStoreBufferOverflowBit));
|
||||
mov(r0, Operand(StoreBuffer::kStoreBufferMask));
|
||||
and_(r0, scratch, r0, SetRC);
|
||||
|
||||
if (and_then == kFallThroughAtEnd) {
|
||||
beq(&done, cr0);
|
||||
bne(&done, cr0);
|
||||
} else {
|
||||
DCHECK(and_then == kReturnAtEnd);
|
||||
Ret(eq, cr0);
|
||||
Ret(ne, cr0);
|
||||
}
|
||||
mflr(r0);
|
||||
push(r0);
|
||||
|
@ -540,13 +540,13 @@ void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
|
||||
StoreP(scratch, MemOperand(ip));
|
||||
// Call stub on end of buffer.
|
||||
// Check for end of buffer.
|
||||
AndP(scratch, Operand(StoreBuffer::kStoreBufferOverflowBit));
|
||||
AndP(scratch, Operand(StoreBuffer::kStoreBufferMask));
|
||||
|
||||
if (and_then == kFallThroughAtEnd) {
|
||||
beq(&done, Label::kNear);
|
||||
bne(&done, Label::kNear);
|
||||
} else {
|
||||
DCHECK(and_then == kReturnAtEnd);
|
||||
beq(&done, Label::kNear);
|
||||
bne(&done, Label::kNear);
|
||||
}
|
||||
push(r14);
|
||||
StoreBufferOverflowStub store_buffer_overflow(isolate(), fp_mode);
|
||||
|
@ -227,15 +227,15 @@ void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
|
||||
// Call stub on end of buffer.
|
||||
Label done;
|
||||
// Check for end of buffer.
|
||||
testp(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
|
||||
testp(scratch, Immediate(StoreBuffer::kStoreBufferMask));
|
||||
if (and_then == kReturnAtEnd) {
|
||||
Label buffer_overflowed;
|
||||
j(not_equal, &buffer_overflowed, Label::kNear);
|
||||
j(equal, &buffer_overflowed, Label::kNear);
|
||||
ret(0);
|
||||
bind(&buffer_overflowed);
|
||||
} else {
|
||||
DCHECK(and_then == kFallThroughAtEnd);
|
||||
j(equal, &done, Label::kNear);
|
||||
j(not_equal, &done, Label::kNear);
|
||||
}
|
||||
StoreBufferOverflowStub store_buffer_overflow(isolate(), save_fp);
|
||||
CallStub(&store_buffer_overflow);
|
||||
|
@ -196,15 +196,15 @@ void MacroAssembler::RememberedSetHelper(
|
||||
mov(Operand::StaticVariable(store_buffer), scratch);
|
||||
// Call stub on end of buffer.
|
||||
// Check for end of buffer.
|
||||
test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
|
||||
test(scratch, Immediate(StoreBuffer::kStoreBufferMask));
|
||||
if (and_then == kReturnAtEnd) {
|
||||
Label buffer_overflowed;
|
||||
j(not_equal, &buffer_overflowed, Label::kNear);
|
||||
j(equal, &buffer_overflowed, Label::kNear);
|
||||
ret(0);
|
||||
bind(&buffer_overflowed);
|
||||
} else {
|
||||
DCHECK(and_then == kFallThroughAtEnd);
|
||||
j(equal, &done, Label::kNear);
|
||||
j(not_equal, &done, Label::kNear);
|
||||
}
|
||||
StoreBufferOverflowStub store_buffer_overflow(isolate(), save_fp);
|
||||
CallStub(&store_buffer_overflow);
|
||||
|
Loading…
Reference in New Issue
Block a user