Revert of Provide a tagged allocation top pointer. (patchset #4 id:60001 of https://codereview.chromium.org/2028633002/ )

Reason for revert:
Seems to be causing flakiness in some wasm tests:

https://build.chromium.org/p/client.v8/builders/V8%20Linux/builds/10598
https://build.chromium.org/p/client.v8/builders/V8%20Win32%20-%20debug/builds/2528

Original issue's description:
> Provide a tagged allocation top pointer.
>
> Taking over http://crrev.com/1924223002.
>
> BUG=chromium:606711
> LOG=N
>
> Committed: https://crrev.com/f42c9e93c80fdf57e8f92bb87f6ed927d0ae4028
> Cr-Commit-Position: refs/heads/master@{#36633}

TBR=bmeurer@chromium.org,hpayer@chromium.org,machenbach@chromium.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=chromium:606711

Review-Url: https://codereview.chromium.org/2031493002
Cr-Commit-Position: refs/heads/master@{#36640}
This commit is contained in:
epertoso 2016-06-01 04:02:08 -07:00 committed by Commit bot
parent 88ab533b32
commit 79f45e026b
24 changed files with 198 additions and 115 deletions

View File

@ -4423,7 +4423,6 @@ void FastNewObjectStub::Generate(MacroAssembler* masm) {
Label allocate, done_allocate;
__ ldrb(r4, FieldMemOperand(r2, Map::kInstanceSizeOffset));
__ Allocate(r4, r0, r5, r6, &allocate, SIZE_IN_WORDS);
__ sub(r5, r5, Operand(kHeapObjectTag)); // Untag result end.
__ bind(&done_allocate);
// Initialize the JSObject fields.
@ -4564,7 +4563,6 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// Allocate an empty rest parameter array.
Label allocate, done_allocate;
__ Allocate(JSArray::kSize, r0, r1, r2, &allocate, NO_ALLOCATION_FLAGS);
__ sub(r1, r1, Operand(kHeapObjectTag)); // Untag result end.
__ bind(&done_allocate);
// Setup the rest parameter array in r0.

View File

@ -2032,7 +2032,7 @@ void MacroAssembler::Allocate(int object_size,
// Align the next allocation. Storing the filler map without checking top is
// safe in new-space because the limit of the heap is aligned there.
STATIC_ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
and_(result_end, result, Operand(kDoubleAlignmentMaskTagged), SetCC);
and_(result_end, result, Operand(kDoubleAlignmentMask), SetCC);
Label aligned;
b(eq, &aligned);
if ((flags & PRETENURE) != 0) {
@ -2040,8 +2040,7 @@ void MacroAssembler::Allocate(int object_size,
b(hs, gc_required);
}
mov(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
str(result_end, MemOperand(result, -kHeapObjectTag));
add(result_end, result_end, Operand(kDoubleSize / 2));
str(result_end, MemOperand(result, kDoubleSize / 2, PostIndex));
bind(&aligned);
}
@ -2074,6 +2073,9 @@ void MacroAssembler::Allocate(int object_size,
// The top pointer is not updated for allocation folding dominators.
str(result_end, MemOperand(top_address));
}
// Tag object.
add(result, result, Operand(kHeapObjectTag));
}
@ -2136,7 +2138,7 @@ void MacroAssembler::Allocate(Register object_size, Register result,
// Align the next allocation. Storing the filler map without checking top is
// safe in new-space because the limit of the heap is aligned there.
DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
and_(result_end, result, Operand(kDoubleAlignmentMaskTagged), SetCC);
and_(result_end, result, Operand(kDoubleAlignmentMask), SetCC);
Label aligned;
b(eq, &aligned);
if ((flags & PRETENURE) != 0) {
@ -2144,8 +2146,7 @@ void MacroAssembler::Allocate(Register object_size, Register result,
b(hs, gc_required);
}
mov(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
str(result_end, MemOperand(result, -kHeapObjectTag));
add(result_end, result_end, Operand(kDoubleSize / 2));
str(result_end, MemOperand(result, kDoubleSize / 2, PostIndex));
bind(&aligned);
}
@ -2164,12 +2165,15 @@ void MacroAssembler::Allocate(Register object_size, Register result,
// Update allocation top. result temporarily holds the new top.
if (emit_debug_code()) {
tst(result_end, Operand(kObjectAlignmentMask));
Check(ne, kUnalignedAllocationInNewSpace);
Check(eq, kUnalignedAllocationInNewSpace);
}
if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) {
// The top pointer is not updated for allocation folding dominators.
str(result_end, MemOperand(top_address));
}
// Tag object.
add(result, result, Operand(kHeapObjectTag));
}
void MacroAssembler::FastAllocate(Register object_size, Register result,
@ -2192,12 +2196,11 @@ void MacroAssembler::FastAllocate(Register object_size, Register result,
// Align the next allocation. Storing the filler map without checking top is
// safe in new-space because the limit of the heap is aligned there.
DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
and_(result_end, result, Operand(kDoubleAlignmentMaskTagged), SetCC);
and_(result_end, result, Operand(kDoubleAlignmentMask), SetCC);
Label aligned;
b(eq, &aligned);
mov(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
str(result_end, MemOperand(result, -kHeapObjectTag));
add(result_end, result_end, Operand(kDoubleSize / 2));
str(result_end, MemOperand(result, kDoubleSize / 2, PostIndex));
bind(&aligned);
}
@ -2212,10 +2215,12 @@ void MacroAssembler::FastAllocate(Register object_size, Register result,
// Update allocation top. result temporarily holds the new top.
if (emit_debug_code()) {
tst(result_end, Operand(kObjectAlignmentMask));
Check(ne, kUnalignedAllocationInNewSpace);
Check(eq, kUnalignedAllocationInNewSpace);
}
// The top pointer is not updated for allocation folding dominators.
str(result_end, MemOperand(top_address));
add(result, result, Operand(kHeapObjectTag));
}
void MacroAssembler::FastAllocate(int object_size, Register result,
@ -2243,12 +2248,11 @@ void MacroAssembler::FastAllocate(int object_size, Register result,
// Align the next allocation. Storing the filler map without checking top is
// safe in new-space because the limit of the heap is aligned there.
STATIC_ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
and_(result_end, result, Operand(kDoubleAlignmentMaskTagged), SetCC);
and_(result_end, result, Operand(kDoubleAlignmentMask), SetCC);
Label aligned;
b(eq, &aligned);
mov(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
str(result_end, MemOperand(result, -kHeapObjectTag));
add(result_end, result_end, Operand(kDoubleSize / 2));
str(result_end, MemOperand(result, kDoubleSize / 2, PostIndex));
bind(&aligned);
}
@ -2276,6 +2280,8 @@ void MacroAssembler::FastAllocate(int object_size, Register result,
// The top pointer is not updated for allocation folding dominators.
str(result_end, MemOperand(top_address));
add(result, result, Operand(kHeapObjectTag));
}
void MacroAssembler::AllocateTwoByteString(Register result,

View File

@ -4695,7 +4695,6 @@ void FastNewObjectStub::Generate(MacroAssembler* masm) {
Label allocate, done_allocate;
__ Ldrb(x4, FieldMemOperand(x2, Map::kInstanceSizeOffset));
__ Allocate(x4, x0, x5, x6, &allocate, SIZE_IN_WORDS);
__ Sub(x5, x5, Operand(kHeapObjectTag)); // Untag result end.
__ Bind(&done_allocate);
// Initialize the JSObject fields.
@ -4843,7 +4842,6 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// Allocate an empty rest parameter array.
Label allocate, done_allocate;
__ Allocate(JSArray::kSize, x0, x1, x2, &allocate, NO_ALLOCATION_FLAGS);
__ Sub(x1, x1, Operand(kHeapObjectTag)); // Untag result end.
__ Bind(&done_allocate);
// Setup the rest parameter array in x0.

View File

@ -3097,6 +3097,9 @@ void MacroAssembler::Allocate(int object_size,
// The top pointer is not updated for allocation folding dominators.
Str(result_end, MemOperand(top_address));
}
// Tag the object.
ObjectTag(result, result);
}
@ -3166,7 +3169,7 @@ void MacroAssembler::Allocate(Register object_size, Register result,
if (emit_debug_code()) {
Tst(result_end, kObjectAlignmentMask);
Check(ne, kUnalignedAllocationInNewSpace);
Check(eq, kUnalignedAllocationInNewSpace);
}
Ccmp(result_end, alloc_limit, NoFlag, cc);
@ -3176,6 +3179,9 @@ void MacroAssembler::Allocate(Register object_size, Register result,
// The top pointer is not updated for allocation folding dominators.
Str(result_end, MemOperand(top_address));
}
// Tag the object.
ObjectTag(result, result);
}
void MacroAssembler::FastAllocate(int object_size, Register result,
@ -3208,6 +3214,8 @@ void MacroAssembler::FastAllocate(int object_size, Register result,
// Calculate new top and write it back.
Adds(result_end, result, object_size);
Str(result_end, MemOperand(top_address));
ObjectTag(result, result);
}
void MacroAssembler::FastAllocate(Register object_size, Register result,
@ -3241,8 +3249,10 @@ void MacroAssembler::FastAllocate(Register object_size, Register result,
if (emit_debug_code()) {
Tst(result_end, kObjectAlignmentMask);
Check(ne, kUnalignedAllocationInNewSpace);
Check(eq, kUnalignedAllocationInNewSpace);
}
ObjectTag(result, result);
}
void MacroAssembler::AllocateTwoByteString(Register result,

View File

@ -367,7 +367,9 @@ Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes,
Node* no_runtime_result = top;
StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
new_top);
result.Bind(BitcastWordToTagged(no_runtime_result));
no_runtime_result = BitcastWordToTagged(
IntPtrAdd(no_runtime_result, IntPtrConstant(kHeapObjectTag)));
result.Bind(no_runtime_result);
Goto(&merge_runtime);
Bind(&merge_runtime);
@ -385,9 +387,8 @@ Node* CodeStubAssembler::AllocateRawAligned(Node* size_in_bytes,
if (flags & kDoubleAlignment) {
// TODO(epertoso): Simd128 alignment.
Label aligned(this), not_aligned(this), merge(this, &adjusted_size);
Branch(WordAnd(IntPtrSub(top, IntPtrConstant(kHeapObjectTag)),
IntPtrConstant(kDoubleAlignmentMask)),
&not_aligned, &aligned);
Branch(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)), &not_aligned,
&aligned);
Bind(&not_aligned);
Node* not_aligned_size =

View File

@ -146,7 +146,10 @@ void MemoryOptimizer::VisitAllocate(Node* node, AllocationState const* state) {
top_address, jsgraph()->IntPtrConstant(0), top, effect, control);
// Compute the effective inner allocated address.
value = graph()->NewNode(machine()->BitcastWordToTagged(), state->top());
value = graph()->NewNode(
machine()->BitcastWordToTagged(),
graph()->NewNode(machine()->IntAdd(), state->top(),
jsgraph()->IntPtrConstant(kHeapObjectTag)));
// Extend the allocation {group}.
group->Add(value);
@ -195,6 +198,8 @@ void MemoryOptimizer::VisitAllocate(Node* node, AllocationState const* state) {
}
vfalse = efalse = graph()->NewNode(allocate_operator_.get(), target,
size, efalse, if_false);
vfalse = graph()->NewNode(machine()->IntSub(), vfalse,
jsgraph()->IntPtrConstant(kHeapObjectTag));
}
control = graph()->NewNode(common()->Merge(2), if_true, if_false);
@ -212,7 +217,10 @@ void MemoryOptimizer::VisitAllocate(Node* node, AllocationState const* state) {
top_address, jsgraph()->IntPtrConstant(0), top, effect, control);
// Compute the initial object address.
value = graph()->NewNode(machine()->BitcastWordToTagged(), value);
value = graph()->NewNode(
machine()->BitcastWordToTagged(),
graph()->NewNode(machine()->IntAdd(), value,
jsgraph()->IntPtrConstant(kHeapObjectTag)));
// Start a new allocation group.
AllocationGroup* group =
@ -248,7 +256,10 @@ void MemoryOptimizer::VisitAllocate(Node* node, AllocationState const* state) {
machine()->Store(StoreRepresentation(
MachineType::PointerRepresentation(), kNoWriteBarrier)),
top_address, jsgraph()->IntPtrConstant(0), new_top, etrue, if_true);
vtrue = graph()->NewNode(machine()->BitcastWordToTagged(), top);
vtrue = graph()->NewNode(
machine()->BitcastWordToTagged(),
graph()->NewNode(machine()->IntAdd(), top,
jsgraph()->IntPtrConstant(kHeapObjectTag)));
}
Node* if_false = graph()->NewNode(common()->IfFalse(), branch);

View File

@ -5172,8 +5172,10 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
ExternalReference allocation_top =
AllocationUtils::GetAllocationTopReference(isolate(), allocation_flags);
Register top_address = scratch0();
__ sub(r0, r0, Operand(kHeapObjectTag));
__ mov(top_address, Operand(allocation_top));
__ str(r0, MemOperand(top_address));
__ add(r0, r0, Operand(kHeapObjectTag));
}
}

View File

@ -1502,8 +1502,10 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
ExternalReference allocation_top =
AllocationUtils::GetAllocationTopReference(isolate(), allocation_flags);
Register top_address = x10;
__ Sub(x0, x0, Operand(kHeapObjectTag));
__ Mov(top_address, Operand(allocation_top));
__ Str(x0, MemOperand(top_address));
__ Add(x0, x0, Operand(kHeapObjectTag));
}
}

View File

@ -4955,7 +4955,9 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
// undo the allocation.
ExternalReference allocation_top =
AllocationUtils::GetAllocationTopReference(isolate(), allocation_flags);
__ sub(eax, Immediate(kHeapObjectTag));
__ mov(Operand::StaticVariable(allocation_top), eax);
__ add(eax, Immediate(kHeapObjectTag));
}
}

View File

@ -5141,8 +5141,10 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
ExternalReference allocation_top =
AllocationUtils::GetAllocationTopReference(isolate(), allocation_flags);
Register top_address = scratch0();
__ Subu(v0, v0, Operand(kHeapObjectTag));
__ li(top_address, Operand(allocation_top));
__ sw(v0, MemOperand(top_address));
__ Addu(v0, v0, Operand(kHeapObjectTag));
}
}

View File

@ -5345,8 +5345,10 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
ExternalReference allocation_top =
AllocationUtils::GetAllocationTopReference(isolate(), allocation_flags);
Register top_address = scratch0();
__ Dsubu(v0, v0, Operand(kHeapObjectTag));
__ li(top_address, Operand(allocation_top));
__ sd(v0, MemOperand(top_address));
__ Daddu(v0, v0, Operand(kHeapObjectTag));
}
}

View File

@ -5249,7 +5249,9 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
// undo the allocation.
ExternalReference allocation_top =
AllocationUtils::GetAllocationTopReference(isolate(), allocation_flags);
__ subp(rax, Immediate(kHeapObjectTag));
__ Store(allocation_top, rax);
__ addp(rax, Immediate(kHeapObjectTag));
}
}

View File

@ -304,7 +304,6 @@ const intptr_t kPointerAlignmentMask = kPointerAlignment - 1;
// Desired alignment for double values.
const intptr_t kDoubleAlignment = 8;
const intptr_t kDoubleAlignmentMask = kDoubleAlignment - 1;
const intptr_t kDoubleAlignmentMaskTagged = kDoubleAlignmentMask - 1;
// Desired alignment for 128 bit SIMD values.
const intptr_t kSimd128Alignment = 16;

View File

@ -1635,39 +1635,51 @@ class PageIterator BASE_EMBEDDED {
// space.
class AllocationInfo {
public:
AllocationInfo() { Reset(nullptr, nullptr); }
AllocationInfo(Address top, Address limit) { Reset(top, limit); }
AllocationInfo() : top_(nullptr), limit_(nullptr) {}
AllocationInfo(Address top, Address limit) : top_(top), limit_(limit) {}
void Reset(Address top, Address limit) {
set_top(top);
set_limit(limit);
}
inline void set_top(Address top) {
SLOW_DCHECK((reinterpret_cast<intptr_t>(top) & kHeapObjectTagMask) == 0);
top_ = reinterpret_cast<intptr_t>(top) + kHeapObjectTag;
INLINE(void set_top(Address top)) {
SLOW_DCHECK(top == NULL ||
(reinterpret_cast<intptr_t>(top) & kHeapObjectTagMask) == 0);
top_ = top;
}
inline Address top() const {
SLOW_DCHECK((top_ & kHeapObjectTagMask) == kHeapObjectTag);
return reinterpret_cast<Address>(top_ - kHeapObjectTag);
INLINE(Address top()) const {
SLOW_DCHECK(top_ == NULL ||
(reinterpret_cast<intptr_t>(top_) & kHeapObjectTagMask) == 0);
return top_;
}
Address* top_address() { return reinterpret_cast<Address*>(&top_); }
Address* top_address() { return &top_; }
inline void set_limit(Address limit) {
limit_ = reinterpret_cast<intptr_t>(limit);
INLINE(void set_limit(Address limit)) {
limit_ = limit;
}
inline Address limit() const { return reinterpret_cast<Address>(limit_); }
INLINE(Address limit()) const {
return limit_;
}
Address* limit_address() { return reinterpret_cast<Address*>(&limit_); }
Address* limit_address() { return &limit_; }
#ifdef DEBUG
bool VerifyPagedAllocation() {
return (Page::FromAllocationAreaAddress(top_) ==
Page::FromAllocationAreaAddress(limit_)) &&
(top_ <= limit_);
}
#endif
private:
// Current tagged allocation top.
intptr_t top_;
// Current untagged allocation limit.
intptr_t limit_;
// Current allocation top.
Address top_;
// Current allocation limit.
Address limit_;
};

View File

@ -4572,7 +4572,6 @@ void FastNewObjectStub::Generate(MacroAssembler* masm) {
__ movzx_b(ebx, FieldOperand(ecx, Map::kInstanceSizeOffset));
__ lea(ebx, Operand(ebx, times_pointer_size, 0));
__ Allocate(ebx, eax, edi, no_reg, &allocate, NO_ALLOCATION_FLAGS);
__ dec(edi); // Untag result end.
__ bind(&done_allocate);
// Initialize the JSObject fields.
@ -4723,7 +4722,6 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// Allocate an empty rest parameter array.
Label allocate, done_allocate;
__ Allocate(JSArray::kSize, eax, edx, ecx, &allocate, NO_ALLOCATION_FLAGS);
__ dec(edx); // Untag result end.
__ bind(&done_allocate);
// Setup the rest parameter array in rax.

View File

@ -1504,7 +1504,7 @@ void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
AllocationFlags flags) {
if (emit_debug_code()) {
test(result_end, Immediate(kObjectAlignmentMask));
Check(not_zero, kUnalignedAllocationInNewSpace);
Check(zero, kUnalignedAllocationInNewSpace);
}
ExternalReference allocation_top =
@ -1555,7 +1555,7 @@ void MacroAssembler::Allocate(int object_size,
if ((flags & DOUBLE_ALIGNMENT) != 0) {
DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
Label aligned;
test(result, Immediate(kDoubleAlignmentMaskTagged));
test(result, Immediate(kDoubleAlignmentMask));
j(zero, &aligned, Label::kNear);
if ((flags & PRETENURE) != 0) {
cmp(result, Operand::StaticVariable(allocation_limit));
@ -1583,7 +1583,11 @@ void MacroAssembler::Allocate(int object_size,
}
if (top_reg.is(result)) {
sub(result, Immediate(object_size));
sub(result, Immediate(object_size - kHeapObjectTag));
} else {
// Tag the result.
DCHECK(kHeapObjectTag == 1);
inc(result);
}
}
@ -1626,13 +1630,13 @@ void MacroAssembler::Allocate(int header_size,
if ((flags & DOUBLE_ALIGNMENT) != 0) {
DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
Label aligned;
test(result, Immediate(kDoubleAlignmentMaskTagged));
test(result, Immediate(kDoubleAlignmentMask));
j(zero, &aligned, Label::kNear);
if ((flags & PRETENURE) != 0) {
cmp(result, Operand::StaticVariable(allocation_limit));
j(above_equal, gc_required);
}
mov(Operand(result, -kHeapObjectTag),
mov(Operand(result, 0),
Immediate(isolate()->factory()->one_pointer_filler_map()));
add(result, Immediate(kDoubleSize / 2));
bind(&aligned);
@ -1657,6 +1661,10 @@ void MacroAssembler::Allocate(int header_size,
cmp(result_end, Operand::StaticVariable(allocation_limit));
j(above, gc_required);
// Tag result.
DCHECK(kHeapObjectTag == 1);
inc(result);
UpdateAllocationTopHelper(result_end, scratch, flags);
}
@ -1695,13 +1703,13 @@ void MacroAssembler::Allocate(Register object_size,
if ((flags & DOUBLE_ALIGNMENT) != 0) {
DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
Label aligned;
test(result, Immediate(kDoubleAlignmentMaskTagged));
test(result, Immediate(kDoubleAlignmentMask));
j(zero, &aligned, Label::kNear);
if ((flags & PRETENURE) != 0) {
cmp(result, Operand::StaticVariable(allocation_limit));
j(above_equal, gc_required);
}
mov(Operand(result, -kHeapObjectTag),
mov(Operand(result, 0),
Immediate(isolate()->factory()->one_pointer_filler_map()));
add(result, Immediate(kDoubleSize / 2));
bind(&aligned);
@ -1715,6 +1723,10 @@ void MacroAssembler::Allocate(Register object_size,
cmp(result_end, Operand::StaticVariable(allocation_limit));
j(above, gc_required);
// Tag result.
DCHECK(kHeapObjectTag == 1);
inc(result);
if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) {
// The top pointer is not updated for allocation folding dominators.
UpdateAllocationTopHelper(result_end, scratch, flags);
@ -1730,9 +1742,9 @@ void MacroAssembler::FastAllocate(int object_size, Register result,
if ((flags & DOUBLE_ALIGNMENT) != 0) {
DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
Label aligned;
test(result, Immediate(kDoubleAlignmentMaskTagged));
test(result, Immediate(kDoubleAlignmentMask));
j(zero, &aligned, Label::kNear);
mov(Operand(result, -kHeapObjectTag),
mov(Operand(result, 0),
Immediate(isolate()->factory()->one_pointer_filler_map()));
add(result, Immediate(kDoubleSize / 2));
bind(&aligned);
@ -1740,6 +1752,9 @@ void MacroAssembler::FastAllocate(int object_size, Register result,
lea(result_end, Operand(result, object_size));
UpdateAllocationTopHelper(result_end, no_reg, flags);
DCHECK(kHeapObjectTag == 1);
inc(result);
}
void MacroAssembler::FastAllocate(Register object_size, Register result,
@ -1751,9 +1766,9 @@ void MacroAssembler::FastAllocate(Register object_size, Register result,
if ((flags & DOUBLE_ALIGNMENT) != 0) {
DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
Label aligned;
test(result, Immediate(kDoubleAlignmentMaskTagged));
test(result, Immediate(kDoubleAlignmentMask));
j(zero, &aligned, Label::kNear);
mov(Operand(result, -kHeapObjectTag),
mov(Operand(result, 0),
Immediate(isolate()->factory()->one_pointer_filler_map()));
add(result, Immediate(kDoubleSize / 2));
bind(&aligned);
@ -1761,6 +1776,9 @@ void MacroAssembler::FastAllocate(Register object_size, Register result,
lea(result_end, Operand(result, object_size, times_1, 0));
UpdateAllocationTopHelper(result_end, no_reg, flags);
DCHECK(kHeapObjectTag == 1);
inc(result);
}

View File

@ -4610,7 +4610,6 @@ void FastNewObjectStub::Generate(MacroAssembler* masm) {
Label allocate, done_allocate;
__ lbu(t0, FieldMemOperand(a2, Map::kInstanceSizeOffset));
__ Allocate(t0, v0, t1, a0, &allocate, SIZE_IN_WORDS);
__ Subu(t1, t1, Operand(kHeapObjectTag)); // Untag result end.
__ bind(&done_allocate);
// Initialize the JSObject fields.
@ -4754,7 +4753,6 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// Allocate an empty rest parameter array.
Label allocate, done_allocate;
__ Allocate(JSArray::kSize, v0, a0, a1, &allocate, NO_ALLOCATION_FLAGS);
__ Subu(a0, a0, Operand(kHeapObjectTag)); // Untag result end.
__ bind(&done_allocate);
// Setup the rest parameter array in v0.

View File

@ -4142,14 +4142,14 @@ void MacroAssembler::Allocate(int object_size,
// Align the next allocation. Storing the filler map without checking top is
// safe in new-space because the limit of the heap is aligned there.
DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
And(result_end, result, Operand(kDoubleAlignmentMaskTagged));
And(result_end, result, Operand(kDoubleAlignmentMask));
Label aligned;
Branch(&aligned, eq, result_end, Operand(zero_reg));
if ((flags & PRETENURE) != 0) {
Branch(gc_required, Ugreater_equal, result, Operand(alloc_limit));
}
li(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
sw(result_end, FieldMemOperand(result, 0));
sw(result_end, MemOperand(result));
Addu(result, result, Operand(kDoubleSize / 2));
bind(&aligned);
}
@ -4163,6 +4163,9 @@ void MacroAssembler::Allocate(int object_size,
// The top pointer is not updated for allocation folding dominators.
sw(result_end, MemOperand(top_address));
}
// Tag object.
Addu(result, result, Operand(kHeapObjectTag));
}
@ -4222,14 +4225,14 @@ void MacroAssembler::Allocate(Register object_size, Register result,
// Align the next allocation. Storing the filler map without checking top is
// safe in new-space because the limit of the heap is aligned there.
DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
And(result_end, result, Operand(kDoubleAlignmentMaskTagged));
And(result_end, result, Operand(kDoubleAlignmentMask));
Label aligned;
Branch(&aligned, eq, result_end, Operand(zero_reg));
if ((flags & PRETENURE) != 0) {
Branch(gc_required, Ugreater_equal, result, Operand(alloc_limit));
}
li(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
sw(result_end, FieldMemOperand(result, 0));
sw(result_end, MemOperand(result));
Addu(result, result, Operand(kDoubleSize / 2));
bind(&aligned);
}
@ -4248,13 +4251,16 @@ void MacroAssembler::Allocate(Register object_size, Register result,
// Update allocation top. result temporarily holds the new top.
if (emit_debug_code()) {
And(alloc_limit, result_end, Operand(kObjectAlignmentMask));
Check(ne, kUnalignedAllocationInNewSpace, alloc_limit, Operand(zero_reg));
Check(eq, kUnalignedAllocationInNewSpace, alloc_limit, Operand(zero_reg));
}
if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) {
// The top pointer is not updated for allocation folding dominators.
sw(result_end, MemOperand(top_address));
}
// Tag object.
Addu(result, result, Operand(kHeapObjectTag));
}
void MacroAssembler::FastAllocate(int object_size, Register result,
@ -4283,11 +4289,11 @@ void MacroAssembler::FastAllocate(int object_size, Register result,
// Align the next allocation. Storing the filler map without checking top is
// safe in new-space because the limit of the heap is aligned there.
DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
And(result_end, result, Operand(kDoubleAlignmentMaskTagged));
And(result_end, result, Operand(kDoubleAlignmentMask));
Label aligned;
Branch(&aligned, eq, result_end, Operand(zero_reg));
li(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
sw(result_end, FieldMemOperand(result, 0));
sw(result_end, MemOperand(result));
Addu(result, result, Operand(kDoubleSize / 2));
bind(&aligned);
}
@ -4296,6 +4302,8 @@ void MacroAssembler::FastAllocate(int object_size, Register result,
// The top pointer is not updated for allocation folding dominators.
sw(result_end, MemOperand(top_address));
Addu(result, result, Operand(kHeapObjectTag));
}
void MacroAssembler::FastAllocate(Register object_size, Register result,
@ -4320,11 +4328,11 @@ void MacroAssembler::FastAllocate(Register object_size, Register result,
// Align the next allocation. Storing the filler map without checking top is
// safe in new-space because the limit of the heap is aligned there.
DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
And(result_end, result, Operand(kDoubleAlignmentMaskTagged));
And(result_end, result, Operand(kDoubleAlignmentMask));
Label aligned;
Branch(&aligned, eq, result_end, Operand(zero_reg));
li(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
sw(result_end, FieldMemOperand(result, 0));
sw(result_end, MemOperand(result));
Addu(result, result, Operand(kDoubleSize / 2));
bind(&aligned);
}
@ -4340,6 +4348,8 @@ void MacroAssembler::FastAllocate(Register object_size, Register result,
// The top pointer is not updated for allocation folding dominators.
sw(result_end, MemOperand(top_address));
Addu(result, result, Operand(kHeapObjectTag));
}
void MacroAssembler::AllocateTwoByteString(Register result,

View File

@ -4623,7 +4623,6 @@ void FastNewObjectStub::Generate(MacroAssembler* masm) {
Label allocate, done_allocate;
__ lbu(a4, FieldMemOperand(a2, Map::kInstanceSizeOffset));
__ Allocate(a4, v0, a5, a0, &allocate, SIZE_IN_WORDS);
__ Dsubu(a5, a5, Operand(kHeapObjectTag)); // Untag result end
__ bind(&done_allocate);
// Initialize the JSObject fields.
@ -4769,7 +4768,6 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// Allocate an empty rest parameter array.
Label allocate, done_allocate;
__ Allocate(JSArray::kSize, v0, a0, a1, &allocate, NO_ALLOCATION_FLAGS);
__ Dsubu(a0, a0, Operand(kHeapObjectTag)); // Untag result end.
__ bind(&done_allocate);
// Setup the rest parameter array in v0.

View File

@ -4318,11 +4318,11 @@ void MacroAssembler::Allocate(int object_size,
}
// We can ignore DOUBLE_ALIGNMENT flags here because doubles and pointers have
// the same alignment on MIPS64.
// the same alignment on ARM64.
STATIC_ASSERT(kPointerAlignment == kDoubleAlignment);
if (emit_debug_code()) {
And(at, result, Operand(kDoubleAlignmentMaskTagged));
And(at, result, Operand(kDoubleAlignmentMask));
Check(eq, kAllocationIsNotDoubleAligned, at, Operand(zero_reg));
}
@ -4335,6 +4335,9 @@ void MacroAssembler::Allocate(int object_size,
// The top pointer is not updated for allocation folding dominators.
sd(result_end, MemOperand(top_address));
}
// Tag object.
Daddu(result, result, Operand(kHeapObjectTag));
}
@ -4388,11 +4391,11 @@ void MacroAssembler::Allocate(Register object_size, Register result,
}
// We can ignore DOUBLE_ALIGNMENT flags here because doubles and pointers have
// the same alignment on MIPS64.
// the same alignment on ARM64.
STATIC_ASSERT(kPointerAlignment == kDoubleAlignment);
if (emit_debug_code()) {
And(at, result, Operand(kDoubleAlignmentMaskTagged));
And(at, result, Operand(kDoubleAlignmentMask));
Check(eq, kAllocationIsNotDoubleAligned, at, Operand(zero_reg));
}
@ -4410,13 +4413,16 @@ void MacroAssembler::Allocate(Register object_size, Register result,
// Update allocation top. result temporarily holds the new top.
if (emit_debug_code()) {
And(at, result_end, Operand(kObjectAlignmentMask));
Check(ne, kUnalignedAllocationInNewSpace, at, Operand(zero_reg));
Check(eq, kUnalignedAllocationInNewSpace, at, Operand(zero_reg));
}
if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) {
// The top pointer is not updated for allocation folding dominators.
sd(result_end, MemOperand(top_address));
}
// Tag object if.
Daddu(result, result, Operand(kHeapObjectTag));
}
void MacroAssembler::FastAllocate(int object_size, Register result,
@ -4444,13 +4450,15 @@ void MacroAssembler::FastAllocate(int object_size, Register result,
STATIC_ASSERT(kPointerAlignment == kDoubleAlignment);
if (emit_debug_code()) {
And(at, result, Operand(kDoubleAlignmentMaskTagged));
And(at, result, Operand(kDoubleAlignmentMask));
Check(eq, kAllocationIsNotDoubleAligned, at, Operand(zero_reg));
}
// Calculate new top and write it back.
Daddu(result_end, result, Operand(object_size));
sd(result_end, MemOperand(top_address));
Daddu(result, result, Operand(kHeapObjectTag));
}
void MacroAssembler::FastAllocate(Register object_size, Register result,
@ -4473,7 +4481,7 @@ void MacroAssembler::FastAllocate(Register object_size, Register result,
STATIC_ASSERT(kPointerAlignment == kDoubleAlignment);
if (emit_debug_code()) {
And(at, result, Operand(kDoubleAlignmentMaskTagged));
And(at, result, Operand(kDoubleAlignmentMask));
Check(eq, kAllocationIsNotDoubleAligned, at, Operand(zero_reg));
}
@ -4487,8 +4495,10 @@ void MacroAssembler::FastAllocate(Register object_size, Register result,
// Update allocation top. result temporarily holds the new top.
if (emit_debug_code()) {
And(at, result_end, Operand(kObjectAlignmentMask));
Check(ne, kUnalignedAllocationInNewSpace, at, Operand(zero_reg));
Check(eq, kUnalignedAllocationInNewSpace, at, Operand(zero_reg));
}
Daddu(result, result, Operand(kHeapObjectTag));
}
void MacroAssembler::AllocateTwoByteString(Register result,

View File

@ -4308,7 +4308,6 @@ void FastNewObjectStub::Generate(MacroAssembler* masm) {
__ movzxbl(rbx, FieldOperand(rcx, Map::kInstanceSizeOffset));
__ leal(rbx, Operand(rbx, times_pointer_size, 0));
__ Allocate(rbx, rax, rdi, no_reg, &allocate, NO_ALLOCATION_FLAGS);
__ decp(rdi); // Untag result end.
__ bind(&done_allocate);
// Initialize the JSObject fields.
@ -4457,7 +4456,6 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// Allocate an empty rest parameter array.
Label allocate, done_allocate;
__ Allocate(JSArray::kSize, rax, rdx, rcx, &allocate, NO_ALLOCATION_FLAGS);
__ decp(rdx); // Untag result end.
__ bind(&done_allocate);
// Setup the rest parameter array in rax.

View File

@ -4816,7 +4816,7 @@ void MacroAssembler::MakeSureDoubleAlignedHelper(Register result,
AllocationFlags flags) {
if (kPointerSize == kDoubleSize) {
if (FLAG_debug_code) {
testl(result, Immediate(kDoubleAlignmentMaskTagged));
testl(result, Immediate(kDoubleAlignmentMask));
Check(zero, kAllocationIsNotDoubleAligned);
}
} else {
@ -4828,7 +4828,7 @@ void MacroAssembler::MakeSureDoubleAlignedHelper(Register result,
// used in UpdateAllocationTopHelper later.
DCHECK(!scratch.is(kScratchRegister));
Label aligned;
testl(result, Immediate(kDoubleAlignmentMaskTagged));
testl(result, Immediate(kDoubleAlignmentMask));
j(zero, &aligned, Label::kNear);
if (((flags & ALLOCATION_FOLDED) == 0) && ((flags & PRETENURE) != 0)) {
ExternalReference allocation_limit =
@ -4837,7 +4837,7 @@ void MacroAssembler::MakeSureDoubleAlignedHelper(Register result,
j(above_equal, gc_required);
}
LoadRoot(kScratchRegister, Heap::kOnePointerFillerMapRootIndex);
movp(Operand(result, -kHeapObjectTag), kScratchRegister);
movp(Operand(result, 0), kScratchRegister);
addp(result, Immediate(kDoubleSize / 2));
bind(&aligned);
}
@ -4849,7 +4849,7 @@ void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
AllocationFlags flags) {
if (emit_debug_code()) {
testp(result_end, Immediate(kObjectAlignmentMask));
Check(not_zero, kUnalignedAllocationInNewSpace);
Check(zero, kUnalignedAllocationInNewSpace);
}
ExternalReference allocation_top =
@ -4917,7 +4917,11 @@ void MacroAssembler::Allocate(int object_size,
}
if (top_reg.is(result)) {
subp(result, Immediate(object_size));
subp(result, Immediate(object_size - kHeapObjectTag));
} else {
// Tag the result.
DCHECK(kHeapObjectTag == 1);
incp(result);
}
}
@ -4982,6 +4986,9 @@ void MacroAssembler::Allocate(Register object_size,
// The top pointer is not updated for allocation folding dominators.
UpdateAllocationTopHelper(result_end, scratch, flags);
}
// Tag the result.
addp(result, Immediate(kHeapObjectTag));
}
void MacroAssembler::FastAllocate(int object_size, Register result,
@ -4997,6 +5004,8 @@ void MacroAssembler::FastAllocate(int object_size, Register result,
leap(result_end, Operand(result, object_size));
UpdateAllocationTopHelper(result_end, no_reg, flags);
addp(result, Immediate(kHeapObjectTag));
}
void MacroAssembler::FastAllocate(Register object_size, Register result,
@ -5012,6 +5021,8 @@ void MacroAssembler::FastAllocate(Register object_size, Register result,
leap(result_end, Operand(result, object_size, times_1, 0));
UpdateAllocationTopHelper(result_end, no_reg, flags);
addp(result, Immediate(kHeapObjectTag));
}
void MacroAssembler::AllocateHeapNumber(Register result,

View File

@ -45,7 +45,8 @@ std::vector<Handle<FixedArray>> CreatePadding(Heap* heap, int padding_size,
} else {
heap->new_space()->DisableInlineAllocationSteps();
int overall_free_memory =
static_cast<int>(heap->new_space()->limit() - heap->new_space()->top());
static_cast<int>(*heap->new_space()->allocation_limit_address() -
*heap->new_space()->allocation_top_address());
CHECK(padding_size <= overall_free_memory || overall_free_memory == 0);
}
while (free_memory > 0) {
@ -58,13 +59,9 @@ std::vector<Handle<FixedArray>> CreatePadding(Heap* heap, int padding_size,
if (length <= 0) {
// Not enough room to create another fixed array. Let's create a filler.
if (free_memory > (2 * kPointerSize)) {
if (tenure == i::TENURED) {
heap->CreateFillerObjectAt(heap->old_space()->top(), free_memory,
heap->CreateFillerObjectAt(
*heap->old_space()->allocation_top_address(), free_memory,
ClearRecordedSlots::kNo);
} else {
heap->CreateFillerObjectAt(heap->new_space()->top(), free_memory,
ClearRecordedSlots::kNo);
}
}
break;
}
@ -80,7 +77,8 @@ std::vector<Handle<FixedArray>> CreatePadding(Heap* heap, int padding_size,
void AllocateAllButNBytes(v8::internal::NewSpace* space, int extra_bytes,
std::vector<Handle<FixedArray>>* out_handles) {
space->DisableInlineAllocationSteps();
int space_remaining = static_cast<int>(space->limit() - space->top());
int space_remaining = static_cast<int>(*space->allocation_limit_address() -
*space->allocation_top_address());
CHECK(space_remaining >= extra_bytes);
int new_linear_size = space_remaining - extra_bytes;
if (new_linear_size == 0) return;
@ -98,7 +96,8 @@ void FillCurrentPage(v8::internal::NewSpace* space,
bool FillUpOnePage(v8::internal::NewSpace* space,
std::vector<Handle<FixedArray>>* out_handles) {
space->DisableInlineAllocationSteps();
int space_remaining = static_cast<int>(space->limit() - space->top());
int space_remaining = static_cast<int>(*space->allocation_limit_address() -
*space->allocation_top_address());
if (space_remaining == 0) return false;
std::vector<Handle<FixedArray>> handles =
heap::CreatePadding(space->heap(), space_remaining, i::NOT_TENURED);

View File

@ -2027,18 +2027,19 @@ static HeapObject* NewSpaceAllocateAligned(int size,
// Get new space allocation into the desired alignment.
static Address AlignNewSpace(AllocationAlignment alignment, int offset) {
Address top = CcTest::heap()->new_space()->top();
int fill = Heap::GetFillToAlign(top, alignment);
Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
int fill = Heap::GetFillToAlign(*top_addr, alignment);
if (fill) {
NewSpaceAllocateAligned(fill + offset, kWordAligned);
}
return CcTest::heap()->new_space()->top();
return *top_addr;
}
TEST(TestAlignedAllocation) {
// Double misalignment is 4 on 32-bit platforms, 0 on 64-bit ones.
const intptr_t double_misalignment = kDoubleSize - kPointerSize;
Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
Address start;
HeapObject* obj;
HeapObject* filler;
@ -2049,7 +2050,7 @@ TEST(TestAlignedAllocation) {
obj = NewSpaceAllocateAligned(kPointerSize, kDoubleAligned);
CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
// There is no filler.
CHECK_EQ(kPointerSize, CcTest::heap()->new_space()->top() - start);
CHECK_EQ(kPointerSize, *top_addr - start);
// Allocate a second pointer sized object that must be double aligned at an
// unaligned address.
@ -2060,14 +2061,13 @@ TEST(TestAlignedAllocation) {
filler = HeapObject::FromAddress(start);
CHECK(obj != filler && filler->IsFiller() &&
filler->Size() == kPointerSize);
CHECK_EQ(kPointerSize + double_misalignment,
CcTest::heap()->new_space()->top() - start);
CHECK_EQ(kPointerSize + double_misalignment, *top_addr - start);
// Similarly for kDoubleUnaligned.
start = AlignNewSpace(kDoubleUnaligned, 0);
obj = NewSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
CHECK_EQ(kPointerSize, CcTest::heap()->new_space()->top() - start);
CHECK_EQ(kPointerSize, *top_addr - start);
start = AlignNewSpace(kDoubleUnaligned, kPointerSize);
obj = NewSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
@ -2075,8 +2075,7 @@ TEST(TestAlignedAllocation) {
filler = HeapObject::FromAddress(start);
CHECK(obj != filler && filler->IsFiller() &&
filler->Size() == kPointerSize);
CHECK_EQ(kPointerSize + double_misalignment,
CcTest::heap()->new_space()->top() - start);
CHECK_EQ(kPointerSize + double_misalignment, *top_addr - start);
}
// Now test SIMD alignment. There are 2 or 4 possible alignments, depending
@ -2085,7 +2084,7 @@ TEST(TestAlignedAllocation) {
obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
// There is no filler.
CHECK_EQ(kPointerSize, CcTest::heap()->new_space()->top() - start);
CHECK_EQ(kPointerSize, *top_addr - start);
start = AlignNewSpace(kSimd128Unaligned, kPointerSize);
obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
@ -2093,8 +2092,7 @@ TEST(TestAlignedAllocation) {
filler = HeapObject::FromAddress(start);
CHECK(obj != filler && filler->IsFiller() &&
filler->Size() == kSimd128Size - kPointerSize);
CHECK_EQ(kPointerSize + kSimd128Size - kPointerSize,
CcTest::heap()->new_space()->top() - start);
CHECK_EQ(kPointerSize + kSimd128Size - kPointerSize, *top_addr - start);
if (double_misalignment) {
// Test the 2 other alignments possible on 32 bit platforms.
@ -2105,8 +2103,7 @@ TEST(TestAlignedAllocation) {
filler = HeapObject::FromAddress(start);
CHECK(obj != filler && filler->IsFiller() &&
filler->Size() == 2 * kPointerSize);
CHECK_EQ(kPointerSize + 2 * kPointerSize,
CcTest::heap()->new_space()->top() - start);
CHECK_EQ(kPointerSize + 2 * kPointerSize, *top_addr - start);
start = AlignNewSpace(kSimd128Unaligned, 3 * kPointerSize);
obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
@ -2114,8 +2111,7 @@ TEST(TestAlignedAllocation) {
filler = HeapObject::FromAddress(start);
CHECK(obj != filler && filler->IsFiller() &&
filler->Size() == kPointerSize);
CHECK_EQ(kPointerSize + kPointerSize,
CcTest::heap()->new_space()->top() - start);
CHECK_EQ(kPointerSize + kPointerSize, *top_addr - start);
}
}
@ -2134,13 +2130,13 @@ static HeapObject* OldSpaceAllocateAligned(int size,
// Get old space allocation into the desired alignment.
static Address AlignOldSpace(AllocationAlignment alignment, int offset) {
Address top = CcTest::heap()->old_space()->top();
int fill = Heap::GetFillToAlign(top, alignment);
Address* top_addr = CcTest::heap()->old_space()->allocation_top_address();
int fill = Heap::GetFillToAlign(*top_addr, alignment);
int allocation = fill + offset;
if (allocation) {
OldSpaceAllocateAligned(allocation, kWordAligned);
}
top = CcTest::heap()->old_space()->top();
Address top = *top_addr;
// Now force the remaining allocation onto the free list.
CcTest::heap()->old_space()->EmptyAllocationInfo();
return top;