Allow direct allocation in old pointer space.

BUG=

Review URL: https://codereview.chromium.org/12314155

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@13940 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
hpayer@chromium.org 2013-03-14 08:32:52 +00:00
parent fbe34d4ba5
commit 05a71fc9e4
35 changed files with 331 additions and 327 deletions

View File

@ -124,12 +124,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
if (initial_capacity > 0) {
size += FixedArray::SizeFor(initial_capacity);
}
__ AllocateInNewSpace(size,
result,
scratch2,
scratch3,
gc_required,
TAG_OBJECT);
__ Allocate(size, result, scratch2, scratch3, gc_required, TAG_OBJECT);
// Allocated the JSArray. Now initialize the fields except for the elements
// array.
@ -653,12 +648,12 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
// -----------------------------------
Label gc_required;
__ AllocateInNewSpace(JSValue::kSize,
r0, // Result.
r3, // Scratch.
r4, // Scratch.
&gc_required,
TAG_OBJECT);
__ Allocate(JSValue::kSize,
r0, // Result.
r3, // Scratch.
r4, // Scratch.
&gc_required,
TAG_OBJECT);
// Initialising the String Object.
Register map = r3;

View File

@ -166,12 +166,7 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
__ pop(r3);
// Attempt to allocate new JSFunction in new space.
__ AllocateInNewSpace(JSFunction::kSize,
r0,
r1,
r2,
&gc,
TAG_OBJECT);
__ Allocate(JSFunction::kSize, r0, r1, r2, &gc, TAG_OBJECT);
__ IncrementCounter(counters->fast_new_closure_total(), 1, r6, r7);
@ -298,12 +293,7 @@ void FastNewContextStub::Generate(MacroAssembler* masm) {
int length = slots_ + Context::MIN_CONTEXT_SLOTS;
// Attempt to allocate the context in new space.
__ AllocateInNewSpace(FixedArray::SizeFor(length),
r0,
r1,
r2,
&gc,
TAG_OBJECT);
__ Allocate(FixedArray::SizeFor(length), r0, r1, r2, &gc, TAG_OBJECT);
// Load the function from the stack.
__ ldr(r3, MemOperand(sp, 0));
@ -348,8 +338,7 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
// Try to allocate the context in new space.
Label gc;
int length = slots_ + Context::MIN_CONTEXT_SLOTS;
__ AllocateInNewSpace(FixedArray::SizeFor(length),
r0, r1, r2, &gc, TAG_OBJECT);
__ Allocate(FixedArray::SizeFor(length), r0, r1, r2, &gc, TAG_OBJECT);
// Load the function from the stack.
__ ldr(r3, MemOperand(sp, 0));
@ -435,7 +424,7 @@ static void GenerateFastCloneShallowArrayCommon(
if (mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS) {
flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT | flags);
}
__ AllocateInNewSpace(size, r0, r1, r2, fail, flags);
__ Allocate(size, r0, r1, r2, fail, flags);
if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
__ mov(r2, Operand(Handle<Map>(masm->isolate()->heap()->

View File

@ -1547,7 +1547,7 @@ void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
__ bind(&materialized);
int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
Label allocated, runtime_allocate;
__ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
__ Allocate(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
__ jmp(&allocated);
__ bind(&runtime_allocate);

View File

@ -5588,12 +5588,8 @@ void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
// the constructor's prototype changes, but instance size and property
// counts remain unchanged (if slack tracking finished).
ASSERT(!constructor->shared()->IsInobjectSlackTrackingInProgress());
__ AllocateInNewSpace(instance_size,
result,
scratch,
scratch2,
deferred->entry(),
TAG_OBJECT);
__ Allocate(instance_size, result, scratch, scratch2, deferred->entry(),
TAG_OBJECT);
__ bind(deferred->exit());
if (FLAG_debug_code) {
@ -5668,12 +5664,10 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
}
if (instr->size()->IsConstantOperand()) {
int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
__ AllocateInNewSpace(size,
result,
scratch,
scratch2,
deferred->entry(),
flags);
if (instr->hydrogen()->CanAllocateInOldPointerSpace()) {
flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
}
__ Allocate(size, result, scratch, scratch2, deferred->entry(), flags);
} else {
Register size = ToRegister(instr->size());
__ AllocateInNewSpace(size,
@ -5906,7 +5900,7 @@ void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
// Allocate all objects that are part of the literal in one big
// allocation. This avoids multiple limit checks.
Label allocated, runtime_allocate;
__ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
__ Allocate(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
__ jmp(&allocated);
__ bind(&runtime_allocate);
@ -5988,7 +5982,7 @@ void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
Label allocated, runtime_allocate;
__ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
__ Allocate(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
__ jmp(&allocated);
__ bind(&runtime_allocate);

View File

@ -1564,12 +1564,12 @@ void MacroAssembler::LoadFromNumberDictionary(Label* miss,
}
void MacroAssembler::AllocateInNewSpace(int object_size,
Register result,
Register scratch1,
Register scratch2,
Label* gc_required,
AllocationFlags flags) {
void MacroAssembler::Allocate(int object_size,
Register result,
Register scratch1,
Register scratch2,
Label* gc_required,
AllocationFlags flags) {
if (!FLAG_inline_new) {
if (emit_debug_code()) {
// Trash the registers to simulate an allocation failure.
@ -1597,21 +1597,22 @@ void MacroAssembler::AllocateInNewSpace(int object_size,
// The values must be adjacent in memory to allow the use of LDM.
// Also, assert that the registers are numbered such that the values
// are loaded in the correct order.
ExternalReference new_space_allocation_top =
ExternalReference::new_space_allocation_top_address(isolate());
ExternalReference new_space_allocation_limit =
ExternalReference::new_space_allocation_limit_address(isolate());
ExternalReference allocation_top =
AllocationUtils::GetAllocationTopReference(isolate(), flags);
ExternalReference allocation_limit =
AllocationUtils::GetAllocationLimitReference(isolate(), flags);
intptr_t top =
reinterpret_cast<intptr_t>(new_space_allocation_top.address());
reinterpret_cast<intptr_t>(allocation_top.address());
intptr_t limit =
reinterpret_cast<intptr_t>(new_space_allocation_limit.address());
reinterpret_cast<intptr_t>(allocation_limit.address());
ASSERT((limit - top) == kPointerSize);
ASSERT(result.code() < ip.code());
// Set up allocation top address and object size registers.
Register topaddr = scratch1;
Register obj_size_reg = scratch2;
mov(topaddr, Operand(new_space_allocation_top));
mov(topaddr, Operand(allocation_top));
Operand obj_size_operand = Operand(object_size);
if (!obj_size_operand.is_single_instruction(this)) {
// We are about to steal IP, so we need to load this value first
@ -1639,6 +1640,7 @@ void MacroAssembler::AllocateInNewSpace(int object_size,
if ((flags & DOUBLE_ALIGNMENT) != 0) {
// Align the next allocation. Storing the filler map without checking top is
// always safe because the limit of the heap is always aligned.
ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
and_(scratch2, result, Operand(kDoubleAlignmentMask), SetCC);
Label aligned;
@ -1675,6 +1677,7 @@ void MacroAssembler::AllocateInNewSpace(Register object_size,
Register scratch2,
Label* gc_required,
AllocationFlags flags) {
ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
if (!FLAG_inline_new) {
if (emit_debug_code()) {
// Trash the registers to simulate an allocation failure.
@ -1858,12 +1861,8 @@ void MacroAssembler::AllocateTwoByteConsString(Register result,
Register scratch1,
Register scratch2,
Label* gc_required) {
AllocateInNewSpace(ConsString::kSize,
result,
scratch1,
scratch2,
gc_required,
TAG_OBJECT);
Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
TAG_OBJECT);
InitializeNewString(result,
length,
@ -1878,12 +1877,8 @@ void MacroAssembler::AllocateAsciiConsString(Register result,
Register scratch1,
Register scratch2,
Label* gc_required) {
AllocateInNewSpace(ConsString::kSize,
result,
scratch1,
scratch2,
gc_required,
TAG_OBJECT);
Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
TAG_OBJECT);
InitializeNewString(result,
length,
@ -1898,12 +1893,8 @@ void MacroAssembler::AllocateTwoByteSlicedString(Register result,
Register scratch1,
Register scratch2,
Label* gc_required) {
AllocateInNewSpace(SlicedString::kSize,
result,
scratch1,
scratch2,
gc_required,
TAG_OBJECT);
Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
TAG_OBJECT);
InitializeNewString(result,
length,
@ -1918,12 +1909,8 @@ void MacroAssembler::AllocateAsciiSlicedString(Register result,
Register scratch1,
Register scratch2,
Label* gc_required) {
AllocateInNewSpace(SlicedString::kSize,
result,
scratch1,
scratch2,
gc_required,
TAG_OBJECT);
Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
TAG_OBJECT);
InitializeNewString(result,
length,
@ -3231,13 +3218,8 @@ void MacroAssembler::AllocateHeapNumber(Register result,
TaggingMode tagging_mode) {
// Allocate an object in the heap for the heap number and tag it as a heap
// object.
AllocateInNewSpace(HeapNumber::kSize,
result,
scratch1,
scratch2,
gc_required,
tagging_mode == TAG_RESULT ? TAG_OBJECT :
NO_ALLOCATION_FLAGS);
Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
tagging_mode == TAG_RESULT ? TAG_OBJECT : NO_ALLOCATION_FLAGS);
// Store heap number map in the allocated object.
AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);

View File

@ -665,19 +665,20 @@ class MacroAssembler: public Assembler {
// ---------------------------------------------------------------------------
// Allocation support
// Allocate an object in new space. The object_size is specified
// either in bytes or in words if the allocation flag SIZE_IN_WORDS
// is passed. If the new space is exhausted control continues at the
// gc_required label. The allocated object is returned in result. If
// the flag tag_allocated_object is true the result is tagged as as
// a heap object. All registers are clobbered also when control
// continues at the gc_required label.
void AllocateInNewSpace(int object_size,
Register result,
Register scratch1,
Register scratch2,
Label* gc_required,
AllocationFlags flags);
// Allocate an object in new space or old pointer space. The object_size is
// specified either in bytes or in words if the allocation flag SIZE_IN_WORDS
// is passed. If the space is exhausted control continues at the gc_required
// label. The allocated object is returned in result. If the flag
// tag_allocated_object is true the result is tagged as as a heap object.
// All registers are clobbered also when control continues at the gc_required
// label.
void Allocate(int object_size,
Register result,
Register scratch1,
Register scratch2,
Label* gc_required,
AllocationFlags flags);
void AllocateInNewSpace(Register object_size,
Register result,
Register scratch1,

View File

@ -3738,8 +3738,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
__ b(ne, &check_capacity);
int size = FixedArray::SizeFor(JSArray::kPreallocatedArrayElements);
__ AllocateInNewSpace(size, elements_reg, scratch, scratch2, &slow,
TAG_OBJECT);
__ Allocate(size, elements_reg, scratch, scratch2, &slow, TAG_OBJECT);
__ LoadRoot(scratch, Heap::kFixedArrayMapRootIndex);
__ str(scratch, FieldMemOperand(elements_reg, JSObject::kMapOffset));
@ -3888,8 +3887,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
__ b(ne, &check_capacity);
int size = FixedDoubleArray::SizeFor(JSArray::kPreallocatedArrayElements);
__ AllocateInNewSpace(size, elements_reg, scratch1, scratch2, &slow,
TAG_OBJECT);
__ Allocate(size, elements_reg, scratch1, scratch2, &slow, TAG_OBJECT);
// Initialize the new FixedDoubleArray.
__ LoadRoot(scratch1, Heap::kFixedDoubleArrayMapRootIndex);

View File

@ -1188,6 +1188,20 @@ ExternalReference ExternalReference::new_space_allocation_limit_address(
}
ExternalReference ExternalReference::old_pointer_space_allocation_top_address(
Isolate* isolate) {
return ExternalReference(
isolate->heap()->OldPointerSpaceAllocationTopAddress());
}
ExternalReference ExternalReference::old_pointer_space_allocation_limit_address(
Isolate* isolate) {
return ExternalReference(
isolate->heap()->OldPointerSpaceAllocationLimitAddress());
}
ExternalReference ExternalReference::handle_scope_level_address(
Isolate* isolate) {
return ExternalReference(HandleScope::current_level_address(isolate));

View File

@ -749,6 +749,10 @@ class ExternalReference BASE_EMBEDDED {
// Used for fast allocation in generated code.
static ExternalReference new_space_allocation_top_address(Isolate* isolate);
static ExternalReference new_space_allocation_limit_address(Isolate* isolate);
static ExternalReference old_pointer_space_allocation_top_address(
Isolate* isolate);
static ExternalReference old_pointer_space_allocation_limit_address(
Isolate* isolate);
static ExternalReference double_fp_operation(Token::Value operation,
Isolate* isolate);

View File

@ -189,11 +189,16 @@ HValue* CodeStubGraphBuilder<FastCloneShallowObjectStub>::BuildCodeStub() {
HValue* size_in_bytes =
AddInstruction(new(zone) HConstant(size, Representation::Integer32()));
HAllocate::Flags flags = HAllocate::CAN_ALLOCATE_IN_NEW_SPACE;
if (FLAG_pretenure_literals) {
flags = static_cast<HAllocate::Flags>(
flags | HAllocate::CAN_ALLOCATE_IN_OLD_POINTER_SPACE);
}
HInstruction* object =
AddInstruction(new(zone) HAllocate(context(),
size_in_bytes,
HType::JSObject(),
HAllocate::CAN_ALLOCATE_IN_NEW_SPACE));
flags));
for (int i = 0; i < size; i += kPointerSize) {
HInstruction* value =

View File

@ -166,6 +166,7 @@ DEFINE_bool(compiled_transitions, false, "use optimizing compiler to "
DEFINE_bool(clever_optimizations,
true,
"Optimize object size, Array shift, DOM strings and string +")
DEFINE_bool(pretenure_literals, false, "allocate literals in old space")
// Flags for data representation optimizations
DEFINE_bool(unbox_double_arrays, true, "automatically unbox arrays of doubles")

View File

@ -590,6 +590,13 @@ class Heap {
return new_space_.allocation_limit_address();
}
Address* OldPointerSpaceAllocationTopAddress() {
return old_pointer_space_->allocation_top_address();
}
Address* OldPointerSpaceAllocationLimitAddress() {
return old_pointer_space_->allocation_limit_address();
}
// Uncommit unused semi space.
bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }

View File

@ -2669,6 +2669,7 @@ HType HAllocate::CalculateInferredType() {
void HAllocate::PrintDataTo(StringStream* stream) {
size()->PrintNameTo(stream);
if (!GuaranteedInNewSpace()) stream->Add(" (pretenure)");
}

View File

@ -4655,7 +4655,6 @@ class HAllocate: public HTemplateInstruction<2> {
: type_(type),
flags_(flags) {
ASSERT((flags & CAN_ALLOCATE_IN_OLD_DATA_SPACE) == 0); // unimplemented
ASSERT((flags & CAN_ALLOCATE_IN_OLD_POINTER_SPACE) == 0); // unimplemented
SetOperandAt(0, context);
SetOperandAt(1, size);
set_representation(Representation::Tagged());

View File

@ -1073,6 +1073,11 @@ HValue* HGraphBuilder::BuildAllocateElements(HContext* context,
total_size->ClearFlag(HValue::kCanOverflow);
HAllocate::Flags flags = HAllocate::CAN_ALLOCATE_IN_NEW_SPACE;
// TODO(hpayer): add support for old data space
if (FLAG_pretenure_literals && !IsFastDoubleElementsKind(kind)) {
flags = static_cast<HAllocate::Flags>(
flags | HAllocate::CAN_ALLOCATE_IN_OLD_POINTER_SPACE);
}
if (IsFastDoubleElementsKind(kind)) {
flags = static_cast<HAllocate::Flags>(
flags | HAllocate::ALLOCATE_DOUBLE_ALIGNED);

View File

@ -1033,12 +1033,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
if (initial_capacity > 0) {
size += FixedArray::SizeFor(initial_capacity);
}
__ AllocateInNewSpace(size,
result,
scratch2,
scratch3,
gc_required,
TAG_OBJECT);
__ Allocate(size, result, scratch2, scratch3, gc_required, TAG_OBJECT);
// Allocated the JSArray. Now initialize the fields except for the elements
// array.
@ -1618,12 +1613,12 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
// Allocate a JSValue and put the tagged pointer into eax.
Label gc_required;
__ AllocateInNewSpace(JSValue::kSize,
eax, // Result.
ecx, // New allocation top (we ignore it).
no_reg,
&gc_required,
TAG_OBJECT);
__ Allocate(JSValue::kSize,
eax, // Result.
ecx, // New allocation top (we ignore it).
no_reg,
&gc_required,
TAG_OBJECT);
// Set the map.
__ LoadGlobalFunctionInitialMap(edi, ecx);

View File

@ -145,7 +145,7 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
Counters* counters = masm->isolate()->counters();
Label gc;
__ AllocateInNewSpace(JSFunction::kSize, eax, ebx, ecx, &gc, TAG_OBJECT);
__ Allocate(JSFunction::kSize, eax, ebx, ecx, &gc, TAG_OBJECT);
__ IncrementCounter(counters->fast_new_closure_total(), 1);
@ -273,8 +273,8 @@ void FastNewContextStub::Generate(MacroAssembler* masm) {
// Try to allocate the context in new space.
Label gc;
int length = slots_ + Context::MIN_CONTEXT_SLOTS;
__ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize,
eax, ebx, ecx, &gc, TAG_OBJECT);
__ Allocate((length * kPointerSize) + FixedArray::kHeaderSize,
eax, ebx, ecx, &gc, TAG_OBJECT);
// Get the function from the stack.
__ mov(ecx, Operand(esp, 1 * kPointerSize));
@ -321,8 +321,7 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
// Try to allocate the context in new space.
Label gc;
int length = slots_ + Context::MIN_CONTEXT_SLOTS;
__ AllocateInNewSpace(FixedArray::SizeFor(length),
eax, ebx, ecx, &gc, TAG_OBJECT);
__ Allocate(FixedArray::SizeFor(length), eax, ebx, ecx, &gc, TAG_OBJECT);
// Get the function or sentinel from the stack.
__ mov(ecx, Operand(esp, 1 * kPointerSize));
@ -414,7 +413,7 @@ static void GenerateFastCloneShallowArrayCommon(
if (mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS) {
flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT | flags);
}
__ AllocateInNewSpace(size, eax, ebx, edx, fail, flags);
__ Allocate(size, eax, ebx, edx, fail, flags);
if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
__ mov(FieldOperand(eax, allocation_info_start),

View File

@ -1489,7 +1489,7 @@ void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
__ bind(&materialized);
int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
Label allocated, runtime_allocate;
__ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
__ Allocate(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
__ jmp(&allocated);
__ bind(&runtime_allocate);

View File

@ -5468,12 +5468,8 @@ void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
// the constructor's prototype changes, but instance size and property
// counts remain unchanged (if slack tracking finished).
ASSERT(!constructor->shared()->IsInobjectSlackTrackingInProgress());
__ AllocateInNewSpace(instance_size,
result,
no_reg,
scratch,
deferred->entry(),
TAG_OBJECT);
__ Allocate(instance_size, result, no_reg, scratch, deferred->entry(),
TAG_OBJECT);
__ bind(deferred->exit());
if (FLAG_debug_code) {
@ -5563,7 +5559,10 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
}
if (instr->size()->IsConstantOperand()) {
int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
__ AllocateInNewSpace(size, result, temp, no_reg, deferred->entry(), flags);
if (instr->hydrogen()->CanAllocateInOldPointerSpace()) {
flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
}
__ Allocate(size, result, temp, no_reg, deferred->entry(), flags);
} else {
Register size = ToRegister(instr->size());
__ AllocateInNewSpace(size, result, temp, no_reg, deferred->entry(), flags);
@ -5802,7 +5801,7 @@ void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
// Allocate all objects that are part of the literal in one big
// allocation. This avoids multiple limit checks.
Label allocated, runtime_allocate;
__ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
__ Allocate(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
__ jmp(&allocated);
__ bind(&runtime_allocate);
@ -5892,7 +5891,7 @@ void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
__ bind(&materialized);
int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
Label allocated, runtime_allocate;
__ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
__ Allocate(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
__ jmp(&allocated);
__ bind(&runtime_allocate);

View File

@ -1214,8 +1214,8 @@ void MacroAssembler::LoadFromNumberDictionary(Label* miss,
void MacroAssembler::LoadAllocationTopHelper(Register result,
Register scratch,
AllocationFlags flags) {
ExternalReference new_space_allocation_top =
ExternalReference::new_space_allocation_top_address(isolate());
ExternalReference allocation_top =
AllocationUtils::GetAllocationTopReference(isolate(), flags);
// Just return if allocation top is already known.
if ((flags & RESULT_CONTAINS_TOP) != 0) {
@ -1223,7 +1223,7 @@ void MacroAssembler::LoadAllocationTopHelper(Register result,
ASSERT(scratch.is(no_reg));
#ifdef DEBUG
// Assert that result actually contains top on entry.
cmp(result, Operand::StaticVariable(new_space_allocation_top));
cmp(result, Operand::StaticVariable(allocation_top));
Check(equal, "Unexpected allocation top");
#endif
return;
@ -1231,39 +1231,40 @@ void MacroAssembler::LoadAllocationTopHelper(Register result,
// Move address of new object to result. Use scratch register if available.
if (scratch.is(no_reg)) {
mov(result, Operand::StaticVariable(new_space_allocation_top));
mov(result, Operand::StaticVariable(allocation_top));
} else {
mov(scratch, Immediate(new_space_allocation_top));
mov(scratch, Immediate(allocation_top));
mov(result, Operand(scratch, 0));
}
}
void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
Register scratch) {
Register scratch,
AllocationFlags flags) {
if (emit_debug_code()) {
test(result_end, Immediate(kObjectAlignmentMask));
Check(zero, "Unaligned allocation in new space");
}
ExternalReference new_space_allocation_top =
ExternalReference::new_space_allocation_top_address(isolate());
ExternalReference allocation_top =
AllocationUtils::GetAllocationTopReference(isolate(), flags);
// Update new top. Use scratch if available.
if (scratch.is(no_reg)) {
mov(Operand::StaticVariable(new_space_allocation_top), result_end);
mov(Operand::StaticVariable(allocation_top), result_end);
} else {
mov(Operand(scratch, 0), result_end);
}
}
void MacroAssembler::AllocateInNewSpace(int object_size,
Register result,
Register result_end,
Register scratch,
Label* gc_required,
AllocationFlags flags) {
void MacroAssembler::Allocate(int object_size,
Register result,
Register result_end,
Register scratch,
Label* gc_required,
AllocationFlags flags) {
ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
if (!FLAG_inline_new) {
if (emit_debug_code()) {
@ -1287,6 +1288,7 @@ void MacroAssembler::AllocateInNewSpace(int object_size,
// Align the next allocation. Storing the filler map without checking top is
// always safe because the limit of the heap is always aligned.
if ((flags & DOUBLE_ALIGNMENT) != 0) {
ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
Label aligned;
test(result, Immediate(kDoubleAlignmentMask));
@ -1299,20 +1301,20 @@ void MacroAssembler::AllocateInNewSpace(int object_size,
Register top_reg = result_end.is_valid() ? result_end : result;
// Calculate new top and bail out if new space is exhausted.
ExternalReference new_space_allocation_limit =
ExternalReference::new_space_allocation_limit_address(isolate());
// Calculate new top and bail out if space is exhausted.
ExternalReference allocation_limit =
AllocationUtils::GetAllocationLimitReference(isolate(), flags);
if (!top_reg.is(result)) {
mov(top_reg, result);
}
add(top_reg, Immediate(object_size));
j(carry, gc_required);
cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit));
cmp(top_reg, Operand::StaticVariable(allocation_limit));
j(above, gc_required);
// Update allocation top.
UpdateAllocationTopHelper(top_reg, scratch);
UpdateAllocationTopHelper(top_reg, scratch, flags);
// Tag result if requested.
bool tag_result = (flags & TAG_OBJECT) != 0;
@ -1340,6 +1342,7 @@ void MacroAssembler::AllocateInNewSpace(
Label* gc_required,
AllocationFlags flags) {
ASSERT((flags & SIZE_IN_WORDS) == 0);
ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
if (!FLAG_inline_new) {
if (emit_debug_code()) {
// Trash the registers to simulate an allocation failure.
@ -1399,7 +1402,7 @@ void MacroAssembler::AllocateInNewSpace(
}
// Update allocation top.
UpdateAllocationTopHelper(result_end, scratch);
UpdateAllocationTopHelper(result_end, scratch, flags);
}
@ -1410,6 +1413,7 @@ void MacroAssembler::AllocateInNewSpace(Register object_size,
Label* gc_required,
AllocationFlags flags) {
ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
if (!FLAG_inline_new) {
if (emit_debug_code()) {
// Trash the registers to simulate an allocation failure.
@ -1459,7 +1463,7 @@ void MacroAssembler::AllocateInNewSpace(Register object_size,
}
// Update allocation top.
UpdateAllocationTopHelper(result_end, scratch);
UpdateAllocationTopHelper(result_end, scratch, flags);
}
@ -1482,12 +1486,8 @@ void MacroAssembler::AllocateHeapNumber(Register result,
Register scratch2,
Label* gc_required) {
// Allocate heap number in new space.
AllocateInNewSpace(HeapNumber::kSize,
result,
scratch1,
scratch2,
gc_required,
TAG_OBJECT);
Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
TAG_OBJECT);
// Set the map.
mov(FieldOperand(result, HeapObject::kMapOffset),
@ -1575,12 +1575,8 @@ void MacroAssembler::AllocateAsciiString(Register result,
ASSERT(length > 0);
// Allocate ASCII string in new space.
AllocateInNewSpace(SeqOneByteString::SizeFor(length),
result,
scratch1,
scratch2,
gc_required,
TAG_OBJECT);
Allocate(SeqOneByteString::SizeFor(length), result, scratch1, scratch2,
gc_required, TAG_OBJECT);
// Set the map, length and hash field.
mov(FieldOperand(result, HeapObject::kMapOffset),
@ -1597,12 +1593,8 @@ void MacroAssembler::AllocateTwoByteConsString(Register result,
Register scratch2,
Label* gc_required) {
// Allocate heap number in new space.
AllocateInNewSpace(ConsString::kSize,
result,
scratch1,
scratch2,
gc_required,
TAG_OBJECT);
Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
TAG_OBJECT);
// Set the map. The other fields are left uninitialized.
mov(FieldOperand(result, HeapObject::kMapOffset),
@ -1615,12 +1607,8 @@ void MacroAssembler::AllocateAsciiConsString(Register result,
Register scratch2,
Label* gc_required) {
// Allocate heap number in new space.
AllocateInNewSpace(ConsString::kSize,
result,
scratch1,
scratch2,
gc_required,
TAG_OBJECT);
Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
TAG_OBJECT);
// Set the map. The other fields are left uninitialized.
mov(FieldOperand(result, HeapObject::kMapOffset),
@ -1633,12 +1621,8 @@ void MacroAssembler::AllocateTwoByteSlicedString(Register result,
Register scratch2,
Label* gc_required) {
// Allocate heap number in new space.
AllocateInNewSpace(SlicedString::kSize,
result,
scratch1,
scratch2,
gc_required,
TAG_OBJECT);
Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
TAG_OBJECT);
// Set the map. The other fields are left uninitialized.
mov(FieldOperand(result, HeapObject::kMapOffset),
@ -1651,12 +1635,8 @@ void MacroAssembler::AllocateAsciiSlicedString(Register result,
Register scratch2,
Label* gc_required) {
// Allocate heap number in new space.
AllocateInNewSpace(SlicedString::kSize,
result,
scratch1,
scratch2,
gc_required,
TAG_OBJECT);
Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
TAG_OBJECT);
// Set the map. The other fields are left uninitialized.
mov(FieldOperand(result, HeapObject::kMapOffset),

View File

@ -564,22 +564,22 @@ class MacroAssembler: public Assembler {
// ---------------------------------------------------------------------------
// Allocation support
// Allocate an object in new space. If the new space is exhausted control
// continues at the gc_required label. The allocated object is returned in
// result and end of the new object is returned in result_end. The register
// scratch can be passed as no_reg in which case an additional object
// reference will be added to the reloc info. The returned pointers in result
// and result_end have not yet been tagged as heap objects. If
// result_contains_top_on_entry is true the content of result is known to be
// the allocation top on entry (could be result_end from a previous call to
// AllocateInNewSpace). If result_contains_top_on_entry is true scratch
// Allocate an object in new space or old pointer space. If the given space
// is exhausted control continues at the gc_required label. The allocated
// object is returned in result and end of the new object is returned in
// result_end. The register scratch can be passed as no_reg in which case
// an additional object reference will be added to the reloc info. The
// returned pointers in result and result_end have not yet been tagged as
// heap objects. If result_contains_top_on_entry is true the content of
// result is known to be the allocation top on entry (could be result_end
// from a previous call). If result_contains_top_on_entry is true scratch
// should be no_reg as it is never used.
void AllocateInNewSpace(int object_size,
Register result,
Register result_end,
Register scratch,
Label* gc_required,
AllocationFlags flags);
void Allocate(int object_size,
Register result,
Register result_end,
Register scratch,
Label* gc_required,
AllocationFlags flags);
void AllocateInNewSpace(int header_size,
ScaleFactor element_size,
@ -910,7 +910,10 @@ class MacroAssembler: public Assembler {
void LoadAllocationTopHelper(Register result,
Register scratch,
AllocationFlags flags);
void UpdateAllocationTopHelper(Register result_end, Register scratch);
void UpdateAllocationTopHelper(Register result_end,
Register scratch,
AllocationFlags flags);
// Helper for PopHandleScope. Allowed to perform a GC and returns
// NULL if gc_allowed. Does not perform a GC if !gc_allowed, and

View File

@ -3067,8 +3067,8 @@ Handle<Code> ConstructStubCompiler::CompileConstructStub(
__ cmp(ecx, Immediate(instance_size));
__ Check(equal, "Instance size of initial map changed.");
#endif
__ AllocateInNewSpace(instance_size, edx, ecx, no_reg,
&generic_stub_call, NO_ALLOCATION_FLAGS);
__ Allocate(instance_size, edx, ecx, no_reg, &generic_stub_call,
NO_ALLOCATION_FLAGS);
// Allocated the JSObject, now initialize the fields and add the heap tag.
// ebx: initial map
@ -3529,7 +3529,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
__ j(not_equal, &check_capacity);
int size = FixedArray::SizeFor(JSArray::kPreallocatedArrayElements);
__ AllocateInNewSpace(size, edi, ebx, ecx, &prepare_slow, TAG_OBJECT);
__ Allocate(size, edi, ebx, ecx, &prepare_slow, TAG_OBJECT);
// Restore the key, which is known to be the array length.
// eax: value
@ -3661,7 +3661,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
__ j(not_equal, &check_capacity);
int size = FixedDoubleArray::SizeFor(JSArray::kPreallocatedArrayElements);
__ AllocateInNewSpace(size, edi, ebx, ecx, &prepare_slow, TAG_OBJECT);
__ Allocate(size, edi, ebx, ecx, &prepare_slow, TAG_OBJECT);
// Restore the key, which is known to be the array length.
__ mov(ecx, Immediate(0));

View File

@ -49,7 +49,9 @@ enum AllocationFlags {
// words instead of bytes.
SIZE_IN_WORDS = 1 << 2,
// Align the allocation to a multiple of kDoubleSize
DOUBLE_ALIGNMENT = 1 << 3
DOUBLE_ALIGNMENT = 1 << 3,
// Directly allocate in old pointer space
PRETENURE_OLD_POINTER_SPACE = 1 << 4
};
@ -168,6 +170,26 @@ class Comment {
#endif // DEBUG
class AllocationUtils {
public:
static ExternalReference GetAllocationTopReference(
Isolate* isolate, AllocationFlags flags) {
return ((flags & PRETENURE_OLD_POINTER_SPACE) != 0) ?
ExternalReference::old_pointer_space_allocation_top_address(isolate) :
ExternalReference::new_space_allocation_top_address(isolate);
}
static ExternalReference GetAllocationLimitReference(
Isolate* isolate, AllocationFlags flags) {
return ((flags & PRETENURE_OLD_POINTER_SPACE) != 0) ?
ExternalReference::old_pointer_space_allocation_limit_address(isolate) :
ExternalReference::new_space_allocation_limit_address(isolate);
}
};
} } // namespace v8::internal
#endif // V8_MACRO_ASSEMBLER_H_

View File

@ -9094,6 +9094,27 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_AllocateInNewSpace) {
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_AllocateInOldPointerSpace) {
// Allocate a block of memory in old pointer space (filled with a filler).
// Use as fallback for allocation in generated code when old pointer space
// is full.
ASSERT(args.length() == 1);
CONVERT_ARG_HANDLE_CHECKED(Smi, size_smi, 0);
int size = size_smi->value();
RUNTIME_ASSERT(IsAligned(size, kPointerSize));
RUNTIME_ASSERT(size > 0);
Heap* heap = isolate->heap();
Object* allocation;
{ MaybeObject* maybe_allocation =
heap->old_pointer_space()->AllocateRaw(size);
if (maybe_allocation->ToObject(&allocation)) {
heap->CreateFillerObjectAt(HeapObject::cast(allocation)->address(), size);
}
return maybe_allocation;
}
}
// Push an object unto an array of objects if it is not already in the
// array. Returns true if the element was pushed on the stack and
// false otherwise.

View File

@ -101,6 +101,7 @@ namespace internal {
F(CompileForOnStackReplacement, 1, 1) \
F(SetNewFunctionAttributes, 1, 1) \
F(AllocateInNewSpace, 1, 1) \
F(AllocateInOldPointerSpace, 1, 1) \
F(SetNativeFlag, 1, 1) \
F(StoreArrayLiteralElement, 5, 1) \
F(DebugCallbackSupportsStepping, 1, 1) \

View File

@ -544,6 +544,20 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) {
UNCLASSIFIED,
55,
"Runtime::AllocateInNewSpace");
Add(ExternalReference::old_pointer_space_allocation_top_address(
isolate).address(),
UNCLASSIFIED,
56,
"Heap::OldPointerSpaceAllocationTopAddress");
Add(ExternalReference::old_pointer_space_allocation_limit_address(
isolate).address(),
UNCLASSIFIED,
57,
"Heap::OldPointerSpaceAllocationLimitAddress");
Add(ExternalReference(Runtime::kAllocateInOldPointerSpace, isolate).address(),
UNCLASSIFIED,
58,
"Runtime::AllocateInOldPointerSpace");
// Add a small set of deopt entry addresses to encoder without generating the
// deopt table code, which isn't possible at deserialization time.
@ -554,7 +568,7 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) {
entry,
Deoptimizer::LAZY,
Deoptimizer::CALCULATE_ENTRY_ADDRESS);
Add(address, LAZY_DEOPTIMIZATION, 56 + entry, "lazy_deopt");
Add(address, LAZY_DEOPTIMIZATION, 59 + entry, "lazy_deopt");
}
}

View File

@ -1663,6 +1663,10 @@ class PagedSpace : public Space {
Address top() { return allocation_info_.top; }
Address limit() { return allocation_info_.limit; }
// The allocation top and limit addresses.
Address* allocation_top_address() { return &allocation_info_.top; }
Address* allocation_limit_address() { return &allocation_info_.limit; }
// Allocate the requested number of bytes in the space if possible, return a
// failure object if not.
MUST_USE_RESULT inline MaybeObject* AllocateRaw(int size_in_bytes);

View File

@ -1118,12 +1118,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
if (initial_capacity > 0) {
size += FixedArray::SizeFor(initial_capacity);
}
__ AllocateInNewSpace(size,
result,
scratch2,
scratch3,
gc_required,
TAG_OBJECT);
__ Allocate(size, result, scratch2, scratch3, gc_required, TAG_OBJECT);
// Allocated the JSArray. Now initialize the fields except for the elements
// array.
@ -1646,12 +1641,12 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
// Allocate a JSValue and put the tagged pointer into rax.
Label gc_required;
__ AllocateInNewSpace(JSValue::kSize,
rax, // Result.
rcx, // New allocation top (we ignore it).
no_reg,
&gc_required,
TAG_OBJECT);
__ Allocate(JSValue::kSize,
rax, // Result.
rcx, // New allocation top (we ignore it).
no_reg,
&gc_required,
TAG_OBJECT);
// Set the map.
__ LoadGlobalFunctionInitialMap(rdi, rcx);

View File

@ -140,7 +140,7 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
Counters* counters = masm->isolate()->counters();
Label gc;
__ AllocateInNewSpace(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT);
__ Allocate(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT);
__ IncrementCounter(counters->fast_new_closure_total(), 1);
@ -274,8 +274,8 @@ void FastNewContextStub::Generate(MacroAssembler* masm) {
// Try to allocate the context in new space.
Label gc;
int length = slots_ + Context::MIN_CONTEXT_SLOTS;
__ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize,
rax, rbx, rcx, &gc, TAG_OBJECT);
__ Allocate((length * kPointerSize) + FixedArray::kHeaderSize,
rax, rbx, rcx, &gc, TAG_OBJECT);
// Get the function from the stack.
__ movq(rcx, Operand(rsp, 1 * kPointerSize));
@ -320,8 +320,8 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
// Try to allocate the context in new space.
Label gc;
int length = slots_ + Context::MIN_CONTEXT_SLOTS;
__ AllocateInNewSpace(FixedArray::SizeFor(length),
rax, rbx, rcx, &gc, TAG_OBJECT);
__ Allocate(FixedArray::SizeFor(length),
rax, rbx, rcx, &gc, TAG_OBJECT);
// Get the function from the stack.
__ movq(rcx, Operand(rsp, 1 * kPointerSize));
@ -406,7 +406,7 @@ static void GenerateFastCloneShallowArrayCommon(
if (mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS) {
flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT | flags);
}
__ AllocateInNewSpace(size, rax, rbx, rdx, fail, flags);
__ Allocate(size, rax, rbx, rdx, fail, flags);
if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
__ LoadRoot(kScratchRegister, Heap::kAllocationSiteInfoMapRootIndex);
@ -1316,12 +1316,8 @@ static void BinaryOpStub_GenerateFloatingPointCode(MacroAssembler* masm,
// Allocate heap number in new space.
// Not using AllocateHeapNumber macro in order to reuse
// already loaded heap_number_map.
__ AllocateInNewSpace(HeapNumber::kSize,
rax,
rdx,
no_reg,
&allocation_failed,
TAG_OBJECT);
__ Allocate(HeapNumber::kSize, rax, rdx, no_reg, &allocation_failed,
TAG_OBJECT);
// Set the map.
__ AssertRootValue(heap_number_map,
Heap::kHeapNumberMapRootIndex,

View File

@ -1514,7 +1514,7 @@ void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
__ bind(&materialized);
int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
Label allocated, runtime_allocate;
__ AllocateInNewSpace(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
__ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
__ jmp(&allocated);
__ bind(&runtime_allocate);

View File

@ -5032,12 +5032,8 @@ void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
// the constructor's prototype changes, but instance size and property
// counts remain unchanged (if slack tracking finished).
ASSERT(!constructor->shared()->IsInobjectSlackTrackingInProgress());
__ AllocateInNewSpace(instance_size,
result,
no_reg,
scratch,
deferred->entry(),
TAG_OBJECT);
__ Allocate(instance_size, result, no_reg, scratch, deferred->entry(),
TAG_OBJECT);
__ bind(deferred->exit());
if (FLAG_debug_code) {
@ -5126,7 +5122,10 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
}
if (instr->size()->IsConstantOperand()) {
int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
__ AllocateInNewSpace(size, result, temp, no_reg, deferred->entry(), flags);
if (instr->hydrogen()->CanAllocateInOldPointerSpace()) {
flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
}
__ Allocate(size, result, temp, no_reg, deferred->entry(), flags);
} else {
Register size = ToRegister(instr->size());
__ AllocateInNewSpace(size, result, temp, no_reg, deferred->entry(), flags);
@ -5347,7 +5346,7 @@ void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
// Allocate all objects that are part of the literal in one big
// allocation. This avoids multiple limit checks.
Label allocated, runtime_allocate;
__ AllocateInNewSpace(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
__ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
__ jmp(&allocated);
__ bind(&runtime_allocate);
@ -5434,7 +5433,7 @@ void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
__ bind(&materialized);
int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
Label allocated, runtime_allocate;
__ AllocateInNewSpace(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
__ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
__ jmp(&allocated);
__ bind(&runtime_allocate);

View File

@ -3714,8 +3714,8 @@ void MacroAssembler::LoadFromNumberDictionary(Label* miss,
void MacroAssembler::LoadAllocationTopHelper(Register result,
Register scratch,
AllocationFlags flags) {
ExternalReference new_space_allocation_top =
ExternalReference::new_space_allocation_top_address(isolate());
ExternalReference allocation_top =
AllocationUtils::GetAllocationTopReference(isolate(), flags);
// Just return if allocation top is already known.
if ((flags & RESULT_CONTAINS_TOP) != 0) {
@ -3723,7 +3723,7 @@ void MacroAssembler::LoadAllocationTopHelper(Register result,
ASSERT(!scratch.is_valid());
#ifdef DEBUG
// Assert that result actually contains top on entry.
Operand top_operand = ExternalOperand(new_space_allocation_top);
Operand top_operand = ExternalOperand(allocation_top);
cmpq(result, top_operand);
Check(equal, "Unexpected allocation top");
#endif
@ -3733,40 +3733,41 @@ void MacroAssembler::LoadAllocationTopHelper(Register result,
// Move address of new object to result. Use scratch register if available,
// and keep address in scratch until call to UpdateAllocationTopHelper.
if (scratch.is_valid()) {
LoadAddress(scratch, new_space_allocation_top);
LoadAddress(scratch, allocation_top);
movq(result, Operand(scratch, 0));
} else {
Load(result, new_space_allocation_top);
Load(result, allocation_top);
}
}
void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
Register scratch) {
Register scratch,
AllocationFlags flags) {
if (emit_debug_code()) {
testq(result_end, Immediate(kObjectAlignmentMask));
Check(zero, "Unaligned allocation in new space");
}
ExternalReference new_space_allocation_top =
ExternalReference::new_space_allocation_top_address(isolate());
ExternalReference allocation_top =
AllocationUtils::GetAllocationTopReference(isolate(), flags);
// Update new top.
if (scratch.is_valid()) {
// Scratch already contains address of allocation top.
movq(Operand(scratch, 0), result_end);
} else {
Store(new_space_allocation_top, result_end);
Store(allocation_top, result_end);
}
}
void MacroAssembler::AllocateInNewSpace(int object_size,
Register result,
Register result_end,
Register scratch,
Label* gc_required,
AllocationFlags flags) {
void MacroAssembler::Allocate(int object_size,
Register result,
Register result_end,
Register scratch,
Label* gc_required,
AllocationFlags flags) {
ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
if (!FLAG_inline_new) {
if (emit_debug_code()) {
@ -3795,8 +3796,8 @@ void MacroAssembler::AllocateInNewSpace(int object_size,
}
// Calculate new top and bail out if new space is exhausted.
ExternalReference new_space_allocation_limit =
ExternalReference::new_space_allocation_limit_address(isolate());
ExternalReference allocation_limit =
AllocationUtils::GetAllocationLimitReference(isolate(), flags);
Register top_reg = result_end.is_valid() ? result_end : result;
@ -3805,12 +3806,12 @@ void MacroAssembler::AllocateInNewSpace(int object_size,
}
addq(top_reg, Immediate(object_size));
j(carry, gc_required);
Operand limit_operand = ExternalOperand(new_space_allocation_limit);
Operand limit_operand = ExternalOperand(allocation_limit);
cmpq(top_reg, limit_operand);
j(above, gc_required);
// Update allocation top.
UpdateAllocationTopHelper(top_reg, scratch);
UpdateAllocationTopHelper(top_reg, scratch, flags);
bool tag_result = (flags & TAG_OBJECT) != 0;
if (top_reg.is(result)) {
@ -3836,6 +3837,7 @@ void MacroAssembler::AllocateInNewSpace(int header_size,
Label* gc_required,
AllocationFlags flags) {
ASSERT((flags & SIZE_IN_WORDS) == 0);
ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
if (!FLAG_inline_new) {
if (emit_debug_code()) {
// Trash the registers to simulate an allocation failure.
@ -3875,7 +3877,7 @@ void MacroAssembler::AllocateInNewSpace(int header_size,
j(above, gc_required);
// Update allocation top.
UpdateAllocationTopHelper(result_end, scratch);
UpdateAllocationTopHelper(result_end, scratch, flags);
// Tag the result if requested.
if ((flags & TAG_OBJECT) != 0) {
@ -3892,6 +3894,7 @@ void MacroAssembler::AllocateInNewSpace(Register object_size,
Label* gc_required,
AllocationFlags flags) {
ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
if (!FLAG_inline_new) {
if (emit_debug_code()) {
// Trash the registers to simulate an allocation failure.
@ -3923,7 +3926,7 @@ void MacroAssembler::AllocateInNewSpace(Register object_size,
j(above, gc_required);
// Update allocation top.
UpdateAllocationTopHelper(result_end, scratch);
UpdateAllocationTopHelper(result_end, scratch, flags);
// Align the next allocation. Storing the filler map without checking top is
// always safe because the limit of the heap is always aligned.
@ -3958,12 +3961,7 @@ void MacroAssembler::AllocateHeapNumber(Register result,
Register scratch,
Label* gc_required) {
// Allocate heap number in new space.
AllocateInNewSpace(HeapNumber::kSize,
result,
scratch,
no_reg,
gc_required,
TAG_OBJECT);
Allocate(HeapNumber::kSize, result, scratch, no_reg, gc_required, TAG_OBJECT);
// Set the map.
LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
@ -4053,12 +4051,8 @@ void MacroAssembler::AllocateTwoByteConsString(Register result,
Register scratch2,
Label* gc_required) {
// Allocate heap number in new space.
AllocateInNewSpace(ConsString::kSize,
result,
scratch1,
scratch2,
gc_required,
TAG_OBJECT);
Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
TAG_OBJECT);
// Set the map. The other fields are left uninitialized.
LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
@ -4071,12 +4065,8 @@ void MacroAssembler::AllocateAsciiConsString(Register result,
Register scratch2,
Label* gc_required) {
// Allocate heap number in new space.
AllocateInNewSpace(ConsString::kSize,
result,
scratch1,
scratch2,
gc_required,
TAG_OBJECT);
Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
TAG_OBJECT);
// Set the map. The other fields are left uninitialized.
LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
@ -4089,12 +4079,8 @@ void MacroAssembler::AllocateTwoByteSlicedString(Register result,
Register scratch2,
Label* gc_required) {
// Allocate heap number in new space.
AllocateInNewSpace(SlicedString::kSize,
result,
scratch1,
scratch2,
gc_required,
TAG_OBJECT);
Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
TAG_OBJECT);
// Set the map. The other fields are left uninitialized.
LoadRoot(kScratchRegister, Heap::kSlicedStringMapRootIndex);
@ -4107,12 +4093,8 @@ void MacroAssembler::AllocateAsciiSlicedString(Register result,
Register scratch2,
Label* gc_required) {
// Allocate heap number in new space.
AllocateInNewSpace(SlicedString::kSize,
result,
scratch1,
scratch2,
gc_required,
TAG_OBJECT);
Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
TAG_OBJECT);
// Set the map. The other fields are left uninitialized.
LoadRoot(kScratchRegister, Heap::kSlicedAsciiStringMapRootIndex);

View File

@ -1024,22 +1024,22 @@ class MacroAssembler: public Assembler {
// ---------------------------------------------------------------------------
// Allocation support
// Allocate an object in new space. If the new space is exhausted control
// continues at the gc_required label. The allocated object is returned in
// result and end of the new object is returned in result_end. The register
// scratch can be passed as no_reg in which case an additional object
// reference will be added to the reloc info. The returned pointers in result
// and result_end have not yet been tagged as heap objects. If
// result_contains_top_on_entry is true the content of result is known to be
// the allocation top on entry (could be result_end from a previous call to
// AllocateInNewSpace). If result_contains_top_on_entry is true scratch
// Allocate an object in new space or old pointer space. If the given space
// is exhausted control continues at the gc_required label. The allocated
// object is returned in result and end of the new object is returned in
// result_end. The register scratch can be passed as no_reg in which case
// an additional object reference will be added to the reloc info. The
// returned pointers in result and result_end have not yet been tagged as
// heap objects. If result_contains_top_on_entry is true the content of
// result is known to be the allocation top on entry (could be result_end
// from a previous call). If result_contains_top_on_entry is true scratch
// should be no_reg as it is never used.
void AllocateInNewSpace(int object_size,
Register result,
Register result_end,
Register scratch,
Label* gc_required,
AllocationFlags flags);
void Allocate(int object_size,
Register result,
Register result_end,
Register scratch,
Label* gc_required,
AllocationFlags flags);
void AllocateInNewSpace(int header_size,
ScaleFactor element_size,
@ -1388,9 +1388,12 @@ class MacroAssembler: public Assembler {
void LoadAllocationTopHelper(Register result,
Register scratch,
AllocationFlags flags);
// Update allocation top with value in result_end register.
// If scratch is valid, it contains the address of the allocation top.
void UpdateAllocationTopHelper(Register result_end, Register scratch);
void UpdateAllocationTopHelper(Register result_end,
Register scratch,
AllocationFlags flags);
// Helper for PopHandleScope. Allowed to perform a GC and returns
// NULL if gc_allowed. Does not perform a GC if !gc_allowed, and

View File

@ -2896,8 +2896,8 @@ Handle<Code> ConstructStubCompiler::CompileConstructStub(
__ cmpq(rcx, Immediate(instance_size));
__ Check(equal, "Instance size of initial map changed.");
#endif
__ AllocateInNewSpace(instance_size, rdx, rcx, no_reg,
&generic_stub_call, NO_ALLOCATION_FLAGS);
__ Allocate(instance_size, rdx, rcx, no_reg, &generic_stub_call,
NO_ALLOCATION_FLAGS);
// Allocated the JSObject, now initialize the fields and add the heap tag.
// rbx: initial map
@ -3319,7 +3319,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
__ j(not_equal, &check_capacity);
int size = FixedArray::SizeFor(JSArray::kPreallocatedArrayElements);
__ AllocateInNewSpace(size, rdi, rbx, r8, &slow, TAG_OBJECT);
__ Allocate(size, rdi, rbx, r8, &slow, TAG_OBJECT);
// rax: value
// rcx: key
@ -3449,7 +3449,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
__ j(not_equal, &check_capacity);
int size = FixedDoubleArray::SizeFor(JSArray::kPreallocatedArrayElements);
__ AllocateInNewSpace(size, rdi, rbx, r8, &slow, TAG_OBJECT);
__ Allocate(size, rdi, rbx, r8, &slow, TAG_OBJECT);
// rax: value
// rcx: key

View File

@ -2051,6 +2051,7 @@ TEST(OptimizedAllocationAlwaysInNewSpace) {
// Test pretenuring of array literals allocated with HAllocate.
TEST(OptimizedPretenuringArrayLiterals) {
i::FLAG_allow_natives_syntax = true;
i::FLAG_pretenure_literals = true;
InitializeVM();
if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
@ -2059,23 +2060,18 @@ TEST(OptimizedPretenuringArrayLiterals) {
AlwaysAllocateScope always_allocate;
v8::Local<v8::Value> res = CompileRun(
"function f() {"
" var numbers = new Array(1, 2, 3);"
" numbers[0] = 3.14;"
" var numbers = [1, 2, 3];"
" numbers[0] = {};"
" return numbers;"
"};"
"f(); f(); f();"
"%OptimizeFunctionOnNextCall(f);"
"f();");
CHECK_EQ(static_cast<int>(3.14),
v8::Object::Cast(*res)->Get(v8_str("0"))->Int32Value());
Handle<JSObject> o =
v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
// TODO(hpayer): remove InNewSpace check and test if object was allocated
// in old pointer space.
CHECK(!HEAP->InOldPointerSpace(*o));
CHECK(HEAP->InNewSpace(*o));
CHECK(HEAP->InOldPointerSpace(o->elements()));
}
@ -2103,7 +2099,7 @@ TEST(OptimizedAllocationArrayLiterals) {
Handle<JSObject> o =
v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
CHECK(HEAP->InNewSpace(*o));
CHECK(HEAP->InNewSpace(o->elements()));
}