Fast allocation of block contexts.

Review URL: http://codereview.chromium.org/8066002

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@9542 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
keuchel@chromium.org 2011-10-06 15:59:02 +00:00
parent 80048c14b1
commit c1cf622fe9
5 changed files with 233 additions and 3 deletions

View File

@ -189,6 +189,72 @@ void FastNewContextStub::Generate(MacroAssembler* masm) {
}
void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
// Stack layout on entry:
//
// [sp]: function.
// [sp + kPointerSize]: serialized scope info
// Try to allocate the context in new space.
Label gc;
int length = slots_ + Context::MIN_CONTEXT_SLOTS;
__ AllocateInNewSpace(FixedArray::SizeFor(length),
r0, r1, r2, &gc, TAG_OBJECT);
// Load the function from the stack.
__ ldr(r3, MemOperand(sp, 0));
// Load the serialized scope info from the stack.
__ ldr(r1, MemOperand(sp, 1 * kPointerSize));
// Setup the object header.
__ LoadRoot(r2, Heap::kBlockContextMapRootIndex);
__ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
__ mov(r2, Operand(Smi::FromInt(length)));
__ str(r2, FieldMemOperand(r0, FixedArray::kLengthOffset));
// If this block context is nested in the global context we get a smi
// sentinel instead of a function. The block context should get the
// canonical empty function of the global context as its closure which
// we still have to look up.
Label after_sentinel;
__ JumpIfNotSmi(r3, &after_sentinel);
if (FLAG_debug_code) {
const char* message = "Expected 0 as a Smi sentinel";
__ cmp(r3, Operand::Zero());
__ Assert(eq, message);
}
__ ldr(r3, GlobalObjectOperand());
__ ldr(r3, FieldMemOperand(r3, GlobalObject::kGlobalContextOffset));
__ ldr(r3, ContextOperand(r3, Context::CLOSURE_INDEX));
__ bind(&after_sentinel);
// Setup the fixed slots.
__ str(r3, ContextOperand(r0, Context::CLOSURE_INDEX));
__ str(cp, ContextOperand(r0, Context::PREVIOUS_INDEX));
__ str(r1, ContextOperand(r0, Context::EXTENSION_INDEX));
// Copy the global object from the previous context.
__ ldr(r1, ContextOperand(cp, Context::GLOBAL_INDEX));
__ str(r1, ContextOperand(r0, Context::GLOBAL_INDEX));
// Initialize the rest of the slots to the hole value.
__ LoadRoot(r1, Heap::kTheHoleValueRootIndex);
for (int i = 0; i < slots_; i++) {
__ str(r1, ContextOperand(r0, i + Context::MIN_CONTEXT_SLOTS));
}
// Remove the on-stack argument and return.
__ mov(cp, r0);
__ add(sp, sp, Operand(2 * kPointerSize));
__ Ret();
// Need to collect. Call into runtime system.
__ bind(&gc);
__ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
}
void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
// Stack layout on entry:
//

View File

@ -55,6 +55,7 @@ namespace internal {
V(StackCheck) \
V(FastNewClosure) \
V(FastNewContext) \
V(FastNewBlockContext) \
V(FastCloneShallowArray) \
V(RevertToNumber) \
V(ToBoolean) \
@ -323,7 +324,7 @@ class FastNewContextStub : public CodeStub {
static const int kMaximumSlots = 64;
explicit FastNewContextStub(int slots) : slots_(slots) {
ASSERT(slots_ > 0 && slots <= kMaximumSlots);
ASSERT(slots_ > 0 && slots_ <= kMaximumSlots);
}
void Generate(MacroAssembler* masm);
@ -336,6 +337,24 @@ class FastNewContextStub : public CodeStub {
};
class FastNewBlockContextStub : public CodeStub {
public:
static const int kMaximumSlots = 64;
explicit FastNewBlockContextStub(int slots) : slots_(slots) {
ASSERT(slots_ > 0 && slots_ <= kMaximumSlots);
}
void Generate(MacroAssembler* masm);
private:
int slots_;
Major MajorKey() { return FastNewBlockContext; }
int MinorKey() { return slots_; }
};
class FastCloneShallowArrayStub : public CodeStub {
public:
// Maximum length of copied elements array.

View File

@ -820,9 +820,19 @@ void FullCodeGenerator::VisitBlock(Block* stmt) {
if (stmt->block_scope() != NULL) {
{ Comment cmnt(masm_, "[ Extend block context");
scope_ = stmt->block_scope();
__ Push(scope_->GetSerializedScopeInfo());
Handle<SerializedScopeInfo> scope_info = scope_->GetSerializedScopeInfo();
int heap_slots =
scope_info->NumberOfContextSlots() - Context::MIN_CONTEXT_SLOTS;
__ Push(scope_info);
PushFunctionArgumentForContextAllocation();
__ CallRuntime(Runtime::kPushBlockContext, 2);
if (heap_slots <= FastNewBlockContextStub::kMaximumSlots) {
FastNewBlockContextStub stub(heap_slots);
__ CallStub(&stub);
} else {
__ CallRuntime(Runtime::kPushBlockContext, 2);
}
// Replace the context stored in the frame.
StoreToFrameField(StandardFrameConstants::kContextOffset,
context_register());
}

View File

@ -159,6 +159,77 @@ void FastNewContextStub::Generate(MacroAssembler* masm) {
}
void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
// Stack layout on entry:
//
// [esp + (1 * kPointerSize)]: function
// [esp + (2 * kPointerSize)]: serialized scope info
// Try to allocate the context in new space.
Label gc;
int length = slots_ + Context::MIN_CONTEXT_SLOTS;
__ AllocateInNewSpace(FixedArray::SizeFor(length),
eax, ebx, ecx, &gc, TAG_OBJECT);
// Get the function or sentinel from the stack.
__ mov(ecx, Operand(esp, 1 * kPointerSize));
// Get the serialized scope info from the stack.
__ mov(ebx, Operand(esp, 2 * kPointerSize));
// Setup the object header.
Factory* factory = masm->isolate()->factory();
__ mov(FieldOperand(eax, HeapObject::kMapOffset),
factory->block_context_map());
__ mov(FieldOperand(eax, Context::kLengthOffset),
Immediate(Smi::FromInt(length)));
// If this block context is nested in the global context we get a smi
// sentinel instead of a function. The block context should get the
// canonical empty function of the global context as its closure which
// we still have to look up.
Label after_sentinel;
__ JumpIfNotSmi(ecx, &after_sentinel, Label::kNear);
if (FLAG_debug_code) {
const char* message = "Expected 0 as a Smi sentinel";
__ cmp(ecx, 0);
__ Assert(equal, message);
}
__ mov(ecx, GlobalObjectOperand());
__ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalContextOffset));
__ mov(ecx, ContextOperand(ecx, Context::CLOSURE_INDEX));
__ bind(&after_sentinel);
// Setup the fixed slots.
__ mov(ContextOperand(eax, Context::CLOSURE_INDEX), ecx);
__ mov(ContextOperand(eax, Context::PREVIOUS_INDEX), esi);
__ mov(ContextOperand(eax, Context::EXTENSION_INDEX), ebx);
// Copy the global object from the previous context.
__ mov(ebx, ContextOperand(esi, Context::GLOBAL_INDEX));
__ mov(ContextOperand(eax, Context::GLOBAL_INDEX), ebx);
// Initialize the rest of the slots to the hole value.
if (slots_ == 1) {
__ mov(ContextOperand(eax, Context::MIN_CONTEXT_SLOTS),
factory->the_hole_value());
} else {
__ mov(ebx, factory->the_hole_value());
for (int i = 0; i < slots_; i++) {
__ mov(ContextOperand(eax, i + Context::MIN_CONTEXT_SLOTS), ebx);
}
}
// Return and remove the on-stack parameters.
__ mov(esi, eax);
__ ret(2 * kPointerSize);
// Need to collect. Call into runtime system.
__ bind(&gc);
__ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
}
void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
// Stack layout on entry:
//

View File

@ -155,6 +155,70 @@ void FastNewContextStub::Generate(MacroAssembler* masm) {
}
void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
// Stack layout on entry:
//
// [rsp + (1 * kPointerSize)]: function
// [rsp + (2 * kPointerSize)]: serialized scope info
// Try to allocate the context in new space.
Label gc;
int length = slots_ + Context::MIN_CONTEXT_SLOTS;
__ AllocateInNewSpace(FixedArray::SizeFor(length),
rax, rbx, rcx, &gc, TAG_OBJECT);
// Get the function from the stack.
__ movq(rcx, Operand(rsp, 1 * kPointerSize));
// Get the serialized scope info from the stack.
__ movq(rbx, Operand(rsp, 2 * kPointerSize));
// Setup the object header.
__ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex);
__ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
__ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length));
// If this block context is nested in the global context we get a smi
// sentinel instead of a function. The block context should get the
// canonical empty function of the global context as its closure which
// we still have to look up.
Label after_sentinel;
__ JumpIfNotSmi(rcx, &after_sentinel, Label::kNear);
if (FLAG_debug_code) {
const char* message = "Expected 0 as a Smi sentinel";
__ cmpq(rcx, Immediate(0));
__ Assert(equal, message);
}
__ movq(rcx, GlobalObjectOperand());
__ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalContextOffset));
__ movq(rcx, ContextOperand(rcx, Context::CLOSURE_INDEX));
__ bind(&after_sentinel);
// Setup the fixed slots.
__ movq(ContextOperand(rax, Context::CLOSURE_INDEX), rcx);
__ movq(ContextOperand(rax, Context::PREVIOUS_INDEX), rsi);
__ movq(ContextOperand(rax, Context::EXTENSION_INDEX), rbx);
// Copy the global object from the previous context.
__ movq(rbx, ContextOperand(rsi, Context::GLOBAL_INDEX));
__ movq(ContextOperand(rax, Context::GLOBAL_INDEX), rbx);
// Initialize the rest of the slots to the hole value.
__ LoadRoot(rbx, Heap::kTheHoleValueRootIndex);
for (int i = 0; i < slots_; i++) {
__ movq(ContextOperand(rax, i + Context::MIN_CONTEXT_SLOTS), rbx);
}
// Return and remove the on-stack parameter.
__ movq(rsi, rax);
__ ret(2 * kPointerSize);
// Need to collect. Call into runtime system.
__ bind(&gc);
__ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
}
void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
// Stack layout on entry:
//