Drop the native FastNewBlockContextStub.
This code is almost never executed in real world and benchmarks, and there's obviously absolutely no need to have this native code hanging around for no benefit. R=svenpanne@chromium.org Review URL: https://codereview.chromium.org/148873002 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18880 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
371d6f6a98
commit
a2d1f8b8f6
@ -486,68 +486,6 @@ void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
|
|
||||||
// Stack layout on entry:
|
|
||||||
//
|
|
||||||
// [sp]: function.
|
|
||||||
// [sp + kPointerSize]: serialized scope info
|
|
||||||
|
|
||||||
// Try to allocate the context in new space.
|
|
||||||
Label gc;
|
|
||||||
int length = slots_ + Context::MIN_CONTEXT_SLOTS;
|
|
||||||
__ Allocate(FixedArray::SizeFor(length), r0, r1, r2, &gc, TAG_OBJECT);
|
|
||||||
|
|
||||||
// Load the function from the stack.
|
|
||||||
__ ldr(r3, MemOperand(sp, 0));
|
|
||||||
|
|
||||||
// Load the serialized scope info from the stack.
|
|
||||||
__ ldr(r1, MemOperand(sp, 1 * kPointerSize));
|
|
||||||
|
|
||||||
// Set up the object header.
|
|
||||||
__ LoadRoot(r2, Heap::kBlockContextMapRootIndex);
|
|
||||||
__ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
|
|
||||||
__ mov(r2, Operand(Smi::FromInt(length)));
|
|
||||||
__ str(r2, FieldMemOperand(r0, FixedArray::kLengthOffset));
|
|
||||||
|
|
||||||
// If this block context is nested in the native context we get a smi
|
|
||||||
// sentinel instead of a function. The block context should get the
|
|
||||||
// canonical empty function of the native context as its closure which
|
|
||||||
// we still have to look up.
|
|
||||||
Label after_sentinel;
|
|
||||||
__ JumpIfNotSmi(r3, &after_sentinel);
|
|
||||||
if (FLAG_debug_code) {
|
|
||||||
__ cmp(r3, Operand::Zero());
|
|
||||||
__ Assert(eq, kExpected0AsASmiSentinel);
|
|
||||||
}
|
|
||||||
__ ldr(r3, GlobalObjectOperand());
|
|
||||||
__ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset));
|
|
||||||
__ ldr(r3, ContextOperand(r3, Context::CLOSURE_INDEX));
|
|
||||||
__ bind(&after_sentinel);
|
|
||||||
|
|
||||||
// Set up the fixed slots, copy the global object from the previous context.
|
|
||||||
__ ldr(r2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
|
|
||||||
__ str(r3, ContextOperand(r0, Context::CLOSURE_INDEX));
|
|
||||||
__ str(cp, ContextOperand(r0, Context::PREVIOUS_INDEX));
|
|
||||||
__ str(r1, ContextOperand(r0, Context::EXTENSION_INDEX));
|
|
||||||
__ str(r2, ContextOperand(r0, Context::GLOBAL_OBJECT_INDEX));
|
|
||||||
|
|
||||||
// Initialize the rest of the slots to the hole value.
|
|
||||||
__ LoadRoot(r1, Heap::kTheHoleValueRootIndex);
|
|
||||||
for (int i = 0; i < slots_; i++) {
|
|
||||||
__ str(r1, ContextOperand(r0, i + Context::MIN_CONTEXT_SLOTS));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove the on-stack argument and return.
|
|
||||||
__ mov(cp, r0);
|
|
||||||
__ add(sp, sp, Operand(2 * kPointerSize));
|
|
||||||
__ Ret();
|
|
||||||
|
|
||||||
// Need to collect. Call into runtime system.
|
|
||||||
__ bind(&gc);
|
|
||||||
__ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
// Takes a Smi and converts to an IEEE 64 bit floating point value in two
|
// Takes a Smi and converts to an IEEE 64 bit floating point value in two
|
||||||
// registers. The format is 1 sign bit, 11 exponent bits (biased 1023) and
|
// registers. The format is 1 sign bit, 11 exponent bits (biased 1023) and
|
||||||
// 52 fraction bits (20 in the first word, 32 in the second). Zeros is a
|
// 52 fraction bits (20 in the first word, 32 in the second). Zeros is a
|
||||||
|
@ -64,7 +64,6 @@ namespace internal {
|
|||||||
V(Interrupt) \
|
V(Interrupt) \
|
||||||
V(FastNewClosure) \
|
V(FastNewClosure) \
|
||||||
V(FastNewContext) \
|
V(FastNewContext) \
|
||||||
V(FastNewBlockContext) \
|
|
||||||
V(FastCloneShallowArray) \
|
V(FastCloneShallowArray) \
|
||||||
V(FastCloneShallowObject) \
|
V(FastCloneShallowObject) \
|
||||||
V(CreateAllocationSite) \
|
V(CreateAllocationSite) \
|
||||||
@ -586,23 +585,6 @@ class FastNewContextStub V8_FINAL : public HydrogenCodeStub {
|
|||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
class FastNewBlockContextStub : public PlatformCodeStub {
|
|
||||||
public:
|
|
||||||
static const int kMaximumSlots = 64;
|
|
||||||
|
|
||||||
explicit FastNewBlockContextStub(int slots) : slots_(slots) {
|
|
||||||
ASSERT(slots_ > 0 && slots_ <= kMaximumSlots);
|
|
||||||
}
|
|
||||||
|
|
||||||
void Generate(MacroAssembler* masm);
|
|
||||||
|
|
||||||
private:
|
|
||||||
int slots_;
|
|
||||||
|
|
||||||
Major MajorKey() { return FastNewBlockContext; }
|
|
||||||
int MinorKey() { return slots_; }
|
|
||||||
};
|
|
||||||
|
|
||||||
class FastCloneShallowArrayStub : public HydrogenCodeStub {
|
class FastCloneShallowArrayStub : public HydrogenCodeStub {
|
||||||
public:
|
public:
|
||||||
// Maximum length of copied elements array.
|
// Maximum length of copied elements array.
|
||||||
|
@ -1098,16 +1098,9 @@ void FullCodeGenerator::VisitBlock(Block* stmt) {
|
|||||||
scope_ = stmt->scope();
|
scope_ = stmt->scope();
|
||||||
ASSERT(!scope_->is_module_scope());
|
ASSERT(!scope_->is_module_scope());
|
||||||
{ Comment cmnt(masm_, "[ Extend block context");
|
{ Comment cmnt(masm_, "[ Extend block context");
|
||||||
Handle<ScopeInfo> scope_info = scope_->GetScopeInfo();
|
__ Push(scope_->GetScopeInfo());
|
||||||
int heap_slots = scope_info->ContextLength() - Context::MIN_CONTEXT_SLOTS;
|
|
||||||
__ Push(scope_info);
|
|
||||||
PushFunctionArgumentForContextAllocation();
|
PushFunctionArgumentForContextAllocation();
|
||||||
if (heap_slots <= FastNewBlockContextStub::kMaximumSlots) {
|
__ CallRuntime(Runtime::kPushBlockContext, 2);
|
||||||
FastNewBlockContextStub stub(heap_slots);
|
|
||||||
__ CallStub(&stub);
|
|
||||||
} else {
|
|
||||||
__ CallRuntime(Runtime::kPushBlockContext, 2);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Replace the context stored in the frame.
|
// Replace the context stored in the frame.
|
||||||
StoreToFrameField(StandardFrameConstants::kContextOffset,
|
StoreToFrameField(StandardFrameConstants::kContextOffset,
|
||||||
|
@ -465,75 +465,6 @@ void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
|
|
||||||
// Stack layout on entry:
|
|
||||||
//
|
|
||||||
// [esp + (1 * kPointerSize)]: function
|
|
||||||
// [esp + (2 * kPointerSize)]: serialized scope info
|
|
||||||
|
|
||||||
// Try to allocate the context in new space.
|
|
||||||
Label gc;
|
|
||||||
int length = slots_ + Context::MIN_CONTEXT_SLOTS;
|
|
||||||
__ Allocate(FixedArray::SizeFor(length), eax, ebx, ecx, &gc, TAG_OBJECT);
|
|
||||||
|
|
||||||
// Get the function or sentinel from the stack.
|
|
||||||
__ mov(ecx, Operand(esp, 1 * kPointerSize));
|
|
||||||
|
|
||||||
// Get the serialized scope info from the stack.
|
|
||||||
__ mov(ebx, Operand(esp, 2 * kPointerSize));
|
|
||||||
|
|
||||||
// Set up the object header.
|
|
||||||
Factory* factory = masm->isolate()->factory();
|
|
||||||
__ mov(FieldOperand(eax, HeapObject::kMapOffset),
|
|
||||||
factory->block_context_map());
|
|
||||||
__ mov(FieldOperand(eax, Context::kLengthOffset),
|
|
||||||
Immediate(Smi::FromInt(length)));
|
|
||||||
|
|
||||||
// If this block context is nested in the native context we get a smi
|
|
||||||
// sentinel instead of a function. The block context should get the
|
|
||||||
// canonical empty function of the native context as its closure which
|
|
||||||
// we still have to look up.
|
|
||||||
Label after_sentinel;
|
|
||||||
__ JumpIfNotSmi(ecx, &after_sentinel, Label::kNear);
|
|
||||||
if (FLAG_debug_code) {
|
|
||||||
__ cmp(ecx, 0);
|
|
||||||
__ Assert(equal, kExpected0AsASmiSentinel);
|
|
||||||
}
|
|
||||||
__ mov(ecx, GlobalObjectOperand());
|
|
||||||
__ mov(ecx, FieldOperand(ecx, GlobalObject::kNativeContextOffset));
|
|
||||||
__ mov(ecx, ContextOperand(ecx, Context::CLOSURE_INDEX));
|
|
||||||
__ bind(&after_sentinel);
|
|
||||||
|
|
||||||
// Set up the fixed slots.
|
|
||||||
__ mov(ContextOperand(eax, Context::CLOSURE_INDEX), ecx);
|
|
||||||
__ mov(ContextOperand(eax, Context::PREVIOUS_INDEX), esi);
|
|
||||||
__ mov(ContextOperand(eax, Context::EXTENSION_INDEX), ebx);
|
|
||||||
|
|
||||||
// Copy the global object from the previous context.
|
|
||||||
__ mov(ebx, ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX));
|
|
||||||
__ mov(ContextOperand(eax, Context::GLOBAL_OBJECT_INDEX), ebx);
|
|
||||||
|
|
||||||
// Initialize the rest of the slots to the hole value.
|
|
||||||
if (slots_ == 1) {
|
|
||||||
__ mov(ContextOperand(eax, Context::MIN_CONTEXT_SLOTS),
|
|
||||||
factory->the_hole_value());
|
|
||||||
} else {
|
|
||||||
__ mov(ebx, factory->the_hole_value());
|
|
||||||
for (int i = 0; i < slots_; i++) {
|
|
||||||
__ mov(ContextOperand(eax, i + Context::MIN_CONTEXT_SLOTS), ebx);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Return and remove the on-stack parameters.
|
|
||||||
__ mov(esi, eax);
|
|
||||||
__ ret(2 * kPointerSize);
|
|
||||||
|
|
||||||
// Need to collect. Call into runtime system.
|
|
||||||
__ bind(&gc);
|
|
||||||
__ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
|
void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
|
||||||
// We don't allow a GC during a store buffer overflow so there is no need to
|
// We don't allow a GC during a store buffer overflow so there is no need to
|
||||||
// store the registers in any particular way, but we do have to store and
|
// store the registers in any particular way, but we do have to store and
|
||||||
|
@ -477,66 +477,6 @@ void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
|
|
||||||
// Stack layout on entry:
|
|
||||||
//
|
|
||||||
// [sp]: function.
|
|
||||||
// [sp + kPointerSize]: serialized scope info
|
|
||||||
|
|
||||||
// Try to allocate the context in new space.
|
|
||||||
Label gc;
|
|
||||||
int length = slots_ + Context::MIN_CONTEXT_SLOTS;
|
|
||||||
__ Allocate(FixedArray::SizeFor(length), v0, a1, a2, &gc, TAG_OBJECT);
|
|
||||||
|
|
||||||
// Load the function from the stack.
|
|
||||||
__ lw(a3, MemOperand(sp, 0));
|
|
||||||
|
|
||||||
// Load the serialized scope info from the stack.
|
|
||||||
__ lw(a1, MemOperand(sp, 1 * kPointerSize));
|
|
||||||
|
|
||||||
// Set up the object header.
|
|
||||||
__ LoadRoot(a2, Heap::kBlockContextMapRootIndex);
|
|
||||||
__ sw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
|
|
||||||
__ li(a2, Operand(Smi::FromInt(length)));
|
|
||||||
__ sw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset));
|
|
||||||
|
|
||||||
// If this block context is nested in the native context we get a smi
|
|
||||||
// sentinel instead of a function. The block context should get the
|
|
||||||
// canonical empty function of the native context as its closure which
|
|
||||||
// we still have to look up.
|
|
||||||
Label after_sentinel;
|
|
||||||
__ JumpIfNotSmi(a3, &after_sentinel);
|
|
||||||
if (FLAG_debug_code) {
|
|
||||||
__ Assert(eq, kExpected0AsASmiSentinel, a3, Operand(zero_reg));
|
|
||||||
}
|
|
||||||
__ lw(a3, GlobalObjectOperand());
|
|
||||||
__ lw(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset));
|
|
||||||
__ lw(a3, ContextOperand(a3, Context::CLOSURE_INDEX));
|
|
||||||
__ bind(&after_sentinel);
|
|
||||||
|
|
||||||
// Set up the fixed slots, copy the global object from the previous context.
|
|
||||||
__ lw(a2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
|
|
||||||
__ sw(a3, ContextOperand(v0, Context::CLOSURE_INDEX));
|
|
||||||
__ sw(cp, ContextOperand(v0, Context::PREVIOUS_INDEX));
|
|
||||||
__ sw(a1, ContextOperand(v0, Context::EXTENSION_INDEX));
|
|
||||||
__ sw(a2, ContextOperand(v0, Context::GLOBAL_OBJECT_INDEX));
|
|
||||||
|
|
||||||
// Initialize the rest of the slots to the hole value.
|
|
||||||
__ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
|
|
||||||
for (int i = 0; i < slots_; i++) {
|
|
||||||
__ sw(a1, ContextOperand(v0, i + Context::MIN_CONTEXT_SLOTS));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove the on-stack argument and return.
|
|
||||||
__ mov(cp, v0);
|
|
||||||
__ DropAndRet(2);
|
|
||||||
|
|
||||||
// Need to collect. Call into runtime system.
|
|
||||||
__ bind(&gc);
|
|
||||||
__ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
// Takes a Smi and converts to an IEEE 64 bit floating point value in two
|
// Takes a Smi and converts to an IEEE 64 bit floating point value in two
|
||||||
// registers. The format is 1 sign bit, 11 exponent bits (biased 1023) and
|
// registers. The format is 1 sign bit, 11 exponent bits (biased 1023) and
|
||||||
// 52 fraction bits (20 in the first word, 32 in the second). Zeros is a
|
// 52 fraction bits (20 in the first word, 32 in the second). Zeros is a
|
||||||
|
@ -462,69 +462,6 @@ void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
|
|
||||||
// Stack layout on entry:
|
|
||||||
//
|
|
||||||
// [rsp + (1 * kPointerSize)] : function
|
|
||||||
// [rsp + (2 * kPointerSize)] : serialized scope info
|
|
||||||
|
|
||||||
// Try to allocate the context in new space.
|
|
||||||
Label gc;
|
|
||||||
int length = slots_ + Context::MIN_CONTEXT_SLOTS;
|
|
||||||
__ Allocate(FixedArray::SizeFor(length),
|
|
||||||
rax, rbx, rcx, &gc, TAG_OBJECT);
|
|
||||||
|
|
||||||
// Get the function from the stack.
|
|
||||||
StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
|
|
||||||
__ movp(rcx, args.GetArgumentOperand(1));
|
|
||||||
// Get the serialized scope info from the stack.
|
|
||||||
__ movp(rbx, args.GetArgumentOperand(0));
|
|
||||||
|
|
||||||
// Set up the object header.
|
|
||||||
__ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex);
|
|
||||||
__ movp(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
|
|
||||||
__ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length));
|
|
||||||
|
|
||||||
// If this block context is nested in the native context we get a smi
|
|
||||||
// sentinel instead of a function. The block context should get the
|
|
||||||
// canonical empty function of the native context as its closure which
|
|
||||||
// we still have to look up.
|
|
||||||
Label after_sentinel;
|
|
||||||
__ JumpIfNotSmi(rcx, &after_sentinel, Label::kNear);
|
|
||||||
if (FLAG_debug_code) {
|
|
||||||
__ cmpq(rcx, Immediate(0));
|
|
||||||
__ Assert(equal, kExpected0AsASmiSentinel);
|
|
||||||
}
|
|
||||||
__ movp(rcx, GlobalObjectOperand());
|
|
||||||
__ movp(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset));
|
|
||||||
__ movp(rcx, ContextOperand(rcx, Context::CLOSURE_INDEX));
|
|
||||||
__ bind(&after_sentinel);
|
|
||||||
|
|
||||||
// Set up the fixed slots.
|
|
||||||
__ movp(ContextOperand(rax, Context::CLOSURE_INDEX), rcx);
|
|
||||||
__ movp(ContextOperand(rax, Context::PREVIOUS_INDEX), rsi);
|
|
||||||
__ movp(ContextOperand(rax, Context::EXTENSION_INDEX), rbx);
|
|
||||||
|
|
||||||
// Copy the global object from the previous context.
|
|
||||||
__ movp(rbx, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX));
|
|
||||||
__ movp(ContextOperand(rax, Context::GLOBAL_OBJECT_INDEX), rbx);
|
|
||||||
|
|
||||||
// Initialize the rest of the slots to the hole value.
|
|
||||||
__ LoadRoot(rbx, Heap::kTheHoleValueRootIndex);
|
|
||||||
for (int i = 0; i < slots_; i++) {
|
|
||||||
__ movp(ContextOperand(rax, i + Context::MIN_CONTEXT_SLOTS), rbx);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Return and remove the on-stack parameter.
|
|
||||||
__ movp(rsi, rax);
|
|
||||||
__ ret(2 * kPointerSize);
|
|
||||||
|
|
||||||
// Need to collect. Call into runtime system.
|
|
||||||
__ bind(&gc);
|
|
||||||
__ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
|
void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
|
||||||
__ PushCallerSaved(save_doubles_);
|
__ PushCallerSaved(save_doubles_);
|
||||||
const int argument_count = 1;
|
const int argument_count = 1;
|
||||||
|
Loading…
Reference in New Issue
Block a user