Implement fast path for strict closure creation.

Review URL: http://codereview.chromium.org/6677036/

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@7251 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
mmaly@chromium.org 2011-03-17 20:28:30 +00:00
parent 1d1018aec7
commit 55906eb46e
13 changed files with 45 additions and 31 deletions

View File

@ -91,11 +91,15 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
&gc,
TAG_OBJECT);
int map_index = strict_mode_ == kStrictMode
? Context::STRICT_MODE_FUNCTION_MAP_INDEX
: Context::FUNCTION_MAP_INDEX;
// Compute the function map in the current global context and set that
// as the map of the allocated object.
__ ldr(r2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
__ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalContextOffset));
__ ldr(r2, MemOperand(r2, Context::SlotOffset(Context::FUNCTION_MAP_INDEX)));
__ ldr(r2, MemOperand(r2, Context::SlotOffset(map_index)));
__ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
// Initialize the rest of the function. We don't have to update the

View File

@ -3116,9 +3116,9 @@ void CodeGenerator::InstantiateFunction(
// space for nested functions that don't need literals cloning.
if (!pretenure &&
scope()->is_function_scope() &&
function_info->num_literals() == 0 &&
!function_info->strict_mode()) { // Strict mode functions use slow path.
FastNewClosureStub stub;
function_info->num_literals() == 0) {
FastNewClosureStub stub(
function_info->strict_mode() ? kStrictMode : kNonStrictMode);
frame_->EmitPush(Operand(function_info));
frame_->SpillAll();
frame_->CallStub(&stub, 1);

View File

@ -1086,9 +1086,8 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
!FLAG_prepare_always_opt &&
!pretenure &&
scope()->is_function_scope() &&
info->num_literals() == 0 &&
!info->strict_mode()) { // Strict mode functions use slow path.
FastNewClosureStub stub;
info->num_literals() == 0) {
FastNewClosureStub stub(info->strict_mode() ? kStrictMode : kNonStrictMode);
__ mov(r0, Operand(info));
__ push(r0);
__ CallStub(&stub);

View File

@ -3726,9 +3726,9 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
// space for nested functions that don't need literals cloning.
Handle<SharedFunctionInfo> shared_info = instr->shared_info();
bool pretenure = instr->hydrogen()->pretenure();
if (!pretenure && shared_info->num_literals() == 0 &&
!shared_info->strict_mode()) { // Strict mode functions use slow path.
FastNewClosureStub stub;
if (!pretenure && shared_info->num_literals() == 0) {
FastNewClosureStub stub(
shared_info->strict_mode() ? kStrictMode : kNonStrictMode);
__ mov(r1, Operand(shared_info));
__ push(r1);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);

View File

@ -277,12 +277,17 @@ class ToNumberStub: public CodeStub {
class FastNewClosureStub : public CodeStub {
public:
explicit FastNewClosureStub(StrictModeFlag strict_mode)
: strict_mode_(strict_mode) { }
void Generate(MacroAssembler* masm);
private:
const char* GetName() { return "FastNewClosureStub"; }
Major MajorKey() { return FastNewClosure; }
int MinorKey() { return 0; }
int MinorKey() { return strict_mode_; }
StrictModeFlag strict_mode_;
};

View File

@ -69,11 +69,15 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
// Get the function info from the stack.
__ mov(edx, Operand(esp, 1 * kPointerSize));
int map_index = strict_mode_ == kStrictMode
? Context::STRICT_MODE_FUNCTION_MAP_INDEX
: Context::FUNCTION_MAP_INDEX;
// Compute the function map in the current global context and set that
// as the map of the allocated object.
__ mov(ecx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
__ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalContextOffset));
__ mov(ecx, Operand(ecx, Context::SlotOffset(Context::FUNCTION_MAP_INDEX)));
__ mov(ecx, Operand(ecx, Context::SlotOffset(map_index)));
__ mov(FieldOperand(eax, JSObject::kMapOffset), ecx);
// Initialize the rest of the function. We don't have to update the

View File

@ -4918,9 +4918,9 @@ Result CodeGenerator::InstantiateFunction(
// space for nested functions that don't need literals cloning.
if (!pretenure &&
scope()->is_function_scope() &&
function_info->num_literals() == 0 &&
!function_info->strict_mode()) { // Strict mode functions use slow path.
FastNewClosureStub stub;
function_info->num_literals() == 0) {
FastNewClosureStub stub(
function_info->strict_mode() ? kStrictMode : kNonStrictMode);
frame()->EmitPush(Immediate(function_info));
return frame()->CallStub(&stub, 1);
} else {

View File

@ -1019,9 +1019,8 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
!FLAG_prepare_always_opt &&
!pretenure &&
scope()->is_function_scope() &&
info->num_literals() == 0 &&
!info->strict_mode()) { // Strict mode functions go through slow path.
FastNewClosureStub stub;
info->num_literals() == 0) {
FastNewClosureStub stub(info->strict_mode() ? kStrictMode : kNonStrictMode);
__ push(Immediate(info));
__ CallStub(&stub);
} else {

View File

@ -3739,9 +3739,9 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
// space for nested functions that don't need literals cloning.
Handle<SharedFunctionInfo> shared_info = instr->shared_info();
bool pretenure = instr->hydrogen()->pretenure();
if (!pretenure && shared_info->num_literals() == 0 &&
!shared_info->strict_mode()) {
FastNewClosureStub stub;
if (!pretenure && shared_info->num_literals() == 0) {
FastNewClosureStub stub(
shared_info->strict_mode() ? kStrictMode : kNonStrictMode);
__ push(Immediate(shared_info));
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
} else {

View File

@ -68,11 +68,15 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
// Get the function info from the stack.
__ movq(rdx, Operand(rsp, 1 * kPointerSize));
int map_index = strict_mode_ == kStrictMode
? Context::STRICT_MODE_FUNCTION_MAP_INDEX
: Context::FUNCTION_MAP_INDEX;
// Compute the function map in the current global context and set that
// as the map of the allocated object.
__ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
__ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalContextOffset));
__ movq(rcx, Operand(rcx, Context::SlotOffset(Context::FUNCTION_MAP_INDEX)));
__ movq(rcx, Operand(rcx, Context::SlotOffset(map_index)));
__ movq(FieldOperand(rax, JSObject::kMapOffset), rcx);
// Initialize the rest of the function. We don't have to update the

View File

@ -4262,9 +4262,9 @@ void CodeGenerator::InstantiateFunction(
// space for nested functions that don't need literals cloning.
if (!pretenure &&
scope()->is_function_scope() &&
function_info->num_literals() == 0 &&
!function_info->strict_mode()) { // Strict mode functions use slow path.
FastNewClosureStub stub;
function_info->num_literals() == 0) {
FastNewClosureStub stub(
function_info->strict_mode() ? kStrictMode : kNonStrictMode);
frame_->Push(function_info);
Result answer = frame_->CallStub(&stub, 1);
frame_->Push(&answer);

View File

@ -1041,9 +1041,8 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
!FLAG_prepare_always_opt &&
!pretenure &&
scope()->is_function_scope() &&
info->num_literals() == 0 &&
!info->strict_mode()) { // Strict mode functions use slow path.
FastNewClosureStub stub;
info->num_literals() == 0) {
FastNewClosureStub stub(info->strict_mode() ? kStrictMode : kNonStrictMode);
__ Push(info);
__ CallStub(&stub);
} else {

View File

@ -3539,9 +3539,9 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
// space for nested functions that don't need literals cloning.
Handle<SharedFunctionInfo> shared_info = instr->shared_info();
bool pretenure = instr->hydrogen()->pretenure();
if (!pretenure && shared_info->num_literals() == 0 &&
!shared_info->strict_mode()) {
FastNewClosureStub stub;
if (!pretenure && shared_info->num_literals() == 0) {
FastNewClosureStub stub(
shared_info->strict_mode() ? kStrictMode : kNonStrictMode);
__ Push(shared_info);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
} else {