diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc index aa70dcb16b..c1ef22edcf 100644 --- a/src/ia32/full-codegen-ia32.cc +++ b/src/ia32/full-codegen-ia32.cc @@ -1477,10 +1477,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { int length = subexprs->length(); Handle constant_elements = expr->constant_elements(); ASSERT_EQ(2, constant_elements->length()); -#if DEBUG ElementsKind constant_elements_kind = static_cast(Smi::cast(constant_elements->get(0))->value()); -#endif + bool has_constant_fast_elements = constant_elements_kind == FAST_ELEMENTS; Handle constant_elements_values( FixedArrayBase::cast(constant_elements->get(1))); @@ -1488,7 +1487,17 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset)); __ push(Immediate(Smi::FromInt(expr->literal_index()))); __ push(Immediate(constant_elements)); - if (expr->depth() > 1) { + Heap* heap = isolate()->heap(); + if (has_constant_fast_elements && + constant_elements_values->map() == heap->fixed_cow_array_map()) { + // If the elements are already FAST_ELEMENTS, the boilerplate cannot + // change, so it's possible to specialize the stub in advance. + __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1); + FastCloneShallowArrayStub stub( + FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, + length); + __ CallStub(&stub); + } else if (expr->depth() > 1) { __ CallRuntime(Runtime::kCreateArrayLiteral, 3); } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); @@ -1496,13 +1505,12 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { ASSERT(constant_elements_kind == FAST_ELEMENTS || constant_elements_kind == FAST_SMI_ONLY_ELEMENTS || FLAG_smi_only_arrays); - if (constant_elements_values->map() == - isolate()->heap()->fixed_cow_array_map()) { - __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), - 1); - } - FastCloneShallowArrayStub stub( - FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS, length); + // If the elements are already FAST_ELEMENTS, the boilerplate cannot + // change, so it's possible to specialize the stub in advance. + FastCloneShallowArrayStub::Mode mode = has_constant_fast_elements + ? FastCloneShallowArrayStub::CLONE_ELEMENTS + : FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS; + FastCloneShallowArrayStub stub(mode, length); __ CallStub(&stub); } diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc index 45fb7557fc..a30d62f965 100644 --- a/src/x64/code-stubs-x64.cc +++ b/src/x64/code-stubs-x64.cc @@ -226,7 +226,7 @@ static void GenerateFastCloneShallowArrayCommon( Label* fail) { // Registers on entry: // - // rcx: boilerplate array. + // rcx: boilerplate literal array. ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS); // All sizes here are multiples of kPointerSize. @@ -315,7 +315,7 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), factory->fixed_array_map()); __ j(not_equal, &double_elements); - GenerateFastCloneShallowArrayCommon(masm, 0, + GenerateFastCloneShallowArrayCommon(masm, length_, CLONE_ELEMENTS, &slow_case); __ ret(3 * kPointerSize); @@ -346,8 +346,7 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { __ pop(rcx); } - GenerateFastCloneShallowArrayCommon(masm, 0, - CLONE_DOUBLE_ELEMENTS, &slow_case); + GenerateFastCloneShallowArrayCommon(masm, length_, mode, &slow_case); __ ret(3 * kPointerSize); __ bind(&slow_case); diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc index 2dc765bcba..35b902d7ae 100644 --- a/src/x64/full-codegen-x64.cc +++ b/src/x64/full-codegen-x64.cc @@ -1480,10 +1480,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { int length = subexprs->length(); Handle constant_elements = expr->constant_elements(); ASSERT_EQ(2, constant_elements->length()); -#if DEBUG ElementsKind constant_elements_kind = static_cast(Smi::cast(constant_elements->get(0))->value()); -#endif + bool has_constant_fast_elements = constant_elements_kind == FAST_ELEMENTS; Handle constant_elements_values( FixedArrayBase::cast(constant_elements->get(1))); @@ -1491,7 +1490,17 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset)); __ Push(Smi::FromInt(expr->literal_index())); __ Push(constant_elements); - if (expr->depth() > 1) { + Heap* heap = isolate()->heap(); + if (has_constant_fast_elements && + constant_elements_values->map() == heap->fixed_cow_array_map()) { + // If the elements are already FAST_ELEMENTS, the boilerplate cannot + // change, so it's possible to specialize the stub in advance. + __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1); + FastCloneShallowArrayStub stub( + FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, + length); + __ CallStub(&stub); + } else if (expr->depth() > 1) { __ CallRuntime(Runtime::kCreateArrayLiteral, 3); } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); @@ -1499,13 +1508,12 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { ASSERT(constant_elements_kind == FAST_ELEMENTS || constant_elements_kind == FAST_SMI_ONLY_ELEMENTS || FLAG_smi_only_arrays); - if (constant_elements_values->map() == - isolate()->heap()->fixed_cow_array_map()) { - __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), - 1); - } - FastCloneShallowArrayStub stub( - FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS, length); + // If the elements are already FAST_ELEMENTS, the boilerplate cannot + // change, so it's possible to specialize the stub in advance. + FastCloneShallowArrayStub::Mode mode = has_constant_fast_elements + ? FastCloneShallowArrayStub::CLONE_ELEMENTS + : FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS; + FastCloneShallowArrayStub stub(mode, length); __ CallStub(&stub); }