diff --git a/src/code-stubs.h b/src/code-stubs.h index 83b0f49140..16826db9e8 100644 --- a/src/code-stubs.h +++ b/src/code-stubs.h @@ -355,7 +355,8 @@ class FastCloneShallowArrayStub : public CodeStub { enum Mode { CLONE_ELEMENTS, CLONE_DOUBLE_ELEMENTS, - COPY_ON_WRITE_ELEMENTS + COPY_ON_WRITE_ELEMENTS, + CLONE_ANY_ELEMENTS }; FastCloneShallowArrayStub(Mode mode, int length) @@ -373,8 +374,8 @@ class FastCloneShallowArrayStub : public CodeStub { Major MajorKey() { return FastCloneShallowArray; } int MinorKey() { - ASSERT(mode_ == 0 || mode_ == 1 || mode_ == 2); - return length_ * 3 + mode_; + ASSERT(mode_ == 0 || mode_ == 1 || mode_ == 2 || mode_ == 3); + return length_ * 4 + mode_; } }; diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc index 8d30447b94..9ccbc9ce7d 100644 --- a/src/ia32/code-stubs-ia32.cc +++ b/src/ia32/code-stubs-ia32.cc @@ -231,70 +231,42 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) { } -void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { - // Stack layout on entry: +static void GenerateFastCloneShallowArrayCommon( + MacroAssembler* masm, + int length, + FastCloneShallowArrayStub::Mode mode, + Label* fail) { + // Stack and register layout on entry: // // [esp + kPointerSize]: constant elements. // [esp + (2 * kPointerSize)]: literal index. // [esp + (3 * kPointerSize)]: literals array. + // eax: literal index. + // ecx: literals array. + ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS); // All sizes here are multiples of kPointerSize. int elements_size = 0; - if (length_ > 0) { - elements_size = mode_ == CLONE_DOUBLE_ELEMENTS - ? FixedDoubleArray::SizeFor(length_) - : FixedArray::SizeFor(length_); + if (length > 0) { + elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS + ? FixedDoubleArray::SizeFor(length) + : FixedArray::SizeFor(length); } int size = JSArray::kSize + elements_size; - // Load boilerplate object into ecx and check if we need to create a - // boilerplate. - Label slow_case; - __ mov(ecx, Operand(esp, 3 * kPointerSize)); - __ mov(eax, Operand(esp, 2 * kPointerSize)); - STATIC_ASSERT(kPointerSize == 4); - STATIC_ASSERT(kSmiTagSize == 1); - STATIC_ASSERT(kSmiTag == 0); - __ mov(ecx, FieldOperand(ecx, eax, times_half_pointer_size, - FixedArray::kHeaderSize)); - Factory* factory = masm->isolate()->factory(); - __ cmp(ecx, factory->undefined_value()); - __ j(equal, &slow_case); - - if (FLAG_debug_code) { - const char* message; - Handle expected_map; - if (mode_ == CLONE_ELEMENTS) { - message = "Expected (writable) fixed array"; - expected_map = factory->fixed_array_map(); - } else if (mode_ == CLONE_DOUBLE_ELEMENTS) { - message = "Expected (writable) fixed double array"; - expected_map = factory->fixed_double_array_map(); - } else { - ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS); - message = "Expected copy-on-write fixed array"; - expected_map = factory->fixed_cow_array_map(); - } - __ push(ecx); - __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset)); - __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), expected_map); - __ Assert(equal, message); - __ pop(ecx); - } - // Allocate both the JS array and the elements array in one big // allocation. This avoids multiple limit checks. - __ AllocateInNewSpace(size, eax, ebx, edx, &slow_case, TAG_OBJECT); + __ AllocateInNewSpace(size, eax, ebx, edx, fail, TAG_OBJECT); // Copy the JS array part. for (int i = 0; i < JSArray::kSize; i += kPointerSize) { - if ((i != JSArray::kElementsOffset) || (length_ == 0)) { + if ((i != JSArray::kElementsOffset) || (length == 0)) { __ mov(ebx, FieldOperand(ecx, i)); __ mov(FieldOperand(eax, i), ebx); } } - if (length_ > 0) { + if (length > 0) { // Get hold of the elements array of the boilerplate and setup the // elements pointer in the resulting object. __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset)); @@ -302,13 +274,13 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { __ mov(FieldOperand(eax, JSArray::kElementsOffset), edx); // Copy the elements array. - if (mode_ == CLONE_ELEMENTS) { + if (mode == FastCloneShallowArrayStub::CLONE_ELEMENTS) { for (int i = 0; i < elements_size; i += kPointerSize) { __ mov(ebx, FieldOperand(ecx, i)); __ mov(FieldOperand(edx, i), ebx); } } else { - ASSERT(mode_ == CLONE_DOUBLE_ELEMENTS); + ASSERT(mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS); int i; for (i = 0; i < FixedDoubleArray::kHeaderSize; i += kPointerSize) { __ mov(ebx, FieldOperand(ecx, i)); @@ -322,7 +294,75 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { ASSERT(i == elements_size); } } +} + +void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { + // Stack layout on entry: + // + // [esp + kPointerSize]: constant elements. + // [esp + (2 * kPointerSize)]: literal index. + // [esp + (3 * kPointerSize)]: literals array. + + // Load boilerplate object into ecx and check if we need to create a + // boilerplate. + __ mov(ecx, Operand(esp, 3 * kPointerSize)); + __ mov(eax, Operand(esp, 2 * kPointerSize)); + STATIC_ASSERT(kPointerSize == 4); + STATIC_ASSERT(kSmiTagSize == 1); + STATIC_ASSERT(kSmiTag == 0); + __ mov(ecx, FieldOperand(ecx, eax, times_half_pointer_size, + FixedArray::kHeaderSize)); + Factory* factory = masm->isolate()->factory(); + __ cmp(ecx, factory->undefined_value()); + Label slow_case; + __ j(equal, &slow_case); + + FastCloneShallowArrayStub::Mode mode = mode_; + // ecx is boilerplate object. + if (mode == CLONE_ANY_ELEMENTS) { + Label double_elements, check_fast_elements; + __ mov(ebx, FieldOperand(ecx, JSArray::kElementsOffset)); + __ CheckMap(ebx, factory->fixed_cow_array_map(), + &check_fast_elements, DONT_DO_SMI_CHECK); + GenerateFastCloneShallowArrayCommon(masm, 0, + COPY_ON_WRITE_ELEMENTS, &slow_case); + __ ret(3 * kPointerSize); + + __ bind(&check_fast_elements); + __ CheckMap(ebx, factory->fixed_array_map(), + &double_elements, DONT_DO_SMI_CHECK); + GenerateFastCloneShallowArrayCommon(masm, length_, + CLONE_ELEMENTS, &slow_case); + __ ret(3 * kPointerSize); + + __ bind(&double_elements); + mode = CLONE_DOUBLE_ELEMENTS; + // Fall through to generate the code to handle double elements. + } + + if (FLAG_debug_code) { + const char* message; + Handle expected_map; + if (mode == CLONE_ELEMENTS) { + message = "Expected (writable) fixed array"; + expected_map = factory->fixed_array_map(); + } else if (mode == CLONE_DOUBLE_ELEMENTS) { + message = "Expected (writable) fixed double array"; + expected_map = factory->fixed_double_array_map(); + } else { + ASSERT(mode == COPY_ON_WRITE_ELEMENTS); + message = "Expected copy-on-write fixed array"; + expected_map = factory->fixed_cow_array_map(); + } + __ push(ecx); + __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset)); + __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), expected_map); + __ Assert(equal, message); + __ pop(ecx); + } + + GenerateFastCloneShallowArrayCommon(masm, length_, mode, &slow_case); // Return and remove the on-stack parameters. __ ret(3 * kPointerSize); diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc index 74616d0756..10a3422ea1 100644 --- a/src/ia32/full-codegen-ia32.cc +++ b/src/ia32/full-codegen-ia32.cc @@ -1477,8 +1477,6 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { int length = subexprs->length(); Handle constant_elements = expr->constant_elements(); ASSERT_EQ(2, constant_elements->length()); - ElementsKind constant_elements_kind = - static_cast(Smi::cast(constant_elements->get(0))->value()); Handle constant_elements_values( FixedArrayBase::cast(constant_elements->get(1))); @@ -1486,14 +1484,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset)); __ push(Immediate(Smi::FromInt(expr->literal_index()))); __ push(Immediate(constant_elements)); - if (constant_elements_values->map() == - isolate()->heap()->fixed_cow_array_map()) { - ASSERT(expr->depth() == 1); - FastCloneShallowArrayStub stub( - FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length); - __ CallStub(&stub); - __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1); - } else if (expr->depth() > 1) { + if (expr->depth() > 1) { __ CallRuntime(Runtime::kCreateArrayLiteral, 3); } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); @@ -1501,11 +1492,13 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { ASSERT(constant_elements_kind == FAST_ELEMENTS || constant_elements_kind == FAST_SMI_ONLY_ELEMENTS || FLAG_smi_only_arrays); - FastCloneShallowArrayStub::Mode mode = - constant_elements_kind == FAST_DOUBLE_ELEMENTS - ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS - : FastCloneShallowArrayStub::CLONE_ELEMENTS; - FastCloneShallowArrayStub stub(mode, length); + if (constant_elements_values->map() == + isolate()->heap()->fixed_cow_array_map()) { + __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), + 1); + } + FastCloneShallowArrayStub stub( + FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS, length); __ CallStub(&stub); }