Add version of x64 FastCloneShallowArrayStub that copies all boilerplate kinds

R=jkummerow@chromium.org
BUG=none
TEST=none

Review URL: http://codereview.chromium.org/8574058

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@10022 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
danno@chromium.org 2011-11-17 15:08:59 +00:00
parent 371841b270
commit be923eed32
4 changed files with 102 additions and 66 deletions

View File

@ -236,13 +236,9 @@ static void GenerateFastCloneShallowArrayCommon(
int length,
FastCloneShallowArrayStub::Mode mode,
Label* fail) {
// Stack and register layout on entry:
// Registers on entry:
//
// [esp + kPointerSize]: constant elements.
// [esp + (2 * kPointerSize)]: literal index.
// [esp + (3 * kPointerSize)]: literals array.
// eax: literal index.
// ecx: literals array.
// ecx: boilerplate literal array.
ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS);
// All sizes here are multiples of kPointerSize.

View File

@ -1477,6 +1477,10 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
int length = subexprs->length();
Handle<FixedArray> constant_elements = expr->constant_elements();
ASSERT_EQ(2, constant_elements->length());
#if DEBUG
ElementsKind constant_elements_kind =
static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
#endif
Handle<FixedArrayBase> constant_elements_values(
FixedArrayBase::cast(constant_elements->get(1)));

View File

@ -219,68 +219,38 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
}
void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
// Stack layout on entry:
static void GenerateFastCloneShallowArrayCommon(
MacroAssembler* masm,
int length,
FastCloneShallowArrayStub::Mode mode,
Label* fail) {
// Registers on entry:
//
// [rsp + kPointerSize]: constant elements.
// [rsp + (2 * kPointerSize)]: literal index.
// [rsp + (3 * kPointerSize)]: literals array.
// rcx: boilerplate array.
ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS);
// All sizes here are multiples of kPointerSize.
int elements_size = 0;
if (length_ > 0) {
elements_size = mode_ == CLONE_DOUBLE_ELEMENTS
? FixedDoubleArray::SizeFor(length_)
: FixedArray::SizeFor(length_);
if (length > 0) {
elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
? FixedDoubleArray::SizeFor(length)
: FixedArray::SizeFor(length);
}
int size = JSArray::kSize + elements_size;
// Load boilerplate object into rcx and check if we need to create a
// boilerplate.
Label slow_case;
__ movq(rcx, Operand(rsp, 3 * kPointerSize));
__ movq(rax, Operand(rsp, 2 * kPointerSize));
SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2);
__ movq(rcx,
FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize));
__ CompareRoot(rcx, Heap::kUndefinedValueRootIndex);
__ j(equal, &slow_case);
if (FLAG_debug_code) {
const char* message;
Heap::RootListIndex expected_map_index;
if (mode_ == CLONE_ELEMENTS) {
message = "Expected (writable) fixed array";
expected_map_index = Heap::kFixedArrayMapRootIndex;
} else if (mode_ == CLONE_DOUBLE_ELEMENTS) {
message = "Expected (writable) fixed double array";
expected_map_index = Heap::kFixedDoubleArrayMapRootIndex;
} else {
ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS);
message = "Expected copy-on-write fixed array";
expected_map_index = Heap::kFixedCOWArrayMapRootIndex;
}
__ push(rcx);
__ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
__ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
expected_map_index);
__ Assert(equal, message);
__ pop(rcx);
}
// Allocate both the JS array and the elements array in one big
// allocation. This avoids multiple limit checks.
__ AllocateInNewSpace(size, rax, rbx, rdx, &slow_case, TAG_OBJECT);
__ AllocateInNewSpace(size, rax, rbx, rdx, fail, TAG_OBJECT);
// Copy the JS array part.
for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
if ((i != JSArray::kElementsOffset) || (length_ == 0)) {
if ((i != JSArray::kElementsOffset) || (length == 0)) {
__ movq(rbx, FieldOperand(rcx, i));
__ movq(FieldOperand(rax, i), rbx);
}
}
if (length_ > 0) {
if (length > 0) {
// Get hold of the elements array of the boilerplate and setup the
// elements pointer in the resulting object.
__ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
@ -288,13 +258,13 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
__ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx);
// Copy the elements array.
if (mode_ == CLONE_ELEMENTS) {
if (mode == FastCloneShallowArrayStub::CLONE_ELEMENTS) {
for (int i = 0; i < elements_size; i += kPointerSize) {
__ movq(rbx, FieldOperand(rcx, i));
__ movq(FieldOperand(rdx, i), rbx);
}
} else {
ASSERT(mode_ == CLONE_DOUBLE_ELEMENTS);
ASSERT(mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS);
int i;
for (i = 0; i < FixedDoubleArray::kHeaderSize; i += kPointerSize) {
__ movq(rbx, FieldOperand(rcx, i));
@ -308,8 +278,76 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
ASSERT(i == elements_size);
}
}
}
// Return and remove the on-stack parameters.
void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
// Stack layout on entry:
//
// [rsp + kPointerSize]: constant elements.
// [rsp + (2 * kPointerSize)]: literal index.
// [rsp + (3 * kPointerSize)]: literals array.
// Load boilerplate object into rcx and check if we need to create a
// boilerplate.
__ movq(rcx, Operand(rsp, 3 * kPointerSize));
__ movq(rax, Operand(rsp, 2 * kPointerSize));
SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2);
__ movq(rcx,
FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize));
__ CompareRoot(rcx, Heap::kUndefinedValueRootIndex);
Label slow_case;
__ j(equal, &slow_case);
FastCloneShallowArrayStub::Mode mode = mode_;
// rcx is boilerplate object.
Factory* factory = masm->isolate()->factory();
if (mode == CLONE_ANY_ELEMENTS) {
Label double_elements, check_fast_elements;
__ movq(rbx, FieldOperand(rcx, JSArray::kElementsOffset));
__ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
factory->fixed_cow_array_map());
__ j(not_equal, &check_fast_elements);
GenerateFastCloneShallowArrayCommon(masm, 0,
COPY_ON_WRITE_ELEMENTS, &slow_case);
__ ret(3 * kPointerSize);
__ bind(&check_fast_elements);
__ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
factory->fixed_array_map());
__ j(not_equal, &double_elements);
GenerateFastCloneShallowArrayCommon(masm, 0,
CLONE_ELEMENTS, &slow_case);
__ ret(3 * kPointerSize);
__ bind(&double_elements);
mode = CLONE_DOUBLE_ELEMENTS;
// Fall through to generate the code to handle double elements.
}
if (FLAG_debug_code) {
const char* message;
Heap::RootListIndex expected_map_index;
if (mode == CLONE_ELEMENTS) {
message = "Expected (writable) fixed array";
expected_map_index = Heap::kFixedArrayMapRootIndex;
} else if (mode == CLONE_DOUBLE_ELEMENTS) {
message = "Expected (writable) fixed double array";
expected_map_index = Heap::kFixedDoubleArrayMapRootIndex;
} else {
ASSERT(mode == COPY_ON_WRITE_ELEMENTS);
message = "Expected copy-on-write fixed array";
expected_map_index = Heap::kFixedCOWArrayMapRootIndex;
}
__ push(rcx);
__ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
__ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
expected_map_index);
__ Assert(equal, message);
__ pop(rcx);
}
GenerateFastCloneShallowArrayCommon(masm, 0,
CLONE_DOUBLE_ELEMENTS, &slow_case);
__ ret(3 * kPointerSize);
__ bind(&slow_case);

View File

@ -1480,8 +1480,10 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
int length = subexprs->length();
Handle<FixedArray> constant_elements = expr->constant_elements();
ASSERT_EQ(2, constant_elements->length());
#if DEBUG
ElementsKind constant_elements_kind =
static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
#endif
Handle<FixedArrayBase> constant_elements_values(
FixedArrayBase::cast(constant_elements->get(1)));
@ -1489,13 +1491,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
__ Push(Smi::FromInt(expr->literal_index()));
__ Push(constant_elements);
if (constant_elements_values->map() ==
isolate()->heap()->fixed_cow_array_map()) {
FastCloneShallowArrayStub stub(
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
__ CallStub(&stub);
__ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
} else if (expr->depth() > 1) {
if (expr->depth() > 1) {
__ CallRuntime(Runtime::kCreateArrayLiteral, 3);
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
__ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
@ -1503,11 +1499,13 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
ASSERT(constant_elements_kind == FAST_ELEMENTS ||
constant_elements_kind == FAST_SMI_ONLY_ELEMENTS ||
FLAG_smi_only_arrays);
FastCloneShallowArrayStub::Mode mode =
constant_elements_kind == FAST_DOUBLE_ELEMENTS
? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
: FastCloneShallowArrayStub::CLONE_ELEMENTS;
FastCloneShallowArrayStub stub(mode, length);
if (constant_elements_values->map() ==
isolate()->heap()->fixed_cow_array_map()) {
__ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(),
1);
}
FastCloneShallowArrayStub stub(
FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS, length);
__ CallStub(&stub);
}