Compile FastCloneShallowArrayStub using Crankshaft.

R=danno@chromium.org

Review URL: https://codereview.chromium.org/12521011

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@14143 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
mstarzinger@chromium.org 2013-04-04 17:55:43 +00:00
parent f995f77cee
commit af25102f41
17 changed files with 364 additions and 523 deletions

View File

@ -38,6 +38,18 @@ namespace v8 {
namespace internal {
void FastCloneShallowArrayStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
static Register registers[] = { r3, r2, r1 };
descriptor->register_param_count_ = 3;
descriptor->register_params_ = registers;
descriptor->stack_parameter_count_ = NULL;
descriptor->deoptimization_handler_ =
Runtime::FunctionForId(Runtime::kCreateArrayLiteralShallow)->entry;
}
void FastCloneShallowObjectStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
@ -403,153 +415,6 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
}
static void GenerateFastCloneShallowArrayCommon(
MacroAssembler* masm,
int length,
FastCloneShallowArrayStub::Mode mode,
AllocationSiteMode allocation_site_mode,
Label* fail) {
// Registers on entry:
//
// r3: boilerplate literal array.
ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS);
// All sizes here are multiples of kPointerSize.
int elements_size = 0;
if (length > 0) {
elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
? FixedDoubleArray::SizeFor(length)
: FixedArray::SizeFor(length);
}
int size = JSArray::kSize;
int allocation_info_start = size;
if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
size += AllocationSiteInfo::kSize;
}
size += elements_size;
// Allocate both the JS array and the elements array in one big
// allocation. This avoids multiple limit checks.
AllocationFlags flags = TAG_OBJECT;
if (mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS) {
flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT | flags);
}
__ Allocate(size, r0, r1, r2, fail, flags);
if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
__ mov(r2, Operand(Handle<Map>(masm->isolate()->heap()->
allocation_site_info_map())));
__ str(r2, FieldMemOperand(r0, allocation_info_start));
__ str(r3, FieldMemOperand(r0, allocation_info_start + kPointerSize));
}
// Copy the JS array part.
for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
if ((i != JSArray::kElementsOffset) || (length == 0)) {
__ ldr(r1, FieldMemOperand(r3, i));
__ str(r1, FieldMemOperand(r0, i));
}
}
if (length > 0) {
// Get hold of the elements array of the boilerplate and setup the
// elements pointer in the resulting object.
__ ldr(r3, FieldMemOperand(r3, JSArray::kElementsOffset));
if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
__ add(r2, r0, Operand(JSArray::kSize + AllocationSiteInfo::kSize));
} else {
__ add(r2, r0, Operand(JSArray::kSize));
}
__ str(r2, FieldMemOperand(r0, JSArray::kElementsOffset));
// Copy the elements array.
ASSERT((elements_size % kPointerSize) == 0);
__ CopyFields(r2, r3, r1.bit(), elements_size / kPointerSize);
}
}
void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
// Stack layout on entry:
//
// [sp]: constant elements.
// [sp + kPointerSize]: literal index.
// [sp + (2 * kPointerSize)]: literals array.
// Load boilerplate object into r3 and check if we need to create a
// boilerplate.
Label slow_case;
__ ldr(r3, MemOperand(sp, 2 * kPointerSize));
__ ldr(r0, MemOperand(sp, 1 * kPointerSize));
__ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
__ ldr(r3, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
__ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
__ b(eq, &slow_case);
FastCloneShallowArrayStub::Mode mode = mode_;
if (mode == CLONE_ANY_ELEMENTS) {
Label double_elements, check_fast_elements;
__ ldr(r0, FieldMemOperand(r3, JSArray::kElementsOffset));
__ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
__ CompareRoot(r0, Heap::kFixedCOWArrayMapRootIndex);
__ b(ne, &check_fast_elements);
GenerateFastCloneShallowArrayCommon(masm, 0, COPY_ON_WRITE_ELEMENTS,
allocation_site_mode_,
&slow_case);
// Return and remove the on-stack parameters.
__ add(sp, sp, Operand(3 * kPointerSize));
__ Ret();
__ bind(&check_fast_elements);
__ CompareRoot(r0, Heap::kFixedArrayMapRootIndex);
__ b(ne, &double_elements);
GenerateFastCloneShallowArrayCommon(masm, length_, CLONE_ELEMENTS,
allocation_site_mode_,
&slow_case);
// Return and remove the on-stack parameters.
__ add(sp, sp, Operand(3 * kPointerSize));
__ Ret();
__ bind(&double_elements);
mode = CLONE_DOUBLE_ELEMENTS;
// Fall through to generate the code to handle double elements.
}
if (FLAG_debug_code) {
const char* message;
Heap::RootListIndex expected_map_index;
if (mode == CLONE_ELEMENTS) {
message = "Expected (writable) fixed array";
expected_map_index = Heap::kFixedArrayMapRootIndex;
} else if (mode == CLONE_DOUBLE_ELEMENTS) {
message = "Expected (writable) fixed double array";
expected_map_index = Heap::kFixedDoubleArrayMapRootIndex;
} else {
ASSERT(mode == COPY_ON_WRITE_ELEMENTS);
message = "Expected copy-on-write fixed array";
expected_map_index = Heap::kFixedCOWArrayMapRootIndex;
}
__ push(r3);
__ ldr(r3, FieldMemOperand(r3, JSArray::kElementsOffset));
__ ldr(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
__ CompareRoot(r3, expected_map_index);
__ Assert(eq, message);
__ pop(r3);
}
GenerateFastCloneShallowArrayCommon(masm, length_, mode,
allocation_site_mode_,
&slow_case);
// Return and remove the on-stack parameters.
__ add(sp, sp, Operand(3 * kPointerSize));
__ Ret();
__ bind(&slow_case);
__ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
}
// Takes a Smi and converts to an IEEE 64 bit floating point value in two
// registers. The format is 1 sign bit, 11 exponent bits (biased 1023) and
// 52 fraction bits (20 in the first word, 32 in the second). Zeros is a
@ -3879,6 +3744,7 @@ void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
CEntryStub::GenerateAheadOfTime(isolate);
WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(isolate);
StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate);
}
@ -3895,11 +3761,13 @@ void CodeStub::GenerateFPStubs(Isolate* isolate) {
Code* save_doubles_code;
if (!save_doubles.FindCodeInCache(&save_doubles_code, isolate)) {
save_doubles_code = *save_doubles.GetCode(isolate);
save_doubles_code->set_is_pregenerated(true);
Code* store_buffer_overflow_code = *stub.GetCode(isolate);
store_buffer_overflow_code->set_is_pregenerated(true);
}
Code* store_buffer_overflow_code;
if (!stub.FindCodeInCache(&store_buffer_overflow_code, isolate)) {
store_buffer_overflow_code = *stub.GetCode(isolate);
}
save_doubles_code->set_is_pregenerated(true);
store_buffer_overflow_code->set_is_pregenerated(true);
isolate->set_fp_stubs_generated(true);
}
@ -7662,11 +7530,6 @@ bool RecordWriteStub::IsPregenerated() {
}
bool StoreBufferOverflowStub::IsPregenerated() {
return save_doubles_ == kDontSaveFPRegs || ISOLATE->fp_stubs_generated();
}
void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
Isolate* isolate) {
StoreBufferOverflowStub stub1(kDontSaveFPRegs);
@ -7955,9 +7818,7 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
ASSERT(!Serializer::enabled());
bool save_fp_regs = CpuFeatures::IsSupported(VFP2);
CEntryStub ces(1, save_fp_regs ? kSaveFPRegs : kDontSaveFPRegs);
CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
__ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
int parameter_count_offset =
StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;

View File

@ -61,11 +61,13 @@ class TranscendentalCacheStub: public PlatformCodeStub {
class StoreBufferOverflowStub: public PlatformCodeStub {
public:
explicit StoreBufferOverflowStub(SaveFPRegsMode save_fp)
: save_doubles_(save_fp) { }
: save_doubles_(save_fp) {
ASSERT(CpuFeatures::IsSafeForSnapshot(VFP2) || save_fp == kDontSaveFPRegs);
}
void Generate(MacroAssembler* masm);
virtual bool IsPregenerated();
virtual bool IsPregenerated() { return true; }
static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
virtual bool SometimesSetsUpAFrame() { return false; }

View File

@ -1727,7 +1727,6 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
__ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
__ mov(r1, Operand(constant_elements));
__ Push(r3, r2, r1);
if (has_fast_elements && constant_elements_values->map() ==
isolate()->heap()->fixed_cow_array_map()) {
FastCloneShallowArrayStub stub(
@ -1738,8 +1737,11 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ IncrementCounter(
isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2);
} else if (expr->depth() > 1) {
__ Push(r3, r2, r1);
__ CallRuntime(Runtime::kCreateArrayLiteral, 3);
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
} else if (Serializer::enabled() ||
length > FastCloneShallowArrayStub::kMaximumClonedLength) {
__ Push(r3, r2, r1);
__ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
} else {
ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||

View File

@ -5734,7 +5734,6 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
// Boilerplate already exists, constant elements are never accessed.
// Pass an empty fixed array.
__ mov(r1, Operand(isolate()->factory()->empty_fixed_array()));
__ Push(r3, r2, r1);
// Pick the right runtime function or stub to call.
int length = instr->hydrogen()->length();
@ -5745,8 +5744,10 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
FastCloneShallowArrayStub stub(mode, DONT_TRACK_ALLOCATION_SITE, length);
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
} else if (instr->hydrogen()->depth() > 1) {
__ Push(r3, r2, r1);
CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
__ Push(r3, r2, r1);
CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
} else {
FastCloneShallowArrayStub::Mode mode =

View File

@ -188,6 +188,74 @@ static Handle<Code> DoGenerateCode(Stub* stub) {
}
template <>
HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() {
Zone* zone = this->zone();
Factory* factory = isolate()->factory();
AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode();
FastCloneShallowArrayStub::Mode mode = casted_stub()->mode();
int length = casted_stub()->length();
HInstruction* boilerplate =
AddInstruction(new(zone) HLoadKeyed(GetParameter(0),
GetParameter(1),
NULL,
FAST_ELEMENTS));
CheckBuilder builder(this);
builder.CheckNotUndefined(boilerplate);
if (mode == FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS) {
HValue* elements =
AddInstruction(new(zone) HLoadElements(boilerplate, NULL));
IfBuilder if_fixed_cow(this);
if_fixed_cow.BeginIfMapEquals(elements, factory->fixed_cow_array_map());
environment()->Push(BuildCloneShallowArray(context(),
boilerplate,
alloc_site_mode,
FAST_ELEMENTS,
BailoutId::StubEntry(),
0/*copy-on-write*/));
if_fixed_cow.BeginElse();
IfBuilder if_fixed(this);
if_fixed.BeginIfMapEquals(elements, factory->fixed_array_map());
environment()->Push(BuildCloneShallowArray(context(),
boilerplate,
alloc_site_mode,
FAST_ELEMENTS,
BailoutId::StubEntry(),
length));
if_fixed.BeginElse();
environment()->Push(BuildCloneShallowArray(context(),
boilerplate,
alloc_site_mode,
FAST_DOUBLE_ELEMENTS,
BailoutId::StubEntry(),
length));
} else {
ElementsKind elements_kind = casted_stub()->ComputeElementsKind();
environment()->Push(BuildCloneShallowArray(context(),
boilerplate,
alloc_site_mode,
elements_kind,
BailoutId::StubEntry(),
length));
}
return environment()->Pop();
}
Handle<Code> FastCloneShallowArrayStub::GenerateCode() {
CodeStubGraphBuilder<FastCloneShallowArrayStub> builder(this);
LChunk* chunk = OptimizeGraph(builder.CreateGraph());
return chunk->Codegen(Code::COMPILED_STUB);
}
template <>
HValue* CodeStubGraphBuilder<FastCloneShallowObjectStub>::BuildCodeStub() {
Zone* zone = this->zone();

View File

@ -619,8 +619,10 @@ void ElementsTransitionAndStoreStub::Generate(MacroAssembler* masm) {
void StubFailureTrampolineStub::GenerateAheadOfTime(Isolate* isolate) {
StubFailureTrampolineStub(NOT_JS_FUNCTION_STUB_MODE).GetCode(isolate);
StubFailureTrampolineStub(JS_FUNCTION_STUB_MODE).GetCode(isolate);
StubFailureTrampolineStub stub1(NOT_JS_FUNCTION_STUB_MODE);
StubFailureTrampolineStub stub2(JS_FUNCTION_STUB_MODE);
stub1.GetCode(isolate)->set_is_pregenerated(true);
stub2.GetCode(isolate)->set_is_pregenerated(true);
}

View File

@ -443,7 +443,7 @@ class FastNewBlockContextStub : public PlatformCodeStub {
};
class FastCloneShallowArrayStub : public PlatformCodeStub {
class FastCloneShallowArrayStub : public HydrogenCodeStub {
public:
// Maximum length of copied elements array.
static const int kMaximumClonedLength = 8;
@ -467,7 +467,31 @@ class FastCloneShallowArrayStub : public PlatformCodeStub {
ASSERT_LE(length_, kMaximumClonedLength);
}
void Generate(MacroAssembler* masm);
Mode mode() const { return mode_; }
int length() const { return length_; }
AllocationSiteMode allocation_site_mode() const {
return allocation_site_mode_;
}
ElementsKind ComputeElementsKind() const {
switch (mode()) {
case CLONE_ELEMENTS:
case COPY_ON_WRITE_ELEMENTS:
return FAST_ELEMENTS;
case CLONE_DOUBLE_ELEMENTS:
return FAST_DOUBLE_ELEMENTS;
case CLONE_ANY_ELEMENTS:
/*fall-through*/;
}
UNREACHABLE();
return LAST_ELEMENTS_KIND;
}
virtual Handle<Code> GenerateCode();
virtual void InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor);
private:
Mode mode_;
@ -1604,18 +1628,25 @@ class StoreArrayLiteralElementStub : public PlatformCodeStub {
class StubFailureTrampolineStub : public PlatformCodeStub {
public:
explicit StubFailureTrampolineStub(StubFunctionMode function_mode)
: function_mode_(function_mode) {}
: fp_registers_(CanUseFPRegisters()), function_mode_(function_mode) {}
virtual bool IsPregenerated() { return true; }
static void GenerateAheadOfTime(Isolate* isolate);
private:
class FPRegisters: public BitField<bool, 0, 1> {};
class FunctionModeField: public BitField<StubFunctionMode, 1, 1> {};
Major MajorKey() { return StubFailureTrampoline; }
int MinorKey() { return static_cast<int>(function_mode_); }
int MinorKey() {
return FPRegisters::encode(fp_registers_) |
FunctionModeField::encode(function_mode_);
}
void Generate(MacroAssembler* masm);
bool fp_registers_;
StubFunctionMode function_mode_;
DISALLOW_COPY_AND_ASSIGN(StubFailureTrampolineStub);

View File

@ -1204,6 +1204,7 @@ void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator,
// and the standard stack frame slots. Include space for an argument
// object to the callee and optionally the space to pass the argument
// object to the stub failure handler.
ASSERT(descriptor->register_param_count_ >= 0);
int height_in_bytes = kPointerSize * descriptor->register_param_count_ +
sizeof(Arguments) + kPointerSize;
int fixed_frame_size = StandardFrameConstants::kFixedFrameSize;

View File

@ -1462,6 +1462,123 @@ void HGraphBuilder::BuildCopyElements(HValue* context,
}
HValue* HGraphBuilder::BuildCloneShallowArray(HContext* context,
HValue* boilerplate,
AllocationSiteMode mode,
ElementsKind kind,
BailoutId id,
int length) {
Zone* zone = this->zone();
Factory* factory = isolate()->factory();
// All sizes here are multiples of kPointerSize.
int size = JSArray::kSize;
if (mode == TRACK_ALLOCATION_SITE) {
size += AllocationSiteInfo::kSize;
}
int elems_offset = size;
if (length > 0) {
size += IsFastDoubleElementsKind(kind)
? FixedDoubleArray::SizeFor(length)
: FixedArray::SizeFor(length);
}
HAllocate::Flags allocate_flags = HAllocate::CAN_ALLOCATE_IN_NEW_SPACE;
if (IsFastDoubleElementsKind(kind)) {
allocate_flags = static_cast<HAllocate::Flags>(
allocate_flags | HAllocate::ALLOCATE_DOUBLE_ALIGNED);
}
// Allocate both the JS array and the elements array in one big
// allocation. This avoids multiple limit checks.
HValue* size_in_bytes =
AddInstruction(new(zone) HConstant(size, Representation::Integer32()));
HInstruction* object =
AddInstruction(new(zone) HAllocate(context,
size_in_bytes,
HType::JSObject(),
allocate_flags));
// Copy the JS array part.
for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
if ((i != JSArray::kElementsOffset) || (length == 0)) {
HInstruction* value =
AddInstruction(new(zone) HLoadNamedField(boilerplate, true, i));
if (i != JSArray::kMapOffset) {
AddInstruction(new(zone) HStoreNamedField(object,
factory->empty_string(),
value,
true, i));
AddSimulate(id);
} else {
BuildStoreMap(object, value, id);
}
}
}
// Create an allocation site info if requested.
if (mode == TRACK_ALLOCATION_SITE) {
HValue* alloc_site =
AddInstruction(new(zone) HInnerAllocatedObject(object, JSArray::kSize));
Handle<Map> alloc_site_map(isolate()->heap()->allocation_site_info_map());
BuildStoreMap(alloc_site, alloc_site_map, id);
int alloc_payload_offset = AllocationSiteInfo::kPayloadOffset;
AddInstruction(new(zone) HStoreNamedField(alloc_site,
factory->empty_string(),
boilerplate,
true, alloc_payload_offset));
AddSimulate(id);
}
if (length > 0) {
// Get hold of the elements array of the boilerplate and setup the
// elements pointer in the resulting object.
HValue* boilerplate_elements =
AddInstruction(new(zone) HLoadElements(boilerplate, NULL));
HValue* object_elements =
AddInstruction(new(zone) HInnerAllocatedObject(object, elems_offset));
AddInstruction(new(zone) HStoreNamedField(object,
factory->elements_field_string(),
object_elements,
true, JSObject::kElementsOffset));
AddSimulate(id);
// Copy the elements array header.
for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) {
HInstruction* value =
AddInstruction(new(zone) HLoadNamedField(boilerplate_elements,
true, i));
AddInstruction(new(zone) HStoreNamedField(object_elements,
factory->empty_string(),
value,
true, i));
AddSimulate(id);
}
// Copy the elements array contents.
// TODO(mstarzinger): Teach HGraphBuilder::BuildCopyElements to unfold
// copying loops with constant length up to a given boundary and use this
// helper here instead.
for (int i = 0; i < length; i++) {
HValue* key_constant =
AddInstruction(new(zone) HConstant(i, Representation::Integer32()));
HInstruction* value =
AddInstruction(new(zone) HLoadKeyed(boilerplate_elements,
key_constant,
NULL,
kind));
AddInstruction(new(zone) HStoreKeyed(object_elements,
key_constant,
value,
kind));
AddSimulate(id);
}
}
return object;
}
HOptimizedGraphBuilder::HOptimizedGraphBuilder(CompilationInfo* info,
TypeFeedbackOracle* oracle)
: HGraphBuilder(info),

View File

@ -1062,6 +1062,13 @@ class HGraphBuilder {
HValue* length,
HValue* capacity);
HValue* BuildCloneShallowArray(HContext* context,
HValue* boilerplate,
AllocationSiteMode mode,
ElementsKind kind,
BailoutId id,
int length);
private:
HGraphBuilder();
CompilationInfo* info_;

View File

@ -43,6 +43,18 @@ namespace v8 {
namespace internal {
void FastCloneShallowArrayStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
static Register registers[] = { eax, ebx, ecx };
descriptor->register_param_count_ = 3;
descriptor->register_params_ = registers;
descriptor->stack_parameter_count_ = NULL;
descriptor->deoptimization_handler_ =
Runtime::FunctionForId(Runtime::kCreateArrayLiteralShallow)->entry;
}
void FastCloneShallowObjectStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
@ -393,168 +405,6 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
}
static void GenerateFastCloneShallowArrayCommon(
MacroAssembler* masm,
int length,
FastCloneShallowArrayStub::Mode mode,
AllocationSiteMode allocation_site_mode,
Label* fail) {
// Registers on entry:
//
// ecx: boilerplate literal array.
ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS);
// All sizes here are multiples of kPointerSize.
int elements_size = 0;
if (length > 0) {
elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
? FixedDoubleArray::SizeFor(length)
: FixedArray::SizeFor(length);
}
int size = JSArray::kSize;
int allocation_info_start = size;
if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
size += AllocationSiteInfo::kSize;
}
size += elements_size;
// Allocate both the JS array and the elements array in one big
// allocation. This avoids multiple limit checks.
AllocationFlags flags = TAG_OBJECT;
if (mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS) {
flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT | flags);
}
__ Allocate(size, eax, ebx, edx, fail, flags);
if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
__ mov(FieldOperand(eax, allocation_info_start),
Immediate(Handle<Map>(masm->isolate()->heap()->
allocation_site_info_map())));
__ mov(FieldOperand(eax, allocation_info_start + kPointerSize), ecx);
}
// Copy the JS array part.
for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
if ((i != JSArray::kElementsOffset) || (length == 0)) {
__ mov(ebx, FieldOperand(ecx, i));
__ mov(FieldOperand(eax, i), ebx);
}
}
if (length > 0) {
// Get hold of the elements array of the boilerplate and setup the
// elements pointer in the resulting object.
__ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset));
if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
__ lea(edx, Operand(eax, JSArray::kSize + AllocationSiteInfo::kSize));
} else {
__ lea(edx, Operand(eax, JSArray::kSize));
}
__ mov(FieldOperand(eax, JSArray::kElementsOffset), edx);
// Copy the elements array.
if (mode == FastCloneShallowArrayStub::CLONE_ELEMENTS) {
for (int i = 0; i < elements_size; i += kPointerSize) {
__ mov(ebx, FieldOperand(ecx, i));
__ mov(FieldOperand(edx, i), ebx);
}
} else {
ASSERT(mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS);
int i;
for (i = 0; i < FixedDoubleArray::kHeaderSize; i += kPointerSize) {
__ mov(ebx, FieldOperand(ecx, i));
__ mov(FieldOperand(edx, i), ebx);
}
while (i < elements_size) {
__ fld_d(FieldOperand(ecx, i));
__ fstp_d(FieldOperand(edx, i));
i += kDoubleSize;
}
ASSERT(i == elements_size);
}
}
}
void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
// Stack layout on entry:
//
// [esp + kPointerSize]: constant elements.
// [esp + (2 * kPointerSize)]: literal index.
// [esp + (3 * kPointerSize)]: literals array.
// Load boilerplate object into ecx and check if we need to create a
// boilerplate.
__ mov(ecx, Operand(esp, 3 * kPointerSize));
__ mov(eax, Operand(esp, 2 * kPointerSize));
STATIC_ASSERT(kPointerSize == 4);
STATIC_ASSERT(kSmiTagSize == 1);
STATIC_ASSERT(kSmiTag == 0);
__ mov(ecx, FieldOperand(ecx, eax, times_half_pointer_size,
FixedArray::kHeaderSize));
Factory* factory = masm->isolate()->factory();
__ cmp(ecx, factory->undefined_value());
Label slow_case;
__ j(equal, &slow_case);
FastCloneShallowArrayStub::Mode mode = mode_;
// ecx is boilerplate object.
if (mode == CLONE_ANY_ELEMENTS) {
Label double_elements, check_fast_elements;
__ mov(ebx, FieldOperand(ecx, JSArray::kElementsOffset));
__ CheckMap(ebx, factory->fixed_cow_array_map(),
&check_fast_elements, DONT_DO_SMI_CHECK);
GenerateFastCloneShallowArrayCommon(masm, 0, COPY_ON_WRITE_ELEMENTS,
allocation_site_mode_,
&slow_case);
__ ret(3 * kPointerSize);
__ bind(&check_fast_elements);
__ CheckMap(ebx, factory->fixed_array_map(),
&double_elements, DONT_DO_SMI_CHECK);
GenerateFastCloneShallowArrayCommon(masm, length_, CLONE_ELEMENTS,
allocation_site_mode_,
&slow_case);
__ ret(3 * kPointerSize);
__ bind(&double_elements);
mode = CLONE_DOUBLE_ELEMENTS;
// Fall through to generate the code to handle double elements.
}
if (FLAG_debug_code) {
const char* message;
Handle<Map> expected_map;
if (mode == CLONE_ELEMENTS) {
message = "Expected (writable) fixed array";
expected_map = factory->fixed_array_map();
} else if (mode == CLONE_DOUBLE_ELEMENTS) {
message = "Expected (writable) fixed double array";
expected_map = factory->fixed_double_array_map();
} else {
ASSERT(mode == COPY_ON_WRITE_ELEMENTS);
message = "Expected copy-on-write fixed array";
expected_map = factory->fixed_cow_array_map();
}
__ push(ecx);
__ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset));
__ cmp(FieldOperand(ecx, HeapObject::kMapOffset), expected_map);
__ Assert(equal, message);
__ pop(ecx);
}
GenerateFastCloneShallowArrayCommon(masm, length_, mode,
allocation_site_mode_,
&slow_case);
// Return and remove the on-stack parameters.
__ ret(3 * kPointerSize);
__ bind(&slow_case);
__ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
}
// The stub expects its argument on the stack and returns its result in tos_:
// zero for false, and a non-zero value for true.
void ToBooleanStub::Generate(MacroAssembler* masm) {
@ -5076,6 +4926,7 @@ bool CEntryStub::IsPregenerated() {
void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
CEntryStub::GenerateAheadOfTime(isolate);
StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
// It is important that the store buffer overflow stubs are generated first.
RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate);
}
@ -7786,9 +7637,7 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
ASSERT(!Serializer::enabled());
bool save_fp_regs = CpuFeatures::IsSupported(SSE2);
CEntryStub ces(1, save_fp_regs ? kSaveFPRegs : kDontSaveFPRegs);
CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
__ call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
int parameter_count_offset =
StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;

View File

@ -1670,24 +1670,33 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
Handle<FixedArrayBase> constant_elements_values(
FixedArrayBase::cast(constant_elements->get(1)));
__ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
__ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
__ push(Immediate(Smi::FromInt(expr->literal_index())));
__ push(Immediate(constant_elements));
Heap* heap = isolate()->heap();
if (has_constant_fast_elements &&
constant_elements_values->map() == heap->fixed_cow_array_map()) {
// If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
// change, so it's possible to specialize the stub in advance.
__ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
__ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
__ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset));
__ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
__ mov(ecx, Immediate(constant_elements));
FastCloneShallowArrayStub stub(
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
DONT_TRACK_ALLOCATION_SITE,
length);
__ CallStub(&stub);
} else if (expr->depth() > 1) {
__ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
__ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
__ push(Immediate(Smi::FromInt(expr->literal_index())));
__ push(Immediate(constant_elements));
__ CallRuntime(Runtime::kCreateArrayLiteral, 3);
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
} else if (Serializer::enabled() ||
length > FastCloneShallowArrayStub::kMaximumClonedLength) {
__ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
__ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
__ push(Immediate(Smi::FromInt(expr->literal_index())));
__ push(Immediate(constant_elements));
__ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
} else {
ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
@ -1704,6 +1713,10 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
}
__ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
__ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset));
__ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
__ mov(ecx, Immediate(constant_elements));
FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
__ CallStub(&stub);
}

View File

@ -5609,26 +5609,33 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
DeoptimizeIf(not_equal, instr->environment());
}
// Set up the parameters to the stub/runtime call.
__ PushHeapObject(literals);
__ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
// Boilerplate already exists, constant elements are never accessed.
// Pass an empty fixed array.
__ push(Immediate(isolate()->factory()->empty_fixed_array()));
// Pick the right runtime function or stub to call.
// Set up the parameters to the stub/runtime call and pick the right
// runtime function or stub to call. Boilerplate already exists,
// constant elements are never accessed, pass an empty fixed array.
int length = instr->hydrogen()->length();
if (instr->hydrogen()->IsCopyOnWrite()) {
ASSERT(instr->hydrogen()->depth() == 1);
__ LoadHeapObject(eax, literals);
__ mov(ebx, Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
__ mov(ecx, Immediate(isolate()->factory()->empty_fixed_array()));
FastCloneShallowArrayStub::Mode mode =
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
FastCloneShallowArrayStub stub(mode, DONT_TRACK_ALLOCATION_SITE, length);
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
} else if (instr->hydrogen()->depth() > 1) {
__ PushHeapObject(literals);
__ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
__ push(Immediate(isolate()->factory()->empty_fixed_array()));
CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
__ PushHeapObject(literals);
__ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
__ push(Immediate(isolate()->factory()->empty_fixed_array()));
CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
} else {
__ LoadHeapObject(eax, literals);
__ mov(ebx, Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
__ mov(ecx, Immediate(isolate()->factory()->empty_fixed_array()));
FastCloneShallowArrayStub::Mode mode =
boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS
? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS

View File

@ -2177,9 +2177,16 @@ bool Isolate::Init(Deserializer* des) {
// Ensure that all stubs which need to be generated ahead of time, but
// cannot be serialized into the snapshot have been generated.
HandleScope scope(this);
StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(this);
CodeStub::GenerateFPStubs(this);
StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(this);
StubFailureTrampolineStub::GenerateAheadOfTime(this);
// TODO(mstarzinger): The following is an ugly hack to make sure the
// interface descriptor is initialized even when stubs have been
// deserialized out of the snapshot without the graph builder.
FastCloneShallowArrayStub stub(FastCloneShallowArrayStub::CLONE_ELEMENTS,
DONT_TRACK_ALLOCATION_SITE, 0);
stub.InitializeInterfaceDescriptor(
this, code_stub_interface_descriptor(CodeStub::FastCloneShallowArray));
}
if (FLAG_parallel_recompilation) optimizing_compiler_thread_.Start();

View File

@ -39,6 +39,18 @@ namespace v8 {
namespace internal {
void FastCloneShallowArrayStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
static Register registers[] = { rax, rbx, rcx };
descriptor->register_param_count_ = 3;
descriptor->register_params_ = registers;
descriptor->stack_parameter_count_ = NULL;
descriptor->deoptimization_handler_ =
Runtime::FunctionForId(Runtime::kCreateArrayLiteralShallow)->entry;
}
void FastCloneShallowObjectStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
@ -386,165 +398,6 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
}
static void GenerateFastCloneShallowArrayCommon(
MacroAssembler* masm,
int length,
FastCloneShallowArrayStub::Mode mode,
AllocationSiteMode allocation_site_mode,
Label* fail) {
// Registers on entry:
//
// rcx: boilerplate literal array.
ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS);
// All sizes here are multiples of kPointerSize.
int elements_size = 0;
if (length > 0) {
elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
? FixedDoubleArray::SizeFor(length)
: FixedArray::SizeFor(length);
}
int size = JSArray::kSize;
int allocation_info_start = size;
if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
size += AllocationSiteInfo::kSize;
}
size += elements_size;
// Allocate both the JS array and the elements array in one big
// allocation. This avoids multiple limit checks.
AllocationFlags flags = TAG_OBJECT;
if (mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS) {
flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT | flags);
}
__ Allocate(size, rax, rbx, rdx, fail, flags);
if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
__ LoadRoot(kScratchRegister, Heap::kAllocationSiteInfoMapRootIndex);
__ movq(FieldOperand(rax, allocation_info_start), kScratchRegister);
__ movq(FieldOperand(rax, allocation_info_start + kPointerSize), rcx);
}
// Copy the JS array part.
for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
if ((i != JSArray::kElementsOffset) || (length == 0)) {
__ movq(rbx, FieldOperand(rcx, i));
__ movq(FieldOperand(rax, i), rbx);
}
}
if (length > 0) {
// Get hold of the elements array of the boilerplate and setup the
// elements pointer in the resulting object.
__ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
__ lea(rdx, Operand(rax, JSArray::kSize + AllocationSiteInfo::kSize));
} else {
__ lea(rdx, Operand(rax, JSArray::kSize));
}
__ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx);
// Copy the elements array.
if (mode == FastCloneShallowArrayStub::CLONE_ELEMENTS) {
for (int i = 0; i < elements_size; i += kPointerSize) {
__ movq(rbx, FieldOperand(rcx, i));
__ movq(FieldOperand(rdx, i), rbx);
}
} else {
ASSERT(mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS);
int i;
for (i = 0; i < FixedDoubleArray::kHeaderSize; i += kPointerSize) {
__ movq(rbx, FieldOperand(rcx, i));
__ movq(FieldOperand(rdx, i), rbx);
}
while (i < elements_size) {
__ movsd(xmm0, FieldOperand(rcx, i));
__ movsd(FieldOperand(rdx, i), xmm0);
i += kDoubleSize;
}
ASSERT(i == elements_size);
}
}
}
void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
// Stack layout on entry:
//
// [rsp + kPointerSize]: constant elements.
// [rsp + (2 * kPointerSize)]: literal index.
// [rsp + (3 * kPointerSize)]: literals array.
// Load boilerplate object into rcx and check if we need to create a
// boilerplate.
__ movq(rcx, Operand(rsp, 3 * kPointerSize));
__ movq(rax, Operand(rsp, 2 * kPointerSize));
SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2);
__ movq(rcx,
FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize));
__ CompareRoot(rcx, Heap::kUndefinedValueRootIndex);
Label slow_case;
__ j(equal, &slow_case);
FastCloneShallowArrayStub::Mode mode = mode_;
// rcx is boilerplate object.
Factory* factory = masm->isolate()->factory();
if (mode == CLONE_ANY_ELEMENTS) {
Label double_elements, check_fast_elements;
__ movq(rbx, FieldOperand(rcx, JSArray::kElementsOffset));
__ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
factory->fixed_cow_array_map());
__ j(not_equal, &check_fast_elements);
GenerateFastCloneShallowArrayCommon(masm, 0, COPY_ON_WRITE_ELEMENTS,
allocation_site_mode_,
&slow_case);
__ ret(3 * kPointerSize);
__ bind(&check_fast_elements);
__ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
factory->fixed_array_map());
__ j(not_equal, &double_elements);
GenerateFastCloneShallowArrayCommon(masm, length_, CLONE_ELEMENTS,
allocation_site_mode_,
&slow_case);
__ ret(3 * kPointerSize);
__ bind(&double_elements);
mode = CLONE_DOUBLE_ELEMENTS;
// Fall through to generate the code to handle double elements.
}
if (FLAG_debug_code) {
const char* message;
Heap::RootListIndex expected_map_index;
if (mode == CLONE_ELEMENTS) {
message = "Expected (writable) fixed array";
expected_map_index = Heap::kFixedArrayMapRootIndex;
} else if (mode == CLONE_DOUBLE_ELEMENTS) {
message = "Expected (writable) fixed double array";
expected_map_index = Heap::kFixedDoubleArrayMapRootIndex;
} else {
ASSERT(mode == COPY_ON_WRITE_ELEMENTS);
message = "Expected copy-on-write fixed array";
expected_map_index = Heap::kFixedCOWArrayMapRootIndex;
}
__ push(rcx);
__ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
__ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
expected_map_index);
__ Assert(equal, message);
__ pop(rcx);
}
GenerateFastCloneShallowArrayCommon(masm, length_, mode,
allocation_site_mode_,
&slow_case);
__ ret(3 * kPointerSize);
__ bind(&slow_case);
__ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
}
// The stub expects its argument on the stack and returns its result in tos_:
// zero for false, and a non-zero value for true.
void ToBooleanStub::Generate(MacroAssembler* masm) {
@ -4154,6 +4007,7 @@ bool CEntryStub::IsPregenerated() {
void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
CEntryStub::GenerateAheadOfTime(isolate);
StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
// It is important that the store buffer overflow stubs are generated first.
RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate);
}
@ -6775,8 +6629,7 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
ASSERT(!Serializer::enabled());
CEntryStub ces(1, kSaveFPRegs);
CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
__ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
int parameter_count_offset =
StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;

View File

@ -1695,24 +1695,33 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
Handle<FixedArrayBase> constant_elements_values(
FixedArrayBase::cast(constant_elements->get(1)));
__ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
__ Push(Smi::FromInt(expr->literal_index()));
__ Push(constant_elements);
Heap* heap = isolate()->heap();
if (has_constant_fast_elements &&
constant_elements_values->map() == heap->fixed_cow_array_map()) {
// If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
// change, so it's possible to specialize the stub in advance.
__ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
__ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ movq(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset));
__ Move(rbx, Smi::FromInt(expr->literal_index()));
__ Move(rcx, constant_elements);
FastCloneShallowArrayStub stub(
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
DONT_TRACK_ALLOCATION_SITE,
length);
__ CallStub(&stub);
} else if (expr->depth() > 1) {
__ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
__ Push(Smi::FromInt(expr->literal_index()));
__ Push(constant_elements);
__ CallRuntime(Runtime::kCreateArrayLiteral, 3);
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
} else if (Serializer::enabled() ||
length > FastCloneShallowArrayStub::kMaximumClonedLength) {
__ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
__ Push(Smi::FromInt(expr->literal_index()));
__ Push(constant_elements);
__ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
} else {
ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
@ -1729,6 +1738,10 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
}
__ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ movq(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset));
__ Move(rbx, Smi::FromInt(expr->literal_index()));
__ Move(rcx, constant_elements);
FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
__ CallStub(&stub);
}

View File

@ -5167,26 +5167,33 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
DeoptimizeIf(not_equal, instr->environment());
}
// Set up the parameters to the stub/runtime call.
__ PushHeapObject(literals);
__ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
// Boilerplate already exists, constant elements are never accessed.
// Pass an empty fixed array.
__ Push(isolate()->factory()->empty_fixed_array());
// Pick the right runtime function or stub to call.
// Set up the parameters to the stub/runtime call and pick the right
// runtime function or stub to call. Boilerplate already exists,
// constant elements are never accessed, pass an empty fixed array.
int length = instr->hydrogen()->length();
if (instr->hydrogen()->IsCopyOnWrite()) {
ASSERT(instr->hydrogen()->depth() == 1);
__ LoadHeapObject(rax, literals);
__ Move(rbx, Smi::FromInt(instr->hydrogen()->literal_index()));
__ Move(rcx, isolate()->factory()->empty_fixed_array());
FastCloneShallowArrayStub::Mode mode =
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
FastCloneShallowArrayStub stub(mode, DONT_TRACK_ALLOCATION_SITE, length);
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
} else if (instr->hydrogen()->depth() > 1) {
__ PushHeapObject(literals);
__ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
__ Push(isolate()->factory()->empty_fixed_array());
CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
__ PushHeapObject(literals);
__ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
__ Push(isolate()->factory()->empty_fixed_array());
CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
} else {
__ LoadHeapObject(rax, literals);
__ Move(rbx, Smi::FromInt(instr->hydrogen()->literal_index()));
__ Move(rcx, isolate()->factory()->empty_fixed_array());
FastCloneShallowArrayStub::Mode mode =
boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS
? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS