Port FastCloneShallowArrayStub to Turbofan

Refactors CodeStubAssembler::AllocateJSArray to share code.

BUG=chromium:608675

Review-Url: https://codereview.chromium.org/2304573004
Cr-Commit-Position: refs/heads/master@{#39550}
This commit is contained in:
klaasb 2016-09-20 05:35:34 -07:00 committed by Commit bot
parent 05eb56798e
commit e2455873e6
19 changed files with 344 additions and 245 deletions

View File

@ -64,6 +64,8 @@ namespace internal {
V(kEval, "eval") \
V(kExpectedAllocationSite, "Expected allocation site") \
V(kExpectedBooleanValue, "Expected boolean value") \
V(kExpectedFixedDoubleArrayMap, \
"Expected a fixed double array map in fast shallow clone array literal") \
V(kExpectedFunctionObject, "Expected function object in register") \
V(kExpectedHeapNumber, "Expected HeapNumber") \
V(kExpectedJSReceiver, "Expected object to have receiver type") \

View File

@ -46,6 +46,18 @@ Node* CodeStubAssembler::EmptyStringConstant() {
return LoadRoot(Heap::kempty_stringRootIndex);
}
Node* CodeStubAssembler::FixedArrayMapConstant() {
return LoadRoot(Heap::kFixedArrayMapRootIndex);
}
Node* CodeStubAssembler::FixedCowArrayMapConstant() {
return LoadRoot(Heap::kFixedCOWArrayMapRootIndex);
}
Node* CodeStubAssembler::FixedDoubleArrayMapConstant() {
return LoadRoot(Heap::kFixedDoubleArrayMapRootIndex);
}
Node* CodeStubAssembler::HeapNumberMapConstant() {
return LoadRoot(Heap::kHeapNumberMapRootIndex);
}
@ -1000,6 +1012,10 @@ Node* CodeStubAssembler::LoadElements(Node* object) {
return LoadObjectField(object, JSObject::kElementsOffset);
}
Node* CodeStubAssembler::LoadJSArrayLength(compiler::Node* array) {
return LoadObjectField(array, JSArray::kLengthOffset);
}
Node* CodeStubAssembler::LoadFixedArrayBaseLength(compiler::Node* array) {
return LoadObjectField(array, FixedArrayBase::kLengthOffset);
}
@ -1366,50 +1382,89 @@ Node* CodeStubAssembler::AllocateSeqTwoByteString(Node* context, Node* length) {
return var_result.value();
}
Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, Node* array_map,
Node* capacity_node, Node* length_node,
compiler::Node* allocation_site,
ParameterMode mode) {
bool is_double = IsFastDoubleElementsKind(kind);
int base_size = JSArray::kSize + FixedArray::kHeaderSize;
int elements_offset = JSArray::kSize;
Comment("begin allocation of JSArray");
Node* CodeStubAssembler::AllocateUninitializedJSArrayWithoutElements(
ElementsKind kind, Node* array_map, Node* length, Node* allocation_site) {
Comment("begin allocation of JSArray without elements");
int base_size = JSArray::kSize;
if (allocation_site != nullptr) {
base_size += AllocationMemento::kSize;
elements_offset += AllocationMemento::kSize;
}
Node* total_size =
ElementOffsetFromIndex(capacity_node, kind, mode, base_size);
Node* size = IntPtrConstant(base_size);
Node* array = AllocateUninitializedJSArray(kind, array_map, length,
allocation_site, size);
return array;
}
// Allocate both array and elements object, and initialize the JSArray.
Heap* heap = isolate()->heap();
Node* array = Allocate(total_size);
std::pair<Node*, Node*>
CodeStubAssembler::AllocateUninitializedJSArrayWithElements(
ElementsKind kind, Node* array_map, Node* length, Node* allocation_site,
Node* capacity, ParameterMode capacity_mode) {
Comment("begin allocation of JSArray with elements");
int base_size = JSArray::kSize;
if (allocation_site != nullptr) {
base_size += AllocationMemento::kSize;
}
int elements_offset = base_size;
// Compute space for elements
base_size += FixedArray::kHeaderSize;
Node* size = ElementOffsetFromIndex(capacity, kind, capacity_mode, base_size);
Node* array = AllocateUninitializedJSArray(kind, array_map, length,
allocation_site, size);
Node* elements = InnerAllocate(array, elements_offset);
StoreObjectField(array, JSObject::kElementsOffset, elements);
return {array, elements};
}
Node* CodeStubAssembler::AllocateUninitializedJSArray(ElementsKind kind,
Node* array_map,
Node* length,
Node* allocation_site,
Node* size_in_bytes) {
Node* array = Allocate(size_in_bytes);
Comment("write JSArray headers");
StoreMapNoWriteBarrier(array, array_map);
Node* empty_properties = LoadRoot(Heap::kEmptyFixedArrayRootIndex);
StoreObjectFieldNoWriteBarrier(array, JSArray::kPropertiesOffset,
empty_properties);
StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset,
TagParameter(length_node, mode));
StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
StoreObjectFieldRoot(array, JSArray::kPropertiesOffset,
Heap::kEmptyFixedArrayRootIndex);
if (allocation_site != nullptr) {
InitializeAllocationMemento(array, JSArray::kSize, allocation_site);
}
return array;
}
Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, Node* array_map,
Node* capacity, Node* length,
Node* allocation_site,
ParameterMode capacity_mode) {
bool is_double = IsFastDoubleElementsKind(kind);
// Allocate both array and elements object, and initialize the JSArray.
Node *array, *elements;
std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
kind, array_map, length, allocation_site, capacity, capacity_mode);
// Setup elements object.
Node* elements = InnerAllocate(array, elements_offset);
StoreObjectFieldNoWriteBarrier(array, JSArray::kElementsOffset, elements);
Heap* heap = isolate()->heap();
Handle<Map> elements_map(is_double ? heap->fixed_double_array_map()
: heap->fixed_array_map());
StoreMapNoWriteBarrier(elements, HeapConstant(elements_map));
StoreObjectFieldNoWriteBarrier(elements, FixedArray::kLengthOffset,
TagParameter(capacity_node, mode));
TagParameter(capacity, capacity_mode));
// Fill in the elements with holes.
FillFixedArrayWithValue(kind, elements, IntPtrConstant(0), capacity_node,
Heap::kTheHoleValueRootIndex, mode);
FillFixedArrayWithValue(kind, elements, IntPtrConstant(0), capacity,
Heap::kTheHoleValueRootIndex, capacity_mode);
return array;
}
@ -3859,7 +3914,7 @@ void CodeStubAssembler::EmitFastElementsBoundsCheck(Node* object,
}
Bind(&if_array);
{
var_length.Bind(SmiUntag(LoadObjectField(object, JSArray::kLengthOffset)));
var_length.Bind(SmiUntag(LoadJSArrayLength(object)));
Goto(&length_loaded);
}
Bind(&length_loaded);
@ -4201,9 +4256,8 @@ void CodeStubAssembler::LoadIC(const LoadICParameters* p) {
{
// Check polymorphic case.
Comment("LoadIC_try_polymorphic");
GotoUnless(
WordEqual(LoadMap(feedback), LoadRoot(Heap::kFixedArrayMapRootIndex)),
&try_megamorphic);
GotoUnless(WordEqual(LoadMap(feedback), FixedArrayMapConstant()),
&try_megamorphic);
HandlePolymorphicCase(p, receiver_map, feedback, &if_handler, &var_handler,
&miss, 2);
}
@ -4247,9 +4301,8 @@ void CodeStubAssembler::KeyedLoadIC(const LoadICParameters* p) {
{
// Check polymorphic case.
Comment("KeyedLoadIC_try_polymorphic");
GotoUnless(
WordEqual(LoadMap(feedback), LoadRoot(Heap::kFixedArrayMapRootIndex)),
&try_megamorphic);
GotoUnless(WordEqual(LoadMap(feedback), FixedArrayMapConstant()),
&try_megamorphic);
HandlePolymorphicCase(p, receiver_map, feedback, &if_handler, &var_handler,
&miss, 2);
}
@ -4632,8 +4685,7 @@ Node* CodeStubAssembler::EmitKeyedSloppyArguments(Node* receiver, Node* key,
{
Node* backing_store = LoadFixedArrayElement(elements, IntPtrConstant(1), 0,
INTPTR_PARAMETERS);
GotoIf(WordNotEqual(LoadMap(backing_store),
LoadRoot(Heap::kFixedArrayMapRootIndex)),
GotoIf(WordNotEqual(LoadMap(backing_store), FixedArrayMapConstant()),
bailout);
Node* backing_store_length =

View File

@ -72,6 +72,9 @@ class CodeStubAssembler : public compiler::CodeAssembler {
compiler::Node* BooleanMapConstant();
compiler::Node* EmptyStringConstant();
compiler::Node* FixedArrayMapConstant();
compiler::Node* FixedCowArrayMapConstant();
compiler::Node* FixedDoubleArrayMapConstant();
compiler::Node* HeapNumberMapConstant();
compiler::Node* NoContextConstant();
compiler::Node* NanConstant();
@ -210,6 +213,8 @@ class CodeStubAssembler : public compiler::CodeAssembler {
compiler::Node* LoadProperties(compiler::Node* object);
// Load the elements backing store of a JSObject.
compiler::Node* LoadElements(compiler::Node* object);
// Load the length of a JSArray instance.
compiler::Node* LoadJSArrayLength(compiler::Node* array);
// Load the length of a fixed array base instance.
compiler::Node* LoadFixedArrayBaseLength(compiler::Node* array);
// Load the length of a fixed array base instance.
@ -317,12 +322,24 @@ class CodeStubAssembler : public compiler::CodeAssembler {
compiler::Node* AllocateSeqTwoByteString(int length);
compiler::Node* AllocateSeqTwoByteString(compiler::Node* context,
compiler::Node* length);
// Allocated an JSArray
compiler::Node* AllocateJSArray(ElementsKind kind, compiler::Node* array_map,
compiler::Node* capacity,
compiler::Node* length,
compiler::Node* allocation_site = nullptr,
ParameterMode mode = INTEGER_PARAMETERS);
// Allocate a JSArray without elements and initialize the header fields.
compiler::Node* AllocateUninitializedJSArrayWithoutElements(
ElementsKind kind, compiler::Node* array_map, compiler::Node* length,
compiler::Node* allocation_site);
// Allocate and return a JSArray with initialized header fields and its
// uninitialized elements.
// The ParameterMode argument is only used for the capacity parameter.
std::pair<compiler::Node*, compiler::Node*>
AllocateUninitializedJSArrayWithElements(
ElementsKind kind, compiler::Node* array_map, compiler::Node* length,
compiler::Node* allocation_site, compiler::Node* capacity,
ParameterMode capacity_mode = INTEGER_PARAMETERS);
// Allocate a JSArray and fill elements with the hole.
// The ParameterMode argument is only used for the capacity parameter.
compiler::Node* AllocateJSArray(
ElementsKind kind, compiler::Node* array_map, compiler::Node* capacity,
compiler::Node* length, compiler::Node* allocation_site = nullptr,
ParameterMode capacity_mode = INTEGER_PARAMETERS);
compiler::Node* AllocateFixedArray(ElementsKind kind,
compiler::Node* capacity,
@ -762,6 +779,13 @@ class CodeStubAssembler : public compiler::CodeAssembler {
AllocationFlags flags,
compiler::Node* top_adddress,
compiler::Node* limit_address);
// Allocate and return a JSArray of given total size in bytes with header
// fields initialized.
compiler::Node* AllocateUninitializedJSArray(ElementsKind kind,
compiler::Node* array_map,
compiler::Node* length,
compiler::Node* allocation_site,
compiler::Node* size_in_bytes);
compiler::Node* SmiShiftBitsConstant();

View File

@ -339,85 +339,6 @@ Handle<Code> NumberToStringStub::GenerateCode() {
return DoGenerateCode(this);
}
template <>
HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() {
Factory* factory = isolate()->factory();
HValue* undefined = graph()->GetConstantUndefined();
AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode();
HValue* closure = GetParameter(Descriptor::kClosure);
HValue* literal_index = GetParameter(Descriptor::kLiteralIndex);
// TODO(turbofan): This codestub has regressed to need a frame on ia32 at some
// point and wasn't caught since it wasn't built in the snapshot. We should
// probably just replace with a TurboFan stub rather than fixing it.
#if !(V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87)
// This stub is very performance sensitive, the generated code must be tuned
// so that it doesn't build and eager frame.
info()->MarkMustNotHaveEagerFrame();
#endif
HValue* literals_array = Add<HLoadNamedField>(
closure, nullptr, HObjectAccess::ForLiteralsPointer());
HInstruction* allocation_site = Add<HLoadKeyed>(
literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS,
NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag);
IfBuilder checker(this);
checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
undefined);
checker.Then();
HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
AllocationSite::kTransitionInfoOffset);
HInstruction* boilerplate =
Add<HLoadNamedField>(allocation_site, nullptr, access);
HValue* elements = AddLoadElements(boilerplate);
HValue* capacity = AddLoadFixedArrayLength(elements);
IfBuilder zero_capacity(this);
zero_capacity.If<HCompareNumericAndBranch>(capacity, graph()->GetConstant0(),
Token::EQ);
zero_capacity.Then();
Push(BuildCloneShallowArrayEmpty(boilerplate,
allocation_site,
alloc_site_mode));
zero_capacity.Else();
IfBuilder if_fixed_cow(this);
if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
if_fixed_cow.Then();
Push(BuildCloneShallowArrayCow(boilerplate,
allocation_site,
alloc_site_mode,
FAST_ELEMENTS));
if_fixed_cow.Else();
IfBuilder if_fixed(this);
if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
if_fixed.Then();
Push(BuildCloneShallowArrayNonEmpty(boilerplate,
allocation_site,
alloc_site_mode,
FAST_ELEMENTS));
if_fixed.Else();
Push(BuildCloneShallowArrayNonEmpty(boilerplate,
allocation_site,
alloc_site_mode,
FAST_DOUBLE_ELEMENTS));
if_fixed.End();
if_fixed_cow.End();
zero_capacity.End();
checker.ElseDeopt(DeoptimizeReason::kUninitializedBoilerplateLiterals);
checker.End();
return environment()->Pop();
}
Handle<Code> FastCloneShallowArrayStub::GenerateCode() {
return DoGenerateCode(this);
}
HValue* CodeStubGraphBuilderBase::BuildPushElement(HValue* object, HValue* argc,
HValue* argument_elements,
ElementsKind kind) {
@ -468,6 +389,7 @@ template <>
HValue* CodeStubGraphBuilder<FastArrayPushStub>::BuildCodeStub() {
// TODO(verwaest): Fix deoptimizer messages.
HValue* argc = GetArgumentsLength();
HInstruction* argument_elements = Add<HArgumentsElements>(false, false);
HInstruction* object = Add<HAccessArgumentsAt>(argument_elements, argc,
graph()->GetConstantMinus1());

View File

@ -5042,13 +5042,13 @@ compiler::Node* FastCloneShallowObjectStub::GenerateFastPath(
typedef compiler::CodeAssembler::Label Label;
typedef compiler::CodeAssembler::Variable Variable;
Node* undefined = assembler->UndefinedConstant();
Node* literals_array =
assembler->LoadObjectField(closure, JSFunction::kLiteralsOffset);
Node* allocation_site = assembler->LoadFixedArrayElement(
literals_array, literals_index,
LiteralsArray::kFirstLiteralIndex * kPointerSize,
CodeStubAssembler::SMI_PARAMETERS);
Node* undefined = assembler->UndefinedConstant();
assembler->GotoIf(assembler->WordEqual(allocation_site, undefined),
call_runtime);
@ -5229,15 +5229,6 @@ void NumberToStringStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
descriptor->SetMissHandler(Runtime::kNumberToString);
}
void FastCloneShallowArrayStub::InitializeDescriptor(
CodeStubDescriptor* descriptor) {
FastCloneShallowArrayDescriptor call_descriptor(isolate());
descriptor->Initialize(
Runtime::FunctionForId(Runtime::kCreateArrayLiteralStubBailout)->entry);
descriptor->SetMissHandler(Runtime::kCreateArrayLiteralStubBailout);
}
void RegExpConstructResultStub::InitializeDescriptor(
CodeStubDescriptor* descriptor) {
descriptor->Initialize(
@ -5746,6 +5737,201 @@ void FastCloneRegExpStub::GenerateAssembly(CodeStubAssembler* assembler) const {
Generate(assembler, closure, literal_index, pattern, flags, context));
}
namespace {
compiler::Node* NonEmptyShallowClone(CodeStubAssembler* assembler,
compiler::Node* boilerplate,
compiler::Node* boilerplate_map,
compiler::Node* boilerplate_elements,
compiler::Node* allocation_site,
compiler::Node* capacity,
ElementsKind kind) {
typedef compiler::Node Node;
typedef CodeStubAssembler::ParameterMode ParameterMode;
ParameterMode param_mode = CodeStubAssembler::SMI_PARAMETERS;
Node* length = assembler->LoadJSArrayLength(boilerplate);
if (assembler->Is64()) {
capacity = assembler->SmiUntag(capacity);
param_mode = CodeStubAssembler::INTEGER_PARAMETERS;
}
Node *array, *elements;
std::tie(array, elements) =
assembler->AllocateUninitializedJSArrayWithElements(
kind, boilerplate_map, length, allocation_site, capacity, param_mode);
assembler->Comment("copy elements header");
for (int offset = 0; offset < FixedArrayBase::kHeaderSize;
offset += kPointerSize) {
Node* value = assembler->LoadObjectField(boilerplate_elements, offset);
assembler->StoreObjectField(elements, offset, value);
}
if (assembler->Is64()) {
length = assembler->SmiUntag(length);
}
assembler->Comment("copy boilerplate elements");
assembler->CopyFixedArrayElements(kind, boilerplate_elements, elements,
length, SKIP_WRITE_BARRIER, param_mode);
assembler->IncrementCounter(
assembler->isolate()->counters()->inlined_copied_elements(), 1);
return array;
}
} // namespace
// static
compiler::Node* FastCloneShallowArrayStub::Generate(
CodeStubAssembler* assembler, compiler::Node* closure,
compiler::Node* literal_index, compiler::Node* constant_elements,
compiler::Node* context, AllocationSiteMode allocation_site_mode) {
typedef CodeStubAssembler::Label Label;
typedef CodeStubAssembler::Variable Variable;
typedef compiler::Node Node;
Label call_runtime(assembler, Label::kDeferred), zero_capacity(assembler),
cow_elements(assembler), fast_elements(assembler),
return_result(assembler);
Variable result(assembler, MachineRepresentation::kTagged);
Node* literals_array =
assembler->LoadObjectField(closure, JSFunction::kLiteralsOffset);
Node* allocation_site = assembler->LoadFixedArrayElement(
literals_array, literal_index,
LiteralsArray::kFirstLiteralIndex * kPointerSize,
CodeStubAssembler::SMI_PARAMETERS);
Node* undefined = assembler->UndefinedConstant();
assembler->GotoIf(assembler->WordEqual(allocation_site, undefined),
&call_runtime);
allocation_site = assembler->LoadFixedArrayElement(
literals_array, literal_index,
LiteralsArray::kFirstLiteralIndex * kPointerSize,
CodeStubAssembler::SMI_PARAMETERS);
Node* boilerplate = assembler->LoadObjectField(
allocation_site, AllocationSite::kTransitionInfoOffset);
Node* boilerplate_map = assembler->LoadMap(boilerplate);
Node* boilerplate_elements = assembler->LoadElements(boilerplate);
Node* capacity = assembler->LoadFixedArrayBaseLength(boilerplate_elements);
allocation_site =
allocation_site_mode == TRACK_ALLOCATION_SITE ? allocation_site : nullptr;
Node* zero = assembler->SmiConstant(Smi::FromInt(0));
assembler->GotoIf(assembler->SmiEqual(capacity, zero), &zero_capacity);
Node* elements_map = assembler->LoadMap(boilerplate_elements);
assembler->GotoIf(
assembler->WordEqual(elements_map, assembler->FixedCowArrayMapConstant()),
&cow_elements);
assembler->GotoIf(
assembler->WordEqual(elements_map, assembler->FixedArrayMapConstant()),
&fast_elements);
{
assembler->Comment("fast double elements path");
if (FLAG_debug_code) {
Label correct_elements_map(assembler), abort(assembler, Label::kDeferred);
assembler->BranchIf(
assembler->WordEqual(elements_map,
assembler->FixedDoubleArrayMapConstant()),
&correct_elements_map, &abort);
assembler->Bind(&abort);
{
Node* abort_id = assembler->SmiConstant(
Smi::FromInt(BailoutReason::kExpectedFixedDoubleArrayMap));
assembler->TailCallRuntime(Runtime::kAbort, context, abort_id);
}
assembler->Bind(&correct_elements_map);
}
Node* array = NonEmptyShallowClone(assembler, boilerplate, boilerplate_map,
boilerplate_elements, allocation_site,
capacity, FAST_DOUBLE_ELEMENTS);
result.Bind(array);
assembler->Goto(&return_result);
}
assembler->Bind(&fast_elements);
{
assembler->Comment("fast elements path");
Node* array = NonEmptyShallowClone(assembler, boilerplate, boilerplate_map,
boilerplate_elements, allocation_site,
capacity, FAST_ELEMENTS);
result.Bind(array);
assembler->Goto(&return_result);
}
Variable length(assembler, MachineRepresentation::kTagged),
elements(assembler, MachineRepresentation::kTagged);
Label allocate_without_elements(assembler);
assembler->Bind(&cow_elements);
{
assembler->Comment("fixed cow path");
length.Bind(assembler->LoadJSArrayLength(boilerplate));
elements.Bind(boilerplate_elements);
assembler->Goto(&allocate_without_elements);
}
assembler->Bind(&zero_capacity);
{
assembler->Comment("zero capacity path");
length.Bind(zero);
elements.Bind(assembler->LoadRoot(Heap::kEmptyFixedArrayRootIndex));
assembler->Goto(&allocate_without_elements);
}
assembler->Bind(&allocate_without_elements);
{
Node* array = assembler->AllocateUninitializedJSArrayWithoutElements(
FAST_ELEMENTS, boilerplate_map, length.value(), allocation_site);
assembler->StoreObjectField(array, JSObject::kElementsOffset,
elements.value());
result.Bind(array);
assembler->Goto(&return_result);
}
assembler->Bind(&call_runtime);
{
assembler->Comment("call runtime");
Node* flags = assembler->SmiConstant(
Smi::FromInt(ArrayLiteral::kShallowElements |
(allocation_site_mode == TRACK_ALLOCATION_SITE
? 0
: ArrayLiteral::kDisableMementos)));
Node* array =
assembler->CallRuntime(Runtime::kCreateArrayLiteral, context, closure,
literal_index, constant_elements, flags);
result.Bind(array);
assembler->Goto(&return_result);
}
assembler->Bind(&return_result);
return result.value();
}
void FastCloneShallowArrayStub::GenerateAssembly(
CodeStubAssembler* assembler) const {
typedef compiler::Node Node;
Node* closure = assembler->Parameter(Descriptor::kClosure);
Node* literal_index = assembler->Parameter(Descriptor::kLiteralIndex);
Node* constant_elements = assembler->Parameter(Descriptor::kConstantElements);
Node* context = assembler->Parameter(Descriptor::kContext);
assembler->Return(Generate(assembler, closure, literal_index,
constant_elements, context,
allocation_site_mode()));
}
void CreateAllocationSiteStub::GenerateAheadOfTime(Isolate* isolate) {
CreateAllocationSiteStub stub(isolate);
stub.GetCode();
@ -5951,7 +6137,7 @@ void ArrayNoArgumentConstructorStub::GenerateAssembly(
Node* array = assembler->AllocateJSArray(
elements_kind(), array_map,
assembler->IntPtrConstant(JSArray::kPreallocatedArrayElements),
assembler->IntPtrConstant(0), allocation_site);
assembler->SmiConstant(Smi::FromInt(0)), allocation_site);
assembler->Return(array);
}
@ -5964,7 +6150,7 @@ void InternalArrayNoArgumentConstructorStub::GenerateAssembly(
Node* array = assembler->AllocateJSArray(
elements_kind(), array_map,
assembler->IntPtrConstant(JSArray::kPreallocatedArrayElements),
assembler->IntPtrConstant(0), nullptr);
assembler->SmiConstant(Smi::FromInt(0)), nullptr);
assembler->Return(array);
}

View File

@ -67,7 +67,6 @@ class ObjectLiteral;
V(KeyedStoreICTrampoline) \
V(StoreICTrampoline) \
/* --- HydrogenCodeStubs --- */ \
V(FastCloneShallowArray) \
V(NumberToString) \
V(StringAdd) \
V(ToObject) \
@ -133,8 +132,9 @@ class ObjectLiteral;
V(InternalArraySingleArgumentConstructor) \
V(Dec) \
V(ElementsTransitionAndStore) \
V(FastCloneShallowObject) \
V(FastCloneRegExp) \
V(FastCloneShallowArray) \
V(FastCloneShallowObject) \
V(FastNewClosure) \
V(FastNewFunctionContext) \
V(InstanceOf) \
@ -1263,24 +1263,30 @@ class FastCloneRegExpStub final : public TurboFanCodeStub {
DEFINE_TURBOFAN_CODE_STUB(FastCloneRegExp, TurboFanCodeStub);
};
class FastCloneShallowArrayStub : public HydrogenCodeStub {
class FastCloneShallowArrayStub : public TurboFanCodeStub {
public:
FastCloneShallowArrayStub(Isolate* isolate,
AllocationSiteMode allocation_site_mode)
: HydrogenCodeStub(isolate) {
set_sub_minor_key(AllocationSiteModeBits::encode(allocation_site_mode));
: TurboFanCodeStub(isolate) {
minor_key_ = AllocationSiteModeBits::encode(allocation_site_mode);
}
static compiler::Node* Generate(CodeStubAssembler* assembler,
compiler::Node* closure,
compiler::Node* literal_index,
compiler::Node* constant_elements,
compiler::Node* context,
AllocationSiteMode allocation_site_mode);
AllocationSiteMode allocation_site_mode() const {
return AllocationSiteModeBits::decode(sub_minor_key());
return AllocationSiteModeBits::decode(minor_key_);
}
private:
class AllocationSiteModeBits: public BitField<AllocationSiteMode, 0, 1> {};
DEFINE_CALL_INTERFACE_DESCRIPTOR(FastCloneShallowArray);
DEFINE_HYDROGEN_CODE_STUB(FastCloneShallowArray, HydrogenCodeStub);
DEFINE_TURBOFAN_CODE_STUB(FastCloneShallowArray, TurboFanCodeStub);
};
class FastCloneShallowObjectStub : public TurboFanCodeStub {

View File

@ -3227,93 +3227,6 @@ void HGraphBuilder::BuildCopyElements(HValue* from_elements,
AddIncrementCounter(counters->inlined_copied_elements());
}
HValue* HGraphBuilder::BuildCloneShallowArrayCow(HValue* boilerplate,
HValue* allocation_site,
AllocationSiteMode mode,
ElementsKind kind) {
HAllocate* array = AllocateJSArrayObject(mode);
HValue* map = AddLoadMap(boilerplate);
HValue* elements = AddLoadElements(boilerplate);
HValue* length = AddLoadArrayLength(boilerplate, kind);
BuildJSArrayHeader(array,
map,
elements,
mode,
FAST_ELEMENTS,
allocation_site,
length);
return array;
}
HValue* HGraphBuilder::BuildCloneShallowArrayEmpty(HValue* boilerplate,
HValue* allocation_site,
AllocationSiteMode mode) {
HAllocate* array = AllocateJSArrayObject(mode);
HValue* map = AddLoadMap(boilerplate);
BuildJSArrayHeader(array,
map,
NULL, // set elements to empty fixed array
mode,
FAST_ELEMENTS,
allocation_site,
graph()->GetConstant0());
return array;
}
HValue* HGraphBuilder::BuildCloneShallowArrayNonEmpty(HValue* boilerplate,
HValue* allocation_site,
AllocationSiteMode mode,
ElementsKind kind) {
HValue* boilerplate_elements = AddLoadElements(boilerplate);
HValue* capacity = AddLoadFixedArrayLength(boilerplate_elements);
// Generate size calculation code here in order to make it dominate
// the JSArray allocation.
HValue* elements_size = BuildCalculateElementsSize(kind, capacity);
// Create empty JSArray object for now, store elimination should remove
// redundant initialization of elements and length fields and at the same
// time the object will be fully prepared for GC if it happens during
// elements allocation.
HValue* result = BuildCloneShallowArrayEmpty(
boilerplate, allocation_site, mode);
HAllocate* elements = BuildAllocateElements(kind, elements_size);
Add<HStoreNamedField>(result, HObjectAccess::ForElementsPointer(), elements);
// The allocation for the cloned array above causes register pressure on
// machines with low register counts. Force a reload of the boilerplate
// elements here to free up a register for the allocation to avoid unnecessary
// spillage.
boilerplate_elements = AddLoadElements(boilerplate);
boilerplate_elements->SetFlag(HValue::kCantBeReplaced);
// Copy the elements array header.
for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) {
HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i);
Add<HStoreNamedField>(
elements, access,
Add<HLoadNamedField>(boilerplate_elements, nullptr, access));
}
// And the result of the length
HValue* length = AddLoadArrayLength(boilerplate, kind);
Add<HStoreNamedField>(result, HObjectAccess::ForArrayLength(kind), length);
BuildCopyElements(boilerplate_elements, kind, elements,
kind, length, NULL);
return result;
}
void HGraphBuilder::BuildCreateAllocationMemento(
HValue* previous_object,
HValue* previous_object_size,

View File

@ -1842,20 +1842,6 @@ class HGraphBuilder {
HValue* length,
HValue* capacity);
HValue* BuildCloneShallowArrayCow(HValue* boilerplate,
HValue* allocation_site,
AllocationSiteMode mode,
ElementsKind kind);
HValue* BuildCloneShallowArrayEmpty(HValue* boilerplate,
HValue* allocation_site,
AllocationSiteMode mode);
HValue* BuildCloneShallowArrayNonEmpty(HValue* boilerplate,
HValue* allocation_site,
AllocationSiteMode mode,
ElementsKind kind);
HValue* BuildElementIndexHash(HValue* index);
void BuildCreateAllocationMemento(HValue* previous_object,

View File

@ -68,7 +68,6 @@ namespace internal {
"Unexpected cell contents in global store") \
V(UnexpectedObject, "unexpected object") \
V(UnexpectedRHSOfBinaryOperation, "Unexpected RHS of binary operation") \
V(UninitializedBoilerplateLiterals, "Uninitialized boilerplate literals") \
V(UnknownMapInPolymorphicAccess, "Unknown map in polymorphic access") \
V(UnknownMapInPolymorphicCall, "Unknown map in polymorphic call") \
V(UnknownMapInPolymorphicElementAccess, \

View File

@ -1597,6 +1597,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
} else {
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
__ CallStub(&stub);
RestoreContext();
}
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);

View File

@ -1580,6 +1580,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
} else {
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
__ CallStub(&stub);
RestoreContext();
}
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);

View File

@ -1511,6 +1511,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ mov(ecx, Immediate(constant_elements));
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
__ CallStub(&stub);
RestoreContext();
}
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);

View File

@ -1592,6 +1592,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
} else {
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
__ CallStub(&stub);
RestoreContext();
}
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);

View File

@ -1593,6 +1593,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
} else {
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
__ CallStub(&stub);
RestoreContext();
}
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);

View File

@ -1560,6 +1560,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
} else {
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
__ CallStub(&stub);
RestoreContext();
}
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);

View File

@ -1519,6 +1519,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
} else {
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
__ CallStub(&stub);
RestoreContext();
}
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);

View File

@ -1539,6 +1539,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ Move(rcx, constant_elements);
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
__ CallStub(&stub);
RestoreContext();
}
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);

View File

@ -1503,6 +1503,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ mov(ecx, Immediate(constant_elements));
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
__ CallStub(&stub);
RestoreContext();
}
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);

View File

@ -23,7 +23,7 @@ bytecodes: [
B(LdaZero),
B(TestEqualStrict), R(1), U8(0),
B(JumpIfTrue), U8(61),
B(LdaSmi), U8(76),
B(LdaSmi), U8(77),
B(Star), R(2),
B(CallRuntime), U16(Runtime::kAbort), R(2), U8(1),
B(LdaSmi), U8(-2),
@ -131,7 +131,7 @@ bytecodes: [
B(LdaSmi), U8(1),
B(TestEqualStrict), R(1), U8(0),
B(JumpIfTrueConstant), U8(0),
B(LdaSmi), U8(76),
B(LdaSmi), U8(77),
B(Star), R(2),
B(CallRuntime), U16(Runtime::kAbort), R(2), U8(1),
B(LdaSmi), U8(-2),
@ -279,7 +279,7 @@ bytecodes: [
B(LdaSmi), U8(1),
B(TestEqualStrict), R(4), U8(0),
B(JumpIfTrueConstant), U8(3),
B(LdaSmi), U8(76),
B(LdaSmi), U8(77),
B(Star), R(5),
B(CallRuntime), U16(Runtime::kAbort), R(5), U8(1),
B(LdaSmi), U8(-2),
@ -345,7 +345,7 @@ bytecodes: [
B(LdaSmi), U8(1),
B(TestEqualStrict), R(4), U8(0),
B(JumpIfTrueConstant), U8(9),
B(LdaSmi), U8(76),
B(LdaSmi), U8(77),
B(Star), R(12),
B(CallRuntime), U16(Runtime::kAbort), R(12), U8(1),
/* 27 S> */ B(LdrContextSlot), R(1), U8(7), U8(0), R(14),