[builtins] More stubs to the builtin-o-sphere.

The following ported to builtins:
FastCloneRegExp
FastCloneShallowArray
FastCloneShallowObject

BUG=
TBR=rmcilroy@chromium.org, rossberg@chromium.org

Review-Url: https://codereview.chromium.org/2605893002
Cr-Commit-Position: refs/heads/master@{#41989}
This commit is contained in:
mvstanton 2016-12-29 05:02:08 -08:00 committed by Commit bot
parent f2e8c9786f
commit 05873add85
21 changed files with 518 additions and 507 deletions

View File

@ -10,6 +10,7 @@
#include "src/ast/prettyprinter.h"
#include "src/ast/scopes.h"
#include "src/base/hashmap.h"
#include "src/builtins/builtins-constructor.h"
#include "src/builtins/builtins.h"
#include "src/code-stubs.h"
#include "src/contexts.h"
@ -577,12 +578,12 @@ void ObjectLiteral::BuildConstantProperties(Isolate* isolate) {
}
bool ObjectLiteral::IsFastCloningSupported() const {
// FastCloneShallowObjectStub doesn't copy elements, and object literals don't
// support copy-on-write (COW) elements for now.
// The FastCloneShallowObject builtin doesn't copy elements, and object
// literals don't support copy-on-write (COW) elements for now.
// TODO(mvstanton): make object literals support COW elements.
return fast_elements() && has_shallow_properties() &&
properties_count() <=
FastCloneShallowObjectStub::kMaximumClonedProperties;
properties_count() <= ConstructorBuiltinsAssembler::
kMaximumClonedShallowObjectProperties;
}
void ArrayLiteral::BuildConstantElements(Isolate* isolate) {
@ -659,7 +660,7 @@ void ArrayLiteral::BuildConstantElements(Isolate* isolate) {
bool ArrayLiteral::IsFastCloningSupported() const {
return depth() <= 1 &&
values()->length() <=
FastCloneShallowArrayStub::kMaximumClonedElements;
ConstructorBuiltinsAssembler::kMaximumClonedShallowArrayElements;
}
void ArrayLiteral::AssignFeedbackVectorSlots(Isolate* isolate,

View File

@ -1416,7 +1416,7 @@ class ObjectLiteral final : public MaterializedLiteral {
// marked expressions, no store code is emitted.
void CalculateEmitStore(Zone* zone);
// Determines whether the {FastCloneShallowObjectStub} can be used.
// Determines whether the {FastCloneShallowObject} builtin can be used.
bool IsFastCloningSupported() const;
// Assemble bitfield of flags for the CreateObjectLiteral helper.
@ -1562,7 +1562,7 @@ class ArrayLiteral final : public MaterializedLiteral {
// Populate the constant elements fixed array.
void BuildConstantElements(Isolate* isolate);
// Determines whether the {FastCloneShallowArrayStub} can be used.
// Determines whether the {FastCloneShallowArray} builtin can be used.
bool IsFastCloningSupported() const;
// Assemble bitfield of flags for the CreateArrayLiteral helper.

View File

@ -3,6 +3,7 @@
// found in the LICENSE file.
#include "src/builtins/builtins-constructor.h"
#include "src/ast/ast.h"
#include "src/builtins/builtins-utils.h"
#include "src/builtins/builtins.h"
#include "src/code-factory.h"
@ -356,5 +357,386 @@ Handle<Code> Builtins::NewFunctionContext(ScopeType scope_type) {
return Handle<Code>::null();
}
Node* ConstructorBuiltinsAssembler::EmitFastCloneRegExp(Node* closure,
Node* literal_index,
Node* pattern,
Node* flags,
Node* context) {
typedef CodeStubAssembler::Label Label;
typedef CodeStubAssembler::Variable Variable;
typedef compiler::Node Node;
Label call_runtime(this, Label::kDeferred), end(this);
Variable result(this, MachineRepresentation::kTagged);
Node* literals_array = LoadObjectField(closure, JSFunction::kLiteralsOffset);
Node* boilerplate =
LoadFixedArrayElement(literals_array, literal_index,
LiteralsArray::kFirstLiteralIndex * kPointerSize,
CodeStubAssembler::SMI_PARAMETERS);
GotoIf(IsUndefined(boilerplate), &call_runtime);
{
int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
Node* copy = Allocate(size);
for (int offset = 0; offset < size; offset += kPointerSize) {
Node* value = LoadObjectField(boilerplate, offset);
StoreObjectFieldNoWriteBarrier(copy, offset, value);
}
result.Bind(copy);
Goto(&end);
}
Bind(&call_runtime);
{
result.Bind(CallRuntime(Runtime::kCreateRegExpLiteral, context, closure,
literal_index, pattern, flags));
Goto(&end);
}
Bind(&end);
return result.value();
}
TF_BUILTIN(FastCloneRegExp, ConstructorBuiltinsAssembler) {
Node* closure = Parameter(FastCloneRegExpDescriptor::kClosure);
Node* literal_index = Parameter(FastCloneRegExpDescriptor::kLiteralIndex);
Node* pattern = Parameter(FastCloneRegExpDescriptor::kPattern);
Node* flags = Parameter(FastCloneRegExpDescriptor::kFlags);
Node* context = Parameter(FastCloneRegExpDescriptor::kContext);
Return(EmitFastCloneRegExp(closure, literal_index, pattern, flags, context));
}
Node* ConstructorBuiltinsAssembler::NonEmptyShallowClone(
Node* boilerplate, Node* boilerplate_map, Node* boilerplate_elements,
Node* allocation_site, Node* capacity, ElementsKind kind) {
typedef CodeStubAssembler::ParameterMode ParameterMode;
ParameterMode param_mode = OptimalParameterMode();
Node* length = LoadJSArrayLength(boilerplate);
capacity = TaggedToParameter(capacity, param_mode);
Node *array, *elements;
std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
kind, boilerplate_map, length, allocation_site, capacity, param_mode);
Comment("copy elements header");
// Header consists of map and length.
STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kPointerSize);
StoreMap(elements, LoadMap(boilerplate_elements));
{
int offset = FixedArrayBase::kLengthOffset;
StoreObjectFieldNoWriteBarrier(
elements, offset, LoadObjectField(boilerplate_elements, offset));
}
length = TaggedToParameter(length, param_mode);
Comment("copy boilerplate elements");
CopyFixedArrayElements(kind, boilerplate_elements, elements, length,
SKIP_WRITE_BARRIER, param_mode);
IncrementCounter(isolate()->counters()->inlined_copied_elements(), 1);
return array;
}
Node* ConstructorBuiltinsAssembler::EmitFastCloneShallowArray(
Node* closure, Node* literal_index, Node* context,
CodeAssemblerLabel* call_runtime, AllocationSiteMode allocation_site_mode) {
typedef CodeStubAssembler::Label Label;
typedef CodeStubAssembler::Variable Variable;
typedef compiler::Node Node;
Label zero_capacity(this), cow_elements(this), fast_elements(this),
return_result(this);
Variable result(this, MachineRepresentation::kTagged);
Node* literals_array = LoadObjectField(closure, JSFunction::kLiteralsOffset);
Node* allocation_site =
LoadFixedArrayElement(literals_array, literal_index,
LiteralsArray::kFirstLiteralIndex * kPointerSize,
CodeStubAssembler::SMI_PARAMETERS);
GotoIf(IsUndefined(allocation_site), call_runtime);
allocation_site =
LoadFixedArrayElement(literals_array, literal_index,
LiteralsArray::kFirstLiteralIndex * kPointerSize,
CodeStubAssembler::SMI_PARAMETERS);
Node* boilerplate =
LoadObjectField(allocation_site, AllocationSite::kTransitionInfoOffset);
Node* boilerplate_map = LoadMap(boilerplate);
Node* boilerplate_elements = LoadElements(boilerplate);
Node* capacity = LoadFixedArrayBaseLength(boilerplate_elements);
allocation_site =
allocation_site_mode == TRACK_ALLOCATION_SITE ? allocation_site : nullptr;
Node* zero = SmiConstant(Smi::kZero);
GotoIf(SmiEqual(capacity, zero), &zero_capacity);
Node* elements_map = LoadMap(boilerplate_elements);
GotoIf(IsFixedCOWArrayMap(elements_map), &cow_elements);
GotoIf(IsFixedArrayMap(elements_map), &fast_elements);
{
Comment("fast double elements path");
if (FLAG_debug_code) {
Label correct_elements_map(this), abort(this, Label::kDeferred);
Branch(IsFixedDoubleArrayMap(elements_map), &correct_elements_map,
&abort);
Bind(&abort);
{
Node* abort_id = SmiConstant(
Smi::FromInt(BailoutReason::kExpectedFixedDoubleArrayMap));
CallRuntime(Runtime::kAbort, context, abort_id);
result.Bind(UndefinedConstant());
Goto(&return_result);
}
Bind(&correct_elements_map);
}
Node* array =
NonEmptyShallowClone(boilerplate, boilerplate_map, boilerplate_elements,
allocation_site, capacity, FAST_DOUBLE_ELEMENTS);
result.Bind(array);
Goto(&return_result);
}
Bind(&fast_elements);
{
Comment("fast elements path");
Node* array =
NonEmptyShallowClone(boilerplate, boilerplate_map, boilerplate_elements,
allocation_site, capacity, FAST_ELEMENTS);
result.Bind(array);
Goto(&return_result);
}
Variable length(this, MachineRepresentation::kTagged),
elements(this, MachineRepresentation::kTagged);
Label allocate_without_elements(this);
Bind(&cow_elements);
{
Comment("fixed cow path");
length.Bind(LoadJSArrayLength(boilerplate));
elements.Bind(boilerplate_elements);
Goto(&allocate_without_elements);
}
Bind(&zero_capacity);
{
Comment("zero capacity path");
length.Bind(zero);
elements.Bind(LoadRoot(Heap::kEmptyFixedArrayRootIndex));
Goto(&allocate_without_elements);
}
Bind(&allocate_without_elements);
{
Node* array = AllocateUninitializedJSArrayWithoutElements(
FAST_ELEMENTS, boilerplate_map, length.value(), allocation_site);
StoreObjectField(array, JSObject::kElementsOffset, elements.value());
result.Bind(array);
Goto(&return_result);
}
Bind(&return_result);
return result.value();
}
void ConstructorBuiltinsAssembler::CreateFastCloneShallowArrayBuiltin(
AllocationSiteMode allocation_site_mode) {
typedef compiler::Node Node;
typedef CodeStubAssembler::Label Label;
Node* closure = Parameter(FastCloneShallowArrayDescriptor::kClosure);
Node* literal_index =
Parameter(FastCloneShallowArrayDescriptor::kLiteralIndex);
Node* constant_elements =
Parameter(FastCloneShallowArrayDescriptor::kConstantElements);
Node* context = Parameter(FastCloneShallowArrayDescriptor::kContext);
Label call_runtime(this, Label::kDeferred);
Return(EmitFastCloneShallowArray(closure, literal_index, context,
&call_runtime, allocation_site_mode));
Bind(&call_runtime);
{
Comment("call runtime");
Node* flags =
SmiConstant(Smi::FromInt(ArrayLiteral::kShallowElements |
(allocation_site_mode == TRACK_ALLOCATION_SITE
? 0
: ArrayLiteral::kDisableMementos)));
Return(CallRuntime(Runtime::kCreateArrayLiteral, context, closure,
literal_index, constant_elements, flags));
}
}
TF_BUILTIN(FastCloneShallowArrayTrack, ConstructorBuiltinsAssembler) {
CreateFastCloneShallowArrayBuiltin(TRACK_ALLOCATION_SITE);
}
TF_BUILTIN(FastCloneShallowArrayDontTrack, ConstructorBuiltinsAssembler) {
CreateFastCloneShallowArrayBuiltin(DONT_TRACK_ALLOCATION_SITE);
}
Handle<Code> Builtins::NewCloneShallowArray(
AllocationSiteMode allocation_mode) {
switch (allocation_mode) {
case TRACK_ALLOCATION_SITE:
return FastCloneShallowArrayTrack();
case DONT_TRACK_ALLOCATION_SITE:
return FastCloneShallowArrayDontTrack();
default:
UNREACHABLE();
}
return Handle<Code>::null();
}
// static
int ConstructorBuiltinsAssembler::FastCloneShallowObjectPropertiesCount(
int literal_length) {
// This heuristic of setting empty literals to have
// kInitialGlobalObjectUnusedPropertiesCount must remain in-sync with the
// runtime.
// TODO(verwaest): Unify this with the heuristic in the runtime.
return literal_length == 0
? JSObject::kInitialGlobalObjectUnusedPropertiesCount
: literal_length;
}
Node* ConstructorBuiltinsAssembler::EmitFastCloneShallowObject(
CodeAssemblerLabel* call_runtime, Node* closure, Node* literals_index,
Node* properties_count) {
Node* literals_array = LoadObjectField(closure, JSFunction::kLiteralsOffset);
Node* allocation_site =
LoadFixedArrayElement(literals_array, literals_index,
LiteralsArray::kFirstLiteralIndex * kPointerSize,
CodeStubAssembler::SMI_PARAMETERS);
GotoIf(IsUndefined(allocation_site), call_runtime);
// Calculate the object and allocation size based on the properties count.
Node* object_size = IntPtrAdd(WordShl(properties_count, kPointerSizeLog2),
IntPtrConstant(JSObject::kHeaderSize));
Node* allocation_size = object_size;
if (FLAG_allocation_site_pretenuring) {
allocation_size =
IntPtrAdd(object_size, IntPtrConstant(AllocationMemento::kSize));
}
Node* boilerplate =
LoadObjectField(allocation_site, AllocationSite::kTransitionInfoOffset);
Node* boilerplate_map = LoadMap(boilerplate);
Node* instance_size = LoadMapInstanceSize(boilerplate_map);
Node* size_in_words = WordShr(object_size, kPointerSizeLog2);
GotoUnless(WordEqual(instance_size, size_in_words), call_runtime);
Node* copy = Allocate(allocation_size);
// Copy boilerplate elements.
Variable offset(this, MachineType::PointerRepresentation());
offset.Bind(IntPtrConstant(-kHeapObjectTag));
Node* end_offset = IntPtrAdd(object_size, offset.value());
Label loop_body(this, &offset), loop_check(this, &offset);
// We should always have an object size greater than zero.
Goto(&loop_body);
Bind(&loop_body);
{
// The Allocate above guarantees that the copy lies in new space. This
// allows us to skip write barriers. This is necessary since we may also be
// copying unboxed doubles.
Node* field = Load(MachineType::IntPtr(), boilerplate, offset.value());
StoreNoWriteBarrier(MachineType::PointerRepresentation(), copy,
offset.value(), field);
Goto(&loop_check);
}
Bind(&loop_check);
{
offset.Bind(IntPtrAdd(offset.value(), IntPtrConstant(kPointerSize)));
GotoUnless(IntPtrGreaterThanOrEqual(offset.value(), end_offset),
&loop_body);
}
if (FLAG_allocation_site_pretenuring) {
Node* memento = InnerAllocate(copy, object_size);
StoreMapNoWriteBarrier(memento, Heap::kAllocationMementoMapRootIndex);
StoreObjectFieldNoWriteBarrier(
memento, AllocationMemento::kAllocationSiteOffset, allocation_site);
Node* memento_create_count = LoadObjectField(
allocation_site, AllocationSite::kPretenureCreateCountOffset);
memento_create_count =
SmiAdd(memento_create_count, SmiConstant(Smi::FromInt(1)));
StoreObjectFieldNoWriteBarrier(allocation_site,
AllocationSite::kPretenureCreateCountOffset,
memento_create_count);
}
// TODO(verwaest): Allocate and fill in double boxes.
return copy;
}
void ConstructorBuiltinsAssembler::CreateFastCloneShallowObjectBuiltin(
int properties_count) {
DCHECK_GE(properties_count, 0);
DCHECK_LE(properties_count, kMaximumClonedShallowObjectProperties);
Label call_runtime(this);
Node* closure = Parameter(0);
Node* literals_index = Parameter(1);
Node* properties_count_node =
IntPtrConstant(FastCloneShallowObjectPropertiesCount(properties_count));
Node* copy = EmitFastCloneShallowObject(
&call_runtime, closure, literals_index, properties_count_node);
Return(copy);
Bind(&call_runtime);
Node* constant_properties = Parameter(2);
Node* flags = Parameter(3);
Node* context = Parameter(4);
TailCallRuntime(Runtime::kCreateObjectLiteral, context, closure,
literals_index, constant_properties, flags);
}
#define SHALLOW_OBJECT_BUILTIN(props) \
TF_BUILTIN(FastCloneShallowObject##props, ConstructorBuiltinsAssembler) { \
CreateFastCloneShallowObjectBuiltin(props); \
}
SHALLOW_OBJECT_BUILTIN(0);
SHALLOW_OBJECT_BUILTIN(1);
SHALLOW_OBJECT_BUILTIN(2);
SHALLOW_OBJECT_BUILTIN(3);
SHALLOW_OBJECT_BUILTIN(4);
SHALLOW_OBJECT_BUILTIN(5);
SHALLOW_OBJECT_BUILTIN(6);
Handle<Code> Builtins::NewCloneShallowObject(int length) {
switch (length) {
case 0:
return FastCloneShallowObject0();
case 1:
return FastCloneShallowObject1();
case 2:
return FastCloneShallowObject2();
case 3:
return FastCloneShallowObject3();
case 4:
return FastCloneShallowObject4();
case 5:
return FastCloneShallowObject5();
case 6:
return FastCloneShallowObject6();
default:
UNREACHABLE();
}
return Handle<Code>::null();
}
} // namespace internal
} // namespace v8

View File

@ -9,6 +9,7 @@ namespace internal {
typedef compiler::Node Node;
typedef compiler::CodeAssemblerState CodeAssemblerState;
typedef compiler::CodeAssemblerLabel CodeAssemblerLabel;
class ConstructorBuiltinsAssembler : public CodeStubAssembler {
public:
@ -20,10 +21,37 @@ class ConstructorBuiltinsAssembler : public CodeStubAssembler {
ScopeType scope_type);
static int MaximumFunctionContextSlots();
Node* EmitFastCloneRegExp(Node* closure, Node* literal_index, Node* pattern,
Node* flags, Node* context);
Node* EmitFastCloneShallowArray(Node* closure, Node* literal_index,
Node* context,
CodeAssemblerLabel* call_runtime,
AllocationSiteMode allocation_site_mode);
// Maximum number of elements in copied array (chosen so that even an array
// backed by a double backing store will fit into new-space).
static const int kMaximumClonedShallowArrayElements =
JSArray::kInitialMaxFastElementArray * kPointerSize / kDoubleSize;
void CreateFastCloneShallowArrayBuiltin(
AllocationSiteMode allocation_site_mode);
// Maximum number of properties in copied objects.
static const int kMaximumClonedShallowObjectProperties = 6;
static int FastCloneShallowObjectPropertiesCount(int literal_length);
Node* EmitFastCloneShallowObject(CodeAssemblerLabel* call_runtime,
Node* closure, Node* literals_index,
Node* properties_count);
void CreateFastCloneShallowObjectBuiltin(int properties_count);
private:
static const int kMaximumSlots = 0x8000;
static const int kSmallMaximumSlots = 10;
Node* NonEmptyShallowClone(Node* boilerplate, Node* boilerplate_map,
Node* boilerplate_elements, Node* allocation_site,
Node* capacity, ElementsKind kind);
// FastNewFunctionContext can only allocate closures which fit in the
// new space.
STATIC_ASSERT(((kMaximumSlots + Context::MIN_CONTEXT_SLOTS) * kPointerSize +

View File

@ -94,6 +94,25 @@ namespace internal {
FastNewFunctionContext) \
TFS(FastNewFunctionContextFunction, BUILTIN, kNoExtraICState, \
FastNewFunctionContext) \
TFS(FastCloneRegExp, BUILTIN, kNoExtraICState, FastCloneRegExp) \
TFS(FastCloneShallowArrayTrack, BUILTIN, kNoExtraICState, \
FastCloneShallowArray) \
TFS(FastCloneShallowArrayDontTrack, BUILTIN, kNoExtraICState, \
FastCloneShallowArray) \
TFS(FastCloneShallowObject0, BUILTIN, kNoExtraICState, \
FastCloneShallowObject) \
TFS(FastCloneShallowObject1, BUILTIN, kNoExtraICState, \
FastCloneShallowObject) \
TFS(FastCloneShallowObject2, BUILTIN, kNoExtraICState, \
FastCloneShallowObject) \
TFS(FastCloneShallowObject3, BUILTIN, kNoExtraICState, \
FastCloneShallowObject) \
TFS(FastCloneShallowObject4, BUILTIN, kNoExtraICState, \
FastCloneShallowObject) \
TFS(FastCloneShallowObject5, BUILTIN, kNoExtraICState, \
FastCloneShallowObject) \
TFS(FastCloneShallowObject6, BUILTIN, kNoExtraICState, \
FastCloneShallowObject) \
\
/* Apply and entries */ \
ASM(Apply) \
@ -784,6 +803,8 @@ class Builtins {
CallableType function_type = CallableType::kAny);
Handle<Code> InterpreterPushArgsAndConstruct(CallableType function_type);
Handle<Code> NewFunctionContext(ScopeType scope_type);
Handle<Code> NewCloneShallowArray(AllocationSiteMode allocation_mode);
Handle<Code> NewCloneShallowObject(int length);
Code* builtin(Name name) {
// Code::cast cannot be used here since we access builtins

View File

@ -338,24 +338,23 @@ Callable CodeFactory::ResumeGenerator(Isolate* isolate) {
// static
Callable CodeFactory::FastCloneRegExp(Isolate* isolate) {
FastCloneRegExpStub stub(isolate);
return make_callable(stub);
return Callable(isolate->builtins()->FastCloneRegExp(),
FastCloneRegExpDescriptor(isolate));
}
// static
Callable CodeFactory::FastCloneShallowArray(Isolate* isolate) {
// TODO(mstarzinger): Thread through AllocationSiteMode at some point.
FastCloneShallowArrayStub stub(isolate, DONT_TRACK_ALLOCATION_SITE);
return make_callable(stub);
Callable CodeFactory::FastCloneShallowArray(
Isolate* isolate, AllocationSiteMode allocation_mode) {
return Callable(isolate->builtins()->NewCloneShallowArray(allocation_mode),
FastCloneShallowArrayDescriptor(isolate));
}
// static
Callable CodeFactory::FastCloneShallowObject(Isolate* isolate, int length) {
FastCloneShallowObjectStub stub(isolate, length);
return make_callable(stub);
return Callable(isolate->builtins()->NewCloneShallowObject(length),
FastCloneShallowObjectDescriptor(isolate));
}
// static
Callable CodeFactory::FastNewFunctionContext(Isolate* isolate,
ScopeType scope_type) {

View File

@ -131,7 +131,8 @@ class V8_EXPORT_PRIVATE CodeFactory final {
static Callable GetSuperConstructor(Isolate* isolate);
static Callable FastCloneRegExp(Isolate* isolate);
static Callable FastCloneShallowArray(Isolate* isolate);
static Callable FastCloneShallowArray(Isolate* isolate,
AllocationSiteMode allocation_mode);
static Callable FastCloneShallowObject(Isolate* isolate, int length);
static Callable FastNewFunctionContext(Isolate* isolate,

View File

@ -1777,123 +1777,6 @@ void LoadIndexedInterceptorStub::GenerateAssembly(
slot, vector);
}
// static
int FastCloneShallowObjectStub::PropertiesCount(int literal_length) {
// This heuristic of setting empty literals to have
// kInitialGlobalObjectUnusedPropertiesCount must remain in-sync with the
// runtime.
// TODO(verwaest): Unify this with the heuristic in the runtime.
return literal_length == 0
? JSObject::kInitialGlobalObjectUnusedPropertiesCount
: literal_length;
}
// static
compiler::Node* FastCloneShallowObjectStub::GenerateFastPath(
CodeStubAssembler* assembler, compiler::CodeAssembler::Label* call_runtime,
compiler::Node* closure, compiler::Node* literals_index,
compiler::Node* properties_count) {
typedef compiler::Node Node;
typedef compiler::CodeAssembler::Label Label;
typedef compiler::CodeAssembler::Variable Variable;
Node* literals_array =
assembler->LoadObjectField(closure, JSFunction::kLiteralsOffset);
Node* allocation_site = assembler->LoadFixedArrayElement(
literals_array, literals_index,
LiteralsArray::kFirstLiteralIndex * kPointerSize,
CodeStubAssembler::SMI_PARAMETERS);
assembler->GotoIf(assembler->IsUndefined(allocation_site), call_runtime);
// Calculate the object and allocation size based on the properties count.
Node* object_size = assembler->IntPtrAdd(
assembler->WordShl(properties_count, kPointerSizeLog2),
assembler->IntPtrConstant(JSObject::kHeaderSize));
Node* allocation_size = object_size;
if (FLAG_allocation_site_pretenuring) {
allocation_size = assembler->IntPtrAdd(
object_size, assembler->IntPtrConstant(AllocationMemento::kSize));
}
Node* boilerplate = assembler->LoadObjectField(
allocation_site, AllocationSite::kTransitionInfoOffset);
Node* boilerplate_map = assembler->LoadMap(boilerplate);
Node* instance_size = assembler->LoadMapInstanceSize(boilerplate_map);
Node* size_in_words = assembler->WordShr(object_size, kPointerSizeLog2);
assembler->GotoUnless(assembler->WordEqual(instance_size, size_in_words),
call_runtime);
Node* copy = assembler->Allocate(allocation_size);
// Copy boilerplate elements.
Variable offset(assembler, MachineType::PointerRepresentation());
offset.Bind(assembler->IntPtrConstant(-kHeapObjectTag));
Node* end_offset = assembler->IntPtrAdd(object_size, offset.value());
Label loop_body(assembler, &offset), loop_check(assembler, &offset);
// We should always have an object size greater than zero.
assembler->Goto(&loop_body);
assembler->Bind(&loop_body);
{
// The Allocate above guarantees that the copy lies in new space. This
// allows us to skip write barriers. This is necessary since we may also be
// copying unboxed doubles.
Node* field =
assembler->Load(MachineType::IntPtr(), boilerplate, offset.value());
assembler->StoreNoWriteBarrier(MachineType::PointerRepresentation(), copy,
offset.value(), field);
assembler->Goto(&loop_check);
}
assembler->Bind(&loop_check);
{
offset.Bind(assembler->IntPtrAdd(offset.value(),
assembler->IntPtrConstant(kPointerSize)));
assembler->GotoUnless(
assembler->IntPtrGreaterThanOrEqual(offset.value(), end_offset),
&loop_body);
}
if (FLAG_allocation_site_pretenuring) {
Node* memento = assembler->InnerAllocate(copy, object_size);
assembler->StoreMapNoWriteBarrier(memento,
Heap::kAllocationMementoMapRootIndex);
assembler->StoreObjectFieldNoWriteBarrier(
memento, AllocationMemento::kAllocationSiteOffset, allocation_site);
Node* memento_create_count = assembler->LoadObjectField(
allocation_site, AllocationSite::kPretenureCreateCountOffset);
memento_create_count = assembler->SmiAdd(
memento_create_count, assembler->SmiConstant(Smi::FromInt(1)));
assembler->StoreObjectFieldNoWriteBarrier(
allocation_site, AllocationSite::kPretenureCreateCountOffset,
memento_create_count);
}
// TODO(verwaest): Allocate and fill in double boxes.
return copy;
}
void FastCloneShallowObjectStub::GenerateAssembly(
compiler::CodeAssemblerState* state) const {
typedef CodeStubAssembler::Label Label;
typedef compiler::Node Node;
CodeStubAssembler assembler(state);
Label call_runtime(&assembler);
Node* closure = assembler.Parameter(0);
Node* literals_index = assembler.Parameter(1);
Node* properties_count =
assembler.IntPtrConstant(PropertiesCount(this->length()));
Node* copy = GenerateFastPath(&assembler, &call_runtime, closure,
literals_index, properties_count);
assembler.Return(copy);
assembler.Bind(&call_runtime);
Node* constant_properties = assembler.Parameter(2);
Node* flags = assembler.Parameter(3);
Node* context = assembler.Parameter(4);
assembler.TailCallRuntime(Runtime::kCreateObjectLiteral, context, closure,
literals_index, constant_properties, flags);
}
template<class StateType>
void HydrogenCodeStub::TraceTransition(StateType from, StateType to) {
// Note: Although a no-op transition is semantically OK, it is hinting at a
@ -2053,252 +1936,6 @@ void GetPropertyStub::GenerateAssembly(
assembler.Return(var_result.value());
}
// static
compiler::Node* FastCloneRegExpStub::Generate(CodeStubAssembler* assembler,
compiler::Node* closure,
compiler::Node* literal_index,
compiler::Node* pattern,
compiler::Node* flags,
compiler::Node* context) {
typedef CodeStubAssembler::Label Label;
typedef CodeStubAssembler::Variable Variable;
typedef compiler::Node Node;
Label call_runtime(assembler, Label::kDeferred), end(assembler);
Variable result(assembler, MachineRepresentation::kTagged);
Node* literals_array =
assembler->LoadObjectField(closure, JSFunction::kLiteralsOffset);
Node* boilerplate = assembler->LoadFixedArrayElement(
literals_array, literal_index,
LiteralsArray::kFirstLiteralIndex * kPointerSize,
CodeStubAssembler::SMI_PARAMETERS);
assembler->GotoIf(assembler->IsUndefined(boilerplate), &call_runtime);
{
int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
Node* copy = assembler->Allocate(size);
for (int offset = 0; offset < size; offset += kPointerSize) {
Node* value = assembler->LoadObjectField(boilerplate, offset);
assembler->StoreObjectFieldNoWriteBarrier(copy, offset, value);
}
result.Bind(copy);
assembler->Goto(&end);
}
assembler->Bind(&call_runtime);
{
result.Bind(assembler->CallRuntime(Runtime::kCreateRegExpLiteral, context,
closure, literal_index, pattern, flags));
assembler->Goto(&end);
}
assembler->Bind(&end);
return result.value();
}
void FastCloneRegExpStub::GenerateAssembly(
compiler::CodeAssemblerState* state) const {
typedef compiler::Node Node;
CodeStubAssembler assembler(state);
Node* closure = assembler.Parameter(Descriptor::kClosure);
Node* literal_index = assembler.Parameter(Descriptor::kLiteralIndex);
Node* pattern = assembler.Parameter(Descriptor::kPattern);
Node* flags = assembler.Parameter(Descriptor::kFlags);
Node* context = assembler.Parameter(Descriptor::kContext);
assembler.Return(
Generate(&assembler, closure, literal_index, pattern, flags, context));
}
namespace {
compiler::Node* NonEmptyShallowClone(CodeStubAssembler* assembler,
compiler::Node* boilerplate,
compiler::Node* boilerplate_map,
compiler::Node* boilerplate_elements,
compiler::Node* allocation_site,
compiler::Node* capacity,
ElementsKind kind) {
typedef compiler::Node Node;
typedef CodeStubAssembler::ParameterMode ParameterMode;
ParameterMode param_mode = assembler->OptimalParameterMode();
Node* length = assembler->LoadJSArrayLength(boilerplate);
capacity = assembler->TaggedToParameter(capacity, param_mode);
Node *array, *elements;
std::tie(array, elements) =
assembler->AllocateUninitializedJSArrayWithElements(
kind, boilerplate_map, length, allocation_site, capacity, param_mode);
assembler->Comment("copy elements header");
// Header consists of map and length.
STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kPointerSize);
assembler->StoreMap(elements, assembler->LoadMap(boilerplate_elements));
{
int offset = FixedArrayBase::kLengthOffset;
assembler->StoreObjectFieldNoWriteBarrier(
elements, offset,
assembler->LoadObjectField(boilerplate_elements, offset));
}
length = assembler->TaggedToParameter(length, param_mode);
assembler->Comment("copy boilerplate elements");
assembler->CopyFixedArrayElements(kind, boilerplate_elements, elements,
length, SKIP_WRITE_BARRIER, param_mode);
assembler->IncrementCounter(
assembler->isolate()->counters()->inlined_copied_elements(), 1);
return array;
}
} // namespace
// static
compiler::Node* FastCloneShallowArrayStub::Generate(
CodeStubAssembler* assembler, compiler::Node* closure,
compiler::Node* literal_index, compiler::Node* context,
CodeStubAssembler::Label* call_runtime,
AllocationSiteMode allocation_site_mode) {
typedef CodeStubAssembler::Label Label;
typedef CodeStubAssembler::Variable Variable;
typedef compiler::Node Node;
Label zero_capacity(assembler), cow_elements(assembler),
fast_elements(assembler), return_result(assembler);
Variable result(assembler, MachineRepresentation::kTagged);
Node* literals_array =
assembler->LoadObjectField(closure, JSFunction::kLiteralsOffset);
Node* allocation_site = assembler->LoadFixedArrayElement(
literals_array, literal_index,
LiteralsArray::kFirstLiteralIndex * kPointerSize,
CodeStubAssembler::SMI_PARAMETERS);
assembler->GotoIf(assembler->IsUndefined(allocation_site), call_runtime);
allocation_site = assembler->LoadFixedArrayElement(
literals_array, literal_index,
LiteralsArray::kFirstLiteralIndex * kPointerSize,
CodeStubAssembler::SMI_PARAMETERS);
Node* boilerplate = assembler->LoadObjectField(
allocation_site, AllocationSite::kTransitionInfoOffset);
Node* boilerplate_map = assembler->LoadMap(boilerplate);
Node* boilerplate_elements = assembler->LoadElements(boilerplate);
Node* capacity = assembler->LoadFixedArrayBaseLength(boilerplate_elements);
allocation_site =
allocation_site_mode == TRACK_ALLOCATION_SITE ? allocation_site : nullptr;
Node* zero = assembler->SmiConstant(Smi::kZero);
assembler->GotoIf(assembler->SmiEqual(capacity, zero), &zero_capacity);
Node* elements_map = assembler->LoadMap(boilerplate_elements);
assembler->GotoIf(assembler->IsFixedCOWArrayMap(elements_map), &cow_elements);
assembler->GotoIf(assembler->IsFixedArrayMap(elements_map), &fast_elements);
{
assembler->Comment("fast double elements path");
if (FLAG_debug_code) {
Label correct_elements_map(assembler), abort(assembler, Label::kDeferred);
assembler->Branch(assembler->IsFixedDoubleArrayMap(elements_map),
&correct_elements_map, &abort);
assembler->Bind(&abort);
{
Node* abort_id = assembler->SmiConstant(
Smi::FromInt(BailoutReason::kExpectedFixedDoubleArrayMap));
assembler->CallRuntime(Runtime::kAbort, context, abort_id);
result.Bind(assembler->UndefinedConstant());
assembler->Goto(&return_result);
}
assembler->Bind(&correct_elements_map);
}
Node* array = NonEmptyShallowClone(assembler, boilerplate, boilerplate_map,
boilerplate_elements, allocation_site,
capacity, FAST_DOUBLE_ELEMENTS);
result.Bind(array);
assembler->Goto(&return_result);
}
assembler->Bind(&fast_elements);
{
assembler->Comment("fast elements path");
Node* array = NonEmptyShallowClone(assembler, boilerplate, boilerplate_map,
boilerplate_elements, allocation_site,
capacity, FAST_ELEMENTS);
result.Bind(array);
assembler->Goto(&return_result);
}
Variable length(assembler, MachineRepresentation::kTagged),
elements(assembler, MachineRepresentation::kTagged);
Label allocate_without_elements(assembler);
assembler->Bind(&cow_elements);
{
assembler->Comment("fixed cow path");
length.Bind(assembler->LoadJSArrayLength(boilerplate));
elements.Bind(boilerplate_elements);
assembler->Goto(&allocate_without_elements);
}
assembler->Bind(&zero_capacity);
{
assembler->Comment("zero capacity path");
length.Bind(zero);
elements.Bind(assembler->LoadRoot(Heap::kEmptyFixedArrayRootIndex));
assembler->Goto(&allocate_without_elements);
}
assembler->Bind(&allocate_without_elements);
{
Node* array = assembler->AllocateUninitializedJSArrayWithoutElements(
FAST_ELEMENTS, boilerplate_map, length.value(), allocation_site);
assembler->StoreObjectField(array, JSObject::kElementsOffset,
elements.value());
result.Bind(array);
assembler->Goto(&return_result);
}
assembler->Bind(&return_result);
return result.value();
}
void FastCloneShallowArrayStub::GenerateAssembly(
compiler::CodeAssemblerState* state) const {
typedef compiler::Node Node;
typedef CodeStubAssembler::Label Label;
CodeStubAssembler assembler(state);
Node* closure = assembler.Parameter(Descriptor::kClosure);
Node* literal_index = assembler.Parameter(Descriptor::kLiteralIndex);
Node* constant_elements = assembler.Parameter(Descriptor::kConstantElements);
Node* context = assembler.Parameter(Descriptor::kContext);
Label call_runtime(&assembler, Label::kDeferred);
assembler.Return(Generate(&assembler, closure, literal_index, context,
&call_runtime, allocation_site_mode()));
assembler.Bind(&call_runtime);
{
assembler.Comment("call runtime");
Node* flags = assembler.SmiConstant(
Smi::FromInt(ArrayLiteral::kShallowElements |
(allocation_site_mode() == TRACK_ALLOCATION_SITE
? 0
: ArrayLiteral::kDisableMementos)));
assembler.Return(assembler.CallRuntime(Runtime::kCreateArrayLiteral,
context, closure, literal_index,
constant_elements, flags));
}
}
void CreateAllocationSiteStub::GenerateAheadOfTime(Isolate* isolate) {
CreateAllocationSiteStub stub(isolate);
stub.GetCode();

View File

@ -96,9 +96,6 @@ class Node;
V(InternalArrayNoArgumentConstructor) \
V(InternalArraySingleArgumentConstructor) \
V(ElementsTransitionAndStore) \
V(FastCloneRegExp) \
V(FastCloneShallowArray) \
V(FastCloneShallowObject) \
V(KeyedLoadSloppyArguments) \
V(KeyedStoreSloppyArguments) \
V(LoadScriptContextField) \
@ -830,81 +827,6 @@ class FastNewStrictArgumentsStub final : public PlatformCodeStub {
class SkipStubFrameBits : public BitField<bool, 0, 1> {};
};
class FastCloneRegExpStub final : public TurboFanCodeStub {
public:
explicit FastCloneRegExpStub(Isolate* isolate) : TurboFanCodeStub(isolate) {}
static compiler::Node* Generate(CodeStubAssembler* assembler,
compiler::Node* closure,
compiler::Node* literal_index,
compiler::Node* pattern,
compiler::Node* flags,
compiler::Node* context);
private:
DEFINE_CALL_INTERFACE_DESCRIPTOR(FastCloneRegExp);
DEFINE_TURBOFAN_CODE_STUB(FastCloneRegExp, TurboFanCodeStub);
};
class FastCloneShallowArrayStub : public TurboFanCodeStub {
public:
// Maximum number of elements in copied array (chosen so that even an array
// backed by a double backing store will fit into new-space).
static const int kMaximumClonedElements =
JSArray::kInitialMaxFastElementArray * kPointerSize / kDoubleSize;
FastCloneShallowArrayStub(Isolate* isolate,
AllocationSiteMode allocation_site_mode)
: TurboFanCodeStub(isolate) {
minor_key_ = AllocationSiteModeBits::encode(allocation_site_mode);
}
static compiler::Node* Generate(CodeStubAssembler* assembler,
compiler::Node* closure,
compiler::Node* literal_index,
compiler::Node* context,
compiler::CodeAssemblerLabel* call_runtime,
AllocationSiteMode allocation_site_mode);
AllocationSiteMode allocation_site_mode() const {
return AllocationSiteModeBits::decode(minor_key_);
}
private:
class AllocationSiteModeBits: public BitField<AllocationSiteMode, 0, 1> {};
DEFINE_CALL_INTERFACE_DESCRIPTOR(FastCloneShallowArray);
DEFINE_TURBOFAN_CODE_STUB(FastCloneShallowArray, TurboFanCodeStub);
};
class FastCloneShallowObjectStub : public TurboFanCodeStub {
public:
// Maximum number of properties in copied object.
static const int kMaximumClonedProperties = 6;
FastCloneShallowObjectStub(Isolate* isolate, int length)
: TurboFanCodeStub(isolate) {
DCHECK_GE(length, 0);
DCHECK_LE(length, kMaximumClonedProperties);
minor_key_ = LengthBits::encode(LengthBits::encode(length));
}
static compiler::Node* GenerateFastPath(
CodeStubAssembler* assembler, compiler::CodeAssemblerLabel* call_runtime,
compiler::Node* closure, compiler::Node* literals_index,
compiler::Node* properties_count);
static int PropertiesCount(int literal_length);
int length() const { return LengthBits::decode(minor_key_); }
private:
class LengthBits : public BitField<int, 0, 4> {};
DEFINE_CALL_INTERFACE_DESCRIPTOR(FastCloneShallowObject);
DEFINE_TURBOFAN_CODE_STUB(FastCloneShallowObject, TurboFanCodeStub);
};
class CreateAllocationSiteStub : public TurboFanCodeStub {
public:
explicit CreateAllocationSiteStub(Isolate* isolate)

View File

@ -392,11 +392,13 @@ void JSGenericLowering::LowerJSCreateLiteralArray(Node* node) {
node->InsertInput(zone(), 1, jsgraph()->SmiConstant(p.index()));
node->InsertInput(zone(), 2, jsgraph()->HeapConstant(p.constant()));
// Use the FastCloneShallowArrayStub only for shallow boilerplates without
// Use the FastCloneShallowArray builtin only for shallow boilerplates without
// properties up to the number of elements that the stubs can handle.
if ((p.flags() & ArrayLiteral::kShallowElements) != 0 &&
p.length() < FastCloneShallowArrayStub::kMaximumClonedElements) {
Callable callable = CodeFactory::FastCloneShallowArray(isolate());
p.length() <
ConstructorBuiltinsAssembler::kMaximumClonedShallowArrayElements) {
Callable callable = CodeFactory::FastCloneShallowArray(
isolate(), DONT_TRACK_ALLOCATION_SITE);
ReplaceWithStubCall(node, callable, flags);
} else {
node->InsertInput(zone(), 3, jsgraph()->SmiConstant(p.flags()));
@ -412,10 +414,11 @@ void JSGenericLowering::LowerJSCreateLiteralObject(Node* node) {
node->InsertInput(zone(), 2, jsgraph()->HeapConstant(p.constant()));
node->InsertInput(zone(), 3, jsgraph()->SmiConstant(p.flags()));
// Use the FastCloneShallowObjectStub only for shallow boilerplates without
// elements up to the number of properties that the stubs can handle.
// Use the FastCloneShallowObject builtin only for shallow boilerplates
// without elements up to the number of properties that the stubs can handle.
if ((p.flags() & ObjectLiteral::kShallowProperties) != 0 &&
p.length() <= FastCloneShallowObjectStub::kMaximumClonedProperties) {
p.length() <=
ConstructorBuiltinsAssembler::kMaximumClonedShallowObjectProperties) {
Callable callable =
CodeFactory::FastCloneShallowObject(isolate(), p.length());
ReplaceWithStubCall(node, callable, flags);

View File

@ -1218,8 +1218,9 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
__ Push(r3, r2, r1, r0);
__ CallRuntime(Runtime::kCreateObjectLiteral);
} else {
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
Callable callable = CodeFactory::FastCloneShallowObject(
isolate(), expr->properties_count());
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext();
}
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
@ -1356,8 +1357,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ Push(r3, r2, r1, r0);
__ CallRuntime(Runtime::kCreateArrayLiteral);
} else {
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
__ CallStub(&stub);
Callable callable =
CodeFactory::FastCloneShallowArray(isolate(), allocation_site_mode);
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext();
}
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);

View File

@ -1205,8 +1205,9 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
__ Push(x3, x2, x1, x0);
__ CallRuntime(Runtime::kCreateObjectLiteral);
} else {
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
Callable callable = CodeFactory::FastCloneShallowObject(
isolate(), expr->properties_count());
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext();
}
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
@ -1341,8 +1342,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ Push(x3, x2, x1, x0);
__ CallRuntime(Runtime::kCreateArrayLiteral);
} else {
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
__ CallStub(&stub);
Callable callable =
CodeFactory::FastCloneShallowArray(isolate(), allocation_site_mode);
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext();
}
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);

View File

@ -1149,8 +1149,9 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
__ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
__ mov(ecx, Immediate(constant_properties));
__ mov(edx, Immediate(Smi::FromInt(flags)));
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
Callable callable = CodeFactory::FastCloneShallowObject(
isolate(), expr->properties_count());
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext();
}
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
@ -1282,8 +1283,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
__ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
__ mov(ecx, Immediate(constant_elements));
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
__ CallStub(&stub);
Callable callable =
CodeFactory::FastCloneShallowArray(isolate(), allocation_site_mode);
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext();
}
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);

View File

@ -1217,8 +1217,9 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
__ Push(a3, a2, a1, a0);
__ CallRuntime(Runtime::kCreateObjectLiteral);
} else {
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
Callable callable = CodeFactory::FastCloneShallowObject(
isolate(), expr->properties_count());
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext();
}
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
@ -1356,8 +1357,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ Push(a3, a2, a1, a0);
__ CallRuntime(Runtime::kCreateArrayLiteral);
} else {
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
__ CallStub(&stub);
Callable callable =
CodeFactory::FastCloneShallowArray(isolate(), allocation_site_mode);
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext();
}
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);

View File

@ -1219,8 +1219,9 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
__ Push(a3, a2, a1, a0);
__ CallRuntime(Runtime::kCreateObjectLiteral);
} else {
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
Callable callable = CodeFactory::FastCloneShallowObject(
isolate(), expr->properties_count());
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext();
}
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
@ -1358,8 +1359,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ Push(a3, a2, a1, a0);
__ CallRuntime(Runtime::kCreateArrayLiteral);
} else {
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
__ CallStub(&stub);
Callable callable =
CodeFactory::FastCloneShallowArray(isolate(), allocation_site_mode);
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext();
}
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);

View File

@ -1178,8 +1178,9 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
__ Move(rbx, Smi::FromInt(expr->literal_index()));
__ Move(rcx, constant_properties);
__ Move(rdx, Smi::FromInt(flags));
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
Callable callable = CodeFactory::FastCloneShallowObject(
isolate(), expr->properties_count());
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext();
}
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
@ -1310,8 +1311,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ Move(rbx, Smi::FromInt(expr->literal_index()));
__ Move(rcx, constant_elements);
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
__ CallStub(&stub);
Callable callable =
CodeFactory::FastCloneShallowArray(isolate(), allocation_site_mode);
__ Call(callable.code(), RelocInfo::CODE_TARGET);
RestoreContext();
}
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);

View File

@ -888,6 +888,14 @@ enum ScopeType : uint8_t {
WITH_SCOPE // The scope introduced by with.
};
// AllocationSiteMode controls whether allocations are tracked by an allocation
// site.
enum AllocationSiteMode {
DONT_TRACK_ALLOCATION_SITE,
TRACK_ALLOCATION_SITE,
LAST_ALLOCATION_SITE_MODE = TRACK_ALLOCATION_SITE
};
// The mips architecture prior to revision 5 has inverted encoding for sNaN.
// The x87 FPU convert the sNaN to qNaN automatically when loading sNaN from
// memmory.

View File

@ -4,6 +4,7 @@
#include "src/interpreter/bytecode-flags.h"
#include "src/builtins/builtins-constructor.h"
#include "src/code-stubs.h"
namespace v8 {
@ -25,10 +26,11 @@ uint8_t CreateObjectLiteralFlags::Encode(bool fast_clone_supported,
uint8_t result = FlagsBits::encode(runtime_flags);
if (fast_clone_supported) {
STATIC_ASSERT(
FastCloneShallowObjectStub::kMaximumClonedProperties <=
ConstructorBuiltinsAssembler::kMaximumClonedShallowObjectProperties <=
1 << CreateObjectLiteralFlags::FastClonePropertiesCountBits::kShift);
DCHECK_LE(properties_count,
FastCloneShallowObjectStub::kMaximumClonedProperties);
DCHECK_LE(
properties_count,
ConstructorBuiltinsAssembler::kMaximumClonedShallowObjectProperties);
result |= CreateObjectLiteralFlags::FastClonePropertiesCountBits::encode(
properties_count);
}

View File

@ -1603,7 +1603,8 @@ void BytecodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
// Deep-copy the literal boilerplate.
uint8_t flags = CreateObjectLiteralFlags::Encode(
expr->IsFastCloningSupported(),
FastCloneShallowObjectStub::PropertiesCount(expr->properties_count()),
ConstructorBuiltinsAssembler::FastCloneShallowObjectPropertiesCount(
expr->properties_count()),
expr->ComputeFlags());
// If constant properties is an empty fixed array, use our cached
// empty_fixed_array to ensure it's only added to the constant pool once.

View File

@ -2551,8 +2551,9 @@ void Interpreter::DoCreateRegExpLiteral(InterpreterAssembler* assembler) {
Node* flags = __ SmiFromWord32(__ BytecodeOperandFlag(2));
Node* closure = __ LoadRegister(Register::function_closure());
Node* context = __ GetContext();
Node* result = FastCloneRegExpStub::Generate(
assembler, closure, literal_index, pattern, flags, context);
ConstructorBuiltinsAssembler constructor_assembler(assembler->state());
Node* result = constructor_assembler.EmitFastCloneRegExp(
closure, literal_index, pattern, flags, context);
__ SetAccumulator(result);
__ Dispatch();
}
@ -2576,9 +2577,9 @@ void Interpreter::DoCreateArrayLiteral(InterpreterAssembler* assembler) {
__ Bind(&fast_shallow_clone);
{
DCHECK(FLAG_allocation_site_pretenuring);
Node* result = FastCloneShallowArrayStub::Generate(
assembler, closure, literal_index, context, &call_runtime,
TRACK_ALLOCATION_SITE);
ConstructorBuiltinsAssembler constructor_assembler(assembler->state());
Node* result = constructor_assembler.EmitFastCloneShallowArray(
closure, literal_index, context, &call_runtime, TRACK_ALLOCATION_SITE);
__ SetAccumulator(result);
__ Dispatch();
}
@ -2619,8 +2620,9 @@ void Interpreter::DoCreateObjectLiteral(InterpreterAssembler* assembler) {
__ Bind(&if_fast_clone);
{
// If we can do a fast clone do the fast-path in FastCloneShallowObjectStub.
Node* result = FastCloneShallowObjectStub::GenerateFastPath(
assembler, &if_not_fast_clone, closure, literal_index,
ConstructorBuiltinsAssembler constructor_assembler(assembler->state());
Node* result = constructor_assembler.EmitFastCloneShallowObject(
&if_not_fast_clone, closure, literal_index,
fast_clone_properties_count);
__ StoreRegister(result, __ BytecodeOperandReg(3));
__ Dispatch();

View File

@ -8987,14 +8987,6 @@ class TypeFeedbackInfo: public Struct {
DISALLOW_IMPLICIT_CONSTRUCTORS(TypeFeedbackInfo);
};
enum AllocationSiteMode {
DONT_TRACK_ALLOCATION_SITE,
TRACK_ALLOCATION_SITE,
LAST_ALLOCATION_SITE_MODE = TRACK_ALLOCATION_SITE
};
class AllocationSite: public Struct {
public:
static const uint32_t kMaximumArrayBytesToPretransition = 8 * 1024;