Reland r20974: Unify and simplify the FastCloneShallowArrayStub

- Don't bake in length/capacity into full codegen calls of stubs,
allowing boilerplates to increase their capacity without regenerating
code.
- Unify all variants of the clone stub into a single,
length-independent version.
- Various tweaks to make sure that the clone stub doesn't spill and
therefore need an eager stack frame.
- Handle all lengths of array literals in the fast case.

R=mvstanton@chromium.org

Review URL: https://codereview.chromium.org/272513004

git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@21230 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
danno@chromium.org 2014-05-09 15:55:45 +00:00
parent e7c6f40a65
commit 36fc96a4b8
19 changed files with 385 additions and 338 deletions

View File

@ -58,6 +58,11 @@ void FastCloneShallowArrayStub::InitializeInterfaceDescriptor(
static Register registers[] = { r3, r2, r1 };
descriptor->register_param_count_ = 3;
descriptor->register_params_ = registers;
static Representation representations[] = {
Representation::Tagged(),
Representation::Smi(),
Representation::Tagged() };
descriptor->register_param_representations_ = representations;
descriptor->deoptimization_handler_ =
Runtime::FunctionForId(
Runtime::kHiddenCreateArrayLiteralStubBailout)->entry;
@ -201,6 +206,11 @@ static void InitializeArrayConstructorDescriptor(
descriptor->stack_parameter_count_ = r0;
descriptor->register_param_count_ = 3;
descriptor->register_params_ = registers_variable_args;
static Representation representations[] = {
Representation::Tagged(),
Representation::Tagged(),
Representation::Integer32() };
descriptor->register_param_representations_ = representations;
}
descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
@ -228,6 +238,10 @@ static void InitializeInternalArrayConstructorDescriptor(
descriptor->stack_parameter_count_ = r0;
descriptor->register_param_count_ = 2;
descriptor->register_params_ = registers_variable_args;
static Representation representations[] = {
Representation::Tagged(),
Representation::Integer32() };
descriptor->register_param_representations_ = representations;
}
descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;

View File

@ -1792,33 +1792,12 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
__ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
__ mov(r1, Operand(constant_elements));
if (has_fast_elements && constant_elements_values->map() ==
isolate()->heap()->fixed_cow_array_map()) {
FastCloneShallowArrayStub stub(
isolate(),
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
allocation_site_mode,
length);
__ CallStub(&stub);
__ IncrementCounter(
isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2);
} else if (expr->depth() > 1 || Serializer::enabled(isolate()) ||
length > FastCloneShallowArrayStub::kMaximumClonedLength) {
if (expr->depth() > 1) {
__ mov(r0, Operand(Smi::FromInt(flags)));
__ Push(r3, r2, r1, r0);
__ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
} else {
ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
FLAG_smi_only_arrays);
FastCloneShallowArrayStub::Mode mode =
FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
if (has_fast_elements) {
mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
}
FastCloneShallowArrayStub stub(isolate(), mode, allocation_site_mode,
length);
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
__ CallStub(&stub);
}

View File

@ -65,6 +65,11 @@ void FastCloneShallowArrayStub::InitializeInterfaceDescriptor(
static Register registers[] = { x3, x2, x1 };
descriptor->register_param_count_ = sizeof(registers) / sizeof(registers[0]);
descriptor->register_params_ = registers;
static Representation representations[] = {
Representation::Tagged(),
Representation::Smi(),
Representation::Tagged() };
descriptor->register_param_representations_ = representations;
descriptor->deoptimization_handler_ =
Runtime::FunctionForId(
Runtime::kHiddenCreateArrayLiteralStubBailout)->entry;
@ -230,6 +235,11 @@ static void InitializeArrayConstructorDescriptor(
descriptor->register_param_count_ =
sizeof(registers_variable_args) / sizeof(registers_variable_args[0]);
descriptor->register_params_ = registers_variable_args;
static Representation representations[] = {
Representation::Tagged(),
Representation::Tagged(),
Representation::Integer32() };
descriptor->register_param_representations_ = representations;
}
descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
@ -276,6 +286,10 @@ static void InitializeInternalArrayConstructorDescriptor(
descriptor->register_param_count_ =
sizeof(registers_variable_args) / sizeof(registers_variable_args[0]);
descriptor->register_params_ = registers_variable_args;
static Representation representations[] = {
Representation::Tagged(),
Representation::Integer32() };
descriptor->register_param_representations_ = representations;
}
descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;

View File

@ -1795,35 +1795,12 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
__ Mov(x2, Smi::FromInt(expr->literal_index()));
__ Mov(x1, Operand(constant_elements));
if (has_fast_elements && constant_elements_values->map() ==
isolate()->heap()->fixed_cow_array_map()) {
FastCloneShallowArrayStub stub(
isolate(),
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
allocation_site_mode,
length);
__ CallStub(&stub);
__ IncrementCounter(
isolate()->counters()->cow_arrays_created_stub(), 1, x10, x11);
} else if ((expr->depth() > 1) || Serializer::enabled(isolate()) ||
length > FastCloneShallowArrayStub::kMaximumClonedLength) {
if (expr->depth() > 1) {
__ Mov(x0, Smi::FromInt(flags));
__ Push(x3, x2, x1, x0);
__ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
} else {
ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
FLAG_smi_only_arrays);
FastCloneShallowArrayStub::Mode mode =
FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
if (has_fast_elements) {
mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
}
FastCloneShallowArrayStub stub(isolate(),
mode,
allocation_site_mode,
length);
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
__ CallStub(&stub);
}

View File

@ -127,9 +127,9 @@ bool CodeStubGraphBuilderBase::BuildGraph() {
bool runtime_stack_params = descriptor_->stack_parameter_count_.is_valid();
HInstruction* stack_parameter_count = NULL;
for (int i = 0; i < param_count; ++i) {
Representation r = descriptor_->IsParameterCountRegister(i)
? Representation::Integer32()
: Representation::Tagged();
Representation r = descriptor_->register_param_representations_ == NULL
? Representation::Tagged()
: descriptor_->register_param_representations_[i];
HParameter* param = Add<HParameter>(i, HParameter::REGISTER_PARAMETER, r);
start_environment->Bind(i, param);
parameters_[i] = param;
@ -330,8 +330,10 @@ HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() {
Factory* factory = isolate()->factory();
HValue* undefined = graph()->GetConstantUndefined();
AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode();
FastCloneShallowArrayStub::Mode mode = casted_stub()->mode();
int length = casted_stub()->length();
// This stub is very performance sensitive, the generated code must be tuned
// so that it doesn't build and eager frame.
info()->MarkMustNotHaveEagerFrame();
HInstruction* allocation_site = Add<HLoadKeyed>(GetParameter(0),
GetParameter(1),
@ -346,46 +348,40 @@ HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() {
AllocationSite::kTransitionInfoOffset);
HInstruction* boilerplate = Add<HLoadNamedField>(
allocation_site, static_cast<HValue*>(NULL), access);
HValue* push_value;
if (mode == FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS) {
HValue* elements = AddLoadElements(boilerplate);
HValue* capacity = AddLoadFixedArrayLength(elements);
IfBuilder zero_capacity(this);
zero_capacity.If<HCompareNumericAndBranch>(capacity, graph()->GetConstant0(),
Token::EQ);
zero_capacity.Then();
Push(BuildCloneShallowArrayEmpty(boilerplate,
allocation_site,
alloc_site_mode));
zero_capacity.Else();
IfBuilder if_fixed_cow(this);
if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
if_fixed_cow.Then();
push_value = BuildCloneShallowArray(boilerplate,
Push(BuildCloneShallowArrayCow(boilerplate,
allocation_site,
alloc_site_mode,
FAST_ELEMENTS,
0/*copy-on-write*/);
environment()->Push(push_value);
FAST_ELEMENTS));
if_fixed_cow.Else();
IfBuilder if_fixed(this);
if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
if_fixed.Then();
push_value = BuildCloneShallowArray(boilerplate,
Push(BuildCloneShallowArrayNonEmpty(boilerplate,
allocation_site,
alloc_site_mode,
FAST_ELEMENTS,
length);
environment()->Push(push_value);
FAST_ELEMENTS));
if_fixed.Else();
push_value = BuildCloneShallowArray(boilerplate,
Push(BuildCloneShallowArrayNonEmpty(boilerplate,
allocation_site,
alloc_site_mode,
FAST_DOUBLE_ELEMENTS,
length);
environment()->Push(push_value);
} else {
ElementsKind elements_kind = casted_stub()->ComputeElementsKind();
push_value = BuildCloneShallowArray(boilerplate,
allocation_site,
alloc_site_mode,
elements_kind,
length);
environment()->Push(push_value);
}
FAST_DOUBLE_ELEMENTS));
if_fixed.End();
if_fixed_cow.End();
zero_capacity.End();
checker.ElseDeopt("Uninitialized boilerplate literals");
checker.End();
@ -644,6 +640,9 @@ HValue* CodeStubGraphBuilderBase::BuildArrayConstructor(
HValue* result = NULL;
switch (argument_class) {
case NONE:
// This stub is very performance sensitive, the generated code must be
// tuned so that it doesn't build and eager frame.
info()->MarkMustNotHaveEagerFrame();
result = array_builder.AllocateEmptyArray();
break;
case SINGLE:
@ -667,6 +666,9 @@ HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor(
HValue* result = NULL;
switch (argument_class) {
case NONE:
// This stub is very performance sensitive, the generated code must be
// tuned so that it doesn't build and eager frame.
info()->MarkMustNotHaveEagerFrame();
result = array_builder.AllocateEmptyArray();
break;
case SINGLE:

View File

@ -22,6 +22,7 @@ CodeStubInterfaceDescriptor::CodeStubInterfaceDescriptor()
hint_stack_parameter_count_(-1),
function_mode_(NOT_JS_FUNCTION_STUB_MODE),
register_params_(NULL),
register_param_representations_(NULL),
deoptimization_handler_(NULL),
handler_arguments_mode_(DONT_PASS_ARGUMENTS),
miss_handler_(),
@ -733,9 +734,7 @@ void FastNewContextStub::InstallDescriptors(Isolate* isolate) {
// static
void FastCloneShallowArrayStub::InstallDescriptors(Isolate* isolate) {
FastCloneShallowArrayStub stub(isolate,
FastCloneShallowArrayStub::CLONE_ELEMENTS,
DONT_TRACK_ALLOCATION_SITE, 0);
FastCloneShallowArrayStub stub(isolate, DONT_TRACK_ALLOCATION_SITE);
InstallDescriptor(isolate, &stub);
}

View File

@ -277,6 +277,11 @@ struct CodeStubInterfaceDescriptor {
int hint_stack_parameter_count_;
StubFunctionMode function_mode_;
Register* register_params_;
// Specifies Representations for the stub's parameter. Points to an array of
// Representations of the same length of the numbers of parameters to the
// stub, or if NULL (the default value), Representation of each parameter
// assumed to be Tagged()
Representation* register_param_representations_;
Address deoptimization_handler_;
HandlerArgumentsMode handler_arguments_mode_;
@ -581,50 +586,18 @@ class FastNewContextStub V8_FINAL : public HydrogenCodeStub {
class FastCloneShallowArrayStub : public HydrogenCodeStub {
public:
// Maximum length of copied elements array.
static const int kMaximumClonedLength = 8;
enum Mode {
CLONE_ELEMENTS,
CLONE_DOUBLE_ELEMENTS,
COPY_ON_WRITE_ELEMENTS,
CLONE_ANY_ELEMENTS,
LAST_CLONE_MODE = CLONE_ANY_ELEMENTS
};
static const int kFastCloneModeCount = LAST_CLONE_MODE + 1;
static const int kMaximumInlinedCloneLength = 8;
FastCloneShallowArrayStub(Isolate* isolate,
Mode mode,
AllocationSiteMode allocation_site_mode,
int length)
AllocationSiteMode allocation_site_mode)
: HydrogenCodeStub(isolate),
mode_(mode),
allocation_site_mode_(allocation_site_mode),
length_((mode == COPY_ON_WRITE_ELEMENTS) ? 0 : length) {
ASSERT_GE(length_, 0);
ASSERT_LE(length_, kMaximumClonedLength);
}
allocation_site_mode_(allocation_site_mode) {}
Mode mode() const { return mode_; }
int length() const { return length_; }
AllocationSiteMode allocation_site_mode() const {
return allocation_site_mode_;
}
ElementsKind ComputeElementsKind() const {
switch (mode()) {
case CLONE_ELEMENTS:
case COPY_ON_WRITE_ELEMENTS:
return FAST_ELEMENTS;
case CLONE_DOUBLE_ELEMENTS:
return FAST_DOUBLE_ELEMENTS;
case CLONE_ANY_ELEMENTS:
/*fall-through*/;
}
UNREACHABLE();
return LAST_ELEMENTS_KIND;
}
virtual Handle<Code> GenerateCode() V8_OVERRIDE;
virtual Handle<Code> GenerateCode();
virtual void InitializeInterfaceDescriptor(
CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE;
@ -632,22 +605,13 @@ class FastCloneShallowArrayStub : public HydrogenCodeStub {
static void InstallDescriptors(Isolate* isolate);
private:
Mode mode_;
AllocationSiteMode allocation_site_mode_;
int length_;
class AllocationSiteModeBits: public BitField<AllocationSiteMode, 0, 1> {};
class ModeBits: public BitField<Mode, 1, 4> {};
class LengthBits: public BitField<int, 5, 4> {};
// Ensure data fits within available bits.
STATIC_ASSERT(LAST_ALLOCATION_SITE_MODE == 1);
STATIC_ASSERT(kFastCloneModeCount < 16);
STATIC_ASSERT(kMaximumClonedLength < 16);
Major MajorKey() { return FastCloneShallowArray; }
int NotMissMinorKey() {
return AllocationSiteModeBits::encode(allocation_site_mode_)
| ModeBits::encode(mode_)
| LengthBits::encode(length_);
return AllocationSiteModeBits::encode(allocation_site_mode_);
}
};

View File

@ -143,6 +143,14 @@ class CompilationInfo {
return RequiresFrame::decode(flags_);
}
void MarkMustNotHaveEagerFrame() {
flags_ |= MustNotHaveEagerFrame::encode(true);
}
bool GetMustNotHaveEagerFrame() const {
return MustNotHaveEagerFrame::decode(flags_);
}
void SetParseRestriction(ParseRestriction restriction) {
flags_ = ParseRestricitonField::update(flags_, restriction);
}
@ -368,6 +376,8 @@ class CompilationInfo {
class ParseRestricitonField: public BitField<ParseRestriction, 12, 1> {};
// If the function requires a frame (for unspecified reasons)
class RequiresFrame: public BitField<bool, 13, 1> {};
// If the function cannot build a frame (for unspecified reasons)
class MustNotHaveEagerFrame: public BitField<bool, 14, 1> {};
unsigned flags_;

View File

@ -381,6 +381,7 @@ class HistogramTimerScope BASE_EMBEDDED {
SC(call_premonomorphic_stubs, V8.CallPreMonomorphicStubs) \
SC(call_normal_stubs, V8.CallNormalStubs) \
SC(call_megamorphic_stubs, V8.CallMegamorphicStubs) \
SC(inlined_copied_elements, V8.InlinedCopiedElements) \
SC(arguments_adaptors, V8.ArgumentsAdaptors) \
SC(compilation_cache_hits, V8.CompilationCacheHits) \
SC(compilation_cache_misses, V8.CompilationCacheMisses) \

View File

@ -863,7 +863,8 @@ void HGlobalValueNumberingPhase::AnalyzeGraph() {
stream.OutputToStdOut();
}
}
if (instr->CheckFlag(HValue::kUseGVN)) {
if (instr->CheckFlag(HValue::kUseGVN) &&
!instr->CheckFlag(HValue::kCantBeReplaced)) {
ASSERT(!instr->HasObservableSideEffects());
HInstruction* other = map->Lookup(instr);
if (other != NULL) {

View File

@ -619,6 +619,10 @@ class HValue : public ZoneObject {
// flag.
kUint32,
kHasNoObservableSideEffects,
// Indicates an instruction shouldn't be replaced by optimization, this flag
// is useful to set in cases where recomputing a value is cheaper than
// extending the value's live range and spilling it.
kCantBeReplaced,
// Indicates the instruction is live during dead code elimination.
kIsLive,
@ -6257,6 +6261,7 @@ class HLoadNamedField V8_FINAL : public HTemplateInstruction<2> {
virtual void PrintDataTo(StringStream* stream) V8_OVERRIDE;
bool CanBeReplacedWith(HValue* other) const {
if (!CheckFlag(HValue::kCantBeReplaced)) return false;
if (!type().Equals(other->type())) return false;
if (!representation().Equals(other->representation())) return false;
if (!other->IsLoadNamedField()) return true;

View File

@ -2388,15 +2388,26 @@ HInstruction* HGraphBuilder::AddElementAccess(
}
HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object) {
HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object,
HValue* dependency) {
return Add<HLoadNamedField>(
object, static_cast<HValue*>(NULL), HObjectAccess::ForElementsPointer());
object, dependency, HObjectAccess::ForElementsPointer());
}
HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength(HValue* object) {
HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength(
HValue* array,
HValue* dependency) {
return Add<HLoadNamedField>(
object, static_cast<HValue*>(NULL), HObjectAccess::ForFixedArrayLength());
array, dependency, HObjectAccess::ForFixedArrayLength());
}
HLoadNamedField* HGraphBuilder::AddLoadArrayLength(HValue* array,
ElementsKind kind,
HValue* dependency) {
return Add<HLoadNamedField>(
array, dependency, HObjectAccess::ForArrayLength(kind));
}
@ -2429,9 +2440,8 @@ HValue* HGraphBuilder::BuildGrowElementsCapacity(HValue* object,
HValue* new_elements = BuildAllocateElementsAndInitializeElementsHeader(
new_kind, new_capacity);
BuildCopyElements(elements, kind,
new_elements, new_kind,
length, new_capacity);
BuildCopyElements(object, elements, kind, new_elements,
new_kind, length, new_capacity);
Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
new_elements);
@ -2444,8 +2454,8 @@ void HGraphBuilder::BuildFillElementsWithHole(HValue* elements,
ElementsKind elements_kind,
HValue* from,
HValue* to) {
// Fast elements kinds need to be initialized in case statements below cause
// a garbage collection.
// Fast elements kinds need to be initialized in case statements below cause a
// garbage collection.
Factory* factory = isolate()->factory();
double nan_double = FixedDoubleArray::hole_nan_as_double();
@ -2453,6 +2463,10 @@ void HGraphBuilder::BuildFillElementsWithHole(HValue* elements,
? Add<HConstant>(factory->the_hole_value())
: Add<HConstant>(nan_double);
if (to == NULL) {
to = AddLoadFixedArrayLength(elements);
}
// Special loop unfolding case
static const int kLoopUnfoldLimit = 8;
STATIC_ASSERT(JSArray::kPreallocatedArrayElements <= kLoopUnfoldLimit);
@ -2478,38 +2492,78 @@ void HGraphBuilder::BuildFillElementsWithHole(HValue* elements,
Add<HStoreKeyed>(elements, key, hole, elements_kind);
}
} else {
LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
// Carefully loop backwards so that the "from" remains live through the loop
// rather than the to. This often corresponds to keeping length live rather
// then capacity, which helps register allocation, since length is used more
// other than capacity after filling with holes.
LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
HValue* key = builder.BeginBody(from, to, Token::LT);
HValue* key = builder.BeginBody(to, from, Token::GT);
Add<HStoreKeyed>(elements, key, hole, elements_kind);
HValue* adjusted_key = AddUncasted<HSub>(key, graph()->GetConstant1());
adjusted_key->ClearFlag(HValue::kCanOverflow);
Add<HStoreKeyed>(elements, adjusted_key, hole, elements_kind);
builder.EndBody();
}
}
void HGraphBuilder::BuildCopyElements(HValue* from_elements,
void HGraphBuilder::BuildCopyElements(HValue* array,
HValue* from_elements,
ElementsKind from_elements_kind,
HValue* to_elements,
ElementsKind to_elements_kind,
HValue* length,
HValue* capacity) {
int constant_capacity = -1;
if (capacity != NULL &&
capacity->IsConstant() &&
HConstant::cast(capacity)->HasInteger32Value()) {
int constant_candidate = HConstant::cast(capacity)->Integer32Value();
if (constant_candidate <=
FastCloneShallowArrayStub::kMaximumInlinedCloneLength) {
constant_capacity = constant_candidate;
}
}
if (constant_capacity != -1) {
// Unroll the loop for small elements kinds.
for (int i = 0; i < constant_capacity; i++) {
HValue* key_constant = Add<HConstant>(i);
HInstruction* value = Add<HLoadKeyed>(from_elements, key_constant,
static_cast<HValue*>(NULL),
from_elements_kind);
Add<HStoreKeyed>(to_elements, key_constant, value, to_elements_kind);
}
} else {
bool pre_fill_with_holes =
IsFastDoubleElementsKind(from_elements_kind) &&
IsFastObjectElementsKind(to_elements_kind);
if (pre_fill_with_holes) {
// If the copy might trigger a GC, make sure that the FixedArray is
// pre-initialized with holes to make sure that it's always in a consistent
// state.
// pre-initialized with holes to make sure that it's always in a
// consistent state.
BuildFillElementsWithHole(to_elements, to_elements_kind,
graph()->GetConstant0(), capacity);
graph()->GetConstant0(), NULL);
} else if (capacity == NULL || !length->Equals(capacity)) {
BuildFillElementsWithHole(to_elements, to_elements_kind,
length, NULL);
}
LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
if (capacity == NULL) {
capacity = AddLoadFixedArrayLength(to_elements);
}
HValue* key = builder.BeginBody(graph()->GetConstant0(), length, Token::LT);
LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
HValue* key = builder.BeginBody(length, graph()->GetConstant0(),
Token::GT);
key = AddUncasted<HSub>(key, graph()->GetConstant1());
key->ClearFlag(HValue::kCanOverflow);
HValue* element = Add<HLoadKeyed>(from_elements, key,
static_cast<HValue*>(NULL),
@ -2539,43 +2593,43 @@ void HGraphBuilder::BuildCopyElements(HValue* from_elements,
}
builder.EndBody();
if (!pre_fill_with_holes && length != capacity) {
// Fill unused capacity with the hole.
BuildFillElementsWithHole(to_elements, to_elements_kind,
key, capacity);
}
}
Counters* counters = isolate()->counters();
AddIncrementCounter(counters->inlined_copied_elements());
}
HValue* HGraphBuilder::BuildCloneShallowArray(HValue* boilerplate,
HValue* HGraphBuilder::BuildCloneShallowArrayCommon(
HValue* boilerplate,
HValue* allocation_site,
AllocationSiteMode mode,
ElementsKind kind,
int length) {
NoObservableSideEffectsScope no_effects(this);
HValue* extra_size,
HValue** return_elements,
AllocationSiteMode mode) {
// All sizes here are multiples of kPointerSize.
int size = JSArray::kSize;
int array_size = JSArray::kSize;
if (mode == TRACK_ALLOCATION_SITE) {
size += AllocationMemento::kSize;
array_size += AllocationMemento::kSize;
}
HValue* size_in_bytes = Add<HConstant>(array_size);
if (extra_size != NULL) {
size_in_bytes = AddUncasted<HAdd>(extra_size, size_in_bytes);
size_in_bytes->ClearFlag(HValue::kCanOverflow);
}
HValue* size_in_bytes = Add<HConstant>(size);
HInstruction* object = Add<HAllocate>(size_in_bytes,
HType::JSObject(),
NOT_TENURED,
JS_OBJECT_TYPE);
// Copy the JS array part.
for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
if ((i != JSArray::kElementsOffset) || (length == 0)) {
HObjectAccess access = HObjectAccess::ForJSArrayOffset(i);
Add<HStoreNamedField>(
object, access, Add<HLoadNamedField>(
boilerplate, static_cast<HValue*>(NULL), access));
}
}
HValue* map = Add<HLoadNamedField>(boilerplate,
static_cast<HValue*>(NULL), HObjectAccess::ForMap());
Add<HStoreNamedField>(object, HObjectAccess::ForPropertiesPointer(),
Add<HConstant>(isolate()->factory()->empty_fixed_array()),
INITIALIZING_STORE);
Add<HStoreNamedField>(object, HObjectAccess::ForMap(), map,
INITIALIZING_STORE);
// Create an allocation site info if requested.
if (mode == TRACK_ALLOCATION_SITE) {
@ -2583,51 +2637,99 @@ HValue* HGraphBuilder::BuildCloneShallowArray(HValue* boilerplate,
object, Add<HConstant>(JSArray::kSize), allocation_site);
}
if (length > 0) {
// We have to initialize the elements pointer if allocation folding is
// turned off.
if (!FLAG_use_gvn || !FLAG_use_allocation_folding) {
HConstant* empty_fixed_array = Add<HConstant>(
isolate()->factory()->empty_fixed_array());
Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
empty_fixed_array, INITIALIZING_STORE);
if (extra_size != NULL) {
HValue* elements = Add<HInnerAllocatedObject>(object,
Add<HConstant>(array_size));
if (return_elements != NULL) *return_elements = elements;
}
HValue* boilerplate_elements = AddLoadElements(boilerplate);
HValue* object_elements;
if (IsFastDoubleElementsKind(kind)) {
HValue* elems_size = Add<HConstant>(FixedDoubleArray::SizeFor(length));
object_elements = Add<HAllocate>(elems_size, HType::Tagged(),
NOT_TENURED, FIXED_DOUBLE_ARRAY_TYPE);
} else {
HValue* elems_size = Add<HConstant>(FixedArray::SizeFor(length));
object_elements = Add<HAllocate>(elems_size, HType::Tagged(),
NOT_TENURED, FIXED_ARRAY_TYPE);
return object;
}
Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
object_elements);
HValue* HGraphBuilder::BuildCloneShallowArrayCow(HValue* boilerplate,
HValue* allocation_site,
AllocationSiteMode mode,
ElementsKind kind) {
HValue* result = BuildCloneShallowArrayCommon(boilerplate,
allocation_site, NULL, NULL, mode);
HValue* elements = AddLoadElements(boilerplate);
HObjectAccess access = HObjectAccess::ForElementsPointer();
Add<HStoreNamedField>(result, access, elements, INITIALIZING_STORE);
HValue* length = AddLoadArrayLength(boilerplate, kind);
access = HObjectAccess::ForArrayLength(kind);
Add<HStoreNamedField>(result, access, length, INITIALIZING_STORE);
return result;
}
HValue* HGraphBuilder::BuildCloneShallowArrayEmpty(HValue* boilerplate,
HValue* allocation_site,
AllocationSiteMode mode) {
HValue* result = BuildCloneShallowArrayCommon(boilerplate,
allocation_site, NULL, NULL, mode);
HObjectAccess access = HObjectAccess::ForArrayLength(FAST_ELEMENTS);
Add<HStoreNamedField>(result, access, graph()->GetConstant0(),
INITIALIZING_STORE);
access = HObjectAccess::ForElementsPointer();
Add<HStoreNamedField>(result, access,
Add<HConstant>(isolate()->factory()->empty_fixed_array()),
INITIALIZING_STORE);
return result;
}
HValue* HGraphBuilder::BuildCloneShallowArrayNonEmpty(HValue* boilerplate,
HValue* allocation_site,
AllocationSiteMode mode,
ElementsKind kind) {
int elements_kind_size = IsFastDoubleElementsKind(kind)
? kDoubleSize : kPointerSize;
HValue* boilerplate_elements = AddLoadElements(boilerplate);
HValue* capacity = AddLoadFixedArrayLength(boilerplate_elements);
HValue* extra = AddUncasted<HMul>(capacity,
Add<HConstant>(elements_kind_size));
extra->ClearFlag(HValue::kCanOverflow);
extra = AddUncasted<HAdd>(extra, Add<HConstant>(FixedArray::kHeaderSize));
extra->ClearFlag(HValue::kCanOverflow);
HValue* elements = NULL;
HValue* result = BuildCloneShallowArrayCommon(boilerplate,
allocation_site, extra, &elements, mode);
Add<HStoreNamedField>(result, HObjectAccess::ForElementsPointer(),
elements, INITIALIZING_STORE);
// The allocation for the cloned array above causes register pressure on
// machines with low register counts. Force a reload of the boilerplate
// elements here to free up a register for the allocation to avoid unnecessary
// spillage.
boilerplate_elements = AddLoadElements(boilerplate);
boilerplate_elements->SetFlag(HValue::kCantBeReplaced);
// Copy the elements array header.
for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) {
HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i);
Add<HStoreNamedField>(
object_elements, access, Add<HLoadNamedField>(
boilerplate_elements, static_cast<HValue*>(NULL), access));
Add<HStoreNamedField>(elements, access,
Add<HLoadNamedField>(boilerplate_elements,
static_cast<HValue*>(NULL), access),
INITIALIZING_STORE);
}
// Copy the elements array contents.
// TODO(mstarzinger): Teach HGraphBuilder::BuildCopyElements to unfold
// copying loops with constant length up to a given boundary and use this
// helper here instead.
for (int i = 0; i < length; i++) {
HValue* key_constant = Add<HConstant>(i);
HInstruction* value = Add<HLoadKeyed>(boilerplate_elements, key_constant,
static_cast<HValue*>(NULL), kind);
Add<HStoreKeyed>(object_elements, key_constant, value, kind);
}
}
// And the result of the length
HValue* length = Add<HLoadNamedField>(boilerplate, static_cast<HValue*>(NULL),
HObjectAccess::ForArrayLength(kind));
Add<HStoreNamedField>(result, HObjectAccess::ForArrayLength(kind),
length, INITIALIZING_STORE);
return object;
BuildCopyElements(result, boilerplate_elements, kind, elements,
kind, length, NULL);
return result;
}

View File

@ -1397,7 +1397,8 @@ class HGraphBuilder {
store_map->SkipWriteBarrier();
return store_map;
}
HLoadNamedField* AddLoadElements(HValue* object);
HLoadNamedField* AddLoadElements(HValue* object,
HValue* dependency = NULL);
bool MatchRotateRight(HValue* left,
HValue* right,
@ -1413,7 +1414,12 @@ class HGraphBuilder {
Maybe<int> fixed_right_arg,
HAllocationMode allocation_mode);
HLoadNamedField* AddLoadFixedArrayLength(HValue *object);
HLoadNamedField* AddLoadFixedArrayLength(HValue *object,
HValue *dependency = NULL);
HLoadNamedField* AddLoadArrayLength(HValue *object,
ElementsKind kind,
HValue *dependency = NULL);
HValue* AddLoadJSBuiltin(Builtins::JavaScript builtin);
@ -1753,18 +1759,33 @@ class HGraphBuilder {
HValue* from,
HValue* to);
void BuildCopyElements(HValue* from_elements,
void BuildCopyElements(HValue* array,
HValue* from_elements,
ElementsKind from_elements_kind,
HValue* to_elements,
ElementsKind to_elements_kind,
HValue* length,
HValue* capacity);
HValue* BuildCloneShallowArray(HValue* boilerplate,
HValue* BuildCloneShallowArrayCommon(HValue* boilerplate,
HValue* allocation_site,
HValue* extra_size,
HValue** return_elements,
AllocationSiteMode mode);
HValue* BuildCloneShallowArrayCow(HValue* boilerplate,
HValue* allocation_site,
AllocationSiteMode mode,
ElementsKind kind,
int length);
ElementsKind kind);
HValue* BuildCloneShallowArrayEmpty(HValue* boilerplate,
HValue* allocation_site,
AllocationSiteMode mode);
HValue* BuildCloneShallowArrayNonEmpty(HValue* boilerplate,
HValue* allocation_site,
AllocationSiteMode mode,
ElementsKind kind);
HValue* BuildElementIndexHash(HValue* index);

View File

@ -63,6 +63,11 @@ void FastCloneShallowArrayStub::InitializeInterfaceDescriptor(
static Register registers[] = { eax, ebx, ecx };
descriptor->register_param_count_ = 3;
descriptor->register_params_ = registers;
static Representation representations[] = {
Representation::Tagged(),
Representation::Smi(),
Representation::Tagged() };
descriptor->register_param_representations_ = representations;
descriptor->deoptimization_handler_ =
Runtime::FunctionForId(
Runtime::kHiddenCreateArrayLiteralStubBailout)->entry;
@ -194,6 +199,11 @@ static void InitializeArrayConstructorDescriptor(
descriptor->stack_parameter_count_ = eax;
descriptor->register_param_count_ = 3;
descriptor->register_params_ = registers_variable_args;
static Representation representations[] = {
Representation::Tagged(),
Representation::Tagged(),
Representation::Integer32() };
descriptor->register_param_representations_ = representations;
}
descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
@ -221,6 +231,10 @@ static void InitializeInternalArrayConstructorDescriptor(
descriptor->stack_parameter_count_ = eax;
descriptor->register_param_count_ = 2;
descriptor->register_params_ = registers_variable_args;
static Representation representations[] = {
Representation::Tagged(),
Representation::Integer32() };
descriptor->register_param_representations_ = representations;
}
descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;

View File

@ -1729,24 +1729,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
}
Heap* heap = isolate()->heap();
if (has_constant_fast_elements &&
constant_elements_values->map() == heap->fixed_cow_array_map()) {
// If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
// change, so it's possible to specialize the stub in advance.
__ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
__ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
__ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset));
__ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
__ mov(ecx, Immediate(constant_elements));
FastCloneShallowArrayStub stub(
isolate(),
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
allocation_site_mode,
length);
__ CallStub(&stub);
} else if (expr->depth() > 1 || Serializer::enabled(isolate()) ||
length > FastCloneShallowArrayStub::kMaximumClonedLength) {
if (expr->depth() > 1) {
__ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
__ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
__ push(Immediate(Smi::FromInt(expr->literal_index())));
@ -1754,25 +1737,11 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ push(Immediate(Smi::FromInt(flags)));
__ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
} else {
ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
FLAG_smi_only_arrays);
FastCloneShallowArrayStub::Mode mode =
FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
// If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
// change, so it's possible to specialize the stub in advance.
if (has_constant_fast_elements) {
mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
}
__ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
__ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset));
__ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
__ mov(ecx, Immediate(constant_elements));
FastCloneShallowArrayStub stub(isolate(),
mode,
allocation_site_mode,
length);
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
__ CallStub(&stub);
}

View File

@ -449,6 +449,8 @@ Handle<Code> LChunk::Codegen() {
CodeEndLinePosInfoRecordEvent(*code, jit_handler_data));
CodeGenerator::PrintCode(code, info());
ASSERT(!(info()->GetMustNotHaveEagerFrame() &&
generator.NeedsEagerFrame()));
return code;
}
assembler.AbortedCodeGeneration();

View File

@ -1805,18 +1805,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
__ li(a2, Operand(Smi::FromInt(expr->literal_index())));
__ li(a1, Operand(constant_elements));
if (has_fast_elements && constant_elements_values->map() ==
isolate()->heap()->fixed_cow_array_map()) {
FastCloneShallowArrayStub stub(
isolate(),
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
allocation_site_mode,
length);
__ CallStub(&stub);
__ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(),
1, a1, a2);
} else if (expr->depth() > 1 || Serializer::enabled(isolate()) ||
length > FastCloneShallowArrayStub::kMaximumClonedLength) {
if (expr->depth() > 1) {
__ li(a0, Operand(Smi::FromInt(flags)));
__ Push(a3, a2, a1, a0);
__ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);

View File

@ -59,6 +59,11 @@ void FastCloneShallowArrayStub::InitializeInterfaceDescriptor(
static Register registers[] = { rax, rbx, rcx };
descriptor->register_param_count_ = 3;
descriptor->register_params_ = registers;
static Representation representations[] = {
Representation::Tagged(),
Representation::Smi(),
Representation::Tagged() };
descriptor->register_param_representations_ = representations;
descriptor->deoptimization_handler_ =
Runtime::FunctionForId(
Runtime::kHiddenCreateArrayLiteralStubBailout)->entry;
@ -188,6 +193,11 @@ static void InitializeArrayConstructorDescriptor(
descriptor->handler_arguments_mode_ = PASS_ARGUMENTS;
descriptor->stack_parameter_count_ = rax;
descriptor->register_param_count_ = 3;
static Representation representations[] = {
Representation::Tagged(),
Representation::Tagged(),
Representation::Integer32() };
descriptor->register_param_representations_ = representations;
descriptor->register_params_ = registers_variable_args;
}
@ -216,6 +226,10 @@ static void InitializeInternalArrayConstructorDescriptor(
descriptor->stack_parameter_count_ = rax;
descriptor->register_param_count_ = 2;
descriptor->register_params_ = registers_variable_args;
static Representation representations[] = {
Representation::Tagged(),
Representation::Integer32() };
descriptor->register_param_representations_ = representations;
}
descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;

View File

@ -1766,24 +1766,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
}
Heap* heap = isolate()->heap();
if (has_constant_fast_elements &&
constant_elements_values->map() == heap->fixed_cow_array_map()) {
// If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
// change, so it's possible to specialize the stub in advance.
__ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
__ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset));
__ Move(rbx, Smi::FromInt(expr->literal_index()));
__ Move(rcx, constant_elements);
FastCloneShallowArrayStub stub(
isolate(),
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
allocation_site_mode,
length);
__ CallStub(&stub);
} else if (expr->depth() > 1 || Serializer::enabled(isolate()) ||
length > FastCloneShallowArrayStub::kMaximumClonedLength) {
if (expr->depth() > 1) {
__ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ Push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
__ Push(Smi::FromInt(expr->literal_index()));
@ -1791,24 +1774,11 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ Push(Smi::FromInt(flags));
__ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
} else {
ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
FLAG_smi_only_arrays);
FastCloneShallowArrayStub::Mode mode =
FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
// If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
// change, so it's possible to specialize the stub in advance.
if (has_constant_fast_elements) {
mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
}
__ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset));
__ Move(rbx, Smi::FromInt(expr->literal_index()));
__ Move(rcx, constant_elements);
FastCloneShallowArrayStub stub(isolate(),
mode,
allocation_site_mode, length);
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
__ CallStub(&stub);
}