Revert "Reland r20974: Unify and simplify the FastCloneShallowArrayStub"

This breaks nosnap.

BUG=
R=ishell@chromium.org

Review URL: https://codereview.chromium.org/272243002

git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@21242 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
verwaest@chromium.org 2014-05-09 17:56:55 +00:00
parent e1c86f8207
commit c33a72b27f
19 changed files with 338 additions and 385 deletions

View File

@ -58,11 +58,6 @@ void FastCloneShallowArrayStub::InitializeInterfaceDescriptor(
static Register registers[] = { r3, r2, r1 };
descriptor->register_param_count_ = 3;
descriptor->register_params_ = registers;
static Representation representations[] = {
Representation::Tagged(),
Representation::Smi(),
Representation::Tagged() };
descriptor->register_param_representations_ = representations;
descriptor->deoptimization_handler_ =
Runtime::FunctionForId(
Runtime::kHiddenCreateArrayLiteralStubBailout)->entry;
@ -206,11 +201,6 @@ static void InitializeArrayConstructorDescriptor(
descriptor->stack_parameter_count_ = r0;
descriptor->register_param_count_ = 3;
descriptor->register_params_ = registers_variable_args;
static Representation representations[] = {
Representation::Tagged(),
Representation::Tagged(),
Representation::Integer32() };
descriptor->register_param_representations_ = representations;
}
descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
@ -238,10 +228,6 @@ static void InitializeInternalArrayConstructorDescriptor(
descriptor->stack_parameter_count_ = r0;
descriptor->register_param_count_ = 2;
descriptor->register_params_ = registers_variable_args;
static Representation representations[] = {
Representation::Tagged(),
Representation::Integer32() };
descriptor->register_param_representations_ = representations;
}
descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;

View File

@ -1792,12 +1792,33 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
__ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
__ mov(r1, Operand(constant_elements));
if (expr->depth() > 1) {
if (has_fast_elements && constant_elements_values->map() ==
isolate()->heap()->fixed_cow_array_map()) {
FastCloneShallowArrayStub stub(
isolate(),
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
allocation_site_mode,
length);
__ CallStub(&stub);
__ IncrementCounter(
isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2);
} else if (expr->depth() > 1 || Serializer::enabled(isolate()) ||
length > FastCloneShallowArrayStub::kMaximumClonedLength) {
__ mov(r0, Operand(Smi::FromInt(flags)));
__ Push(r3, r2, r1, r0);
__ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
} else {
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
FLAG_smi_only_arrays);
FastCloneShallowArrayStub::Mode mode =
FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
if (has_fast_elements) {
mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
}
FastCloneShallowArrayStub stub(isolate(), mode, allocation_site_mode,
length);
__ CallStub(&stub);
}

View File

@ -65,11 +65,6 @@ void FastCloneShallowArrayStub::InitializeInterfaceDescriptor(
static Register registers[] = { x3, x2, x1 };
descriptor->register_param_count_ = sizeof(registers) / sizeof(registers[0]);
descriptor->register_params_ = registers;
static Representation representations[] = {
Representation::Tagged(),
Representation::Smi(),
Representation::Tagged() };
descriptor->register_param_representations_ = representations;
descriptor->deoptimization_handler_ =
Runtime::FunctionForId(
Runtime::kHiddenCreateArrayLiteralStubBailout)->entry;
@ -235,11 +230,6 @@ static void InitializeArrayConstructorDescriptor(
descriptor->register_param_count_ =
sizeof(registers_variable_args) / sizeof(registers_variable_args[0]);
descriptor->register_params_ = registers_variable_args;
static Representation representations[] = {
Representation::Tagged(),
Representation::Tagged(),
Representation::Integer32() };
descriptor->register_param_representations_ = representations;
}
descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
@ -286,10 +276,6 @@ static void InitializeInternalArrayConstructorDescriptor(
descriptor->register_param_count_ =
sizeof(registers_variable_args) / sizeof(registers_variable_args[0]);
descriptor->register_params_ = registers_variable_args;
static Representation representations[] = {
Representation::Tagged(),
Representation::Integer32() };
descriptor->register_param_representations_ = representations;
}
descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;

View File

@ -1795,12 +1795,35 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
__ Mov(x2, Smi::FromInt(expr->literal_index()));
__ Mov(x1, Operand(constant_elements));
if (expr->depth() > 1) {
if (has_fast_elements && constant_elements_values->map() ==
isolate()->heap()->fixed_cow_array_map()) {
FastCloneShallowArrayStub stub(
isolate(),
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
allocation_site_mode,
length);
__ CallStub(&stub);
__ IncrementCounter(
isolate()->counters()->cow_arrays_created_stub(), 1, x10, x11);
} else if ((expr->depth() > 1) || Serializer::enabled(isolate()) ||
length > FastCloneShallowArrayStub::kMaximumClonedLength) {
__ Mov(x0, Smi::FromInt(flags));
__ Push(x3, x2, x1, x0);
__ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
} else {
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
FLAG_smi_only_arrays);
FastCloneShallowArrayStub::Mode mode =
FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
if (has_fast_elements) {
mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
}
FastCloneShallowArrayStub stub(isolate(),
mode,
allocation_site_mode,
length);
__ CallStub(&stub);
}

View File

@ -127,9 +127,9 @@ bool CodeStubGraphBuilderBase::BuildGraph() {
bool runtime_stack_params = descriptor_->stack_parameter_count_.is_valid();
HInstruction* stack_parameter_count = NULL;
for (int i = 0; i < param_count; ++i) {
Representation r = descriptor_->register_param_representations_ == NULL
? Representation::Tagged()
: descriptor_->register_param_representations_[i];
Representation r = descriptor_->IsParameterCountRegister(i)
? Representation::Integer32()
: Representation::Tagged();
HParameter* param = Add<HParameter>(i, HParameter::REGISTER_PARAMETER, r);
start_environment->Bind(i, param);
parameters_[i] = param;
@ -330,10 +330,8 @@ HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() {
Factory* factory = isolate()->factory();
HValue* undefined = graph()->GetConstantUndefined();
AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode();
// This stub is very performance sensitive, the generated code must be tuned
// so that it doesn't build and eager frame.
info()->MarkMustNotHaveEagerFrame();
FastCloneShallowArrayStub::Mode mode = casted_stub()->mode();
int length = casted_stub()->length();
HInstruction* allocation_site = Add<HLoadKeyed>(GetParameter(0),
GetParameter(1),
@ -348,40 +346,46 @@ HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() {
AllocationSite::kTransitionInfoOffset);
HInstruction* boilerplate = Add<HLoadNamedField>(
allocation_site, static_cast<HValue*>(NULL), access);
HValue* elements = AddLoadElements(boilerplate);
HValue* capacity = AddLoadFixedArrayLength(elements);
IfBuilder zero_capacity(this);
zero_capacity.If<HCompareNumericAndBranch>(capacity, graph()->GetConstant0(),
Token::EQ);
zero_capacity.Then();
Push(BuildCloneShallowArrayEmpty(boilerplate,
allocation_site,
alloc_site_mode));
zero_capacity.Else();
IfBuilder if_fixed_cow(this);
if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
if_fixed_cow.Then();
Push(BuildCloneShallowArrayCow(boilerplate,
allocation_site,
alloc_site_mode,
FAST_ELEMENTS));
if_fixed_cow.Else();
IfBuilder if_fixed(this);
if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
if_fixed.Then();
Push(BuildCloneShallowArrayNonEmpty(boilerplate,
allocation_site,
alloc_site_mode,
FAST_ELEMENTS));
HValue* push_value;
if (mode == FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS) {
HValue* elements = AddLoadElements(boilerplate);
if_fixed.Else();
Push(BuildCloneShallowArrayNonEmpty(boilerplate,
allocation_site,
alloc_site_mode,
FAST_DOUBLE_ELEMENTS));
if_fixed.End();
if_fixed_cow.End();
zero_capacity.End();
IfBuilder if_fixed_cow(this);
if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
if_fixed_cow.Then();
push_value = BuildCloneShallowArray(boilerplate,
allocation_site,
alloc_site_mode,
FAST_ELEMENTS,
0/*copy-on-write*/);
environment()->Push(push_value);
if_fixed_cow.Else();
IfBuilder if_fixed(this);
if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
if_fixed.Then();
push_value = BuildCloneShallowArray(boilerplate,
allocation_site,
alloc_site_mode,
FAST_ELEMENTS,
length);
environment()->Push(push_value);
if_fixed.Else();
push_value = BuildCloneShallowArray(boilerplate,
allocation_site,
alloc_site_mode,
FAST_DOUBLE_ELEMENTS,
length);
environment()->Push(push_value);
} else {
ElementsKind elements_kind = casted_stub()->ComputeElementsKind();
push_value = BuildCloneShallowArray(boilerplate,
allocation_site,
alloc_site_mode,
elements_kind,
length);
environment()->Push(push_value);
}
checker.ElseDeopt("Uninitialized boilerplate literals");
checker.End();
@ -640,9 +644,6 @@ HValue* CodeStubGraphBuilderBase::BuildArrayConstructor(
HValue* result = NULL;
switch (argument_class) {
case NONE:
// This stub is very performance sensitive, the generated code must be
// tuned so that it doesn't build and eager frame.
info()->MarkMustNotHaveEagerFrame();
result = array_builder.AllocateEmptyArray();
break;
case SINGLE:
@ -666,9 +667,6 @@ HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor(
HValue* result = NULL;
switch (argument_class) {
case NONE:
// This stub is very performance sensitive, the generated code must be
// tuned so that it doesn't build and eager frame.
info()->MarkMustNotHaveEagerFrame();
result = array_builder.AllocateEmptyArray();
break;
case SINGLE:

View File

@ -22,7 +22,6 @@ CodeStubInterfaceDescriptor::CodeStubInterfaceDescriptor()
hint_stack_parameter_count_(-1),
function_mode_(NOT_JS_FUNCTION_STUB_MODE),
register_params_(NULL),
register_param_representations_(NULL),
deoptimization_handler_(NULL),
handler_arguments_mode_(DONT_PASS_ARGUMENTS),
miss_handler_(),
@ -734,7 +733,9 @@ void FastNewContextStub::InstallDescriptors(Isolate* isolate) {
// static
void FastCloneShallowArrayStub::InstallDescriptors(Isolate* isolate) {
FastCloneShallowArrayStub stub(isolate, DONT_TRACK_ALLOCATION_SITE);
FastCloneShallowArrayStub stub(isolate,
FastCloneShallowArrayStub::CLONE_ELEMENTS,
DONT_TRACK_ALLOCATION_SITE, 0);
InstallDescriptor(isolate, &stub);
}

View File

@ -277,11 +277,6 @@ struct CodeStubInterfaceDescriptor {
int hint_stack_parameter_count_;
StubFunctionMode function_mode_;
Register* register_params_;
// Specifies Representations for the stub's parameter. Points to an array of
// Representations of the same length of the numbers of parameters to the
// stub, or if NULL (the default value), Representation of each parameter
// assumed to be Tagged()
Representation* register_param_representations_;
Address deoptimization_handler_;
HandlerArgumentsMode handler_arguments_mode_;
@ -586,18 +581,50 @@ class FastNewContextStub V8_FINAL : public HydrogenCodeStub {
class FastCloneShallowArrayStub : public HydrogenCodeStub {
public:
// Maximum length of copied elements array.
static const int kMaximumInlinedCloneLength = 8;
static const int kMaximumClonedLength = 8;
enum Mode {
CLONE_ELEMENTS,
CLONE_DOUBLE_ELEMENTS,
COPY_ON_WRITE_ELEMENTS,
CLONE_ANY_ELEMENTS,
LAST_CLONE_MODE = CLONE_ANY_ELEMENTS
};
static const int kFastCloneModeCount = LAST_CLONE_MODE + 1;
FastCloneShallowArrayStub(Isolate* isolate,
AllocationSiteMode allocation_site_mode)
Mode mode,
AllocationSiteMode allocation_site_mode,
int length)
: HydrogenCodeStub(isolate),
allocation_site_mode_(allocation_site_mode) {}
mode_(mode),
allocation_site_mode_(allocation_site_mode),
length_((mode == COPY_ON_WRITE_ELEMENTS) ? 0 : length) {
ASSERT_GE(length_, 0);
ASSERT_LE(length_, kMaximumClonedLength);
}
Mode mode() const { return mode_; }
int length() const { return length_; }
AllocationSiteMode allocation_site_mode() const {
return allocation_site_mode_;
}
virtual Handle<Code> GenerateCode();
ElementsKind ComputeElementsKind() const {
switch (mode()) {
case CLONE_ELEMENTS:
case COPY_ON_WRITE_ELEMENTS:
return FAST_ELEMENTS;
case CLONE_DOUBLE_ELEMENTS:
return FAST_DOUBLE_ELEMENTS;
case CLONE_ANY_ELEMENTS:
/*fall-through*/;
}
UNREACHABLE();
return LAST_ELEMENTS_KIND;
}
virtual Handle<Code> GenerateCode() V8_OVERRIDE;
virtual void InitializeInterfaceDescriptor(
CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE;
@ -605,13 +632,22 @@ class FastCloneShallowArrayStub : public HydrogenCodeStub {
static void InstallDescriptors(Isolate* isolate);
private:
Mode mode_;
AllocationSiteMode allocation_site_mode_;
int length_;
class AllocationSiteModeBits: public BitField<AllocationSiteMode, 0, 1> {};
class ModeBits: public BitField<Mode, 1, 4> {};
class LengthBits: public BitField<int, 5, 4> {};
// Ensure data fits within available bits.
STATIC_ASSERT(LAST_ALLOCATION_SITE_MODE == 1);
STATIC_ASSERT(kFastCloneModeCount < 16);
STATIC_ASSERT(kMaximumClonedLength < 16);
Major MajorKey() { return FastCloneShallowArray; }
int NotMissMinorKey() {
return AllocationSiteModeBits::encode(allocation_site_mode_);
return AllocationSiteModeBits::encode(allocation_site_mode_)
| ModeBits::encode(mode_)
| LengthBits::encode(length_);
}
};

View File

@ -143,14 +143,6 @@ class CompilationInfo {
return RequiresFrame::decode(flags_);
}
void MarkMustNotHaveEagerFrame() {
flags_ |= MustNotHaveEagerFrame::encode(true);
}
bool GetMustNotHaveEagerFrame() const {
return MustNotHaveEagerFrame::decode(flags_);
}
void SetParseRestriction(ParseRestriction restriction) {
flags_ = ParseRestricitonField::update(flags_, restriction);
}
@ -376,8 +368,6 @@ class CompilationInfo {
class ParseRestricitonField: public BitField<ParseRestriction, 12, 1> {};
// If the function requires a frame (for unspecified reasons)
class RequiresFrame: public BitField<bool, 13, 1> {};
// If the function cannot build a frame (for unspecified reasons)
class MustNotHaveEagerFrame: public BitField<bool, 14, 1> {};
unsigned flags_;

View File

@ -381,7 +381,6 @@ class HistogramTimerScope BASE_EMBEDDED {
SC(call_premonomorphic_stubs, V8.CallPreMonomorphicStubs) \
SC(call_normal_stubs, V8.CallNormalStubs) \
SC(call_megamorphic_stubs, V8.CallMegamorphicStubs) \
SC(inlined_copied_elements, V8.InlinedCopiedElements) \
SC(arguments_adaptors, V8.ArgumentsAdaptors) \
SC(compilation_cache_hits, V8.CompilationCacheHits) \
SC(compilation_cache_misses, V8.CompilationCacheMisses) \

View File

@ -863,8 +863,7 @@ void HGlobalValueNumberingPhase::AnalyzeGraph() {
stream.OutputToStdOut();
}
}
if (instr->CheckFlag(HValue::kUseGVN) &&
!instr->CheckFlag(HValue::kCantBeReplaced)) {
if (instr->CheckFlag(HValue::kUseGVN)) {
ASSERT(!instr->HasObservableSideEffects());
HInstruction* other = map->Lookup(instr);
if (other != NULL) {

View File

@ -619,10 +619,6 @@ class HValue : public ZoneObject {
// flag.
kUint32,
kHasNoObservableSideEffects,
// Indicates an instruction shouldn't be replaced by optimization, this flag
// is useful to set in cases where recomputing a value is cheaper than
// extending the value's live range and spilling it.
kCantBeReplaced,
// Indicates the instruction is live during dead code elimination.
kIsLive,
@ -6261,7 +6257,6 @@ class HLoadNamedField V8_FINAL : public HTemplateInstruction<2> {
virtual void PrintDataTo(StringStream* stream) V8_OVERRIDE;
bool CanBeReplacedWith(HValue* other) const {
if (!CheckFlag(HValue::kCantBeReplaced)) return false;
if (!type().Equals(other->type())) return false;
if (!representation().Equals(other->representation())) return false;
if (!other->IsLoadNamedField()) return true;

View File

@ -2388,26 +2388,15 @@ HInstruction* HGraphBuilder::AddElementAccess(
}
HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object,
HValue* dependency) {
HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object) {
return Add<HLoadNamedField>(
object, dependency, HObjectAccess::ForElementsPointer());
object, static_cast<HValue*>(NULL), HObjectAccess::ForElementsPointer());
}
HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength(
HValue* array,
HValue* dependency) {
HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength(HValue* object) {
return Add<HLoadNamedField>(
array, dependency, HObjectAccess::ForFixedArrayLength());
}
HLoadNamedField* HGraphBuilder::AddLoadArrayLength(HValue* array,
ElementsKind kind,
HValue* dependency) {
return Add<HLoadNamedField>(
array, dependency, HObjectAccess::ForArrayLength(kind));
object, static_cast<HValue*>(NULL), HObjectAccess::ForFixedArrayLength());
}
@ -2440,8 +2429,9 @@ HValue* HGraphBuilder::BuildGrowElementsCapacity(HValue* object,
HValue* new_elements = BuildAllocateElementsAndInitializeElementsHeader(
new_kind, new_capacity);
BuildCopyElements(object, elements, kind, new_elements,
new_kind, length, new_capacity);
BuildCopyElements(elements, kind,
new_elements, new_kind,
length, new_capacity);
Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
new_elements);
@ -2454,8 +2444,8 @@ void HGraphBuilder::BuildFillElementsWithHole(HValue* elements,
ElementsKind elements_kind,
HValue* from,
HValue* to) {
// Fast elements kinds need to be initialized in case statements below cause a
// garbage collection.
// Fast elements kinds need to be initialized in case statements below cause
// a garbage collection.
Factory* factory = isolate()->factory();
double nan_double = FixedDoubleArray::hole_nan_as_double();
@ -2463,10 +2453,6 @@ void HGraphBuilder::BuildFillElementsWithHole(HValue* elements,
? Add<HConstant>(factory->the_hole_value())
: Add<HConstant>(nan_double);
if (to == NULL) {
to = AddLoadFixedArrayLength(elements);
}
// Special loop unfolding case
static const int kLoopUnfoldLimit = 8;
STATIC_ASSERT(JSArray::kPreallocatedArrayElements <= kLoopUnfoldLimit);
@ -2492,144 +2478,104 @@ void HGraphBuilder::BuildFillElementsWithHole(HValue* elements,
Add<HStoreKeyed>(elements, key, hole, elements_kind);
}
} else {
// Carefully loop backwards so that the "from" remains live through the loop
// rather than the to. This often corresponds to keeping length live rather
// then capacity, which helps register allocation, since length is used more
// other than capacity after filling with holes.
LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
HValue* key = builder.BeginBody(to, from, Token::GT);
HValue* key = builder.BeginBody(from, to, Token::LT);
HValue* adjusted_key = AddUncasted<HSub>(key, graph()->GetConstant1());
adjusted_key->ClearFlag(HValue::kCanOverflow);
Add<HStoreKeyed>(elements, adjusted_key, hole, elements_kind);
Add<HStoreKeyed>(elements, key, hole, elements_kind);
builder.EndBody();
}
}
void HGraphBuilder::BuildCopyElements(HValue* array,
HValue* from_elements,
void HGraphBuilder::BuildCopyElements(HValue* from_elements,
ElementsKind from_elements_kind,
HValue* to_elements,
ElementsKind to_elements_kind,
HValue* length,
HValue* capacity) {
int constant_capacity = -1;
if (capacity != NULL &&
capacity->IsConstant() &&
HConstant::cast(capacity)->HasInteger32Value()) {
int constant_candidate = HConstant::cast(capacity)->Integer32Value();
if (constant_candidate <=
FastCloneShallowArrayStub::kMaximumInlinedCloneLength) {
constant_capacity = constant_candidate;
}
}
if (constant_capacity != -1) {
// Unroll the loop for small elements kinds.
for (int i = 0; i < constant_capacity; i++) {
HValue* key_constant = Add<HConstant>(i);
HInstruction* value = Add<HLoadKeyed>(from_elements, key_constant,
static_cast<HValue*>(NULL),
from_elements_kind);
Add<HStoreKeyed>(to_elements, key_constant, value, to_elements_kind);
}
} else {
bool pre_fill_with_holes =
bool pre_fill_with_holes =
IsFastDoubleElementsKind(from_elements_kind) &&
IsFastObjectElementsKind(to_elements_kind);
if (pre_fill_with_holes) {
// If the copy might trigger a GC, make sure that the FixedArray is
// pre-initialized with holes to make sure that it's always in a
// consistent state.
BuildFillElementsWithHole(to_elements, to_elements_kind,
graph()->GetConstant0(), NULL);
} else if (capacity == NULL || !length->Equals(capacity)) {
BuildFillElementsWithHole(to_elements, to_elements_kind,
length, NULL);
}
if (pre_fill_with_holes) {
// If the copy might trigger a GC, make sure that the FixedArray is
// pre-initialized with holes to make sure that it's always in a consistent
// state.
BuildFillElementsWithHole(to_elements, to_elements_kind,
graph()->GetConstant0(), capacity);
}
if (capacity == NULL) {
capacity = AddLoadFixedArrayLength(to_elements);
}
LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
HValue* key = builder.BeginBody(graph()->GetConstant0(), length, Token::LT);
HValue* key = builder.BeginBody(length, graph()->GetConstant0(),
Token::GT);
HValue* element = Add<HLoadKeyed>(from_elements, key,
static_cast<HValue*>(NULL),
from_elements_kind,
ALLOW_RETURN_HOLE);
key = AddUncasted<HSub>(key, graph()->GetConstant1());
key->ClearFlag(HValue::kCanOverflow);
HValue* element = Add<HLoadKeyed>(from_elements, key,
static_cast<HValue*>(NULL),
from_elements_kind,
ALLOW_RETURN_HOLE);
ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) &&
IsFastSmiElementsKind(to_elements_kind))
ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) &&
IsFastSmiElementsKind(to_elements_kind))
? FAST_HOLEY_ELEMENTS : to_elements_kind;
if (IsHoleyElementsKind(from_elements_kind) &&
from_elements_kind != to_elements_kind) {
IfBuilder if_hole(this);
if_hole.If<HCompareHoleAndBranch>(element);
if_hole.Then();
HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind)
if (IsHoleyElementsKind(from_elements_kind) &&
from_elements_kind != to_elements_kind) {
IfBuilder if_hole(this);
if_hole.If<HCompareHoleAndBranch>(element);
if_hole.Then();
HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind)
? Add<HConstant>(FixedDoubleArray::hole_nan_as_double())
: graph()->GetConstantHole();
Add<HStoreKeyed>(to_elements, key, hole_constant, kind);
if_hole.Else();
HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind);
store->SetFlag(HValue::kAllowUndefinedAsNaN);
if_hole.End();
} else {
HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind);
store->SetFlag(HValue::kAllowUndefinedAsNaN);
}
builder.EndBody();
Add<HStoreKeyed>(to_elements, key, hole_constant, kind);
if_hole.Else();
HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind);
store->SetFlag(HValue::kAllowUndefinedAsNaN);
if_hole.End();
} else {
HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind);
store->SetFlag(HValue::kAllowUndefinedAsNaN);
}
Counters* counters = isolate()->counters();
AddIncrementCounter(counters->inlined_copied_elements());
builder.EndBody();
if (!pre_fill_with_holes && length != capacity) {
// Fill unused capacity with the hole.
BuildFillElementsWithHole(to_elements, to_elements_kind,
key, capacity);
}
}
HValue* HGraphBuilder::BuildCloneShallowArrayCommon(
HValue* boilerplate,
HValue* allocation_site,
HValue* extra_size,
HValue** return_elements,
AllocationSiteMode mode) {
HValue* HGraphBuilder::BuildCloneShallowArray(HValue* boilerplate,
HValue* allocation_site,
AllocationSiteMode mode,
ElementsKind kind,
int length) {
NoObservableSideEffectsScope no_effects(this);
// All sizes here are multiples of kPointerSize.
int array_size = JSArray::kSize;
int size = JSArray::kSize;
if (mode == TRACK_ALLOCATION_SITE) {
array_size += AllocationMemento::kSize;
}
HValue* size_in_bytes = Add<HConstant>(array_size);
if (extra_size != NULL) {
size_in_bytes = AddUncasted<HAdd>(extra_size, size_in_bytes);
size_in_bytes->ClearFlag(HValue::kCanOverflow);
size += AllocationMemento::kSize;
}
HValue* size_in_bytes = Add<HConstant>(size);
HInstruction* object = Add<HAllocate>(size_in_bytes,
HType::JSObject(),
NOT_TENURED,
JS_OBJECT_TYPE);
// Copy the JS array part.
HValue* map = Add<HLoadNamedField>(boilerplate,
static_cast<HValue*>(NULL), HObjectAccess::ForMap());
Add<HStoreNamedField>(object, HObjectAccess::ForPropertiesPointer(),
Add<HConstant>(isolate()->factory()->empty_fixed_array()),
INITIALIZING_STORE);
Add<HStoreNamedField>(object, HObjectAccess::ForMap(), map,
INITIALIZING_STORE);
for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
if ((i != JSArray::kElementsOffset) || (length == 0)) {
HObjectAccess access = HObjectAccess::ForJSArrayOffset(i);
Add<HStoreNamedField>(
object, access, Add<HLoadNamedField>(
boilerplate, static_cast<HValue*>(NULL), access));
}
}
// Create an allocation site info if requested.
if (mode == TRACK_ALLOCATION_SITE) {
@ -2637,102 +2583,54 @@ HValue* HGraphBuilder::BuildCloneShallowArrayCommon(
object, Add<HConstant>(JSArray::kSize), allocation_site);
}
if (extra_size != NULL) {
HValue* elements = Add<HInnerAllocatedObject>(object,
Add<HConstant>(array_size));
if (return_elements != NULL) *return_elements = elements;
if (length > 0) {
// We have to initialize the elements pointer if allocation folding is
// turned off.
if (!FLAG_use_gvn || !FLAG_use_allocation_folding) {
HConstant* empty_fixed_array = Add<HConstant>(
isolate()->factory()->empty_fixed_array());
Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
empty_fixed_array, INITIALIZING_STORE);
}
HValue* boilerplate_elements = AddLoadElements(boilerplate);
HValue* object_elements;
if (IsFastDoubleElementsKind(kind)) {
HValue* elems_size = Add<HConstant>(FixedDoubleArray::SizeFor(length));
object_elements = Add<HAllocate>(elems_size, HType::Tagged(),
NOT_TENURED, FIXED_DOUBLE_ARRAY_TYPE);
} else {
HValue* elems_size = Add<HConstant>(FixedArray::SizeFor(length));
object_elements = Add<HAllocate>(elems_size, HType::Tagged(),
NOT_TENURED, FIXED_ARRAY_TYPE);
}
Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
object_elements);
// Copy the elements array header.
for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) {
HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i);
Add<HStoreNamedField>(
object_elements, access, Add<HLoadNamedField>(
boilerplate_elements, static_cast<HValue*>(NULL), access));
}
// Copy the elements array contents.
// TODO(mstarzinger): Teach HGraphBuilder::BuildCopyElements to unfold
// copying loops with constant length up to a given boundary and use this
// helper here instead.
for (int i = 0; i < length; i++) {
HValue* key_constant = Add<HConstant>(i);
HInstruction* value = Add<HLoadKeyed>(boilerplate_elements, key_constant,
static_cast<HValue*>(NULL), kind);
Add<HStoreKeyed>(object_elements, key_constant, value, kind);
}
}
return object;
}
HValue* HGraphBuilder::BuildCloneShallowArrayCow(HValue* boilerplate,
HValue* allocation_site,
AllocationSiteMode mode,
ElementsKind kind) {
HValue* result = BuildCloneShallowArrayCommon(boilerplate,
allocation_site, NULL, NULL, mode);
HValue* elements = AddLoadElements(boilerplate);
HObjectAccess access = HObjectAccess::ForElementsPointer();
Add<HStoreNamedField>(result, access, elements, INITIALIZING_STORE);
HValue* length = AddLoadArrayLength(boilerplate, kind);
access = HObjectAccess::ForArrayLength(kind);
Add<HStoreNamedField>(result, access, length, INITIALIZING_STORE);
return result;
}
HValue* HGraphBuilder::BuildCloneShallowArrayEmpty(HValue* boilerplate,
HValue* allocation_site,
AllocationSiteMode mode) {
HValue* result = BuildCloneShallowArrayCommon(boilerplate,
allocation_site, NULL, NULL, mode);
HObjectAccess access = HObjectAccess::ForArrayLength(FAST_ELEMENTS);
Add<HStoreNamedField>(result, access, graph()->GetConstant0(),
INITIALIZING_STORE);
access = HObjectAccess::ForElementsPointer();
Add<HStoreNamedField>(result, access,
Add<HConstant>(isolate()->factory()->empty_fixed_array()),
INITIALIZING_STORE);
return result;
}
HValue* HGraphBuilder::BuildCloneShallowArrayNonEmpty(HValue* boilerplate,
HValue* allocation_site,
AllocationSiteMode mode,
ElementsKind kind) {
int elements_kind_size = IsFastDoubleElementsKind(kind)
? kDoubleSize : kPointerSize;
HValue* boilerplate_elements = AddLoadElements(boilerplate);
HValue* capacity = AddLoadFixedArrayLength(boilerplate_elements);
HValue* extra = AddUncasted<HMul>(capacity,
Add<HConstant>(elements_kind_size));
extra->ClearFlag(HValue::kCanOverflow);
extra = AddUncasted<HAdd>(extra, Add<HConstant>(FixedArray::kHeaderSize));
extra->ClearFlag(HValue::kCanOverflow);
HValue* elements = NULL;
HValue* result = BuildCloneShallowArrayCommon(boilerplate,
allocation_site, extra, &elements, mode);
Add<HStoreNamedField>(result, HObjectAccess::ForElementsPointer(),
elements, INITIALIZING_STORE);
// The allocation for the cloned array above causes register pressure on
// machines with low register counts. Force a reload of the boilerplate
// elements here to free up a register for the allocation to avoid unnecessary
// spillage.
boilerplate_elements = AddLoadElements(boilerplate);
boilerplate_elements->SetFlag(HValue::kCantBeReplaced);
// Copy the elements array header.
for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) {
HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i);
Add<HStoreNamedField>(elements, access,
Add<HLoadNamedField>(boilerplate_elements,
static_cast<HValue*>(NULL), access),
INITIALIZING_STORE);
}
// And the result of the length
HValue* length = Add<HLoadNamedField>(boilerplate, static_cast<HValue*>(NULL),
HObjectAccess::ForArrayLength(kind));
Add<HStoreNamedField>(result, HObjectAccess::ForArrayLength(kind),
length, INITIALIZING_STORE);
BuildCopyElements(result, boilerplate_elements, kind, elements,
kind, length, NULL);
return result;
}
void HGraphBuilder::BuildCompareNil(
HValue* value,
Type* type,

View File

@ -1397,8 +1397,7 @@ class HGraphBuilder {
store_map->SkipWriteBarrier();
return store_map;
}
HLoadNamedField* AddLoadElements(HValue* object,
HValue* dependency = NULL);
HLoadNamedField* AddLoadElements(HValue* object);
bool MatchRotateRight(HValue* left,
HValue* right,
@ -1414,12 +1413,7 @@ class HGraphBuilder {
Maybe<int> fixed_right_arg,
HAllocationMode allocation_mode);
HLoadNamedField* AddLoadFixedArrayLength(HValue *object,
HValue *dependency = NULL);
HLoadNamedField* AddLoadArrayLength(HValue *object,
ElementsKind kind,
HValue *dependency = NULL);
HLoadNamedField* AddLoadFixedArrayLength(HValue *object);
HValue* AddLoadJSBuiltin(Builtins::JavaScript builtin);
@ -1759,33 +1753,18 @@ class HGraphBuilder {
HValue* from,
HValue* to);
void BuildCopyElements(HValue* array,
HValue* from_elements,
void BuildCopyElements(HValue* from_elements,
ElementsKind from_elements_kind,
HValue* to_elements,
ElementsKind to_elements_kind,
HValue* length,
HValue* capacity);
HValue* BuildCloneShallowArrayCommon(HValue* boilerplate,
HValue* allocation_site,
HValue* extra_size,
HValue** return_elements,
AllocationSiteMode mode);
HValue* BuildCloneShallowArrayCow(HValue* boilerplate,
HValue* allocation_site,
AllocationSiteMode mode,
ElementsKind kind);
HValue* BuildCloneShallowArrayEmpty(HValue* boilerplate,
HValue* allocation_site,
AllocationSiteMode mode);
HValue* BuildCloneShallowArrayNonEmpty(HValue* boilerplate,
HValue* allocation_site,
AllocationSiteMode mode,
ElementsKind kind);
HValue* BuildCloneShallowArray(HValue* boilerplate,
HValue* allocation_site,
AllocationSiteMode mode,
ElementsKind kind,
int length);
HValue* BuildElementIndexHash(HValue* index);

View File

@ -63,11 +63,6 @@ void FastCloneShallowArrayStub::InitializeInterfaceDescriptor(
static Register registers[] = { eax, ebx, ecx };
descriptor->register_param_count_ = 3;
descriptor->register_params_ = registers;
static Representation representations[] = {
Representation::Tagged(),
Representation::Smi(),
Representation::Tagged() };
descriptor->register_param_representations_ = representations;
descriptor->deoptimization_handler_ =
Runtime::FunctionForId(
Runtime::kHiddenCreateArrayLiteralStubBailout)->entry;
@ -199,11 +194,6 @@ static void InitializeArrayConstructorDescriptor(
descriptor->stack_parameter_count_ = eax;
descriptor->register_param_count_ = 3;
descriptor->register_params_ = registers_variable_args;
static Representation representations[] = {
Representation::Tagged(),
Representation::Tagged(),
Representation::Integer32() };
descriptor->register_param_representations_ = representations;
}
descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
@ -231,10 +221,6 @@ static void InitializeInternalArrayConstructorDescriptor(
descriptor->stack_parameter_count_ = eax;
descriptor->register_param_count_ = 2;
descriptor->register_params_ = registers_variable_args;
static Representation representations[] = {
Representation::Tagged(),
Representation::Integer32() };
descriptor->register_param_representations_ = representations;
}
descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;

View File

@ -1729,7 +1729,24 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
}
if (expr->depth() > 1) {
Heap* heap = isolate()->heap();
if (has_constant_fast_elements &&
constant_elements_values->map() == heap->fixed_cow_array_map()) {
// If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
// change, so it's possible to specialize the stub in advance.
__ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
__ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
__ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset));
__ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
__ mov(ecx, Immediate(constant_elements));
FastCloneShallowArrayStub stub(
isolate(),
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
allocation_site_mode,
length);
__ CallStub(&stub);
} else if (expr->depth() > 1 || Serializer::enabled(isolate()) ||
length > FastCloneShallowArrayStub::kMaximumClonedLength) {
__ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
__ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
__ push(Immediate(Smi::FromInt(expr->literal_index())));
@ -1737,11 +1754,25 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ push(Immediate(Smi::FromInt(flags)));
__ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
} else {
ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
FLAG_smi_only_arrays);
FastCloneShallowArrayStub::Mode mode =
FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
// If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
// change, so it's possible to specialize the stub in advance.
if (has_constant_fast_elements) {
mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
}
__ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
__ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset));
__ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
__ mov(ecx, Immediate(constant_elements));
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
FastCloneShallowArrayStub stub(isolate(),
mode,
allocation_site_mode,
length);
__ CallStub(&stub);
}

View File

@ -449,8 +449,6 @@ Handle<Code> LChunk::Codegen() {
CodeEndLinePosInfoRecordEvent(*code, jit_handler_data));
CodeGenerator::PrintCode(code, info());
ASSERT(!(info()->GetMustNotHaveEagerFrame() &&
generator.NeedsEagerFrame()));
return code;
}
assembler.AbortedCodeGeneration();

View File

@ -1805,7 +1805,18 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
__ li(a2, Operand(Smi::FromInt(expr->literal_index())));
__ li(a1, Operand(constant_elements));
if (expr->depth() > 1) {
if (has_fast_elements && constant_elements_values->map() ==
isolate()->heap()->fixed_cow_array_map()) {
FastCloneShallowArrayStub stub(
isolate(),
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
allocation_site_mode,
length);
__ CallStub(&stub);
__ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(),
1, a1, a2);
} else if (expr->depth() > 1 || Serializer::enabled(isolate()) ||
length > FastCloneShallowArrayStub::kMaximumClonedLength) {
__ li(a0, Operand(Smi::FromInt(flags)));
__ Push(a3, a2, a1, a0);
__ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);

View File

@ -59,11 +59,6 @@ void FastCloneShallowArrayStub::InitializeInterfaceDescriptor(
static Register registers[] = { rax, rbx, rcx };
descriptor->register_param_count_ = 3;
descriptor->register_params_ = registers;
static Representation representations[] = {
Representation::Tagged(),
Representation::Smi(),
Representation::Tagged() };
descriptor->register_param_representations_ = representations;
descriptor->deoptimization_handler_ =
Runtime::FunctionForId(
Runtime::kHiddenCreateArrayLiteralStubBailout)->entry;
@ -193,11 +188,6 @@ static void InitializeArrayConstructorDescriptor(
descriptor->handler_arguments_mode_ = PASS_ARGUMENTS;
descriptor->stack_parameter_count_ = rax;
descriptor->register_param_count_ = 3;
static Representation representations[] = {
Representation::Tagged(),
Representation::Tagged(),
Representation::Integer32() };
descriptor->register_param_representations_ = representations;
descriptor->register_params_ = registers_variable_args;
}
@ -226,10 +216,6 @@ static void InitializeInternalArrayConstructorDescriptor(
descriptor->stack_parameter_count_ = rax;
descriptor->register_param_count_ = 2;
descriptor->register_params_ = registers_variable_args;
static Representation representations[] = {
Representation::Tagged(),
Representation::Integer32() };
descriptor->register_param_representations_ = representations;
}
descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;

View File

@ -1766,7 +1766,24 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
}
if (expr->depth() > 1) {
Heap* heap = isolate()->heap();
if (has_constant_fast_elements &&
constant_elements_values->map() == heap->fixed_cow_array_map()) {
// If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
// change, so it's possible to specialize the stub in advance.
__ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
__ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset));
__ Move(rbx, Smi::FromInt(expr->literal_index()));
__ Move(rcx, constant_elements);
FastCloneShallowArrayStub stub(
isolate(),
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
allocation_site_mode,
length);
__ CallStub(&stub);
} else if (expr->depth() > 1 || Serializer::enabled(isolate()) ||
length > FastCloneShallowArrayStub::kMaximumClonedLength) {
__ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ Push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
__ Push(Smi::FromInt(expr->literal_index()));
@ -1774,11 +1791,24 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ Push(Smi::FromInt(flags));
__ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
} else {
ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
FLAG_smi_only_arrays);
FastCloneShallowArrayStub::Mode mode =
FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
// If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
// change, so it's possible to specialize the stub in advance.
if (has_constant_fast_elements) {
mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
}
__ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset));
__ Move(rbx, Smi::FromInt(expr->literal_index()));
__ Move(rcx, constant_elements);
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
FastCloneShallowArrayStub stub(isolate(),
mode,
allocation_site_mode, length);
__ CallStub(&stub);
}