diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc index afea875e9e..910714c65a 100644 --- a/src/arm/code-stubs-arm.cc +++ b/src/arm/code-stubs-arm.cc @@ -58,11 +58,6 @@ void FastCloneShallowArrayStub::InitializeInterfaceDescriptor( static Register registers[] = { r3, r2, r1 }; descriptor->register_param_count_ = 3; descriptor->register_params_ = registers; - static Representation representations[] = { - Representation::Tagged(), - Representation::Smi(), - Representation::Tagged() }; - descriptor->register_param_representations_ = representations; descriptor->deoptimization_handler_ = Runtime::FunctionForId( Runtime::kHiddenCreateArrayLiteralStubBailout)->entry; @@ -206,11 +201,6 @@ static void InitializeArrayConstructorDescriptor( descriptor->stack_parameter_count_ = r0; descriptor->register_param_count_ = 3; descriptor->register_params_ = registers_variable_args; - static Representation representations[] = { - Representation::Tagged(), - Representation::Tagged(), - Representation::Integer32() }; - descriptor->register_param_representations_ = representations; } descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count; @@ -238,10 +228,6 @@ static void InitializeInternalArrayConstructorDescriptor( descriptor->stack_parameter_count_ = r0; descriptor->register_param_count_ = 2; descriptor->register_params_ = registers_variable_args; - static Representation representations[] = { - Representation::Tagged(), - Representation::Integer32() }; - descriptor->register_param_representations_ = representations; } descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count; diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc index 221526b670..baea557eec 100644 --- a/src/arm/full-codegen-arm.cc +++ b/src/arm/full-codegen-arm.cc @@ -1792,12 +1792,33 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset)); __ mov(r2, Operand(Smi::FromInt(expr->literal_index()))); __ mov(r1, Operand(constant_elements)); - if (expr->depth() > 1) { + if (has_fast_elements && constant_elements_values->map() == + isolate()->heap()->fixed_cow_array_map()) { + FastCloneShallowArrayStub stub( + isolate(), + FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, + allocation_site_mode, + length); + __ CallStub(&stub); + __ IncrementCounter( + isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2); + } else if (expr->depth() > 1 || Serializer::enabled(isolate()) || + length > FastCloneShallowArrayStub::kMaximumClonedLength) { __ mov(r0, Operand(Smi::FromInt(flags))); __ Push(r3, r2, r1, r0); __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4); } else { - FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); + ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) || + FLAG_smi_only_arrays); + FastCloneShallowArrayStub::Mode mode = + FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS; + + if (has_fast_elements) { + mode = FastCloneShallowArrayStub::CLONE_ELEMENTS; + } + + FastCloneShallowArrayStub stub(isolate(), mode, allocation_site_mode, + length); __ CallStub(&stub); } diff --git a/src/arm64/code-stubs-arm64.cc b/src/arm64/code-stubs-arm64.cc index 0bf89d8901..dc9d63ca49 100644 --- a/src/arm64/code-stubs-arm64.cc +++ b/src/arm64/code-stubs-arm64.cc @@ -65,11 +65,6 @@ void FastCloneShallowArrayStub::InitializeInterfaceDescriptor( static Register registers[] = { x3, x2, x1 }; descriptor->register_param_count_ = sizeof(registers) / sizeof(registers[0]); descriptor->register_params_ = registers; - static Representation representations[] = { - Representation::Tagged(), - Representation::Smi(), - Representation::Tagged() }; - descriptor->register_param_representations_ = representations; descriptor->deoptimization_handler_ = Runtime::FunctionForId( Runtime::kHiddenCreateArrayLiteralStubBailout)->entry; @@ -235,11 +230,6 @@ static void InitializeArrayConstructorDescriptor( descriptor->register_param_count_ = sizeof(registers_variable_args) / sizeof(registers_variable_args[0]); descriptor->register_params_ = registers_variable_args; - static Representation representations[] = { - Representation::Tagged(), - Representation::Tagged(), - Representation::Integer32() }; - descriptor->register_param_representations_ = representations; } descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count; @@ -286,10 +276,6 @@ static void InitializeInternalArrayConstructorDescriptor( descriptor->register_param_count_ = sizeof(registers_variable_args) / sizeof(registers_variable_args[0]); descriptor->register_params_ = registers_variable_args; - static Representation representations[] = { - Representation::Tagged(), - Representation::Integer32() }; - descriptor->register_param_representations_ = representations; } descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count; diff --git a/src/arm64/full-codegen-arm64.cc b/src/arm64/full-codegen-arm64.cc index af007589d3..0196e69e45 100644 --- a/src/arm64/full-codegen-arm64.cc +++ b/src/arm64/full-codegen-arm64.cc @@ -1795,12 +1795,35 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset)); __ Mov(x2, Smi::FromInt(expr->literal_index())); __ Mov(x1, Operand(constant_elements)); - if (expr->depth() > 1) { + if (has_fast_elements && constant_elements_values->map() == + isolate()->heap()->fixed_cow_array_map()) { + FastCloneShallowArrayStub stub( + isolate(), + FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, + allocation_site_mode, + length); + __ CallStub(&stub); + __ IncrementCounter( + isolate()->counters()->cow_arrays_created_stub(), 1, x10, x11); + } else if ((expr->depth() > 1) || Serializer::enabled(isolate()) || + length > FastCloneShallowArrayStub::kMaximumClonedLength) { __ Mov(x0, Smi::FromInt(flags)); __ Push(x3, x2, x1, x0); __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4); } else { - FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); + ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) || + FLAG_smi_only_arrays); + FastCloneShallowArrayStub::Mode mode = + FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS; + + if (has_fast_elements) { + mode = FastCloneShallowArrayStub::CLONE_ELEMENTS; + } + + FastCloneShallowArrayStub stub(isolate(), + mode, + allocation_site_mode, + length); __ CallStub(&stub); } diff --git a/src/code-stubs-hydrogen.cc b/src/code-stubs-hydrogen.cc index 83b461de20..a35da69f59 100644 --- a/src/code-stubs-hydrogen.cc +++ b/src/code-stubs-hydrogen.cc @@ -127,9 +127,9 @@ bool CodeStubGraphBuilderBase::BuildGraph() { bool runtime_stack_params = descriptor_->stack_parameter_count_.is_valid(); HInstruction* stack_parameter_count = NULL; for (int i = 0; i < param_count; ++i) { - Representation r = descriptor_->register_param_representations_ == NULL - ? Representation::Tagged() - : descriptor_->register_param_representations_[i]; + Representation r = descriptor_->IsParameterCountRegister(i) + ? Representation::Integer32() + : Representation::Tagged(); HParameter* param = Add(i, HParameter::REGISTER_PARAMETER, r); start_environment->Bind(i, param); parameters_[i] = param; @@ -330,10 +330,8 @@ HValue* CodeStubGraphBuilder::BuildCodeStub() { Factory* factory = isolate()->factory(); HValue* undefined = graph()->GetConstantUndefined(); AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode(); - - // This stub is very performance sensitive, the generated code must be tuned - // so that it doesn't build and eager frame. - info()->MarkMustNotHaveEagerFrame(); + FastCloneShallowArrayStub::Mode mode = casted_stub()->mode(); + int length = casted_stub()->length(); HInstruction* allocation_site = Add(GetParameter(0), GetParameter(1), @@ -348,40 +346,46 @@ HValue* CodeStubGraphBuilder::BuildCodeStub() { AllocationSite::kTransitionInfoOffset); HInstruction* boilerplate = Add( allocation_site, static_cast(NULL), access); - HValue* elements = AddLoadElements(boilerplate); - HValue* capacity = AddLoadFixedArrayLength(elements); - IfBuilder zero_capacity(this); - zero_capacity.If(capacity, graph()->GetConstant0(), - Token::EQ); - zero_capacity.Then(); - Push(BuildCloneShallowArrayEmpty(boilerplate, - allocation_site, - alloc_site_mode)); - zero_capacity.Else(); - IfBuilder if_fixed_cow(this); - if_fixed_cow.If(elements, factory->fixed_cow_array_map()); - if_fixed_cow.Then(); - Push(BuildCloneShallowArrayCow(boilerplate, - allocation_site, - alloc_site_mode, - FAST_ELEMENTS)); - if_fixed_cow.Else(); - IfBuilder if_fixed(this); - if_fixed.If(elements, factory->fixed_array_map()); - if_fixed.Then(); - Push(BuildCloneShallowArrayNonEmpty(boilerplate, - allocation_site, - alloc_site_mode, - FAST_ELEMENTS)); + HValue* push_value; + if (mode == FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS) { + HValue* elements = AddLoadElements(boilerplate); - if_fixed.Else(); - Push(BuildCloneShallowArrayNonEmpty(boilerplate, - allocation_site, - alloc_site_mode, - FAST_DOUBLE_ELEMENTS)); - if_fixed.End(); - if_fixed_cow.End(); - zero_capacity.End(); + IfBuilder if_fixed_cow(this); + if_fixed_cow.If(elements, factory->fixed_cow_array_map()); + if_fixed_cow.Then(); + push_value = BuildCloneShallowArray(boilerplate, + allocation_site, + alloc_site_mode, + FAST_ELEMENTS, + 0/*copy-on-write*/); + environment()->Push(push_value); + if_fixed_cow.Else(); + + IfBuilder if_fixed(this); + if_fixed.If(elements, factory->fixed_array_map()); + if_fixed.Then(); + push_value = BuildCloneShallowArray(boilerplate, + allocation_site, + alloc_site_mode, + FAST_ELEMENTS, + length); + environment()->Push(push_value); + if_fixed.Else(); + push_value = BuildCloneShallowArray(boilerplate, + allocation_site, + alloc_site_mode, + FAST_DOUBLE_ELEMENTS, + length); + environment()->Push(push_value); + } else { + ElementsKind elements_kind = casted_stub()->ComputeElementsKind(); + push_value = BuildCloneShallowArray(boilerplate, + allocation_site, + alloc_site_mode, + elements_kind, + length); + environment()->Push(push_value); + } checker.ElseDeopt("Uninitialized boilerplate literals"); checker.End(); @@ -640,9 +644,6 @@ HValue* CodeStubGraphBuilderBase::BuildArrayConstructor( HValue* result = NULL; switch (argument_class) { case NONE: - // This stub is very performance sensitive, the generated code must be - // tuned so that it doesn't build and eager frame. - info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: @@ -666,9 +667,6 @@ HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor( HValue* result = NULL; switch (argument_class) { case NONE: - // This stub is very performance sensitive, the generated code must be - // tuned so that it doesn't build and eager frame. - info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: diff --git a/src/code-stubs.cc b/src/code-stubs.cc index 64fb3eb705..24f60ed412 100644 --- a/src/code-stubs.cc +++ b/src/code-stubs.cc @@ -22,7 +22,6 @@ CodeStubInterfaceDescriptor::CodeStubInterfaceDescriptor() hint_stack_parameter_count_(-1), function_mode_(NOT_JS_FUNCTION_STUB_MODE), register_params_(NULL), - register_param_representations_(NULL), deoptimization_handler_(NULL), handler_arguments_mode_(DONT_PASS_ARGUMENTS), miss_handler_(), @@ -734,7 +733,9 @@ void FastNewContextStub::InstallDescriptors(Isolate* isolate) { // static void FastCloneShallowArrayStub::InstallDescriptors(Isolate* isolate) { - FastCloneShallowArrayStub stub(isolate, DONT_TRACK_ALLOCATION_SITE); + FastCloneShallowArrayStub stub(isolate, + FastCloneShallowArrayStub::CLONE_ELEMENTS, + DONT_TRACK_ALLOCATION_SITE, 0); InstallDescriptor(isolate, &stub); } diff --git a/src/code-stubs.h b/src/code-stubs.h index 4180e27da0..a6c2294ba2 100644 --- a/src/code-stubs.h +++ b/src/code-stubs.h @@ -277,11 +277,6 @@ struct CodeStubInterfaceDescriptor { int hint_stack_parameter_count_; StubFunctionMode function_mode_; Register* register_params_; - // Specifies Representations for the stub's parameter. Points to an array of - // Representations of the same length of the numbers of parameters to the - // stub, or if NULL (the default value), Representation of each parameter - // assumed to be Tagged() - Representation* register_param_representations_; Address deoptimization_handler_; HandlerArgumentsMode handler_arguments_mode_; @@ -586,18 +581,50 @@ class FastNewContextStub V8_FINAL : public HydrogenCodeStub { class FastCloneShallowArrayStub : public HydrogenCodeStub { public: // Maximum length of copied elements array. - static const int kMaximumInlinedCloneLength = 8; + static const int kMaximumClonedLength = 8; + enum Mode { + CLONE_ELEMENTS, + CLONE_DOUBLE_ELEMENTS, + COPY_ON_WRITE_ELEMENTS, + CLONE_ANY_ELEMENTS, + LAST_CLONE_MODE = CLONE_ANY_ELEMENTS + }; + + static const int kFastCloneModeCount = LAST_CLONE_MODE + 1; FastCloneShallowArrayStub(Isolate* isolate, - AllocationSiteMode allocation_site_mode) + Mode mode, + AllocationSiteMode allocation_site_mode, + int length) : HydrogenCodeStub(isolate), - allocation_site_mode_(allocation_site_mode) {} + mode_(mode), + allocation_site_mode_(allocation_site_mode), + length_((mode == COPY_ON_WRITE_ELEMENTS) ? 0 : length) { + ASSERT_GE(length_, 0); + ASSERT_LE(length_, kMaximumClonedLength); + } + Mode mode() const { return mode_; } + int length() const { return length_; } AllocationSiteMode allocation_site_mode() const { return allocation_site_mode_; } - virtual Handle GenerateCode(); + ElementsKind ComputeElementsKind() const { + switch (mode()) { + case CLONE_ELEMENTS: + case COPY_ON_WRITE_ELEMENTS: + return FAST_ELEMENTS; + case CLONE_DOUBLE_ELEMENTS: + return FAST_DOUBLE_ELEMENTS; + case CLONE_ANY_ELEMENTS: + /*fall-through*/; + } + UNREACHABLE(); + return LAST_ELEMENTS_KIND; + } + + virtual Handle GenerateCode() V8_OVERRIDE; virtual void InitializeInterfaceDescriptor( CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE; @@ -605,13 +632,22 @@ class FastCloneShallowArrayStub : public HydrogenCodeStub { static void InstallDescriptors(Isolate* isolate); private: + Mode mode_; AllocationSiteMode allocation_site_mode_; + int length_; class AllocationSiteModeBits: public BitField {}; + class ModeBits: public BitField {}; + class LengthBits: public BitField {}; // Ensure data fits within available bits. + STATIC_ASSERT(LAST_ALLOCATION_SITE_MODE == 1); + STATIC_ASSERT(kFastCloneModeCount < 16); + STATIC_ASSERT(kMaximumClonedLength < 16); Major MajorKey() { return FastCloneShallowArray; } int NotMissMinorKey() { - return AllocationSiteModeBits::encode(allocation_site_mode_); + return AllocationSiteModeBits::encode(allocation_site_mode_) + | ModeBits::encode(mode_) + | LengthBits::encode(length_); } }; diff --git a/src/compiler.h b/src/compiler.h index fd26d24d71..24a8a9f5de 100644 --- a/src/compiler.h +++ b/src/compiler.h @@ -143,14 +143,6 @@ class CompilationInfo { return RequiresFrame::decode(flags_); } - void MarkMustNotHaveEagerFrame() { - flags_ |= MustNotHaveEagerFrame::encode(true); - } - - bool GetMustNotHaveEagerFrame() const { - return MustNotHaveEagerFrame::decode(flags_); - } - void SetParseRestriction(ParseRestriction restriction) { flags_ = ParseRestricitonField::update(flags_, restriction); } @@ -376,8 +368,6 @@ class CompilationInfo { class ParseRestricitonField: public BitField {}; // If the function requires a frame (for unspecified reasons) class RequiresFrame: public BitField {}; - // If the function cannot build a frame (for unspecified reasons) - class MustNotHaveEagerFrame: public BitField {}; unsigned flags_; diff --git a/src/counters.h b/src/counters.h index 19e19bd9dd..9f1cd370e0 100644 --- a/src/counters.h +++ b/src/counters.h @@ -381,7 +381,6 @@ class HistogramTimerScope BASE_EMBEDDED { SC(call_premonomorphic_stubs, V8.CallPreMonomorphicStubs) \ SC(call_normal_stubs, V8.CallNormalStubs) \ SC(call_megamorphic_stubs, V8.CallMegamorphicStubs) \ - SC(inlined_copied_elements, V8.InlinedCopiedElements) \ SC(arguments_adaptors, V8.ArgumentsAdaptors) \ SC(compilation_cache_hits, V8.CompilationCacheHits) \ SC(compilation_cache_misses, V8.CompilationCacheMisses) \ diff --git a/src/hydrogen-gvn.cc b/src/hydrogen-gvn.cc index b32b90951a..f9d1b408a7 100644 --- a/src/hydrogen-gvn.cc +++ b/src/hydrogen-gvn.cc @@ -863,8 +863,7 @@ void HGlobalValueNumberingPhase::AnalyzeGraph() { stream.OutputToStdOut(); } } - if (instr->CheckFlag(HValue::kUseGVN) && - !instr->CheckFlag(HValue::kCantBeReplaced)) { + if (instr->CheckFlag(HValue::kUseGVN)) { ASSERT(!instr->HasObservableSideEffects()); HInstruction* other = map->Lookup(instr); if (other != NULL) { diff --git a/src/hydrogen-instructions.h b/src/hydrogen-instructions.h index 34eafa99ff..fcf46554ea 100644 --- a/src/hydrogen-instructions.h +++ b/src/hydrogen-instructions.h @@ -619,10 +619,6 @@ class HValue : public ZoneObject { // flag. kUint32, kHasNoObservableSideEffects, - // Indicates an instruction shouldn't be replaced by optimization, this flag - // is useful to set in cases where recomputing a value is cheaper than - // extending the value's live range and spilling it. - kCantBeReplaced, // Indicates the instruction is live during dead code elimination. kIsLive, @@ -6261,7 +6257,6 @@ class HLoadNamedField V8_FINAL : public HTemplateInstruction<2> { virtual void PrintDataTo(StringStream* stream) V8_OVERRIDE; bool CanBeReplacedWith(HValue* other) const { - if (!CheckFlag(HValue::kCantBeReplaced)) return false; if (!type().Equals(other->type())) return false; if (!representation().Equals(other->representation())) return false; if (!other->IsLoadNamedField()) return true; diff --git a/src/hydrogen.cc b/src/hydrogen.cc index ba0fcab311..60730ab6c1 100644 --- a/src/hydrogen.cc +++ b/src/hydrogen.cc @@ -2388,26 +2388,15 @@ HInstruction* HGraphBuilder::AddElementAccess( } -HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object, - HValue* dependency) { +HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object) { return Add( - object, dependency, HObjectAccess::ForElementsPointer()); + object, static_cast(NULL), HObjectAccess::ForElementsPointer()); } -HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength( - HValue* array, - HValue* dependency) { +HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength(HValue* object) { return Add( - array, dependency, HObjectAccess::ForFixedArrayLength()); -} - - -HLoadNamedField* HGraphBuilder::AddLoadArrayLength(HValue* array, - ElementsKind kind, - HValue* dependency) { - return Add( - array, dependency, HObjectAccess::ForArrayLength(kind)); + object, static_cast(NULL), HObjectAccess::ForFixedArrayLength()); } @@ -2440,8 +2429,9 @@ HValue* HGraphBuilder::BuildGrowElementsCapacity(HValue* object, HValue* new_elements = BuildAllocateElementsAndInitializeElementsHeader( new_kind, new_capacity); - BuildCopyElements(object, elements, kind, new_elements, - new_kind, length, new_capacity); + BuildCopyElements(elements, kind, + new_elements, new_kind, + length, new_capacity); Add(object, HObjectAccess::ForElementsPointer(), new_elements); @@ -2454,8 +2444,8 @@ void HGraphBuilder::BuildFillElementsWithHole(HValue* elements, ElementsKind elements_kind, HValue* from, HValue* to) { - // Fast elements kinds need to be initialized in case statements below cause a - // garbage collection. + // Fast elements kinds need to be initialized in case statements below cause + // a garbage collection. Factory* factory = isolate()->factory(); double nan_double = FixedDoubleArray::hole_nan_as_double(); @@ -2463,10 +2453,6 @@ void HGraphBuilder::BuildFillElementsWithHole(HValue* elements, ? Add(factory->the_hole_value()) : Add(nan_double); - if (to == NULL) { - to = AddLoadFixedArrayLength(elements); - } - // Special loop unfolding case static const int kLoopUnfoldLimit = 8; STATIC_ASSERT(JSArray::kPreallocatedArrayElements <= kLoopUnfoldLimit); @@ -2492,144 +2478,104 @@ void HGraphBuilder::BuildFillElementsWithHole(HValue* elements, Add(elements, key, hole, elements_kind); } } else { - // Carefully loop backwards so that the "from" remains live through the loop - // rather than the to. This often corresponds to keeping length live rather - // then capacity, which helps register allocation, since length is used more - // other than capacity after filling with holes. - LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement); + LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); - HValue* key = builder.BeginBody(to, from, Token::GT); + HValue* key = builder.BeginBody(from, to, Token::LT); - HValue* adjusted_key = AddUncasted(key, graph()->GetConstant1()); - adjusted_key->ClearFlag(HValue::kCanOverflow); - - Add(elements, adjusted_key, hole, elements_kind); + Add(elements, key, hole, elements_kind); builder.EndBody(); } } -void HGraphBuilder::BuildCopyElements(HValue* array, - HValue* from_elements, +void HGraphBuilder::BuildCopyElements(HValue* from_elements, ElementsKind from_elements_kind, HValue* to_elements, ElementsKind to_elements_kind, HValue* length, HValue* capacity) { - int constant_capacity = -1; - if (capacity != NULL && - capacity->IsConstant() && - HConstant::cast(capacity)->HasInteger32Value()) { - int constant_candidate = HConstant::cast(capacity)->Integer32Value(); - if (constant_candidate <= - FastCloneShallowArrayStub::kMaximumInlinedCloneLength) { - constant_capacity = constant_candidate; - } - } - - if (constant_capacity != -1) { - // Unroll the loop for small elements kinds. - for (int i = 0; i < constant_capacity; i++) { - HValue* key_constant = Add(i); - HInstruction* value = Add(from_elements, key_constant, - static_cast(NULL), - from_elements_kind); - Add(to_elements, key_constant, value, to_elements_kind); - } - } else { - bool pre_fill_with_holes = + bool pre_fill_with_holes = IsFastDoubleElementsKind(from_elements_kind) && IsFastObjectElementsKind(to_elements_kind); - if (pre_fill_with_holes) { - // If the copy might trigger a GC, make sure that the FixedArray is - // pre-initialized with holes to make sure that it's always in a - // consistent state. - BuildFillElementsWithHole(to_elements, to_elements_kind, - graph()->GetConstant0(), NULL); - } else if (capacity == NULL || !length->Equals(capacity)) { - BuildFillElementsWithHole(to_elements, to_elements_kind, - length, NULL); - } + if (pre_fill_with_holes) { + // If the copy might trigger a GC, make sure that the FixedArray is + // pre-initialized with holes to make sure that it's always in a consistent + // state. + BuildFillElementsWithHole(to_elements, to_elements_kind, + graph()->GetConstant0(), capacity); + } - if (capacity == NULL) { - capacity = AddLoadFixedArrayLength(to_elements); - } + LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); - LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement); + HValue* key = builder.BeginBody(graph()->GetConstant0(), length, Token::LT); - HValue* key = builder.BeginBody(length, graph()->GetConstant0(), - Token::GT); + HValue* element = Add(from_elements, key, + static_cast(NULL), + from_elements_kind, + ALLOW_RETURN_HOLE); - key = AddUncasted(key, graph()->GetConstant1()); - key->ClearFlag(HValue::kCanOverflow); - - HValue* element = Add(from_elements, key, - static_cast(NULL), - from_elements_kind, - ALLOW_RETURN_HOLE); - - ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) && - IsFastSmiElementsKind(to_elements_kind)) + ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) && + IsFastSmiElementsKind(to_elements_kind)) ? FAST_HOLEY_ELEMENTS : to_elements_kind; - if (IsHoleyElementsKind(from_elements_kind) && - from_elements_kind != to_elements_kind) { - IfBuilder if_hole(this); - if_hole.If(element); - if_hole.Then(); - HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind) + if (IsHoleyElementsKind(from_elements_kind) && + from_elements_kind != to_elements_kind) { + IfBuilder if_hole(this); + if_hole.If(element); + if_hole.Then(); + HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind) ? Add(FixedDoubleArray::hole_nan_as_double()) : graph()->GetConstantHole(); - Add(to_elements, key, hole_constant, kind); - if_hole.Else(); - HStoreKeyed* store = Add(to_elements, key, element, kind); - store->SetFlag(HValue::kAllowUndefinedAsNaN); - if_hole.End(); - } else { - HStoreKeyed* store = Add(to_elements, key, element, kind); - store->SetFlag(HValue::kAllowUndefinedAsNaN); - } - - builder.EndBody(); + Add(to_elements, key, hole_constant, kind); + if_hole.Else(); + HStoreKeyed* store = Add(to_elements, key, element, kind); + store->SetFlag(HValue::kAllowUndefinedAsNaN); + if_hole.End(); + } else { + HStoreKeyed* store = Add(to_elements, key, element, kind); + store->SetFlag(HValue::kAllowUndefinedAsNaN); } - Counters* counters = isolate()->counters(); - AddIncrementCounter(counters->inlined_copied_elements()); + builder.EndBody(); + + if (!pre_fill_with_holes && length != capacity) { + // Fill unused capacity with the hole. + BuildFillElementsWithHole(to_elements, to_elements_kind, + key, capacity); + } } -HValue* HGraphBuilder::BuildCloneShallowArrayCommon( - HValue* boilerplate, - HValue* allocation_site, - HValue* extra_size, - HValue** return_elements, - AllocationSiteMode mode) { + +HValue* HGraphBuilder::BuildCloneShallowArray(HValue* boilerplate, + HValue* allocation_site, + AllocationSiteMode mode, + ElementsKind kind, + int length) { + NoObservableSideEffectsScope no_effects(this); + // All sizes here are multiples of kPointerSize. - int array_size = JSArray::kSize; + int size = JSArray::kSize; if (mode == TRACK_ALLOCATION_SITE) { - array_size += AllocationMemento::kSize; - } - - HValue* size_in_bytes = Add(array_size); - if (extra_size != NULL) { - size_in_bytes = AddUncasted(extra_size, size_in_bytes); - size_in_bytes->ClearFlag(HValue::kCanOverflow); + size += AllocationMemento::kSize; } + HValue* size_in_bytes = Add(size); HInstruction* object = Add(size_in_bytes, HType::JSObject(), NOT_TENURED, JS_OBJECT_TYPE); // Copy the JS array part. - HValue* map = Add(boilerplate, - static_cast(NULL), HObjectAccess::ForMap()); - Add(object, HObjectAccess::ForPropertiesPointer(), - Add(isolate()->factory()->empty_fixed_array()), - INITIALIZING_STORE); - Add(object, HObjectAccess::ForMap(), map, - INITIALIZING_STORE); + for (int i = 0; i < JSArray::kSize; i += kPointerSize) { + if ((i != JSArray::kElementsOffset) || (length == 0)) { + HObjectAccess access = HObjectAccess::ForJSArrayOffset(i); + Add( + object, access, Add( + boilerplate, static_cast(NULL), access)); + } + } // Create an allocation site info if requested. if (mode == TRACK_ALLOCATION_SITE) { @@ -2637,102 +2583,54 @@ HValue* HGraphBuilder::BuildCloneShallowArrayCommon( object, Add(JSArray::kSize), allocation_site); } - if (extra_size != NULL) { - HValue* elements = Add(object, - Add(array_size)); - if (return_elements != NULL) *return_elements = elements; + if (length > 0) { + // We have to initialize the elements pointer if allocation folding is + // turned off. + if (!FLAG_use_gvn || !FLAG_use_allocation_folding) { + HConstant* empty_fixed_array = Add( + isolate()->factory()->empty_fixed_array()); + Add(object, HObjectAccess::ForElementsPointer(), + empty_fixed_array, INITIALIZING_STORE); + } + + HValue* boilerplate_elements = AddLoadElements(boilerplate); + HValue* object_elements; + if (IsFastDoubleElementsKind(kind)) { + HValue* elems_size = Add(FixedDoubleArray::SizeFor(length)); + object_elements = Add(elems_size, HType::Tagged(), + NOT_TENURED, FIXED_DOUBLE_ARRAY_TYPE); + } else { + HValue* elems_size = Add(FixedArray::SizeFor(length)); + object_elements = Add(elems_size, HType::Tagged(), + NOT_TENURED, FIXED_ARRAY_TYPE); + } + Add(object, HObjectAccess::ForElementsPointer(), + object_elements); + + // Copy the elements array header. + for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) { + HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i); + Add( + object_elements, access, Add( + boilerplate_elements, static_cast(NULL), access)); + } + + // Copy the elements array contents. + // TODO(mstarzinger): Teach HGraphBuilder::BuildCopyElements to unfold + // copying loops with constant length up to a given boundary and use this + // helper here instead. + for (int i = 0; i < length; i++) { + HValue* key_constant = Add(i); + HInstruction* value = Add(boilerplate_elements, key_constant, + static_cast(NULL), kind); + Add(object_elements, key_constant, value, kind); + } } return object; } -HValue* HGraphBuilder::BuildCloneShallowArrayCow(HValue* boilerplate, - HValue* allocation_site, - AllocationSiteMode mode, - ElementsKind kind) { - HValue* result = BuildCloneShallowArrayCommon(boilerplate, - allocation_site, NULL, NULL, mode); - - HValue* elements = AddLoadElements(boilerplate); - HObjectAccess access = HObjectAccess::ForElementsPointer(); - Add(result, access, elements, INITIALIZING_STORE); - - HValue* length = AddLoadArrayLength(boilerplate, kind); - access = HObjectAccess::ForArrayLength(kind); - Add(result, access, length, INITIALIZING_STORE); - - return result; -} - - -HValue* HGraphBuilder::BuildCloneShallowArrayEmpty(HValue* boilerplate, - HValue* allocation_site, - AllocationSiteMode mode) { - HValue* result = BuildCloneShallowArrayCommon(boilerplate, - allocation_site, NULL, NULL, mode); - - HObjectAccess access = HObjectAccess::ForArrayLength(FAST_ELEMENTS); - Add(result, access, graph()->GetConstant0(), - INITIALIZING_STORE); - access = HObjectAccess::ForElementsPointer(); - Add(result, access, - Add(isolate()->factory()->empty_fixed_array()), - INITIALIZING_STORE); - - return result; -} - - -HValue* HGraphBuilder::BuildCloneShallowArrayNonEmpty(HValue* boilerplate, - HValue* allocation_site, - AllocationSiteMode mode, - ElementsKind kind) { - int elements_kind_size = IsFastDoubleElementsKind(kind) - ? kDoubleSize : kPointerSize; - - HValue* boilerplate_elements = AddLoadElements(boilerplate); - HValue* capacity = AddLoadFixedArrayLength(boilerplate_elements); - HValue* extra = AddUncasted(capacity, - Add(elements_kind_size)); - extra->ClearFlag(HValue::kCanOverflow); - extra = AddUncasted(extra, Add(FixedArray::kHeaderSize)); - extra->ClearFlag(HValue::kCanOverflow); - HValue* elements = NULL; - HValue* result = BuildCloneShallowArrayCommon(boilerplate, - allocation_site, extra, &elements, mode); - Add(result, HObjectAccess::ForElementsPointer(), - elements, INITIALIZING_STORE); - - // The allocation for the cloned array above causes register pressure on - // machines with low register counts. Force a reload of the boilerplate - // elements here to free up a register for the allocation to avoid unnecessary - // spillage. - boilerplate_elements = AddLoadElements(boilerplate); - boilerplate_elements->SetFlag(HValue::kCantBeReplaced); - - // Copy the elements array header. - for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) { - HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i); - Add(elements, access, - Add(boilerplate_elements, - static_cast(NULL), access), - INITIALIZING_STORE); - } - - // And the result of the length - HValue* length = Add(boilerplate, static_cast(NULL), - HObjectAccess::ForArrayLength(kind)); - Add(result, HObjectAccess::ForArrayLength(kind), - length, INITIALIZING_STORE); - - BuildCopyElements(result, boilerplate_elements, kind, elements, - kind, length, NULL); - - return result; -} - - void HGraphBuilder::BuildCompareNil( HValue* value, Type* type, diff --git a/src/hydrogen.h b/src/hydrogen.h index 42a63fd57e..060ccb9b36 100644 --- a/src/hydrogen.h +++ b/src/hydrogen.h @@ -1397,8 +1397,7 @@ class HGraphBuilder { store_map->SkipWriteBarrier(); return store_map; } - HLoadNamedField* AddLoadElements(HValue* object, - HValue* dependency = NULL); + HLoadNamedField* AddLoadElements(HValue* object); bool MatchRotateRight(HValue* left, HValue* right, @@ -1414,12 +1413,7 @@ class HGraphBuilder { Maybe fixed_right_arg, HAllocationMode allocation_mode); - HLoadNamedField* AddLoadFixedArrayLength(HValue *object, - HValue *dependency = NULL); - - HLoadNamedField* AddLoadArrayLength(HValue *object, - ElementsKind kind, - HValue *dependency = NULL); + HLoadNamedField* AddLoadFixedArrayLength(HValue *object); HValue* AddLoadJSBuiltin(Builtins::JavaScript builtin); @@ -1759,33 +1753,18 @@ class HGraphBuilder { HValue* from, HValue* to); - void BuildCopyElements(HValue* array, - HValue* from_elements, + void BuildCopyElements(HValue* from_elements, ElementsKind from_elements_kind, HValue* to_elements, ElementsKind to_elements_kind, HValue* length, HValue* capacity); - HValue* BuildCloneShallowArrayCommon(HValue* boilerplate, - HValue* allocation_site, - HValue* extra_size, - HValue** return_elements, - AllocationSiteMode mode); - - HValue* BuildCloneShallowArrayCow(HValue* boilerplate, - HValue* allocation_site, - AllocationSiteMode mode, - ElementsKind kind); - - HValue* BuildCloneShallowArrayEmpty(HValue* boilerplate, - HValue* allocation_site, - AllocationSiteMode mode); - - HValue* BuildCloneShallowArrayNonEmpty(HValue* boilerplate, - HValue* allocation_site, - AllocationSiteMode mode, - ElementsKind kind); + HValue* BuildCloneShallowArray(HValue* boilerplate, + HValue* allocation_site, + AllocationSiteMode mode, + ElementsKind kind, + int length); HValue* BuildElementIndexHash(HValue* index); diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc index 03c43611fe..2fde5acd73 100644 --- a/src/ia32/code-stubs-ia32.cc +++ b/src/ia32/code-stubs-ia32.cc @@ -63,11 +63,6 @@ void FastCloneShallowArrayStub::InitializeInterfaceDescriptor( static Register registers[] = { eax, ebx, ecx }; descriptor->register_param_count_ = 3; descriptor->register_params_ = registers; - static Representation representations[] = { - Representation::Tagged(), - Representation::Smi(), - Representation::Tagged() }; - descriptor->register_param_representations_ = representations; descriptor->deoptimization_handler_ = Runtime::FunctionForId( Runtime::kHiddenCreateArrayLiteralStubBailout)->entry; @@ -199,11 +194,6 @@ static void InitializeArrayConstructorDescriptor( descriptor->stack_parameter_count_ = eax; descriptor->register_param_count_ = 3; descriptor->register_params_ = registers_variable_args; - static Representation representations[] = { - Representation::Tagged(), - Representation::Tagged(), - Representation::Integer32() }; - descriptor->register_param_representations_ = representations; } descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count; @@ -231,10 +221,6 @@ static void InitializeInternalArrayConstructorDescriptor( descriptor->stack_parameter_count_ = eax; descriptor->register_param_count_ = 2; descriptor->register_params_ = registers_variable_args; - static Representation representations[] = { - Representation::Tagged(), - Representation::Integer32() }; - descriptor->register_param_representations_ = representations; } descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count; diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc index 640f0a57c9..65abb46374 100644 --- a/src/ia32/full-codegen-ia32.cc +++ b/src/ia32/full-codegen-ia32.cc @@ -1729,7 +1729,24 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { allocation_site_mode = DONT_TRACK_ALLOCATION_SITE; } - if (expr->depth() > 1) { + Heap* heap = isolate()->heap(); + if (has_constant_fast_elements && + constant_elements_values->map() == heap->fixed_cow_array_map()) { + // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot + // change, so it's possible to specialize the stub in advance. + __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1); + __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); + __ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset)); + __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index()))); + __ mov(ecx, Immediate(constant_elements)); + FastCloneShallowArrayStub stub( + isolate(), + FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, + allocation_site_mode, + length); + __ CallStub(&stub); + } else if (expr->depth() > 1 || Serializer::enabled(isolate()) || + length > FastCloneShallowArrayStub::kMaximumClonedLength) { __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset)); __ push(Immediate(Smi::FromInt(expr->literal_index()))); @@ -1737,11 +1754,25 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { __ push(Immediate(Smi::FromInt(flags))); __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4); } else { + ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) || + FLAG_smi_only_arrays); + FastCloneShallowArrayStub::Mode mode = + FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS; + + // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot + // change, so it's possible to specialize the stub in advance. + if (has_constant_fast_elements) { + mode = FastCloneShallowArrayStub::CLONE_ELEMENTS; + } + __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); __ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset)); __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index()))); __ mov(ecx, Immediate(constant_elements)); - FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); + FastCloneShallowArrayStub stub(isolate(), + mode, + allocation_site_mode, + length); __ CallStub(&stub); } diff --git a/src/lithium.cc b/src/lithium.cc index 5cbed35228..2265353f47 100644 --- a/src/lithium.cc +++ b/src/lithium.cc @@ -449,8 +449,6 @@ Handle LChunk::Codegen() { CodeEndLinePosInfoRecordEvent(*code, jit_handler_data)); CodeGenerator::PrintCode(code, info()); - ASSERT(!(info()->GetMustNotHaveEagerFrame() && - generator.NeedsEagerFrame())); return code; } assembler.AbortedCodeGeneration(); diff --git a/src/mips/full-codegen-mips.cc b/src/mips/full-codegen-mips.cc index 13032ca75f..baa813ae0e 100644 --- a/src/mips/full-codegen-mips.cc +++ b/src/mips/full-codegen-mips.cc @@ -1805,7 +1805,18 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset)); __ li(a2, Operand(Smi::FromInt(expr->literal_index()))); __ li(a1, Operand(constant_elements)); - if (expr->depth() > 1) { + if (has_fast_elements && constant_elements_values->map() == + isolate()->heap()->fixed_cow_array_map()) { + FastCloneShallowArrayStub stub( + isolate(), + FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, + allocation_site_mode, + length); + __ CallStub(&stub); + __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), + 1, a1, a2); + } else if (expr->depth() > 1 || Serializer::enabled(isolate()) || + length > FastCloneShallowArrayStub::kMaximumClonedLength) { __ li(a0, Operand(Smi::FromInt(flags))); __ Push(a3, a2, a1, a0); __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4); diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc index 0e53eb390e..f65a94a179 100644 --- a/src/x64/code-stubs-x64.cc +++ b/src/x64/code-stubs-x64.cc @@ -59,11 +59,6 @@ void FastCloneShallowArrayStub::InitializeInterfaceDescriptor( static Register registers[] = { rax, rbx, rcx }; descriptor->register_param_count_ = 3; descriptor->register_params_ = registers; - static Representation representations[] = { - Representation::Tagged(), - Representation::Smi(), - Representation::Tagged() }; - descriptor->register_param_representations_ = representations; descriptor->deoptimization_handler_ = Runtime::FunctionForId( Runtime::kHiddenCreateArrayLiteralStubBailout)->entry; @@ -193,11 +188,6 @@ static void InitializeArrayConstructorDescriptor( descriptor->handler_arguments_mode_ = PASS_ARGUMENTS; descriptor->stack_parameter_count_ = rax; descriptor->register_param_count_ = 3; - static Representation representations[] = { - Representation::Tagged(), - Representation::Tagged(), - Representation::Integer32() }; - descriptor->register_param_representations_ = representations; descriptor->register_params_ = registers_variable_args; } @@ -226,10 +216,6 @@ static void InitializeInternalArrayConstructorDescriptor( descriptor->stack_parameter_count_ = rax; descriptor->register_param_count_ = 2; descriptor->register_params_ = registers_variable_args; - static Representation representations[] = { - Representation::Tagged(), - Representation::Integer32() }; - descriptor->register_param_representations_ = representations; } descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count; diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc index 1f03fbf9df..54007737e2 100644 --- a/src/x64/full-codegen-x64.cc +++ b/src/x64/full-codegen-x64.cc @@ -1766,7 +1766,24 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { allocation_site_mode = DONT_TRACK_ALLOCATION_SITE; } - if (expr->depth() > 1) { + Heap* heap = isolate()->heap(); + if (has_constant_fast_elements && + constant_elements_values->map() == heap->fixed_cow_array_map()) { + // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot + // change, so it's possible to specialize the stub in advance. + __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1); + __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); + __ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset)); + __ Move(rbx, Smi::FromInt(expr->literal_index())); + __ Move(rcx, constant_elements); + FastCloneShallowArrayStub stub( + isolate(), + FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, + allocation_site_mode, + length); + __ CallStub(&stub); + } else if (expr->depth() > 1 || Serializer::enabled(isolate()) || + length > FastCloneShallowArrayStub::kMaximumClonedLength) { __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); __ Push(FieldOperand(rbx, JSFunction::kLiteralsOffset)); __ Push(Smi::FromInt(expr->literal_index())); @@ -1774,11 +1791,24 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { __ Push(Smi::FromInt(flags)); __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4); } else { + ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) || + FLAG_smi_only_arrays); + FastCloneShallowArrayStub::Mode mode = + FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS; + + // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot + // change, so it's possible to specialize the stub in advance. + if (has_constant_fast_elements) { + mode = FastCloneShallowArrayStub::CLONE_ELEMENTS; + } + __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); __ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset)); __ Move(rbx, Smi::FromInt(expr->literal_index())); __ Move(rcx, constant_elements); - FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); + FastCloneShallowArrayStub stub(isolate(), + mode, + allocation_site_mode, length); __ CallStub(&stub); }