[stubs] Port builtin for Array.push fast-case from Crankshaft to TF

Improves performance in simple, single element case by 5% and in multiple
elements cases by 2%.

BUG=chromium:608675
LOG=N

Review-Url: https://codereview.chromium.org/2497243002
Cr-Commit-Position: refs/heads/master@{#41368}
This commit is contained in:
danno 2016-11-29 08:57:58 -08:00 committed by Commit bot
parent f8b8983962
commit df2578d2ec
23 changed files with 556 additions and 250 deletions

View File

@ -33,11 +33,6 @@ void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
__ TailCallRuntime(Runtime::kNewArray); __ TailCallRuntime(Runtime::kNewArray);
} }
void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
Address deopt_handler = Runtime::FunctionForId(Runtime::kArrayPush)->entry;
descriptor->Initialize(r0, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
}
void FastFunctionBindStub::InitializeDescriptor( void FastFunctionBindStub::InitializeDescriptor(
CodeStubDescriptor* descriptor) { CodeStubDescriptor* descriptor) {
Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry; Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry;

View File

@ -33,11 +33,6 @@ void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
__ TailCallRuntime(Runtime::kNewArray); __ TailCallRuntime(Runtime::kNewArray);
} }
void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
Address deopt_handler = Runtime::FunctionForId(Runtime::kArrayPush)->entry;
descriptor->Initialize(x0, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
}
void FastFunctionBindStub::InitializeDescriptor( void FastFunctionBindStub::InitializeDescriptor(
CodeStubDescriptor* descriptor) { CodeStubDescriptor* descriptor) {
Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry; Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry;

View File

@ -150,8 +150,9 @@ MUST_USE_RESULT static Object* CallJsIntrinsic(Isolate* isolate,
isolate, isolate,
Execution::Call(isolate, function, args.receiver(), argc, argv.start())); Execution::Call(isolate, function, args.receiver(), argc, argv.start()));
} }
} // namespace
Object* DoArrayPush(Isolate* isolate, BuiltinArguments args) { BUILTIN(ArrayPush) {
HandleScope scope(isolate); HandleScope scope(isolate);
Handle<Object> receiver = args.receiver(); Handle<Object> receiver = args.receiver();
if (!EnsureJSArrayWithWritableFastElements(isolate, receiver, &args, 1)) { if (!EnsureJSArrayWithWritableFastElements(isolate, receiver, &args, 1)) {
@ -174,19 +175,163 @@ Object* DoArrayPush(Isolate* isolate, BuiltinArguments args) {
int new_length = accessor->Push(array, &args, to_add); int new_length = accessor->Push(array, &args, to_add);
return Smi::FromInt(new_length); return Smi::FromInt(new_length);
} }
} // namespace
BUILTIN(ArrayPush) { return DoArrayPush(isolate, args); } void Builtins::Generate_FastArrayPush(compiler::CodeAssemblerState* state) {
typedef compiler::Node Node;
typedef CodeStubAssembler::Label Label;
typedef CodeStubAssembler::Variable Variable;
CodeStubAssembler assembler(state);
Variable arg_index(&assembler, MachineType::PointerRepresentation());
Label default_label(&assembler, &arg_index);
Label smi_transition(&assembler);
Label object_push_pre(&assembler);
Label object_push(&assembler, &arg_index);
Label double_push(&assembler, &arg_index);
Label double_transition(&assembler);
Label runtime(&assembler, Label::kDeferred);
// TODO(verwaest): This is a temporary helper until the FastArrayPush stub can Node* argc = assembler.Parameter(1);
// tailcall to the builtin directly. Node* context = assembler.Parameter(2);
RUNTIME_FUNCTION(Runtime_ArrayPush) { Node* new_target = assembler.Parameter(0);
DCHECK_EQ(2, args.length());
Arguments* incoming = reinterpret_cast<Arguments*>(args[0]); CodeStubArguments args(&assembler, argc);
// Rewrap the arguments as builtins arguments. Node* receiver = args.GetReceiver();
int argc = incoming->length() + BuiltinArguments::kNumExtraArgsWithReceiver; Node* kind = nullptr;
BuiltinArguments caller_args(argc, incoming->arguments() + 1);
return DoArrayPush(isolate, caller_args); Label fast(&assembler);
{
assembler.BranchIfFastJSArray(
receiver, context, CodeStubAssembler::FastJSArrayAccessMode::ANY_ACCESS,
&fast, &runtime);
}
assembler.Bind(&fast);
{
// Disallow pushing onto prototypes. It might be the JSArray prototype.
// Disallow pushing onto non-extensible objects.
assembler.Comment("Disallow pushing onto prototypes");
Node* map = assembler.LoadMap(receiver);
Node* bit_field2 = assembler.LoadMapBitField2(map);
int mask = static_cast<int>(Map::IsPrototypeMapBits::kMask) |
(1 << Map::kIsExtensible);
Node* test = assembler.Word32And(bit_field2, assembler.Int32Constant(mask));
assembler.GotoIf(
assembler.Word32NotEqual(
test, assembler.Int32Constant(1 << Map::kIsExtensible)),
&runtime);
// Disallow pushing onto arrays in dictionary named property mode. We need
// to figure out whether the length property is still writable.
assembler.Comment(
"Disallow pushing onto arrays in dictionary named property mode");
Node* bit_field3 = assembler.LoadMapBitField3(map);
assembler.GotoIf(assembler.IsSetWord32<Map::DictionaryMap>(bit_field3),
&runtime);
// Check whether the length property is writable. The length property is the
// only default named property on arrays. It's nonconfigurable, hence is
// guaranteed to stay the first property.
Node* descriptors = assembler.LoadMapDescriptors(map);
Node* details = assembler.LoadFixedArrayElement(
descriptors,
assembler.Int32Constant(DescriptorArray::ToDetailsIndex(0)));
mask = READ_ONLY << PropertyDetails::AttributesField::kShift;
Node* mask_node = assembler.SmiConstant(mask);
test = assembler.WordAnd(details, mask_node);
assembler.GotoIf(assembler.WordEqual(test, mask_node), &runtime);
arg_index.Bind(assembler.IntPtrConstant(0));
kind = assembler.DecodeWord32<Map::ElementsKindBits>(bit_field2);
assembler.GotoIf(
assembler.IntPtrGreaterThan(
kind, assembler.IntPtrConstant(FAST_HOLEY_SMI_ELEMENTS)),
&object_push_pre);
Node* new_length = assembler.BuildAppendJSArray(
FAST_SMI_ELEMENTS, context, receiver, args, arg_index, &smi_transition);
args.PopAndReturn(new_length);
}
// If the argument is not a smi, then use a heavyweight SetProperty to
// transition the array for only the single next element. If the argument is
// a smi, the failure is due to some other reason and we should fall back on
// the most generic implementation for the rest of the array.
assembler.Bind(&smi_transition);
{
Node* arg = args.AtIndex(arg_index.value());
assembler.GotoIf(assembler.TaggedIsSmi(arg), &default_label);
Node* length = assembler.LoadJSArrayLength(receiver);
// TODO(danno): Use the KeyedStoreGeneric stub here when possible,
// calling into the runtime to do the elements transition is overkill.
assembler.CallRuntime(Runtime::kSetProperty, context, receiver, length, arg,
assembler.SmiConstant(STRICT));
assembler.Increment(arg_index);
assembler.GotoIfNotNumber(arg, &object_push);
assembler.Goto(&double_push);
}
assembler.Bind(&object_push_pre);
{
assembler.Branch(assembler.IntPtrGreaterThan(
kind, assembler.IntPtrConstant(FAST_HOLEY_ELEMENTS)),
&double_push, &object_push);
}
assembler.Bind(&object_push);
{
Node* new_length = assembler.BuildAppendJSArray(
FAST_ELEMENTS, context, receiver, args, arg_index, &default_label);
args.PopAndReturn(new_length);
}
assembler.Bind(&double_push);
{
Node* new_length =
assembler.BuildAppendJSArray(FAST_DOUBLE_ELEMENTS, context, receiver,
args, arg_index, &double_transition);
args.PopAndReturn(new_length);
}
// If the argument is not a double, then use a heavyweight SetProperty to
// transition the array for only the single next element. If the argument is
// a double, the failure is due to some other reason and we should fall back
// on the most generic implementation for the rest of the array.
assembler.Bind(&double_transition);
{
Node* arg = args.AtIndex(arg_index.value());
assembler.GotoIfNumber(arg, &default_label);
Node* length = assembler.LoadJSArrayLength(receiver);
// TODO(danno): Use the KeyedStoreGeneric stub here when possible,
// calling into the runtime to do the elements transition is overkill.
assembler.CallRuntime(Runtime::kSetProperty, context, receiver, length, arg,
assembler.SmiConstant(STRICT));
assembler.Increment(arg_index);
assembler.Goto(&object_push);
}
// Fallback that stores un-processed arguments using the full, heavyweight
// SetProperty machinery.
assembler.Bind(&default_label);
{
args.ForEach(
[receiver, context, &arg_index](CodeStubAssembler* assembler,
Node* arg) {
Node* length = assembler->LoadJSArrayLength(receiver);
assembler->CallRuntime(Runtime::kSetProperty, context, receiver,
length, arg, assembler->SmiConstant(STRICT));
},
arg_index.value());
args.PopAndReturn(assembler.LoadJSArrayLength(receiver));
}
assembler.Bind(&runtime);
{
Node* target = assembler.LoadFromFrame(
StandardFrameConstants::kFunctionOffset, MachineType::TaggedPointer());
assembler.TailCallStub(CodeFactory::ArrayPush(assembler.isolate()), context,
target, new_target, argc);
}
} }
BUILTIN(ArrayPop) { BUILTIN(ArrayPop) {
@ -1294,7 +1439,9 @@ void Builtins::Generate_ArrayIncludes(compiler::CodeAssemblerState* state) {
// Take slow path if not a JSArray, if retrieving elements requires // Take slow path if not a JSArray, if retrieving elements requires
// traversing prototype, or if access checks are required. // traversing prototype, or if access checks are required.
assembler.BranchIfFastJSArray(array, context, &init_len, &call_runtime); assembler.BranchIfFastJSArray(
array, context, CodeStubAssembler::FastJSArrayAccessMode::INBOUNDS_READ,
&init_len, &call_runtime);
assembler.Bind(&init_len); assembler.Bind(&init_len);
{ {
@ -1735,7 +1882,9 @@ void Builtins::Generate_ArrayIndexOf(compiler::CodeAssemblerState* state) {
// Take slow path if not a JSArray, if retrieving elements requires // Take slow path if not a JSArray, if retrieving elements requires
// traversing prototype, or if access checks are required. // traversing prototype, or if access checks are required.
assembler.BranchIfFastJSArray(array, context, &init_len, &call_runtime); assembler.BranchIfFastJSArray(
array, context, CodeStubAssembler::FastJSArrayAccessMode::INBOUNDS_READ,
&init_len, &call_runtime);
assembler.Bind(&init_len); assembler.Bind(&init_len);
{ {

View File

@ -218,6 +218,7 @@ namespace internal {
TFJ(ArrayIndexOf, 2) \ TFJ(ArrayIndexOf, 2) \
CPP(ArrayPop) \ CPP(ArrayPop) \
CPP(ArrayPush) \ CPP(ArrayPush) \
TFJ(FastArrayPush, -1) \
CPP(ArrayShift) \ CPP(ArrayShift) \
CPP(ArraySlice) \ CPP(ArraySlice) \
CPP(ArraySplice) \ CPP(ArraySplice) \

View File

@ -495,5 +495,10 @@ Callable CodeFactory::InterpreterOnStackReplacement(Isolate* isolate) {
ContextOnlyDescriptor(isolate)); ContextOnlyDescriptor(isolate));
} }
// static
Callable CodeFactory::ArrayPush(Isolate* isolate) {
return Callable(isolate->builtins()->ArrayPush(), BuiltinDescriptor(isolate));
}
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8

View File

@ -166,6 +166,8 @@ class V8_EXPORT_PRIVATE CodeFactory final {
static Callable InterpreterPushArgsAndConstructArray(Isolate* isolate); static Callable InterpreterPushArgsAndConstructArray(Isolate* isolate);
static Callable InterpreterCEntry(Isolate* isolate, int result_size = 1); static Callable InterpreterCEntry(Isolate* isolate, int result_size = 1);
static Callable InterpreterOnStackReplacement(Isolate* isolate); static Callable InterpreterOnStackReplacement(Isolate* isolate);
static Callable ArrayPush(Isolate* isolate);
}; };
} // namespace internal } // namespace internal

View File

@ -525,6 +525,12 @@ Node* CodeStubAssembler::TaggedIsSmi(Node* a) {
IntPtrConstant(0)); IntPtrConstant(0));
} }
Node* CodeStubAssembler::TaggedIsNotSmi(Node* a) {
return WordNotEqual(
WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)),
IntPtrConstant(0));
}
Node* CodeStubAssembler::WordIsPositiveSmi(Node* a) { Node* CodeStubAssembler::WordIsPositiveSmi(Node* a) {
return WordEqual(WordAnd(a, IntPtrConstant(kSmiTagMask | kSmiSignMask)), return WordEqual(WordAnd(a, IntPtrConstant(kSmiTagMask | kSmiSignMask)),
IntPtrConstant(0)); IntPtrConstant(0));
@ -653,8 +659,9 @@ void CodeStubAssembler::BranchIfJSObject(Node* object, Label* if_true,
if_true, if_false); if_true, if_false);
} }
void CodeStubAssembler::BranchIfFastJSArray(Node* object, Node* context, void CodeStubAssembler::BranchIfFastJSArray(
Label* if_true, Label* if_false) { Node* object, Node* context, CodeStubAssembler::FastJSArrayAccessMode mode,
Label* if_true, Label* if_false) {
// Bailout if receiver is a Smi. // Bailout if receiver is a Smi.
GotoIf(TaggedIsSmi(object), if_false); GotoIf(TaggedIsSmi(object), if_false);
@ -670,8 +677,9 @@ void CodeStubAssembler::BranchIfFastJSArray(Node* object, Node* context,
GotoUnless(IsFastElementsKind(elements_kind), if_false); GotoUnless(IsFastElementsKind(elements_kind), if_false);
// Check prototype chain if receiver does not have packed elements. // Check prototype chain if receiver does not have packed elements.
GotoUnless(IsHoleyFastElementsKind(elements_kind), if_true); if (mode == FastJSArrayAccessMode::INBOUNDS_READ) {
GotoUnless(IsHoleyFastElementsKind(elements_kind), if_true);
}
BranchIfPrototypesHaveNoElements(map, if_true, if_false); BranchIfPrototypesHaveNoElements(map, if_true, if_false);
} }
@ -1361,6 +1369,79 @@ Node* CodeStubAssembler::StoreFixedDoubleArrayElement(
return StoreNoWriteBarrier(rep, object, offset, value); return StoreNoWriteBarrier(rep, object, offset, value);
} }
Node* CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* context,
Node* array,
CodeStubArguments& args,
Variable& arg_index,
Label* bailout) {
Comment("BuildAppendJSArray: %s", ElementsKindToString(kind));
Label pre_bailout(this);
Label success(this);
Variable elements(this, MachineRepresentation::kTagged);
ParameterMode mode = OptimalParameterMode();
Variable length(this, OptimalParameterRepresentation());
length.Bind(UntagParameter(LoadJSArrayLength(array), mode));
elements.Bind(LoadElements(array));
Node* capacity =
UntagParameter(LoadFixedArrayBaseLength(elements.value()), mode);
// Resize the capacity of the fixed array if it doesn't fit.
Label fits(this, &elements);
Node* first = arg_index.value();
Node* growth = IntPtrSubFoldConstants(args.GetLength(), first);
Node* new_length = IntPtrAdd(
mode == INTPTR_PARAMETERS ? growth : SmiTag(growth), length.value());
GotoUnless(IntPtrGreaterThanOrEqual(new_length, capacity), &fits);
Node* new_capacity = CalculateNewElementsCapacity(
IntPtrAdd(new_length, IntPtrOrSmiConstant(1, mode)), mode);
elements.Bind(GrowElementsCapacity(array, elements.value(), kind, kind,
capacity, new_capacity, mode,
&pre_bailout));
Goto(&fits);
Bind(&fits);
// Push each argument onto the end of the array now that there is enough
// capacity.
CodeStubAssembler::VariableList push_vars({&length, &elements}, zone());
args.ForEach(
push_vars,
[kind, mode, &length, &elements, &pre_bailout](
CodeStubAssembler* assembler, Node* arg) {
if (IsFastSmiElementsKind(kind)) {
assembler->GotoIf(assembler->TaggedIsNotSmi(arg), &pre_bailout);
} else if (IsFastDoubleElementsKind(kind)) {
assembler->GotoIfNotNumber(arg, &pre_bailout);
}
if (IsFastDoubleElementsKind(kind)) {
Node* double_value = assembler->ChangeNumberToFloat64(arg);
assembler->StoreFixedDoubleArrayElement(
elements.value(), length.value(),
assembler->Float64SilenceNaN(double_value), mode);
} else {
WriteBarrierMode barrier_mode = IsFastSmiElementsKind(kind)
? SKIP_WRITE_BARRIER
: UPDATE_WRITE_BARRIER;
assembler->StoreFixedArrayElement(elements.value(), length.value(),
arg, barrier_mode, 0, mode);
}
assembler->Increment(length, 1, mode);
},
first, nullptr);
length.Bind(TagParameter(length.value(), mode));
StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length.value());
Goto(&success);
Bind(&pre_bailout);
length.Bind(TagParameter(length.value(), mode));
Node* diff = SmiSub(length.value(), LoadJSArrayLength(array));
StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length.value());
arg_index.Bind(IntPtrAdd(arg_index.value(), SmiUntag(diff)));
Goto(bailout);
Bind(&success);
return length.value();
}
Node* CodeStubAssembler::AllocateHeapNumber(MutableMode mode) { Node* CodeStubAssembler::AllocateHeapNumber(MutableMode mode) {
Node* result = Allocate(HeapNumber::kSize, kNone); Node* result = Allocate(HeapNumber::kSize, kNone);
Heap::RootListIndex heap_map_index = Heap::RootListIndex heap_map_index =
@ -2130,9 +2211,7 @@ void CodeStubAssembler::CopyStringCharacters(Node* from_string, Node* to_string,
rep, to_string, rep, to_string,
index_same ? offset : current_to_offset.value(), value); index_same ? offset : current_to_offset.value(), value);
if (!index_same) { if (!index_same) {
current_to_offset.Bind(assembler->IntPtrAdd( assembler->Increment(current_to_offset, to_increment);
current_to_offset.value(),
assembler->IntPtrConstant(to_increment)));
} }
}, },
from_increment, IndexAdvanceMode::kPost); from_increment, IndexAdvanceMode::kPost);
@ -2554,6 +2633,25 @@ Node* CodeStubAssembler::ToThisString(Node* context, Node* value,
return var_value.value(); return var_value.value();
} }
Node* CodeStubAssembler::ChangeNumberToFloat64(compiler::Node* value) {
Variable result(this, MachineRepresentation::kFloat64);
Label smi(this);
Label done(this, &result);
GotoIf(TaggedIsSmi(value), &smi);
result.Bind(
LoadObjectField(value, HeapNumber::kValueOffset, MachineType::Float64()));
Goto(&done);
Bind(&smi);
{
result.Bind(ChangeInt32ToFloat64(SmiUntag(value)));
Goto(&done);
}
Bind(&done);
return result.value();
}
Node* CodeStubAssembler::ToThisValue(Node* context, Node* value, Node* CodeStubAssembler::ToThisValue(Node* context, Node* value,
PrimitiveType primitive_type, PrimitiveType primitive_type,
char const* method_name) { char const* method_name) {
@ -3966,6 +4064,16 @@ void CodeStubAssembler::DecrementCounter(StatsCounter* counter, int delta) {
} }
} }
void CodeStubAssembler::Increment(Variable& variable, int value,
ParameterMode mode) {
DCHECK_IMPLIES(mode == INTPTR_PARAMETERS,
variable.rep() == MachineType::PointerRepresentation());
DCHECK_IMPLIES(mode == SMI_PARAMETERS,
variable.rep() == MachineRepresentation::kTagged ||
variable.rep() == MachineRepresentation::kTaggedSigned);
variable.Bind(IntPtrAdd(variable.value(), IntPtrOrSmiConstant(value, mode)));
}
void CodeStubAssembler::Use(Label* label) { void CodeStubAssembler::Use(Label* label) {
GotoIf(Word32Equal(Int32Constant(0), Int32Constant(1)), label); GotoIf(Word32Equal(Int32Constant(0), Int32Constant(1)), label);
} }
@ -4126,7 +4234,6 @@ void CodeStubAssembler::NameDictionaryLookup(Node* dictionary,
Goto(&loop); Goto(&loop);
Bind(&loop); Bind(&loop);
{ {
Node* count = var_count.value();
Node* entry = var_entry.value(); Node* entry = var_entry.value();
Node* index = EntryToIndex<Dictionary>(entry); Node* index = EntryToIndex<Dictionary>(entry);
@ -4143,10 +4250,9 @@ void CodeStubAssembler::NameDictionaryLookup(Node* dictionary,
} }
// See Dictionary::NextProbe(). // See Dictionary::NextProbe().
count = IntPtrAdd(count, IntPtrConstant(1)); Increment(var_count);
entry = WordAnd(IntPtrAdd(entry, count), mask); entry = WordAnd(IntPtrAdd(entry, var_count.value()), mask);
var_count.Bind(count);
var_entry.Bind(entry); var_entry.Bind(entry);
Goto(&loop); Goto(&loop);
} }
@ -4209,7 +4315,6 @@ void CodeStubAssembler::NumberDictionaryLookup(Node* dictionary,
Goto(&loop); Goto(&loop);
Bind(&loop); Bind(&loop);
{ {
Node* count = var_count.value();
Node* entry = var_entry->value(); Node* entry = var_entry->value();
Node* index = EntryToIndex<Dictionary>(entry); Node* index = EntryToIndex<Dictionary>(entry);
@ -4237,10 +4342,9 @@ void CodeStubAssembler::NumberDictionaryLookup(Node* dictionary,
Bind(&next_probe); Bind(&next_probe);
// See Dictionary::NextProbe(). // See Dictionary::NextProbe().
count = IntPtrAdd(count, IntPtrConstant(1)); Increment(var_count);
entry = WordAnd(IntPtrAdd(entry, count), mask); entry = WordAnd(IntPtrAdd(entry, var_count.value()), mask);
var_count.Bind(count);
var_entry->Bind(entry); var_entry->Bind(entry);
Goto(&loop); Goto(&loop);
} }
@ -5902,11 +6006,11 @@ void CodeStubAssembler::BuildFastLoop(
Bind(&loop); Bind(&loop);
{ {
if (mode == IndexAdvanceMode::kPre) { if (mode == IndexAdvanceMode::kPre) {
var.Bind(IntPtrAdd(var.value(), IntPtrConstant(increment))); Increment(var, increment);
} }
body(this, var.value()); body(this, var.value());
if (mode == IndexAdvanceMode::kPost) { if (mode == IndexAdvanceMode::kPost) {
var.Bind(IntPtrAdd(var.value(), IntPtrConstant(increment))); Increment(var, increment);
} }
Branch(WordNotEqual(var.value(), end_index), &loop, &after_loop); Branch(WordNotEqual(var.value(), end_index), &loop, &after_loop);
} }
@ -7545,6 +7649,20 @@ Node* CodeStubAssembler::NumberInc(Node* value) {
return var_result.value(); return var_result.value();
} }
void CodeStubAssembler::GotoIfNotNumber(Node* input, Label* is_not_number) {
Label is_number(this);
GotoIf(TaggedIsSmi(input), &is_number);
Node* input_map = LoadMap(input);
Branch(IsHeapNumberMap(input_map), &is_number, is_not_number);
Bind(&is_number);
}
void CodeStubAssembler::GotoIfNumber(Node* input, Label* is_number) {
GotoIf(TaggedIsSmi(input), is_number);
Node* input_map = LoadMap(input);
GotoIf(IsHeapNumberMap(input_map), is_number);
}
Node* CodeStubAssembler::CreateArrayIterator(Node* array, Node* array_map, Node* CodeStubAssembler::CreateArrayIterator(Node* array, Node* array_map,
Node* array_type, Node* context, Node* array_type, Node* context,
IterationKind mode) { IterationKind mode) {
@ -7601,7 +7719,8 @@ Node* CodeStubAssembler::CreateArrayIterator(Node* array, Node* array_map,
Bind(&if_isgeneric); Bind(&if_isgeneric);
{ {
Label if_isfast(this), if_isslow(this); Label if_isfast(this), if_isslow(this);
BranchIfFastJSArray(array, context, &if_isfast, &if_isslow); BranchIfFastJSArray(array, context, FastJSArrayAccessMode::INBOUNDS_READ,
&if_isfast, &if_isslow);
Bind(&if_isfast); Bind(&if_isfast);
{ {
@ -7635,7 +7754,8 @@ Node* CodeStubAssembler::CreateArrayIterator(Node* array, Node* array_map,
Bind(&if_isgeneric); Bind(&if_isgeneric);
{ {
Label if_isfast(this), if_isslow(this); Label if_isfast(this), if_isslow(this);
BranchIfFastJSArray(array, context, &if_isfast, &if_isslow); BranchIfFastJSArray(array, context, FastJSArrayAccessMode::INBOUNDS_READ,
&if_isfast, &if_isslow);
Bind(&if_isfast); Bind(&if_isfast);
{ {
@ -7778,13 +7898,14 @@ CodeStubArguments::CodeStubArguments(CodeStubAssembler* assembler, Node* argc,
} }
} }
Node* CodeStubArguments::GetReceiver() { Node* CodeStubArguments::GetReceiver() const {
return assembler_->Load(MachineType::AnyTagged(), arguments_, return assembler_->Load(MachineType::AnyTagged(), arguments_,
assembler_->IntPtrConstant(kPointerSize)); assembler_->IntPtrConstant(kPointerSize));
} }
Node* CodeStubArguments::AtIndex(Node* index, Node* CodeStubArguments::AtIndex(Node* index,
CodeStubAssembler::ParameterMode mode) { CodeStubAssembler::ParameterMode mode) const {
typedef compiler::Node Node;
Node* negated_index = assembler_->IntPtrSubFoldConstants( Node* negated_index = assembler_->IntPtrSubFoldConstants(
assembler_->IntPtrOrSmiConstant(0, mode), index); assembler_->IntPtrOrSmiConstant(0, mode), index);
Node* offset = Node* offset =
@ -7792,7 +7913,7 @@ Node* CodeStubArguments::AtIndex(Node* index,
return assembler_->Load(MachineType::AnyTagged(), arguments_, offset); return assembler_->Load(MachineType::AnyTagged(), arguments_, offset);
} }
Node* CodeStubArguments::AtIndex(int index) { Node* CodeStubArguments::AtIndex(int index) const {
return AtIndex(assembler_->IntPtrConstant(index)); return AtIndex(assembler_->IntPtrConstant(index));
} }

View File

@ -15,6 +15,7 @@ namespace v8 {
namespace internal { namespace internal {
class CallInterfaceDescriptor; class CallInterfaceDescriptor;
class CodeStubArguments;
class StatsCounter; class StatsCounter;
class StubCache; class StubCache;
@ -74,6 +75,12 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
return Is64() ? INTPTR_PARAMETERS : SMI_PARAMETERS; return Is64() ? INTPTR_PARAMETERS : SMI_PARAMETERS;
} }
MachineRepresentation OptimalParameterRepresentation() const {
return OptimalParameterMode() == INTPTR_PARAMETERS
? MachineType::PointerRepresentation()
: MachineRepresentation::kTaggedSigned;
}
Node* UntagParameter(Node* value, ParameterMode mode) { Node* UntagParameter(Node* value, ParameterMode mode) {
if (mode != SMI_PARAMETERS) value = SmiUntag(value); if (mode != SMI_PARAMETERS) value = SmiUntag(value);
return value; return value;
@ -144,6 +151,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
// Smi | HeapNumber operations. // Smi | HeapNumber operations.
Node* NumberInc(Node* value); Node* NumberInc(Node* value);
void GotoIfNotNumber(Node* value, Label* is_not_number);
void GotoIfNumber(Node* value, Label* is_number);
// Allocate an object of the given size. // Allocate an object of the given size.
Node* Allocate(Node* size, AllocationFlags flags = kNone); Node* Allocate(Node* size, AllocationFlags flags = kNone);
@ -158,6 +167,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
// Check a value for smi-ness // Check a value for smi-ness
Node* TaggedIsSmi(Node* a); Node* TaggedIsSmi(Node* a);
Node* TaggedIsNotSmi(Node* a);
// Check that the value is a non-negative smi. // Check that the value is a non-negative smi.
Node* WordIsPositiveSmi(Node* a); Node* WordIsPositiveSmi(Node* a);
// Check that a word has a word-aligned address. // Check that a word has a word-aligned address.
@ -195,7 +205,10 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
void BranchIfJSReceiver(Node* object, Label* if_true, Label* if_false); void BranchIfJSReceiver(Node* object, Label* if_true, Label* if_false);
void BranchIfJSObject(Node* object, Label* if_true, Label* if_false); void BranchIfJSObject(Node* object, Label* if_true, Label* if_false);
void BranchIfFastJSArray(Node* object, Node* context, Label* if_true,
enum class FastJSArrayAccessMode { INBOUNDS_READ, ANY_ACCESS };
void BranchIfFastJSArray(Node* object, Node* context,
FastJSArrayAccessMode mode, Label* if_true,
Label* if_false); Label* if_false);
// Load value from current frame by given offset in bytes. // Load value from current frame by given offset in bytes.
@ -349,6 +362,10 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
Node* object, Node* index, Node* value, Node* object, Node* index, Node* value,
ParameterMode parameter_mode = INTEGER_PARAMETERS); ParameterMode parameter_mode = INTEGER_PARAMETERS);
Node* BuildAppendJSArray(ElementsKind kind, Node* context, Node* array,
CodeStubArguments& args, Variable& arg_index,
Label* bailout);
void StoreFieldsNoWriteBarrier(Node* start_address, Node* end_address, void StoreFieldsNoWriteBarrier(Node* start_address, Node* end_address,
Node* value); Node* value);
@ -518,6 +535,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
Node* ChangeFloat64ToTagged(Node* value); Node* ChangeFloat64ToTagged(Node* value);
Node* ChangeInt32ToTagged(Node* value); Node* ChangeInt32ToTagged(Node* value);
Node* ChangeUint32ToTagged(Node* value); Node* ChangeUint32ToTagged(Node* value);
Node* ChangeNumberToFloat64(Node* value);
// Type conversions. // Type conversions.
// Throws a TypeError for {method_name} if {value} is not coercible to Object, // Throws a TypeError for {method_name} if {value} is not coercible to Object,
@ -663,6 +681,9 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
void IncrementCounter(StatsCounter* counter, int delta); void IncrementCounter(StatsCounter* counter, int delta);
void DecrementCounter(StatsCounter* counter, int delta); void DecrementCounter(StatsCounter* counter, int delta);
void Increment(Variable& variable, int value = 1,
ParameterMode mode = INTPTR_PARAMETERS);
// Generates "if (false) goto label" code. Useful for marking a label as // Generates "if (false) goto label" code. Useful for marking a label as
// "live" to avoid assertion failures during graph building. In the resulting // "live" to avoid assertion failures during graph building. In the resulting
// code this check will be eliminated. // code this check will be eliminated.
@ -1018,13 +1039,15 @@ class CodeStubArguments {
CodeStubAssembler::ParameterMode mode = CodeStubAssembler::ParameterMode mode =
CodeStubAssembler::INTPTR_PARAMETERS); CodeStubAssembler::INTPTR_PARAMETERS);
Node* GetReceiver(); Node* GetReceiver() const;
// |index| is zero-based and does not include the receiver // |index| is zero-based and does not include the receiver
Node* AtIndex(Node* index, CodeStubAssembler::ParameterMode mode = Node* AtIndex(Node* index, CodeStubAssembler::ParameterMode mode =
CodeStubAssembler::INTPTR_PARAMETERS); CodeStubAssembler::INTPTR_PARAMETERS) const;
Node* AtIndex(int index); Node* AtIndex(int index) const;
Node* GetLength() const { return argc_; }
typedef std::function<void(CodeStubAssembler* assembler, Node* arg)> typedef std::function<void(CodeStubAssembler* assembler, Node* arg)>
ForEachBodyFunction; ForEachBodyFunction;

View File

@ -375,158 +375,6 @@ HValue* CodeStubGraphBuilderBase::BuildPushElement(HValue* object, HValue* argc,
return new_length; return new_length;
} }
template <>
HValue* CodeStubGraphBuilder<FastArrayPushStub>::BuildCodeStub() {
// TODO(verwaest): Fix deoptimizer messages.
HValue* argc = GetArgumentsLength();
HInstruction* argument_elements = Add<HArgumentsElements>(false, false);
HInstruction* object = Add<HAccessArgumentsAt>(argument_elements, argc,
graph()->GetConstantMinus1());
BuildCheckHeapObject(object);
HValue* map = Add<HLoadNamedField>(object, nullptr, HObjectAccess::ForMap());
Add<HCheckInstanceType>(object, HCheckInstanceType::IS_JS_ARRAY);
// Disallow pushing onto prototypes. It might be the JSArray prototype.
// Disallow pushing onto non-extensible objects.
{
HValue* bit_field2 =
Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField2());
HValue* mask =
Add<HConstant>(static_cast<int>(Map::IsPrototypeMapBits::kMask) |
(1 << Map::kIsExtensible));
HValue* bits = AddUncasted<HBitwise>(Token::BIT_AND, bit_field2, mask);
IfBuilder check(this);
check.If<HCompareNumericAndBranch>(
bits, Add<HConstant>(1 << Map::kIsExtensible), Token::NE);
check.ThenDeopt(DeoptimizeReason::kFastPathFailed);
check.End();
}
// Disallow pushing onto arrays in dictionary named property mode. We need to
// figure out whether the length property is still writable.
{
HValue* bit_field3 =
Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField3());
HValue* mask = Add<HConstant>(static_cast<int>(Map::DictionaryMap::kMask));
HValue* bit = AddUncasted<HBitwise>(Token::BIT_AND, bit_field3, mask);
IfBuilder check(this);
check.If<HCompareNumericAndBranch>(bit, mask, Token::EQ);
check.ThenDeopt(DeoptimizeReason::kFastPathFailed);
check.End();
}
// Check whether the length property is writable. The length property is the
// only default named property on arrays. It's nonconfigurable, hence is
// guaranteed to stay the first property.
{
HValue* descriptors =
Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapDescriptors());
HValue* details = Add<HLoadKeyed>(
descriptors, Add<HConstant>(DescriptorArray::ToDetailsIndex(0)),
nullptr, nullptr, FAST_SMI_ELEMENTS);
HValue* mask =
Add<HConstant>(READ_ONLY << PropertyDetails::AttributesField::kShift);
HValue* bit = AddUncasted<HBitwise>(Token::BIT_AND, details, mask);
IfBuilder readonly(this);
readonly.If<HCompareNumericAndBranch>(bit, mask, Token::EQ);
readonly.ThenDeopt(DeoptimizeReason::kFastPathFailed);
readonly.End();
}
HValue* null = Add<HLoadRoot>(Heap::kNullValueRootIndex);
HValue* empty = Add<HLoadRoot>(Heap::kEmptyFixedArrayRootIndex);
environment()->Push(map);
LoopBuilder check_prototypes(this);
check_prototypes.BeginBody(1);
{
HValue* parent_map = environment()->Pop();
HValue* prototype = Add<HLoadNamedField>(parent_map, nullptr,
HObjectAccess::ForPrototype());
IfBuilder is_null(this);
is_null.If<HCompareObjectEqAndBranch>(prototype, null);
is_null.Then();
check_prototypes.Break();
is_null.End();
HValue* prototype_map =
Add<HLoadNamedField>(prototype, nullptr, HObjectAccess::ForMap());
HValue* instance_type = Add<HLoadNamedField>(
prototype_map, nullptr, HObjectAccess::ForMapInstanceType());
IfBuilder check_instance_type(this);
check_instance_type.If<HCompareNumericAndBranch>(
instance_type, Add<HConstant>(LAST_CUSTOM_ELEMENTS_RECEIVER),
Token::LTE);
check_instance_type.ThenDeopt(DeoptimizeReason::kFastPathFailed);
check_instance_type.End();
HValue* elements = Add<HLoadNamedField>(
prototype, nullptr, HObjectAccess::ForElementsPointer());
IfBuilder no_elements(this);
no_elements.IfNot<HCompareObjectEqAndBranch>(elements, empty);
no_elements.ThenDeopt(DeoptimizeReason::kFastPathFailed);
no_elements.End();
environment()->Push(prototype_map);
}
check_prototypes.EndBody();
HValue* bit_field2 =
Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField2());
HValue* kind = BuildDecodeField<Map::ElementsKindBits>(bit_field2);
// Below we only check the upper bound of the relevant ranges to include both
// holey and non-holey versions. We check them in order smi, object, double
// since smi < object < double.
STATIC_ASSERT(FAST_SMI_ELEMENTS < FAST_HOLEY_SMI_ELEMENTS);
STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS < FAST_HOLEY_ELEMENTS);
STATIC_ASSERT(FAST_ELEMENTS < FAST_HOLEY_ELEMENTS);
STATIC_ASSERT(FAST_HOLEY_ELEMENTS < FAST_HOLEY_DOUBLE_ELEMENTS);
STATIC_ASSERT(FAST_DOUBLE_ELEMENTS < FAST_HOLEY_DOUBLE_ELEMENTS);
IfBuilder has_smi_elements(this);
has_smi_elements.If<HCompareNumericAndBranch>(
kind, Add<HConstant>(FAST_HOLEY_SMI_ELEMENTS), Token::LTE);
has_smi_elements.Then();
{
HValue* new_length = BuildPushElement(object, argc, argument_elements,
FAST_HOLEY_SMI_ELEMENTS);
environment()->Push(new_length);
}
has_smi_elements.Else();
{
IfBuilder has_object_elements(this);
has_object_elements.If<HCompareNumericAndBranch>(
kind, Add<HConstant>(FAST_HOLEY_ELEMENTS), Token::LTE);
has_object_elements.Then();
{
HValue* new_length = BuildPushElement(object, argc, argument_elements,
FAST_HOLEY_ELEMENTS);
environment()->Push(new_length);
}
has_object_elements.Else();
{
IfBuilder has_double_elements(this);
has_double_elements.If<HCompareNumericAndBranch>(
kind, Add<HConstant>(FAST_HOLEY_DOUBLE_ELEMENTS), Token::LTE);
has_double_elements.Then();
{
HValue* new_length = BuildPushElement(object, argc, argument_elements,
FAST_HOLEY_DOUBLE_ELEMENTS);
environment()->Push(new_length);
}
has_double_elements.ElseDeopt(DeoptimizeReason::kFastPathFailed);
has_double_elements.End();
}
has_object_elements.End();
}
has_smi_elements.End();
return environment()->Pop();
}
Handle<Code> FastArrayPushStub::GenerateCode() { return DoGenerateCode(this); }
template <> template <>
HValue* CodeStubGraphBuilder<FastFunctionBindStub>::BuildCodeStub() { HValue* CodeStubGraphBuilder<FastFunctionBindStub>::BuildCodeStub() {
// TODO(verwaest): Fix deoptimizer messages. // TODO(verwaest): Fix deoptimizer messages.

View File

@ -62,7 +62,6 @@ class ObjectLiteral;
/* These builtins w/ JS linkage are */ \ /* These builtins w/ JS linkage are */ \
/* just fast-cases of C++ builtins. They */ \ /* just fast-cases of C++ builtins. They */ \
/* require varg support from TF */ \ /* require varg support from TF */ \
V(FastArrayPush) \
V(FastFunctionBind) \ V(FastFunctionBind) \
/* These will be ported/eliminated */ \ /* These will be ported/eliminated */ \
/* as part of the new IC system, ask */ \ /* as part of the new IC system, ask */ \
@ -1011,15 +1010,6 @@ class GrowArrayElementsStub : public TurboFanCodeStub {
DEFINE_TURBOFAN_CODE_STUB(GrowArrayElements, TurboFanCodeStub); DEFINE_TURBOFAN_CODE_STUB(GrowArrayElements, TurboFanCodeStub);
}; };
class FastArrayPushStub : public HydrogenCodeStub {
public:
explicit FastArrayPushStub(Isolate* isolate) : HydrogenCodeStub(isolate) {}
private:
DEFINE_CALL_INTERFACE_DESCRIPTOR(VarArgFunction);
DEFINE_HYDROGEN_CODE_STUB(FastArrayPush, HydrogenCodeStub);
};
class FastFunctionBindStub : public HydrogenCodeStub { class FastFunctionBindStub : public HydrogenCodeStub {
public: public:
explicit FastFunctionBindStub(Isolate* isolate) : HydrogenCodeStub(isolate) {} explicit FastFunctionBindStub(Isolate* isolate) : HydrogenCodeStub(isolate) {}

View File

@ -34,11 +34,6 @@ void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
__ TailCallRuntime(Runtime::kNewArray); __ TailCallRuntime(Runtime::kNewArray);
} }
void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
Address deopt_handler = Runtime::FunctionForId(Runtime::kArrayPush)->entry;
descriptor->Initialize(eax, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
}
void FastFunctionBindStub::InitializeDescriptor( void FastFunctionBindStub::InitializeDescriptor(
CodeStubDescriptor* descriptor) { CodeStubDescriptor* descriptor) {
Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry; Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry;

View File

@ -390,15 +390,16 @@ void CallFunctionWithFeedbackAndVectorDescriptor::InitializePlatformIndependent(
void BuiltinDescriptor::InitializePlatformIndependent( void BuiltinDescriptor::InitializePlatformIndependent(
CallInterfaceDescriptorData* data) { CallInterfaceDescriptorData* data) {
MachineType machine_types[] = {MachineType::AnyTagged(), MachineType machine_types[] = {
MachineType::Int32()}; MachineType::AnyTagged(), MachineType::AnyTagged(), MachineType::Int32()};
data->InitializePlatformIndependent(arraysize(machine_types), 0, data->InitializePlatformIndependent(arraysize(machine_types), 0,
machine_types); machine_types);
} }
void BuiltinDescriptor::InitializePlatformSpecific( void BuiltinDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) { CallInterfaceDescriptorData* data) {
Register registers[] = {NewTargetRegister(), ArgumentsCountRegister()}; Register registers[] = {TargetRegister(), NewTargetRegister(),
ArgumentsCountRegister()};
data->InitializePlatformSpecific(arraysize(registers), registers); data->InitializePlatformSpecific(arraysize(registers), registers);
} }
@ -409,6 +410,10 @@ const Register BuiltinDescriptor::NewTargetRegister() {
return kJavaScriptCallNewTargetRegister; return kJavaScriptCallNewTargetRegister;
} }
const Register BuiltinDescriptor::TargetRegister() {
return kJSFunctionRegister;
}
void ArrayNoArgumentConstructorDescriptor::InitializePlatformIndependent( void ArrayNoArgumentConstructorDescriptor::InitializePlatformIndependent(
CallInterfaceDescriptorData* data) { CallInterfaceDescriptorData* data) {
// kFunction, kAllocationSite, kActualArgumentsCount, kFunctionParameter // kFunction, kAllocationSite, kActualArgumentsCount, kFunctionParameter

View File

@ -618,6 +618,7 @@ class BuiltinDescriptor : public CallInterfaceDescriptor {
CallInterfaceDescriptor) CallInterfaceDescriptor)
static const Register ArgumentsCountRegister(); static const Register ArgumentsCountRegister();
static const Register NewTargetRegister(); static const Register NewTargetRegister();
static const Register TargetRegister();
}; };
class ArrayNoArgumentConstructorDescriptor : public CallInterfaceDescriptor { class ArrayNoArgumentConstructorDescriptor : public CallInterfaceDescriptor {

View File

@ -33,11 +33,6 @@ void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
__ TailCallRuntime(Runtime::kNewArray); __ TailCallRuntime(Runtime::kNewArray);
} }
void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
Address deopt_handler = Runtime::FunctionForId(Runtime::kArrayPush)->entry;
descriptor->Initialize(a0, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
}
void FastFunctionBindStub::InitializeDescriptor( void FastFunctionBindStub::InitializeDescriptor(
CodeStubDescriptor* descriptor) { CodeStubDescriptor* descriptor) {
Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry; Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry;

View File

@ -32,11 +32,6 @@ void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
__ TailCallRuntime(Runtime::kNewArray); __ TailCallRuntime(Runtime::kNewArray);
} }
void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
Address deopt_handler = Runtime::FunctionForId(Runtime::kArrayPush)->entry;
descriptor->Initialize(a0, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
}
void FastFunctionBindStub::InitializeDescriptor( void FastFunctionBindStub::InitializeDescriptor(
CodeStubDescriptor* descriptor) { CodeStubDescriptor* descriptor) {
Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry; Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry;

View File

@ -65,12 +65,7 @@ RUNTIME_FUNCTION(Runtime_SpecialArrayFunctions) {
isolate->factory()->NewJSObject(isolate->object_function()); isolate->factory()->NewJSObject(isolate->object_function());
InstallBuiltin(isolate, holder, "pop", Builtins::kArrayPop); InstallBuiltin(isolate, holder, "pop", Builtins::kArrayPop);
if (FLAG_minimal) { InstallBuiltin(isolate, holder, "push", Builtins::kFastArrayPush);
InstallBuiltin(isolate, holder, "push", Builtins::kArrayPush);
} else {
FastArrayPushStub stub(isolate);
InstallCode(isolate, holder, "push", stub.GetCode());
}
InstallBuiltin(isolate, holder, "shift", Builtins::kArrayShift); InstallBuiltin(isolate, holder, "shift", Builtins::kArrayShift);
InstallBuiltin(isolate, holder, "unshift", Builtins::kArrayUnshift); InstallBuiltin(isolate, holder, "unshift", Builtins::kArrayUnshift);
InstallBuiltin(isolate, holder, "slice", Builtins::kArraySlice); InstallBuiltin(isolate, holder, "slice", Builtins::kArraySlice);

View File

@ -45,7 +45,6 @@ namespace internal {
F(EstimateNumberOfElements, 1, 1) \ F(EstimateNumberOfElements, 1, 1) \
F(GetArrayKeys, 2, 1) \ F(GetArrayKeys, 2, 1) \
F(NewArray, -1 /* >= 3 */, 1) \ F(NewArray, -1 /* >= 3 */, 1) \
F(ArrayPush, -1, 1) \
F(FunctionBind, -1, 1) \ F(FunctionBind, -1, 1) \
F(NormalizeElements, 1, 1) \ F(NormalizeElements, 1, 1) \
F(GrowArrayElements, 2, 1) \ F(GrowArrayElements, 2, 1) \

View File

@ -32,11 +32,6 @@ void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
__ TailCallRuntime(Runtime::kNewArray); __ TailCallRuntime(Runtime::kNewArray);
} }
void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
Address deopt_handler = Runtime::FunctionForId(Runtime::kArrayPush)->entry;
descriptor->Initialize(rax, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
}
void FastFunctionBindStub::InitializeDescriptor( void FastFunctionBindStub::InitializeDescriptor(
CodeStubDescriptor* descriptor) { CodeStubDescriptor* descriptor) {
Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry; Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry;

View File

@ -1797,5 +1797,136 @@ TEST(IsDebugActive) {
*debug_is_active = false; *debug_is_active = false;
} }
class AppendJSArrayCodeStubAssembler : public CodeStubAssembler {
public:
AppendJSArrayCodeStubAssembler(compiler::CodeAssemblerState* state,
ElementsKind kind)
: CodeStubAssembler(state), kind_(kind) {}
void TestAppendJSArrayImpl(Isolate* isolate, CodeAssemblerTester* tester,
Object* o1, Object* o2, Object* o3, Object* o4,
int initial_size, int result_size) {
typedef CodeStubAssembler::Variable Variable;
typedef CodeStubAssembler::Label Label;
Handle<JSArray> array = isolate->factory()->NewJSArray(
kind_, 2, initial_size, INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE);
JSObject::SetElement(isolate, array, 0,
Handle<Smi>(Smi::FromInt(1), isolate), SLOPPY)
.Check();
JSObject::SetElement(isolate, array, 1,
Handle<Smi>(Smi::FromInt(2), isolate), SLOPPY)
.Check();
CodeStubArguments args(this, IntPtrConstant(kNumParams));
Variable arg_index(this, MachineType::PointerRepresentation());
Label bailout(this);
arg_index.Bind(IntPtrConstant(0));
Node* length = BuildAppendJSArray(
kind_, HeapConstant(Handle<HeapObject>(isolate->context(), isolate)),
HeapConstant(array), args, arg_index, &bailout);
Return(length);
Bind(&bailout);
Return(SmiTag(IntPtrAdd(arg_index.value(), IntPtrConstant(2))));
Handle<Code> code = tester->GenerateCode();
CHECK(!code.is_null());
FunctionTester ft(code, kNumParams);
Handle<Object> result =
ft.Call(Handle<Object>(o1, isolate), Handle<Object>(o2, isolate),
Handle<Object>(o3, isolate), Handle<Object>(o4, isolate))
.ToHandleChecked();
CHECK_EQ(kind_, array->GetElementsKind());
CHECK_EQ(result_size, Handle<Smi>::cast(result)->value());
CHECK_EQ(result_size, Smi::cast(array->length())->value());
Object* obj = *JSObject::GetElement(isolate, array, 2).ToHandleChecked();
CHECK_EQ(result_size < 3 ? isolate->heap()->undefined_value() : o1, obj);
obj = *JSObject::GetElement(isolate, array, 3).ToHandleChecked();
CHECK_EQ(result_size < 4 ? isolate->heap()->undefined_value() : o2, obj);
obj = *JSObject::GetElement(isolate, array, 4).ToHandleChecked();
CHECK_EQ(result_size < 5 ? isolate->heap()->undefined_value() : o3, obj);
obj = *JSObject::GetElement(isolate, array, 5).ToHandleChecked();
CHECK_EQ(result_size < 6 ? isolate->heap()->undefined_value() : o4, obj);
}
static void TestAppendJSArray(Isolate* isolate, ElementsKind kind, Object* o1,
Object* o2, Object* o3, Object* o4,
int initial_size, int result_size) {
CodeAssemblerTester data(isolate, kNumParams);
AppendJSArrayCodeStubAssembler m(data.state(), kind);
m.TestAppendJSArrayImpl(isolate, &data, o1, o2, o3, o4, initial_size,
result_size);
}
private:
static const int kNumParams = 4;
ElementsKind kind_;
};
TEST(BuildAppendJSArrayFastElement) {
Isolate* isolate(CcTest::InitIsolateOnce());
AppendJSArrayCodeStubAssembler::TestAppendJSArray(
isolate, FAST_ELEMENTS, Smi::FromInt(3), Smi::FromInt(4), Smi::FromInt(5),
Smi::FromInt(6), 6, 6);
}
TEST(BuildAppendJSArrayFastElementGrow) {
Isolate* isolate(CcTest::InitIsolateOnce());
AppendJSArrayCodeStubAssembler::TestAppendJSArray(
isolate, FAST_ELEMENTS, Smi::FromInt(3), Smi::FromInt(4), Smi::FromInt(5),
Smi::FromInt(6), 2, 6);
}
TEST(BuildAppendJSArrayFastSmiElement) {
Isolate* isolate(CcTest::InitIsolateOnce());
AppendJSArrayCodeStubAssembler::TestAppendJSArray(
isolate, FAST_SMI_ELEMENTS, Smi::FromInt(3), Smi::FromInt(4),
Smi::FromInt(5), Smi::FromInt(6), 6, 6);
}
TEST(BuildAppendJSArrayFastSmiElementGrow) {
Isolate* isolate(CcTest::InitIsolateOnce());
AppendJSArrayCodeStubAssembler::TestAppendJSArray(
isolate, FAST_SMI_ELEMENTS, Smi::FromInt(3), Smi::FromInt(4),
Smi::FromInt(5), Smi::FromInt(6), 2, 6);
}
TEST(BuildAppendJSArrayFastSmiElementObject) {
Isolate* isolate(CcTest::InitIsolateOnce());
AppendJSArrayCodeStubAssembler::TestAppendJSArray(
isolate, FAST_SMI_ELEMENTS, Smi::FromInt(3), Smi::FromInt(4),
isolate->heap()->undefined_value(), Smi::FromInt(6), 6, 4);
}
TEST(BuildAppendJSArrayFastSmiElementObjectGrow) {
Isolate* isolate(CcTest::InitIsolateOnce());
AppendJSArrayCodeStubAssembler::TestAppendJSArray(
isolate, FAST_SMI_ELEMENTS, Smi::FromInt(3), Smi::FromInt(4),
isolate->heap()->undefined_value(), Smi::FromInt(6), 2, 4);
}
TEST(BuildAppendJSArrayFastDoubleElements) {
Isolate* isolate(CcTest::InitIsolateOnce());
AppendJSArrayCodeStubAssembler::TestAppendJSArray(
isolate, FAST_DOUBLE_ELEMENTS, Smi::FromInt(3), Smi::FromInt(4),
Smi::FromInt(5), Smi::FromInt(6), 6, 6);
}
TEST(BuildAppendJSArrayFastDoubleElementsGrow) {
Isolate* isolate(CcTest::InitIsolateOnce());
AppendJSArrayCodeStubAssembler::TestAppendJSArray(
isolate, FAST_DOUBLE_ELEMENTS, Smi::FromInt(3), Smi::FromInt(4),
Smi::FromInt(5), Smi::FromInt(6), 2, 6);
}
TEST(BuildAppendJSArrayFastDoubleElementsObject) {
Isolate* isolate(CcTest::InitIsolateOnce());
AppendJSArrayCodeStubAssembler::TestAppendJSArray(
isolate, FAST_DOUBLE_ELEMENTS, Smi::FromInt(3), Smi::FromInt(4),
isolate->heap()->undefined_value(), Smi::FromInt(6), 6, 4);
}
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8

View File

@ -0,0 +1,23 @@
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --allow-natives-syntax
[1].push(1);
(function PushHoleBitPattern() {
function g(src, dst, i) {
dst[i] = src[i];
}
var b = new ArrayBuffer(8);
var i32 = new Int32Array(b);
i32[0] = 0xFFF7FFFF;
i32[1] = 0xFFF7FFFF;
var f64 = new Float64Array(b);
var a = [,2.5];
a.push(f64[0]);
assertTrue(Number.isNaN(a[2]));
})();

View File

@ -3,13 +3,16 @@
// found in the LICENSE file. // found in the LICENSE file.
function __f_17(__v_9) { function __f_17(__v_9) {
var __v_10 = 0; var __v_10 = 0;
var count = 100000; var count = 100000;
while (count-- != 0) { while (count-- != 0) {
var l = __v_9.push(0); var l = __v_9.push(0);
if (++__v_10 >= 2) return __v_9; if (++__v_10 >= 2) return __v_9;
__v_10 = {}; __v_10 = {};
} }
return __v_9;
} }
__f_17([]); let a = __f_17([]);
assertEquals(a[0], 0);
assertEquals(a[10000], 0);

View File

@ -0,0 +1,22 @@
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
function __f_17(__v_9) {
for (var count = 0; count < 20000; ++count) {
if (count < 100) {
__v_9.push(3);
} else if (count < 2500) {
__v_9.push(2.5);
} else {
__v_9.push(true);
}
}
return __v_9;
}
let a = __f_17([]);
assertEquals(a[0], 3);
assertEquals(a[10], 3);
assertEquals(a[2499], 2.5);
assertEquals(a[10000], true);

View File

@ -0,0 +1,18 @@
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
function __f_17(__v_9) {
var __v_10 = 0;
var count = 100000;
while (count-- != 0) {
var l = __v_9.push(0.5);
if (++__v_10 >= 2) return __v_9;
__v_10 = {};
}
return __v_9;
}
let a = __f_17([2.2]);
assertEquals(a[0], 2.2);
assertEquals(a[10000], 0.5);