Fastpath some spread-call desugaring.

Avoid using the iterator for arrays with fast elements where the iterator has
not been modified.

Only deals with the case where there is a single spread argument.

Improves the six-speed "spread" benchmark to 1.5x slower than baseline es5 implementation, compared to 19x slower previously.

BUG=v8:5511

Review-Url: https://codereview.chromium.org/2465253011
Cr-Commit-Position: refs/heads/master@{#40998}
This commit is contained in:
petermarshall 2016-11-15 06:41:27 -08:00 committed by Commit bot
parent a18be72c8e
commit a63eeb485a
25 changed files with 244 additions and 48 deletions

View File

@ -1350,6 +1350,11 @@ void Genesis::InitializeGlobal(Handle<JSGlobalObject> global_object,
array_iterator_function->shared()->set_instance_class_name(
isolate->heap()->ArrayIterator_string());
native_context()->set_initial_array_iterator_prototype(
*array_iterator_prototype);
native_context()->set_initial_array_iterator_prototype_map(
array_iterator_prototype->map());
Handle<Map> initial_map(array_iterator_function->initial_map(), isolate);
#define ARRAY_ITERATOR_LIST(V) \

View File

@ -2247,7 +2247,7 @@ void Builtins::Generate_ArrayIteratorPrototypeNext(
{
// Check the array_protector cell, and take the slow path if it's invalid.
Node* invalid =
assembler->SmiConstant(Smi::FromInt(Isolate::kArrayProtectorInvalid));
assembler->SmiConstant(Smi::FromInt(Isolate::kProtectorInvalid));
Node* cell = assembler->LoadRoot(Heap::kArrayProtectorRootIndex);
Node* cell_value =
assembler->LoadObjectField(cell, PropertyCell::kValueOffset);
@ -2268,7 +2268,7 @@ void Builtins::Generate_ArrayIteratorPrototypeNext(
{
// Check the array_protector cell, and take the slow path if it's invalid.
Node* invalid =
assembler->SmiConstant(Smi::FromInt(Isolate::kArrayProtectorInvalid));
assembler->SmiConstant(Smi::FromInt(Isolate::kProtectorInvalid));
Node* cell = assembler->LoadRoot(Heap::kArrayProtectorRootIndex);
Node* cell_value =
assembler->LoadObjectField(cell, PropertyCell::kValueOffset);
@ -2338,7 +2338,7 @@ void Builtins::Generate_ArrayIteratorPrototypeNext(
&done);
Node* invalid = assembler->SmiConstant(
Smi::FromInt(Isolate::kArrayProtectorInvalid));
Smi::FromInt(Isolate::kProtectorInvalid));
Node* cell = assembler->LoadRoot(
Heap::kFastArrayIterationProtectorRootIndex);
assembler->StoreObjectFieldNoWriteBarrier(cell, Cell::kValueOffset,

View File

@ -5570,7 +5570,7 @@ void CodeStubAssembler::HandleLoadICSmiHandlerCase(
DCHECK(isolate()->heap()->array_protector()->IsPropertyCell());
GotoUnless(
WordEqual(LoadObjectField(protector_cell, PropertyCell::kValueOffset),
SmiConstant(Smi::FromInt(Isolate::kArrayProtectorValid))),
SmiConstant(Smi::FromInt(Isolate::kProtectorValid))),
miss);
Return(UndefinedConstant());
}
@ -8764,7 +8764,7 @@ compiler::Node* CodeStubAssembler::InstanceOf(compiler::Node* object,
GotoUnless(
WordEqual(LoadObjectField(LoadRoot(Heap::kHasInstanceProtectorRootIndex),
PropertyCell::kValueOffset),
SmiConstant(Smi::FromInt(Isolate::kArrayProtectorValid))),
SmiConstant(Smi::FromInt(Isolate::kProtectorValid))),
&return_runtime);
// Check if {callable} is a valid receiver.
@ -8949,7 +8949,7 @@ compiler::Node* CodeStubAssembler::CreateArrayIterator(
GotoUnless(
WordEqual(
LoadObjectField(protector_cell, PropertyCell::kValueOffset),
SmiConstant(Smi::FromInt(Isolate::kArrayProtectorValid))),
SmiConstant(Smi::FromInt(Isolate::kProtectorValid))),
&if_isslow);
Node* native_context = LoadNativeContext(context);

View File

@ -226,6 +226,10 @@ enum ContextLookupFlags {
V(GENERATOR_FUNCTION_FUNCTION_INDEX, JSFunction, \
generator_function_function) \
V(GENERATOR_OBJECT_PROTOTYPE_MAP_INDEX, Map, generator_object_prototype_map) \
V(INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX, JSObject, \
initial_array_iterator_prototype) \
V(INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX, Map, \
initial_array_iterator_prototype_map) \
V(INITIAL_ARRAY_PROTOTYPE_INDEX, JSObject, initial_array_prototype) \
V(INITIAL_GENERATOR_PROTOTYPE_INDEX, JSObject, initial_generator_prototype) \
V(INITIAL_ITERATOR_PROTOTYPE_INDEX, JSObject, initial_iterator_prototype) \

View File

@ -2870,11 +2870,11 @@ void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) {
__ b(ne, &done);
if (info()->IsStub()) {
// A stub can safely convert the hole to undefined only if the array
// protector cell contains (Smi) Isolate::kArrayProtectorValid. Otherwise
// protector cell contains (Smi) Isolate::kProtectorValid. Otherwise
// it needs to bail out.
__ LoadRoot(result, Heap::kArrayProtectorRootIndex);
__ ldr(result, FieldMemOperand(result, Cell::kValueOffset));
__ cmp(result, Operand(Smi::FromInt(Isolate::kArrayProtectorValid)));
__ cmp(result, Operand(Smi::FromInt(Isolate::kProtectorValid)));
DeoptimizeIf(ne, instr, DeoptimizeReason::kHole);
}
__ LoadRoot(result, Heap::kUndefinedValueRootIndex);

View File

@ -3240,11 +3240,11 @@ void LCodeGen::DoLoadKeyedFixed(LLoadKeyedFixed* instr) {
__ B(ne, &done);
if (info()->IsStub()) {
// A stub can safely convert the hole to undefined only if the array
// protector cell contains (Smi) Isolate::kArrayProtectorValid. Otherwise
// protector cell contains (Smi) Isolate::kProtectorValid. Otherwise
// it needs to bail out.
__ LoadRoot(result, Heap::kArrayProtectorRootIndex);
__ Ldr(result, FieldMemOperand(result, Cell::kValueOffset));
__ Cmp(result, Operand(Smi::FromInt(Isolate::kArrayProtectorValid)));
__ Cmp(result, Operand(Smi::FromInt(Isolate::kProtectorValid)));
DeoptimizeIf(ne, instr, DeoptimizeReason::kHole);
}
__ LoadRoot(result, Heap::kUndefinedValueRootIndex);

View File

@ -2625,11 +2625,11 @@ void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) {
__ j(not_equal, &done);
if (info()->IsStub()) {
// A stub can safely convert the hole to undefined only if the array
// protector cell contains (Smi) Isolate::kArrayProtectorValid.
// protector cell contains (Smi) Isolate::kProtectorValid.
// Otherwise it needs to bail out.
__ LoadRoot(result, Heap::kArrayProtectorRootIndex);
__ cmp(FieldOperand(result, PropertyCell::kValueOffset),
Immediate(Smi::FromInt(Isolate::kArrayProtectorValid)));
Immediate(Smi::FromInt(Isolate::kProtectorValid)));
DeoptimizeIf(not_equal, instr, DeoptimizeReason::kHole);
}
__ mov(result, isolate()->factory()->undefined_value());

View File

@ -2796,12 +2796,12 @@ void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) {
__ Branch(&done, ne, result, Operand(scratch));
if (info()->IsStub()) {
// A stub can safely convert the hole to undefined only if the array
// protector cell contains (Smi) Isolate::kArrayProtectorValid. Otherwise
// protector cell contains (Smi) Isolate::kProtectorValid. Otherwise
// it needs to bail out.
__ LoadRoot(result, Heap::kArrayProtectorRootIndex);
__ lw(result, FieldMemOperand(result, Cell::kValueOffset));
DeoptimizeIf(ne, instr, DeoptimizeReason::kHole, result,
Operand(Smi::FromInt(Isolate::kArrayProtectorValid)));
Operand(Smi::FromInt(Isolate::kProtectorValid)));
}
__ LoadRoot(result, Heap::kUndefinedValueRootIndex);
__ bind(&done);

View File

@ -2972,13 +2972,13 @@ void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) {
__ Branch(&done, ne, result, Operand(scratch));
if (info()->IsStub()) {
// A stub can safely convert the hole to undefined only if the array
// protector cell contains (Smi) Isolate::kArrayProtectorValid. Otherwise
// protector cell contains (Smi) Isolate::kProtectorValid. Otherwise
// it needs to bail out.
__ LoadRoot(result, Heap::kArrayProtectorRootIndex);
// The comparison only needs LS bits of value, which is a smi.
__ ld(result, FieldMemOperand(result, Cell::kValueOffset));
DeoptimizeIf(ne, instr, DeoptimizeReason::kHole, result,
Operand(Smi::FromInt(Isolate::kArrayProtectorValid)));
Operand(Smi::FromInt(Isolate::kProtectorValid)));
}
__ LoadRoot(result, Heap::kUndefinedValueRootIndex);
__ bind(&done);

View File

@ -3046,11 +3046,11 @@ void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) {
__ bne(&done);
if (info()->IsStub()) {
// A stub can safely convert the hole to undefined only if the array
// protector cell contains (Smi) Isolate::kArrayProtectorValid. Otherwise
// protector cell contains (Smi) Isolate::kProtectorValid. Otherwise
// it needs to bail out.
__ LoadRoot(result, Heap::kArrayProtectorRootIndex);
__ LoadP(result, FieldMemOperand(result, Cell::kValueOffset));
__ CmpSmiLiteral(result, Smi::FromInt(Isolate::kArrayProtectorValid), r0);
__ CmpSmiLiteral(result, Smi::FromInt(Isolate::kProtectorValid), r0);
DeoptimizeIf(ne, instr, DeoptimizeReason::kHole);
}
__ LoadRoot(result, Heap::kUndefinedValueRootIndex);

View File

@ -3009,11 +3009,11 @@ void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) {
__ bne(&done);
if (info()->IsStub()) {
// A stub can safely convert the hole to undefined only if the array
// protector cell contains (Smi) Isolate::kArrayProtectorValid. Otherwise
// protector cell contains (Smi) Isolate::kProtectorValid. Otherwise
// it needs to bail out.
__ LoadRoot(result, Heap::kArrayProtectorRootIndex);
__ LoadP(result, FieldMemOperand(result, Cell::kValueOffset));
__ CmpSmiLiteral(result, Smi::FromInt(Isolate::kArrayProtectorValid), r0);
__ CmpSmiLiteral(result, Smi::FromInt(Isolate::kProtectorValid), r0);
DeoptimizeIf(ne, instr, DeoptimizeReason::kHole);
}
__ LoadRoot(result, Heap::kUndefinedValueRootIndex);

View File

@ -2822,11 +2822,11 @@ void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) {
__ j(not_equal, &done);
if (info()->IsStub()) {
// A stub can safely convert the hole to undefined only if the array
// protector cell contains (Smi) Isolate::kArrayProtectorValid. Otherwise
// protector cell contains (Smi) Isolate::kProtectorValid. Otherwise
// it needs to bail out.
__ LoadRoot(result, Heap::kArrayProtectorRootIndex);
__ Cmp(FieldOperand(result, Cell::kValueOffset),
Smi::FromInt(Isolate::kArrayProtectorValid));
Smi::FromInt(Isolate::kProtectorValid));
DeoptimizeIf(not_equal, instr, DeoptimizeReason::kHole);
}
__ Move(result, isolate()->factory()->undefined_value());

View File

@ -2900,11 +2900,11 @@ void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) {
__ j(not_equal, &done);
if (info()->IsStub()) {
// A stub can safely convert the hole to undefined only if the array
// protector cell contains (Smi) Isolate::kArrayProtectorValid.
// protector cell contains (Smi) Isolate::kProtectorValid.
// Otherwise it needs to bail out.
__ LoadRoot(result, Heap::kArrayProtectorRootIndex);
__ cmp(FieldOperand(result, PropertyCell::kValueOffset),
Immediate(Smi::FromInt(Isolate::kArrayProtectorValid)));
Immediate(Smi::FromInt(Isolate::kProtectorValid)));
DeoptimizeIf(not_equal, instr, DeoptimizeReason::kHole);
}
__ mov(result, isolate()->factory()->undefined_value());

View File

@ -2823,7 +2823,7 @@ void Heap::CreateInitialObjects() {
set_empty_script(*script);
Handle<PropertyCell> cell = factory->NewPropertyCell();
cell->set_value(Smi::FromInt(Isolate::kArrayProtectorValid));
cell->set_value(Smi::FromInt(Isolate::kProtectorValid));
set_array_protector(*cell);
cell = factory->NewPropertyCell();
@ -2831,25 +2831,29 @@ void Heap::CreateInitialObjects() {
set_empty_property_cell(*cell);
cell = factory->NewPropertyCell();
cell->set_value(Smi::FromInt(Isolate::kArrayProtectorValid));
cell->set_value(Smi::FromInt(Isolate::kProtectorValid));
set_has_instance_protector(*cell);
Handle<Cell> is_concat_spreadable_cell = factory->NewCell(
handle(Smi::FromInt(Isolate::kArrayProtectorValid), isolate()));
handle(Smi::FromInt(Isolate::kProtectorValid), isolate()));
set_is_concat_spreadable_protector(*is_concat_spreadable_cell);
Handle<Cell> species_cell = factory->NewCell(
handle(Smi::FromInt(Isolate::kArrayProtectorValid), isolate()));
handle(Smi::FromInt(Isolate::kProtectorValid), isolate()));
set_species_protector(*species_cell);
cell = factory->NewPropertyCell();
cell->set_value(Smi::FromInt(Isolate::kArrayProtectorValid));
cell->set_value(Smi::FromInt(Isolate::kProtectorValid));
set_string_length_protector(*cell);
Handle<Cell> fast_array_iteration_cell = factory->NewCell(
handle(Smi::FromInt(Isolate::kArrayProtectorValid), isolate()));
handle(Smi::FromInt(Isolate::kProtectorValid), isolate()));
set_fast_array_iteration_protector(*fast_array_iteration_cell);
Handle<Cell> array_iterator_cell = factory->NewCell(
handle(Smi::FromInt(Isolate::kProtectorValid), isolate()));
set_array_iterator_protector(*array_iterator_cell);
set_serialized_templates(empty_fixed_array());
set_weak_stack_trace_list(Smi::kZero);

View File

@ -167,6 +167,7 @@ using v8::MemoryPressureLevel;
V(Cell, species_protector, SpeciesProtector) \
V(PropertyCell, string_length_protector, StringLengthProtector) \
V(Cell, fast_array_iteration_protector, FastArrayIterationProtector) \
V(Cell, array_iterator_protector, ArrayIteratorProtector) \
/* Special numbers */ \
V(HeapNumber, nan_value, NanValue) \
V(HeapNumber, hole_nan_value, HoleNanValue) \

View File

@ -130,22 +130,27 @@ bool Isolate::IsArraySpeciesLookupChainIntact() {
Cell* species_cell = heap()->species_protector();
return species_cell->value()->IsSmi() &&
Smi::cast(species_cell->value())->value() == kArrayProtectorValid;
Smi::cast(species_cell->value())->value() == kProtectorValid;
}
bool Isolate::IsHasInstanceLookupChainIntact() {
PropertyCell* has_instance_cell = heap()->has_instance_protector();
return has_instance_cell->value() == Smi::FromInt(kArrayProtectorValid);
return has_instance_cell->value() == Smi::FromInt(kProtectorValid);
}
bool Isolate::IsStringLengthOverflowIntact() {
PropertyCell* has_instance_cell = heap()->string_length_protector();
return has_instance_cell->value() == Smi::FromInt(kArrayProtectorValid);
return has_instance_cell->value() == Smi::FromInt(kProtectorValid);
}
bool Isolate::IsFastArrayIterationIntact() {
Cell* fast_iteration = heap()->fast_array_iteration_protector();
return fast_iteration->value() == Smi::FromInt(kArrayProtectorValid);
return fast_iteration->value() == Smi::FromInt(kProtectorValid);
}
bool Isolate::IsArrayIteratorLookupChainIntact() {
Cell* array_iterator_cell = heap()->array_iterator_protector();
return array_iterator_cell->value() == Smi::FromInt(kProtectorValid);
}
} // namespace internal

View File

@ -2822,7 +2822,7 @@ bool Isolate::IsFastArrayConstructorPrototypeChainIntact() {
PropertyCell* no_elements_cell = heap()->array_protector();
bool cell_reports_intact =
no_elements_cell->value()->IsSmi() &&
Smi::cast(no_elements_cell->value())->value() == kArrayProtectorValid;
Smi::cast(no_elements_cell->value())->value() == kProtectorValid;
#ifdef DEBUG
Map* root_array_map =
@ -2881,7 +2881,7 @@ bool Isolate::IsIsConcatSpreadableLookupChainIntact() {
Cell* is_concat_spreadable_cell = heap()->is_concat_spreadable_protector();
bool is_is_concat_spreadable_set =
Smi::cast(is_concat_spreadable_cell->value())->value() ==
kArrayProtectorInvalid;
kProtectorInvalid;
#ifdef DEBUG
Map* root_array_map = get_initial_js_array_map(GetInitialFastElementsKind());
if (root_array_map == NULL) {
@ -2916,7 +2916,7 @@ void Isolate::UpdateArrayProtectorOnSetElement(Handle<JSObject> object) {
if (!IsArrayOrObjectPrototype(*object)) return;
PropertyCell::SetValueWithInvalidation(
factory()->array_protector(),
handle(Smi::FromInt(kArrayProtectorInvalid), this));
handle(Smi::FromInt(kProtectorInvalid), this));
}
void Isolate::InvalidateHasInstanceProtector() {
@ -2924,7 +2924,7 @@ void Isolate::InvalidateHasInstanceProtector() {
DCHECK(IsHasInstanceLookupChainIntact());
PropertyCell::SetValueWithInvalidation(
factory()->has_instance_protector(),
handle(Smi::FromInt(kArrayProtectorInvalid), this));
handle(Smi::FromInt(kProtectorInvalid), this));
DCHECK(!IsHasInstanceLookupChainIntact());
}
@ -2932,15 +2932,14 @@ void Isolate::InvalidateIsConcatSpreadableProtector() {
DCHECK(factory()->is_concat_spreadable_protector()->value()->IsSmi());
DCHECK(IsIsConcatSpreadableLookupChainIntact());
factory()->is_concat_spreadable_protector()->set_value(
Smi::FromInt(kArrayProtectorInvalid));
Smi::FromInt(kProtectorInvalid));
DCHECK(!IsIsConcatSpreadableLookupChainIntact());
}
void Isolate::InvalidateArraySpeciesProtector() {
DCHECK(factory()->species_protector()->value()->IsSmi());
DCHECK(IsArraySpeciesLookupChainIntact());
factory()->species_protector()->set_value(
Smi::FromInt(kArrayProtectorInvalid));
factory()->species_protector()->set_value(Smi::FromInt(kProtectorInvalid));
DCHECK(!IsArraySpeciesLookupChainIntact());
}
@ -2949,10 +2948,18 @@ void Isolate::InvalidateStringLengthOverflowProtector() {
DCHECK(IsStringLengthOverflowIntact());
PropertyCell::SetValueWithInvalidation(
factory()->string_length_protector(),
handle(Smi::FromInt(kArrayProtectorInvalid), this));
handle(Smi::FromInt(kProtectorInvalid), this));
DCHECK(!IsStringLengthOverflowIntact());
}
void Isolate::InvalidateArrayIteratorProtector() {
DCHECK(factory()->array_iterator_protector()->value()->IsSmi());
DCHECK(IsArrayIteratorLookupChainIntact());
factory()->array_iterator_protector()->set_value(
Smi::FromInt(kProtectorInvalid));
DCHECK(!IsArrayIteratorLookupChainIntact());
}
bool Isolate::IsAnyInitialArrayPrototype(Handle<JSArray> array) {
DisallowHeapAllocation no_gc;
return IsInAnyContext(*array, Context::INITIAL_ARRAY_PROTOTYPE_INDEX);

View File

@ -987,8 +987,8 @@ class Isolate {
Map* get_initial_js_array_map(ElementsKind kind);
static const int kArrayProtectorValid = 1;
static const int kArrayProtectorInvalid = 0;
static const int kProtectorValid = 1;
static const int kProtectorInvalid = 0;
bool IsFastArrayConstructorPrototypeChainIntact();
inline bool IsArraySpeciesLookupChainIntact();
@ -996,6 +996,7 @@ class Isolate {
bool IsIsConcatSpreadableLookupChainIntact();
bool IsIsConcatSpreadableLookupChainIntact(JSReceiver* receiver);
inline bool IsStringLengthOverflowIntact();
inline bool IsArrayIteratorLookupChainIntact();
// Avoid deopt loops if fast Array Iterators migrate to slow Array Iterators.
inline bool IsFastArrayIterationIntact();
@ -1018,6 +1019,7 @@ class Isolate {
void InvalidateHasInstanceProtector();
void InvalidateIsConcatSpreadableProtector();
void InvalidateStringLengthOverflowProtector();
void InvalidateArrayIteratorProtector();
// Returns true if array is the initial array prototype in any native context.
bool IsAnyInitialArrayPrototype(Handle<JSArray> array);

View File

@ -194,6 +194,11 @@ void LookupIterator::InternalUpdateProtector() {
} else if (*name_ == heap()->has_instance_symbol()) {
if (!isolate_->IsHasInstanceLookupChainIntact()) return;
isolate_->InvalidateHasInstanceProtector();
} else if (*name_ == heap()->iterator_symbol()) {
if (!isolate_->IsArrayIteratorLookupChainIntact()) return;
if (holder_->IsJSArray()) {
isolate_->InvalidateArrayIteratorProtector();
}
}
}

View File

@ -258,7 +258,8 @@ class V8_EXPORT_PRIVATE LookupIterator final BASE_EMBEDDED {
if (*name_ == heap()->is_concat_spreadable_symbol() ||
*name_ == heap()->constructor_string() ||
*name_ == heap()->species_symbol() ||
*name_ == heap()->has_instance_symbol()) {
*name_ == heap()->has_instance_symbol() ||
*name_ == heap()->iterator_symbol()) {
InternalUpdateProtector();
}
}

View File

@ -3973,7 +3973,7 @@ ZoneList<Expression*>* Parser::PrepareSpreadArguments(
ZoneList<Expression*>* spread_list =
new (zone()) ZoneList<Expression*>(0, zone());
spread_list->Add(list->at(0)->AsSpread()->expression(), zone());
args->Add(factory()->NewCallRuntime(Context::SPREAD_ITERABLE_INDEX,
args->Add(factory()->NewCallRuntime(Runtime::kSpreadIterablePrepare,
spread_list, kNoSourcePosition),
zone());
return args;

View File

@ -636,5 +636,48 @@ RUNTIME_FUNCTION(Runtime_ArrayIndexOf) {
return Smi::FromInt(-1);
}
RUNTIME_FUNCTION(Runtime_SpreadIterablePrepare) {
HandleScope scope(isolate);
DCHECK_EQ(1, args.length());
CONVERT_ARG_HANDLE_CHECKED(Object, spread, 0);
if (spread->IsJSArray()) {
// Check that the spread arg has fast elements
Handle<JSArray> spread_array = Handle<JSArray>::cast(spread);
ElementsKind array_kind = spread_array->GetElementsKind();
// And that it has the orignal ArrayPrototype
JSObject* array_proto = JSObject::cast(spread_array->map()->prototype());
Map* iterator_map = isolate->initial_array_iterator_prototype()->map();
// Check that the iterator acts as expected.
// If IsArrayIteratorLookupChainIntact(), then we know that the initial
// ArrayIterator is being used. If the map of the prototype has changed,
// then take the slow path.
if (isolate->is_initial_array_prototype(array_proto) &&
isolate->IsArrayIteratorLookupChainIntact() &&
isolate->is_initial_array_iterator_prototype_map(iterator_map)) {
if (IsFastPackedElementsKind(array_kind)) {
return *spread;
}
if (IsFastHoleyElementsKind(array_kind) &&
isolate->IsFastArrayConstructorPrototypeChainIntact()) {
return *spread;
}
}
}
Handle<JSFunction> spread_iterable_function = isolate->spread_iterable();
Handle<Object> spreaded;
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
isolate, spreaded,
Execution::Call(isolate, spread_iterable_function,
isolate->factory()->undefined_value(), 1, &spread));
return *spreaded;
}
} // namespace internal
} // namespace v8

View File

@ -56,7 +56,8 @@ namespace internal {
F(FixedArraySet, 3, 1) \
F(ArraySpeciesConstructor, 1, 1) \
F(ArrayIncludes_Slow, 3, 1) \
F(ArrayIndexOf, 3, 1)
F(ArrayIndexOf, 3, 1) \
F(SpreadIterablePrepare, 1, 1)
#define FOR_EACH_INTRINSIC_ATOMICS(F) \
F(ThrowNotIntegerSharedTypedArrayError, 1, 1) \

View File

@ -79,7 +79,7 @@ bytecodes: [
B(Star), R(0),
B(CreateArrayLiteral), U8(0), U8(0), U8(9),
B(Star), R(1),
B(CallJSRuntime), U8(152), R(0), U8(2),
B(CallJSRuntime), U8(154), R(0), U8(2),
/* 44 S> */ B(Return),
]
constant pool: [

View File

@ -24,6 +24,13 @@
assertEquals(5, countArgs(...[1, 2, 3, 4, 5]));
assertEquals(6, countArgs(...[1, 2, 3, 4, 5, 6]));
assertEquals(1, countArgs(...[1.1]));
assertEquals(2, countArgs(...[1.1, 2.2]));
assertEquals(3, countArgs(...[1.1, 2.2, 3.3]));
assertEquals(4, countArgs(...[1.1, 2.2, 3.3, 4.4]));
assertEquals(5, countArgs(...[1.1, 2.2, 3.3, 4.4, 5.5]));
assertEquals(6, countArgs(...[1.1, 2.2, 3.3, 4.4, 5.5, 6.6]));
assertEquals(1, countArgs(...new Set([1])));
assertEquals(2, countArgs(...new Set([1, 2])));
assertEquals(3, countArgs(...new Set([1, 2, 3])));
@ -346,6 +353,91 @@
assertEquals("ABXYC1C2DEBXYC1C2", log);
})();
(function testArrayPrototypeHoleGetterModifiesIteratorPrototypeNext() {
function sum() {
var sum = arguments[0];
for (var i = 1; i < arguments.length; ++i) {
sum += arguments[i];
}
return sum;
}
var a = [1, 2];
a[3] = 4;
var called = 0;
Object.defineProperty(Array.prototype, 2, {
get: function() {
var ai = a[Symbol.iterator]();
var original_next = ai.__proto__["next"];
Object.defineProperty(ai.__proto__, "next", {
get: function() {
called++;
return original_next;
}
});
return 3;
},
configurable: true
});
assertEquals(10, sum(...a));
assertEquals(2, called);
Object.defineProperty(Array.prototype, 2, {});
})();
(function testArrayHasOtherPrototype() {
function countArgs() { return arguments.length; }
var a = [1, 2, 3];
var b = {};
Object.defineProperty(b, Symbol.iterator, {
value: function*() {
yield 4;
},
configurable: true
});
Object.setPrototypeOf(a, b);
assertEquals(1, countArgs(...a));
})();
(function testArrayIteratorPrototypeGetter() {
function countArgs() { return arguments.length; }
var a = [1, 2, 3];
var ai = a[Symbol.iterator]();
var called = 0;
var original_next = ai.__proto__["next"];
Object.defineProperty(ai.__proto__, "next", {
get: function() {
called++;
return original_next;
}
});
countArgs(...a);
// should be called 4 times; 3 for the values, 1 for the final
// {value: undefined, done: true} pair
assertEquals(4, called);
})();
(function testArrayIteratorPrototypeModified() {
function countArgs() { return arguments.length; }
var a = [1,2,3];
var ai = a[Symbol.iterator]();
Object.defineProperty(ai.__proto__, "next", {
value: function() {
return {value: undefined, done: true};
},
configurable: true
});
assertEquals(0, countArgs(...a));
})();
(function testCustomArrayPrototypeIterator() {
var origIterator =
@ -370,3 +462,29 @@
Object.defineProperty(Array.prototype, Symbol.iterator, origIterator);
})();
(function testGetPropertyIteratorCalledExactlyOnce() {
function countArgs() { return arguments.length; }
var a = [1, 2, 3];
var called = 0;
Object.defineProperty(Array.prototype, Symbol.iterator, {
value: function*() {
yield 1;
yield 2;
},
configurable: true
});
var it = a[Symbol.iterator];
Object.defineProperty(a, Symbol.iterator, {
get: function() {
called++;
return it;
}
});
countArgs(...a);
assertEquals(1, called);
})();