[builtins] implement JSBuiltinReducer for ArrayIteratorNext()

Adds a protector cell to prevent inlining (which will likely lead to deopt
loops) when a JSArrayIterator's array transitions from a fast JSArray to a
slow JSArray (such as, when the array is touched during iteration in a way
which triggers a map transition).

Also adds TODO comments relating to the spec update proposed by Dan at
https://github.com/tc39/ecma262/pull/724

BUG=v8:5388
R=bmeurer@chromium.org, mstarzinger@chromium.org
TBR=hpayer@chromium.org, ulan@chromium.org

Review-Url: https://codereview.chromium.org/2484003002
Cr-Commit-Position: refs/heads/master@{#40970}
This commit is contained in:
caitp 2016-11-14 07:58:48 -08:00 committed by Commit bot
parent 0909e5cccb
commit 7f21e67b38
29 changed files with 1178 additions and 39 deletions

View File

@ -1334,6 +1334,7 @@ void Genesis::InitializeGlobal(Handle<JSGlobalObject> global_object,
Handle<JSFunction> next = InstallFunction(
array_iterator_prototype, "next", JS_OBJECT_TYPE, JSObject::kHeaderSize,
MaybeHandle<JSObject>(), Builtins::kArrayIteratorPrototypeNext);
next->shared()->set_builtin_function_id(kArrayIteratorNext);
// Set the expected parameters for %ArrayIteratorPrototype%.next to 0 (not
// including the receiver), as required by the builtin.
@ -2187,14 +2188,21 @@ void Genesis::InitializeGlobal(Handle<JSGlobalObject> global_object,
kTypedArrayLength);
// Install "keys", "values" and "entries" methods on the {prototype}.
SimpleInstallFunction(prototype, factory->entries_string(),
Builtins::kTypedArrayPrototypeEntries, 0, true);
SimpleInstallFunction(prototype, factory->keys_string(),
Builtins::kTypedArrayPrototypeKeys, 0, true);
Handle<JSFunction> iterator =
Handle<JSFunction> entries =
SimpleInstallFunction(prototype, factory->entries_string(),
Builtins::kTypedArrayPrototypeEntries, 0, true);
entries->shared()->set_builtin_function_id(kTypedArrayEntries);
Handle<JSFunction> keys =
SimpleInstallFunction(prototype, factory->keys_string(),
Builtins::kTypedArrayPrototypeKeys, 0, true);
keys->shared()->set_builtin_function_id(kTypedArrayKeys);
Handle<JSFunction> values =
SimpleInstallFunction(prototype, factory->values_string(),
Builtins::kTypedArrayPrototypeValues, 0, true);
JSObject::AddProperty(prototype, factory->iterator_symbol(), iterator,
values->shared()->set_builtin_function_id(kTypedArrayValues);
JSObject::AddProperty(prototype, factory->iterator_symbol(), values,
DONT_ENUM);
}

View File

@ -2301,6 +2301,8 @@ void Builtins::Generate_ArrayIteratorPrototypeNext(
assembler->Bind(&if_isgeneric);
{
Label if_wasfastarray(assembler);
Node* length = nullptr;
{
Variable var_length(assembler, MachineRepresentation::kTagged);
@ -2314,7 +2316,35 @@ void Builtins::Generate_ArrayIteratorPrototypeNext(
{
var_length.Bind(
assembler->LoadObjectField(array, JSArray::kLengthOffset));
assembler->Goto(&done);
// Invalidate protector cell if needed
assembler->Branch(
assembler->WordNotEqual(orig_map, assembler->UndefinedConstant()),
&if_wasfastarray, &done);
assembler->Bind(&if_wasfastarray);
{
Label if_invalid(assembler, Label::kDeferred);
// A fast array iterator transitioned to a slow iterator during
// iteration. Invalidate fast_array_iteration_prtoector cell to
// prevent potential deopt loops.
assembler->StoreObjectFieldNoWriteBarrier(
iterator, JSArrayIterator::kIteratedObjectMapOffset,
assembler->UndefinedConstant());
assembler->GotoIf(
assembler->Uint32LessThanOrEqual(
instance_type, assembler->Int32Constant(
JS_GENERIC_ARRAY_KEY_ITERATOR_TYPE)),
&done);
Node* invalid = assembler->SmiConstant(
Smi::FromInt(Isolate::kArrayProtectorInvalid));
Node* cell = assembler->LoadRoot(
Heap::kFastArrayIterationProtectorRootIndex);
assembler->StoreObjectFieldNoWriteBarrier(cell, Cell::kValueOffset,
invalid);
assembler->Goto(&done);
}
}
assembler->Bind(&if_isnotarray);
@ -2377,6 +2407,8 @@ void Builtins::Generate_ArrayIteratorPrototypeNext(
assembler->Bind(&if_isdetached);
{
// TODO(caitp): If IsDetached(buffer) is true, throw a TypeError, per
// https://github.com/tc39/ecma262/issues/713
var_length.Bind(assembler->SmiConstant(Smi::kZero));
assembler->Goto(&done);
}

View File

@ -681,16 +681,11 @@ void CodeStubAssembler::BranchIfFastJSArray(Node* object, Node* context,
Node* elements_kind = LoadMapElementsKind(map);
// Bailout if receiver has slow elements.
GotoIf(
Int32GreaterThan(elements_kind, Int32Constant(LAST_FAST_ELEMENTS_KIND)),
if_false);
GotoUnless(IsFastElementsKind(elements_kind), if_false);
// Check prototype chain if receiver does not have packed elements.
STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == (FAST_SMI_ELEMENTS | 1));
STATIC_ASSERT(FAST_HOLEY_ELEMENTS == (FAST_ELEMENTS | 1));
STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == (FAST_DOUBLE_ELEMENTS | 1));
Node* holey_elements = Word32And(elements_kind, Int32Constant(1));
GotoIf(Word32Equal(holey_elements, Int32Constant(0)), if_true);
GotoUnless(IsHoleyFastElementsKind(elements_kind), if_true);
BranchIfPrototypesHaveNoElements(map, if_true, if_false);
}
@ -8898,19 +8893,59 @@ compiler::Node* CodeStubAssembler::CreateArrayIterator(
Bind(&if_isfast);
{
Node* map_index =
IntPtrAdd(IntPtrConstant(kBaseMapIndex + kFastIteratorOffset),
LoadMapElementsKind(array_map));
CSA_ASSERT(this, IntPtrGreaterThanOrEqual(
map_index, IntPtrConstant(kBaseMapIndex +
kFastIteratorOffset)));
CSA_ASSERT(this, IntPtrLessThan(map_index,
IntPtrConstant(kBaseMapIndex +
kSlowIteratorOffset)));
Label if_ispacked(this), if_isholey(this);
Node* elements_kind = LoadMapElementsKind(array_map);
Branch(IsHoleyFastElementsKind(elements_kind), &if_isholey,
&if_ispacked);
var_map_index.Bind(map_index);
var_array_map.Bind(array_map);
Goto(&allocate_iterator);
Bind(&if_isholey);
{
// Fast holey JSArrays can treat the hole as undefined if the
// protector cell is valid, and the prototype chain is unchanged from
// its initial state (because the protector cell is only tracked for
// initial the Array and Object prototypes). Check these conditions
// here, and take the slow path if any fail.
Node* protector_cell = LoadRoot(Heap::kArrayProtectorRootIndex);
DCHECK(isolate()->heap()->array_protector()->IsPropertyCell());
GotoUnless(
WordEqual(
LoadObjectField(protector_cell, PropertyCell::kValueOffset),
SmiConstant(Smi::FromInt(Isolate::kArrayProtectorValid))),
&if_isslow);
Node* native_context = LoadNativeContext(context);
Node* prototype = LoadMapPrototype(array_map);
Node* array_prototype = LoadContextElement(
native_context, Context::INITIAL_ARRAY_PROTOTYPE_INDEX);
GotoUnless(WordEqual(prototype, array_prototype), &if_isslow);
Node* map = LoadMap(prototype);
prototype = LoadMapPrototype(map);
Node* object_prototype = LoadContextElement(
native_context, Context::INITIAL_OBJECT_PROTOTYPE_INDEX);
GotoUnless(WordEqual(prototype, object_prototype), &if_isslow);
map = LoadMap(prototype);
prototype = LoadMapPrototype(map);
Branch(IsNull(prototype), &if_ispacked, &if_isslow);
}
Bind(&if_ispacked);
{
Node* map_index =
IntPtrAdd(IntPtrConstant(kBaseMapIndex + kFastIteratorOffset),
LoadMapElementsKind(array_map));
CSA_ASSERT(this, IntPtrGreaterThanOrEqual(
map_index, IntPtrConstant(kBaseMapIndex +
kFastIteratorOffset)));
CSA_ASSERT(this, IntPtrLessThan(map_index,
IntPtrConstant(kBaseMapIndex +
kSlowIteratorOffset)));
var_map_index.Bind(map_index);
var_array_map.Bind(array_map);
Goto(&allocate_iterator);
}
}
Bind(&if_isslow);
@ -9051,5 +9086,24 @@ void CodeStubArguments::PopAndReturn(compiler::Node* value) {
value);
}
compiler::Node* CodeStubAssembler::IsFastElementsKind(
compiler::Node* elements_kind) {
return Uint32LessThanOrEqual(elements_kind,
Int32Constant(LAST_FAST_ELEMENTS_KIND));
}
compiler::Node* CodeStubAssembler::IsHoleyFastElementsKind(
compiler::Node* elements_kind) {
CSA_ASSERT(this, IsFastElementsKind(elements_kind));
STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == (FAST_SMI_ELEMENTS | 1));
STATIC_ASSERT(FAST_HOLEY_ELEMENTS == (FAST_ELEMENTS | 1));
STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == (FAST_DOUBLE_ELEMENTS | 1));
// Check prototype chain if receiver does not have packed elements.
Node* holey_elements = Word32And(elements_kind, Int32Constant(1));
return Word32Equal(holey_elements, Int32Constant(1));
}
} // namespace internal
} // namespace v8

View File

@ -20,8 +20,6 @@ class StubCache;
enum class PrimitiveType { kBoolean, kNumber, kString, kSymbol };
enum class IterationKind { kKeys, kValues, kEntries };
#define HEAP_CONSTANT_LIST(V) \
V(BooleanMap, BooleanMap) \
V(CodeMap, CodeMap) \
@ -634,6 +632,10 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
compiler::Node* IsDictionary(compiler::Node* object);
compiler::Node* IsUnseededNumberDictionary(compiler::Node* object);
// ElementsKind helpers:
compiler::Node* IsFastElementsKind(compiler::Node* elements_kind);
compiler::Node* IsHoleyFastElementsKind(compiler::Node* elements_kind);
// String helpers.
// Load a character from a String (might flatten a ConsString).
compiler::Node* StringCharCodeAt(compiler::Node* string,

View File

@ -570,6 +570,59 @@ FieldAccess AccessBuilder::ForJSGlobalObjectNativeContext() {
return access;
}
// static
FieldAccess AccessBuilder::ForJSArrayIteratorObject() {
FieldAccess access = {kTaggedBase,
JSArrayIterator::kIteratedObjectOffset,
Handle<Name>(),
Type::ReceiverOrUndefined(),
MachineType::TaggedPointer(),
kPointerWriteBarrier};
return access;
}
// static
FieldAccess AccessBuilder::ForJSArrayIteratorIndex(InstanceType instance_type,
ElementsKind elements_kind) {
// In generic case, cap to 2^53-1 (per ToLength() in spec) via
// kPositiveSafeInteger
FieldAccess access = {kTaggedBase,
JSArrayIterator::kNextIndexOffset,
Handle<Name>(),
TypeCache::Get().kPositiveSafeInteger,
MachineType::AnyTagged(),
kFullWriteBarrier};
if (instance_type == JS_ARRAY_TYPE) {
if (IsFastDoubleElementsKind(elements_kind)) {
access.type = TypeCache::Get().kFixedDoubleArrayLengthType;
access.machine_type = MachineType::TaggedSigned();
access.write_barrier_kind = kNoWriteBarrier;
} else if (IsFastElementsKind(elements_kind)) {
access.type = TypeCache::Get().kFixedArrayLengthType;
access.machine_type = MachineType::TaggedSigned();
access.write_barrier_kind = kNoWriteBarrier;
} else {
access.type = TypeCache::Get().kJSArrayLengthType;
}
} else if (instance_type == JS_TYPED_ARRAY_TYPE) {
access.type = TypeCache::Get().kJSTypedArrayLengthType;
access.machine_type = MachineType::TaggedSigned();
access.write_barrier_kind = kNoWriteBarrier;
}
return access;
}
// static
FieldAccess AccessBuilder::ForJSArrayIteratorObjectMap() {
FieldAccess access = {kTaggedBase,
JSArrayIterator::kIteratedObjectMapOffset,
Handle<Name>(),
Type::OtherInternal(),
MachineType::TaggedPointer(),
kPointerWriteBarrier};
return access;
}
// static
FieldAccess AccessBuilder::ForJSStringIteratorString() {
FieldAccess access = {

View File

@ -191,6 +191,16 @@ class V8_EXPORT_PRIVATE AccessBuilder final
// Provides access to JSGlobalObject::native_context() field.
static FieldAccess ForJSGlobalObjectNativeContext();
// Provides access to JSArrayIterator::object() field.
static FieldAccess ForJSArrayIteratorObject();
// Provides access to JSArrayIterator::index() field.
static FieldAccess ForJSArrayIteratorIndex(InstanceType type = JS_OBJECT_TYPE,
ElementsKind kind = NO_ELEMENTS);
// Provides access to JSArrayIterator::object_map() field.
static FieldAccess ForJSArrayIteratorObjectMap();
// Provides access to JSStringIterator::string() field.
static FieldAccess ForJSStringIteratorString();

View File

@ -175,8 +175,552 @@ bool CanInlineArrayResizeOperation(Handle<Map> receiver_map) {
!IsReadOnlyLengthDescriptor(receiver_map);
}
bool CanInlineJSArrayIteration(Handle<Map> receiver_map) {
Isolate* const isolate = receiver_map->GetIsolate();
// Ensure that the [[Prototype]] is actually an exotic Array
if (!receiver_map->prototype()->IsJSArray()) return false;
// Don't inline JSArrays with slow elements of any kind
if (!IsFastElementsKind(receiver_map->elements_kind())) return false;
// If the receiver map has packed elements, no need to check the prototype.
// This requires a MapCheck where this is used.
if (!IsFastHoleyElementsKind(receiver_map->elements_kind())) return true;
Handle<JSArray> receiver_prototype(JSArray::cast(receiver_map->prototype()),
isolate);
// Ensure all prototypes of the {receiver} are stable.
for (PrototypeIterator it(isolate, receiver_prototype, kStartAtReceiver);
!it.IsAtEnd(); it.Advance()) {
Handle<JSReceiver> current = PrototypeIterator::GetCurrent<JSReceiver>(it);
if (!current->map()->is_stable()) return false;
}
// For holey Arrays, ensure that the array_protector cell is valid (must be
// a CompilationDependency), and the JSArray prototype has not been altered.
return receiver_map->instance_type() == JS_ARRAY_TYPE &&
(!receiver_map->is_dictionary_map() || receiver_map->is_stable()) &&
isolate->IsFastArrayConstructorPrototypeChainIntact() &&
isolate->IsAnyInitialArrayPrototype(receiver_prototype);
}
} // namespace
Reduction JSBuiltinReducer::ReduceArrayIterator(Node* node,
IterationKind kind) {
Handle<Map> receiver_map;
if (GetMapWitness(node).ToHandle(&receiver_map)) {
return ReduceArrayIterator(receiver_map, node, kind,
ArrayIteratorKind::kArray);
}
return NoChange();
}
Reduction JSBuiltinReducer::ReduceTypedArrayIterator(Node* node,
IterationKind kind) {
Handle<Map> receiver_map;
if (GetMapWitness(node).ToHandle(&receiver_map) &&
receiver_map->instance_type() == JS_TYPED_ARRAY_TYPE) {
return ReduceArrayIterator(receiver_map, node, kind,
ArrayIteratorKind::kTypedArray);
}
return NoChange();
}
Reduction JSBuiltinReducer::ReduceArrayIterator(Handle<Map> receiver_map,
Node* node, IterationKind kind,
ArrayIteratorKind iter_kind) {
Node* receiver = NodeProperties::GetValueInput(node, 1);
Node* effect = NodeProperties::GetEffectInput(node);
Node* control = NodeProperties::GetControlInput(node);
if (iter_kind == ArrayIteratorKind::kTypedArray) {
// For JSTypedArray iterator methods, deopt if the buffer is neutered. This
// is potentially a deopt loop, but should be extremely unlikely.
DCHECK_EQ(JS_TYPED_ARRAY_TYPE, receiver_map->instance_type());
Node* buffer = effect = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForJSArrayBufferViewBuffer()),
receiver, effect, control);
Node* check = effect = graph()->NewNode(
simplified()->ArrayBufferWasNeutered(), buffer, effect, control);
check = graph()->NewNode(simplified()->BooleanNot(), check);
effect = graph()->NewNode(simplified()->CheckIf(), check, effect, control);
}
int map_index = -1;
Node* object_map = jsgraph()->UndefinedConstant();
switch (receiver_map->instance_type()) {
case JS_ARRAY_TYPE:
if (kind == IterationKind::kKeys) {
map_index = Context::FAST_ARRAY_KEY_ITERATOR_MAP_INDEX;
} else {
map_index = kind == IterationKind::kValues
? Context::FAST_SMI_ARRAY_VALUE_ITERATOR_MAP_INDEX
: Context::FAST_SMI_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX;
if (CanInlineJSArrayIteration(receiver_map)) {
// Use `generic` elements for holey arrays if there may be elements
// on the prototype chain.
map_index += static_cast<int>(receiver_map->elements_kind());
object_map = jsgraph()->Constant(receiver_map);
if (IsFastHoleyElementsKind(receiver_map->elements_kind())) {
Handle<JSObject> initial_array_prototype(
native_context()->initial_array_prototype(), isolate());
dependencies()->AssumePrototypeMapsStable(receiver_map,
initial_array_prototype);
}
} else {
map_index += (Context::GENERIC_ARRAY_VALUE_ITERATOR_MAP_INDEX -
Context::FAST_SMI_ARRAY_VALUE_ITERATOR_MAP_INDEX);
}
}
break;
case JS_TYPED_ARRAY_TYPE:
if (kind == IterationKind::kKeys) {
map_index = Context::TYPED_ARRAY_KEY_ITERATOR_MAP_INDEX;
} else {
DCHECK_GE(receiver_map->elements_kind(), UINT8_ELEMENTS);
DCHECK_LE(receiver_map->elements_kind(), UINT8_CLAMPED_ELEMENTS);
map_index = (kind == IterationKind::kValues
? Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX
: Context::UINT8_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX) +
(receiver_map->elements_kind() - UINT8_ELEMENTS);
}
break;
default:
if (kind == IterationKind::kKeys) {
map_index = Context::GENERIC_ARRAY_KEY_ITERATOR_MAP_INDEX;
} else if (kind == IterationKind::kValues) {
map_index = Context::GENERIC_ARRAY_VALUE_ITERATOR_MAP_INDEX;
} else {
map_index = Context::GENERIC_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX;
}
break;
}
DCHECK_GE(map_index, Context::TYPED_ARRAY_KEY_ITERATOR_MAP_INDEX);
DCHECK_LE(map_index, Context::GENERIC_ARRAY_VALUE_ITERATOR_MAP_INDEX);
Handle<Map> map(Map::cast(native_context()->get(map_index)), isolate());
// allocate new iterator
effect = graph()->NewNode(
common()->BeginRegion(RegionObservability::kNotObservable), effect);
Node* value = effect = graph()->NewNode(
simplified()->Allocate(NOT_TENURED),
jsgraph()->Constant(JSArrayIterator::kSize), effect, control);
effect = graph()->NewNode(simplified()->StoreField(AccessBuilder::ForMap()),
value, jsgraph()->Constant(map), effect, control);
effect = graph()->NewNode(
simplified()->StoreField(AccessBuilder::ForJSObjectProperties()), value,
jsgraph()->EmptyFixedArrayConstant(), effect, control);
effect = graph()->NewNode(
simplified()->StoreField(AccessBuilder::ForJSObjectElements()), value,
jsgraph()->EmptyFixedArrayConstant(), effect, control);
// attach the iterator to this object
effect = graph()->NewNode(
simplified()->StoreField(AccessBuilder::ForJSArrayIteratorObject()),
value, receiver, effect, control);
effect = graph()->NewNode(
simplified()->StoreField(AccessBuilder::ForJSArrayIteratorIndex()), value,
jsgraph()->ZeroConstant(), effect, control);
effect = graph()->NewNode(
simplified()->StoreField(AccessBuilder::ForJSArrayIteratorObjectMap()),
value, object_map, effect, control);
value = effect = graph()->NewNode(common()->FinishRegion(), value, effect);
// replace it
ReplaceWithValue(node, value, effect, control);
return Replace(value);
}
Reduction JSBuiltinReducer::ReduceFastArrayIteratorNext(
Handle<Map> iterator_map, Node* node, IterationKind kind) {
Node* iterator = NodeProperties::GetValueInput(node, 1);
Node* effect = NodeProperties::GetEffectInput(node);
Node* control = NodeProperties::GetControlInput(node);
Node* context = NodeProperties::GetContextInput(node);
if (kind != IterationKind::kKeys &&
!isolate()->IsFastArrayIterationIntact()) {
// Avoid deopt loops for non-key iteration if the
// fast_array_iteration_protector cell has been invalidated.
return NoChange();
}
ElementsKind elements_kind = JSArrayIterator::ElementsKindForInstanceType(
iterator_map->instance_type());
if (IsFastHoleyElementsKind(elements_kind)) {
if (!isolate()->IsFastArrayConstructorPrototypeChainIntact()) {
return NoChange();
} else {
Handle<JSObject> initial_array_prototype(
native_context()->initial_array_prototype(), isolate());
dependencies()->AssumePropertyCell(factory()->array_protector());
}
}
Node* array = effect = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForJSArrayIteratorObject()),
iterator, effect, control);
Node* check0 = graph()->NewNode(simplified()->ReferenceEqual(), array,
jsgraph()->UndefinedConstant());
Node* branch0 =
graph()->NewNode(common()->Branch(BranchHint::kFalse), check0, control);
Node* vdone_false0;
Node* vfalse0;
Node* efalse0 = effect;
Node* if_false0 = graph()->NewNode(common()->IfFalse(), branch0);
{
// iterator.[[IteratedObject]] !== undefined, continue iterating.
Node* index = efalse0 = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForJSArrayIteratorIndex(
JS_ARRAY_TYPE, elements_kind)),
iterator, efalse0, if_false0);
Node* length = efalse0 = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForJSArrayLength(elements_kind)),
array, efalse0, if_false0);
Node* check1 =
graph()->NewNode(simplified()->NumberLessThan(), index, length);
Node* branch1 = graph()->NewNode(common()->Branch(BranchHint::kTrue),
check1, if_false0);
Node* vdone_true1;
Node* vtrue1;
Node* etrue1 = efalse0;
Node* if_true1 = graph()->NewNode(common()->IfTrue(), branch1);
{
// iterator.[[NextIndex]] < array.length, continue iterating
vdone_true1 = jsgraph()->FalseConstant();
if (kind == IterationKind::kKeys) {
vtrue1 = index;
} else {
// For value/entry iteration, first step is a mapcheck to ensure
// inlining is still valid.
Node* orig_map = etrue1 =
graph()->NewNode(simplified()->LoadField(
AccessBuilder::ForJSArrayIteratorObjectMap()),
iterator, etrue1, if_true1);
etrue1 = graph()->NewNode(simplified()->CheckMaps(1), array, orig_map,
etrue1, if_true1);
}
if (kind != IterationKind::kKeys) {
Node* elements = etrue1 = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForJSObjectElements()),
array, etrue1, if_true1);
Node* value = etrue1 = graph()->NewNode(
simplified()->LoadElement(
AccessBuilder::ForFixedArrayElement(elements_kind)),
elements, index, etrue1, if_true1);
// Convert hole to undefined if needed.
if (elements_kind == FAST_HOLEY_ELEMENTS ||
elements_kind == FAST_HOLEY_SMI_ELEMENTS) {
value = graph()->NewNode(simplified()->ConvertTaggedHoleToUndefined(),
value);
} else if (elements_kind == FAST_HOLEY_DOUBLE_ELEMENTS) {
// TODO(bmeurer): avoid deopt if not all uses of value are truncated.
CheckFloat64HoleMode mode = CheckFloat64HoleMode::kAllowReturnHole;
value = etrue1 = graph()->NewNode(
simplified()->CheckFloat64Hole(mode), value, etrue1, if_true1);
}
if (kind == IterationKind::kEntries) {
// Allocate elements for key/value pair
vtrue1 = etrue1 = graph()->NewNode(
javascript()->CreateKeyValueArray(), index, value, etrue1);
} else {
DCHECK_EQ(kind, IterationKind::kValues);
vtrue1 = value;
}
}
Node* next_index = graph()->NewNode(simplified()->NumberAdd(), index,
jsgraph()->OneConstant());
next_index = graph()->NewNode(simplified()->NumberToUint32(), next_index);
etrue1 = graph()->NewNode(
simplified()->StoreField(AccessBuilder::ForJSArrayIteratorIndex(
JS_ARRAY_TYPE, elements_kind)),
iterator, next_index, etrue1, if_true1);
}
Node* vdone_false1;
Node* vfalse1;
Node* efalse1 = efalse0;
Node* if_false1 = graph()->NewNode(common()->IfFalse(), branch1);
{
// iterator.[[NextIndex]] >= array.length, stop iterating.
vdone_false1 = jsgraph()->TrueConstant();
vfalse1 = jsgraph()->UndefinedConstant();
efalse1 = graph()->NewNode(
simplified()->StoreField(AccessBuilder::ForJSArrayIteratorObject()),
iterator, vfalse1, efalse1, if_false1);
}
if_false0 = graph()->NewNode(common()->Merge(2), if_true1, if_false1);
efalse0 =
graph()->NewNode(common()->EffectPhi(2), etrue1, efalse1, if_false0);
vfalse0 = graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
vtrue1, vfalse1, if_false0);
vdone_false0 =
graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
vdone_true1, vdone_false1, if_false0);
}
Node* vdone_true0;
Node* vtrue0;
Node* etrue0 = effect;
Node* if_true0 = graph()->NewNode(common()->IfTrue(), branch0);
{
// iterator.[[IteratedObject]] === undefined, the iterator is done.
vdone_true0 = jsgraph()->TrueConstant();
vtrue0 = jsgraph()->UndefinedConstant();
}
control = graph()->NewNode(common()->Merge(2), if_false0, if_true0);
effect = graph()->NewNode(common()->EffectPhi(2), efalse0, etrue0, control);
Node* value =
graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
vfalse0, vtrue0, control);
Node* done =
graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
vdone_false0, vdone_true0, control);
// Create IteratorResult object.
value = effect = graph()->NewNode(javascript()->CreateIterResultObject(),
value, done, context, effect);
ReplaceWithValue(node, value, effect, control);
return Replace(value);
}
Reduction JSBuiltinReducer::ReduceTypedArrayIteratorNext(
Handle<Map> iterator_map, Node* node, IterationKind kind) {
Node* iterator = NodeProperties::GetValueInput(node, 1);
Node* effect = NodeProperties::GetEffectInput(node);
Node* control = NodeProperties::GetControlInput(node);
Node* context = NodeProperties::GetContextInput(node);
ElementsKind elements_kind = JSArrayIterator::ElementsKindForInstanceType(
iterator_map->instance_type());
Node* array = effect = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForJSArrayIteratorObject()),
iterator, effect, control);
Node* check0 = graph()->NewNode(simplified()->ReferenceEqual(), array,
jsgraph()->UndefinedConstant());
Node* branch0 =
graph()->NewNode(common()->Branch(BranchHint::kFalse), check0, control);
Node* vdone_false0;
Node* vfalse0;
Node* efalse0 = effect;
Node* if_false0 = graph()->NewNode(common()->IfFalse(), branch0);
{
// iterator.[[IteratedObject]] !== undefined, continue iterating.
Node* index = efalse0 = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForJSArrayIteratorIndex(
JS_TYPED_ARRAY_TYPE, elements_kind)),
iterator, efalse0, if_false0);
// typedarray.[[ViewedArrayBuffer]]
Node* buffer = efalse0 = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForJSArrayBufferViewBuffer()),
array, efalse0, if_false0);
Node* check1 = efalse0 = graph()->NewNode(
simplified()->ArrayBufferWasNeutered(), buffer, efalse0, if_false0);
check1 = graph()->NewNode(simplified()->BooleanNot(), check1);
efalse0 =
graph()->NewNode(simplified()->CheckIf(), check1, efalse0, if_false0);
Node* length = efalse0 = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForJSTypedArrayLength()), array,
efalse0, if_false0);
Node* check2 =
graph()->NewNode(simplified()->NumberLessThan(), index, length);
Node* branch2 = graph()->NewNode(common()->Branch(BranchHint::kTrue),
check2, if_false0);
Node* vdone_true2;
Node* vtrue2;
Node* etrue2 = efalse0;
Node* if_true2 = graph()->NewNode(common()->IfTrue(), branch2);
{
// iterator.[[NextIndex]] < array.length, continue iterating
vdone_true2 = jsgraph()->FalseConstant();
if (kind == IterationKind::kKeys) {
vtrue2 = index;
}
Node* next_index = graph()->NewNode(simplified()->NumberAdd(), index,
jsgraph()->OneConstant());
next_index = graph()->NewNode(simplified()->NumberToUint32(), next_index);
etrue2 = graph()->NewNode(
simplified()->StoreField(AccessBuilder::ForJSArrayIteratorIndex(
JS_TYPED_ARRAY_TYPE, elements_kind)),
iterator, next_index, etrue2, if_true2);
if (kind != IterationKind::kKeys) {
Node* elements = etrue2 = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForJSObjectElements()),
array, etrue2, if_true2);
Node* base_ptr = etrue2 = graph()->NewNode(
simplified()->LoadField(
AccessBuilder::ForFixedTypedArrayBaseBasePointer()),
elements, etrue2, if_true2);
Node* external_ptr = etrue2 = graph()->NewNode(
simplified()->LoadField(
AccessBuilder::ForFixedTypedArrayBaseExternalPointer()),
elements, etrue2, if_true2);
ExternalArrayType array_type = kExternalInt8Array;
switch (elements_kind) {
#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
case TYPE##_ELEMENTS: \
array_type = kExternal##Type##Array; \
break;
TYPED_ARRAYS(TYPED_ARRAY_CASE)
default:
UNREACHABLE();
#undef TYPED_ARRAY_CASE
}
Node* value = etrue2 =
graph()->NewNode(simplified()->LoadTypedElement(array_type), buffer,
base_ptr, external_ptr, index, etrue2, if_true2);
if (kind == IterationKind::kEntries) {
// Allocate elements for key/value pair
vtrue2 = etrue2 = graph()->NewNode(
javascript()->CreateKeyValueArray(), index, value, etrue2);
} else {
DCHECK(kind == IterationKind::kValues);
vtrue2 = value;
}
}
}
Node* vdone_false2;
Node* vfalse2;
Node* efalse2 = efalse0;
Node* if_false2 = graph()->NewNode(common()->IfFalse(), branch2);
{
// iterator.[[NextIndex]] >= array.length, stop iterating.
vdone_false2 = jsgraph()->TrueConstant();
vfalse2 = jsgraph()->UndefinedConstant();
efalse2 = graph()->NewNode(
simplified()->StoreField(AccessBuilder::ForJSArrayIteratorObject()),
iterator, vfalse2, efalse2, if_false2);
}
if_false0 = graph()->NewNode(common()->Merge(2), if_true2, if_false2);
efalse0 =
graph()->NewNode(common()->EffectPhi(2), etrue2, efalse2, if_false0);
vfalse0 = graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
vtrue2, vfalse2, if_false0);
vdone_false0 =
graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
vdone_true2, vdone_false2, if_false0);
}
Node* vdone_true0;
Node* vtrue0;
Node* etrue0 = effect;
Node* if_true0 = graph()->NewNode(common()->IfTrue(), branch0);
{
// iterator.[[IteratedObject]] === undefined, the iterator is done.
vdone_true0 = jsgraph()->TrueConstant();
vtrue0 = jsgraph()->UndefinedConstant();
}
control = graph()->NewNode(common()->Merge(2), if_false0, if_true0);
effect = graph()->NewNode(common()->EffectPhi(2), efalse0, etrue0, control);
Node* value =
graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
vfalse0, vtrue0, control);
Node* done =
graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
vdone_false0, vdone_true0, control);
// Create IteratorResult object.
value = effect = graph()->NewNode(javascript()->CreateIterResultObject(),
value, done, context, effect);
ReplaceWithValue(node, value, effect, control);
return Replace(value);
}
Reduction JSBuiltinReducer::ReduceArrayIteratorNext(Node* node) {
Handle<Map> receiver_map;
if (GetMapWitness(node).ToHandle(&receiver_map)) {
switch (receiver_map->instance_type()) {
case JS_TYPED_ARRAY_KEY_ITERATOR_TYPE:
return ReduceTypedArrayIteratorNext(receiver_map, node,
IterationKind::kKeys);
case JS_FAST_ARRAY_KEY_ITERATOR_TYPE:
return ReduceFastArrayIteratorNext(receiver_map, node,
IterationKind::kKeys);
case JS_INT8_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_UINT8_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_INT16_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_UINT16_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_INT32_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_UINT32_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_FLOAT32_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_FLOAT64_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_UINT8_CLAMPED_ARRAY_KEY_VALUE_ITERATOR_TYPE:
return ReduceTypedArrayIteratorNext(receiver_map, node,
IterationKind::kEntries);
case JS_FAST_SMI_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_FAST_HOLEY_SMI_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_FAST_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_FAST_HOLEY_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_FAST_DOUBLE_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_FAST_HOLEY_DOUBLE_ARRAY_KEY_VALUE_ITERATOR_TYPE:
return ReduceFastArrayIteratorNext(receiver_map, node,
IterationKind::kEntries);
case JS_INT8_ARRAY_VALUE_ITERATOR_TYPE:
case JS_UINT8_ARRAY_VALUE_ITERATOR_TYPE:
case JS_INT16_ARRAY_VALUE_ITERATOR_TYPE:
case JS_UINT16_ARRAY_VALUE_ITERATOR_TYPE:
case JS_INT32_ARRAY_VALUE_ITERATOR_TYPE:
case JS_UINT32_ARRAY_VALUE_ITERATOR_TYPE:
case JS_FLOAT32_ARRAY_VALUE_ITERATOR_TYPE:
case JS_FLOAT64_ARRAY_VALUE_ITERATOR_TYPE:
case JS_UINT8_CLAMPED_ARRAY_VALUE_ITERATOR_TYPE:
return ReduceTypedArrayIteratorNext(receiver_map, node,
IterationKind::kValues);
case JS_FAST_SMI_ARRAY_VALUE_ITERATOR_TYPE:
case JS_FAST_HOLEY_SMI_ARRAY_VALUE_ITERATOR_TYPE:
case JS_FAST_ARRAY_VALUE_ITERATOR_TYPE:
case JS_FAST_HOLEY_ARRAY_VALUE_ITERATOR_TYPE:
case JS_FAST_DOUBLE_ARRAY_VALUE_ITERATOR_TYPE:
case JS_FAST_HOLEY_DOUBLE_ARRAY_VALUE_ITERATOR_TYPE:
return ReduceFastArrayIteratorNext(receiver_map, node,
IterationKind::kValues);
default:
// Slow array iterators are not reduced
return NoChange();
}
}
return NoChange();
}
// ES6 section 22.1.3.17 Array.prototype.pop ( )
Reduction JSBuiltinReducer::ReduceArrayPop(Node* node) {
Handle<Map> receiver_map;
@ -1260,6 +1804,14 @@ Reduction JSBuiltinReducer::Reduce(Node* node) {
// Dispatch according to the BuiltinFunctionId if present.
if (!r.HasBuiltinFunctionId()) return NoChange();
switch (r.GetBuiltinFunctionId()) {
case kArrayEntries:
return ReduceArrayIterator(node, IterationKind::kEntries);
case kArrayKeys:
return ReduceArrayIterator(node, IterationKind::kKeys);
case kArrayValues:
return ReduceArrayIterator(node, IterationKind::kValues);
case kArrayIteratorNext:
return ReduceArrayIteratorNext(node);
case kArrayPop:
return ReduceArrayPop(node);
case kArrayPush:
@ -1416,6 +1968,12 @@ Reduction JSBuiltinReducer::Reduce(Node* node) {
case kTypedArrayLength:
return ReduceArrayBufferViewAccessor(
node, JS_TYPED_ARRAY_TYPE, AccessBuilder::ForJSTypedArrayLength());
case kTypedArrayEntries:
return ReduceTypedArrayIterator(node, IterationKind::kEntries);
case kTypedArrayKeys:
return ReduceTypedArrayIterator(node, IterationKind::kKeys);
case kTypedArrayValues:
return ReduceTypedArrayIterator(node, IterationKind::kValues);
default:
break;
}

View File

@ -45,6 +45,16 @@ class V8_EXPORT_PRIVATE JSBuiltinReducer final
Reduction Reduce(Node* node) final;
private:
Reduction ReduceArrayIterator(Node* node, IterationKind kind);
Reduction ReduceTypedArrayIterator(Node* node, IterationKind kind);
Reduction ReduceArrayIterator(Handle<Map> receiver_map, Node* node,
IterationKind kind,
ArrayIteratorKind iter_kind);
Reduction ReduceArrayIteratorNext(Node* node);
Reduction ReduceFastArrayIteratorNext(Handle<Map> iterator_map, Node* node,
IterationKind kind);
Reduction ReduceTypedArrayIteratorNext(Handle<Map> iterator_map, Node* node,
IterationKind kind);
Reduction ReduceArrayPop(Node* node);
Reduction ReduceArrayPush(Node* node);
Reduction ReduceDateGetTime(Node* node);

View File

@ -210,6 +210,8 @@ Reduction JSCreateLowering::Reduce(Node* node) {
return ReduceJSCreateClosure(node);
case IrOpcode::kJSCreateIterResultObject:
return ReduceJSCreateIterResultObject(node);
case IrOpcode::kJSCreateKeyValueArray:
return ReduceJSCreateKeyValueArray(node);
case IrOpcode::kJSCreateLiteralArray:
case IrOpcode::kJSCreateLiteralObject:
return ReduceJSCreateLiteral(node);
@ -721,6 +723,36 @@ Reduction JSCreateLowering::ReduceJSCreateIterResultObject(Node* node) {
return Changed(node);
}
Reduction JSCreateLowering::ReduceJSCreateKeyValueArray(Node* node) {
DCHECK_EQ(IrOpcode::kJSCreateKeyValueArray, node->opcode());
Node* key = NodeProperties::GetValueInput(node, 0);
Node* value = NodeProperties::GetValueInput(node, 1);
Node* effect = NodeProperties::GetEffectInput(node);
Node* array_map = jsgraph()->HeapConstant(
handle(native_context()->js_array_fast_elements_map_index()));
Node* properties = jsgraph()->EmptyFixedArrayConstant();
Node* length = jsgraph()->Constant(2);
AllocationBuilder aa(jsgraph(), effect, graph()->start());
aa.AllocateArray(2, factory()->fixed_array_map());
aa.Store(AccessBuilder::ForFixedArrayElement(FAST_ELEMENTS),
jsgraph()->Constant(0), key);
aa.Store(AccessBuilder::ForFixedArrayElement(FAST_ELEMENTS),
jsgraph()->Constant(1), value);
Node* elements = aa.Finish();
AllocationBuilder a(jsgraph(), elements, graph()->start());
a.Allocate(JSArray::kSize);
a.Store(AccessBuilder::ForMap(), array_map);
a.Store(AccessBuilder::ForJSObjectProperties(), properties);
a.Store(AccessBuilder::ForJSObjectElements(), elements);
a.Store(AccessBuilder::ForJSArrayLength(FAST_ELEMENTS), length);
STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
a.FinishAndChange(node);
return Changed(node);
}
Reduction JSCreateLowering::ReduceJSCreateLiteral(Node* node) {
DCHECK(node->opcode() == IrOpcode::kJSCreateLiteralArray ||
node->opcode() == IrOpcode::kJSCreateLiteralObject);

View File

@ -51,6 +51,7 @@ class V8_EXPORT_PRIVATE JSCreateLowering final
Reduction ReduceJSCreateArray(Node* node);
Reduction ReduceJSCreateClosure(Node* node);
Reduction ReduceJSCreateIterResultObject(Node* node);
Reduction ReduceJSCreateKeyValueArray(Node* node);
Reduction ReduceJSCreateLiteral(Node* node);
Reduction ReduceJSCreateFunctionContext(Node* node);
Reduction ReduceJSCreateWithContext(Node* node);

View File

@ -463,6 +463,9 @@ void JSGenericLowering::LowerJSCreateIterResultObject(Node* node) {
ReplaceWithRuntimeCall(node, Runtime::kCreateIterResultObject);
}
void JSGenericLowering::LowerJSCreateKeyValueArray(Node* node) {
ReplaceWithRuntimeCall(node, Runtime::kCreateKeyValueArray);
}
void JSGenericLowering::LowerJSCreateLiteralArray(Node* node) {
CreateLiteralParameters const& p = CreateLiteralParametersOf(node->op());

View File

@ -445,6 +445,7 @@ CompareOperationHint CompareOperationHintOf(const Operator* op) {
V(ToString, Operator::kNoProperties, 1, 1) \
V(Create, Operator::kEliminatable, 2, 1) \
V(CreateIterResultObject, Operator::kEliminatable, 2, 1) \
V(CreateKeyValueArray, Operator::kEliminatable, 2, 1) \
V(HasProperty, Operator::kNoProperties, 2, 1) \
V(TypeOf, Operator::kPure, 1, 1) \
V(InstanceOf, Operator::kNoProperties, 2, 1) \

View File

@ -458,6 +458,7 @@ class V8_EXPORT_PRIVATE JSOperatorBuilder final
const Operator* CreateClosure(Handle<SharedFunctionInfo> shared_info,
PretenureFlag pretenure);
const Operator* CreateIterResultObject();
const Operator* CreateKeyValueArray();
const Operator* CreateLiteralArray(Handle<FixedArray> constant_elements,
int literal_flags, int literal_index,
int number_of_elements);

View File

@ -128,6 +128,7 @@
V(JSCreateArray) \
V(JSCreateClosure) \
V(JSCreateIterResultObject) \
V(JSCreateKeyValueArray) \
V(JSCreateLiteralArray) \
V(JSCreateLiteralObject) \
V(JSCreateLiteralRegExp) \

View File

@ -1103,6 +1103,9 @@ Type* Typer::Visitor::TypeJSCreateIterResultObject(Node* node) {
return Type::OtherObject();
}
Type* Typer::Visitor::TypeJSCreateKeyValueArray(Node* node) {
return Type::OtherObject();
}
Type* Typer::Visitor::TypeJSCreateLiteralArray(Node* node) {
return Type::OtherObject();
@ -1384,6 +1387,15 @@ Type* Typer::Visitor::JSCallFunctionTyper(Type* fun, Typer* t) {
case kStringIteratorNext:
return Type::OtherObject();
case kArrayEntries:
case kArrayKeys:
case kArrayValues:
case kTypedArrayEntries:
case kTypedArrayKeys:
case kTypedArrayValues:
case kArrayIteratorNext:
return Type::OtherObject();
// Array functions.
case kArrayIndexOf:
case kArrayLastIndexOf:

View File

@ -158,6 +158,7 @@ namespace compiler {
V(DetectableReceiver, kFunction | kOtherObject | kProxy) \
V(Object, kFunction | kOtherObject | kOtherUndetectable) \
V(Receiver, kObject | kProxy) \
V(ReceiverOrUndefined, kReceiver | kUndefined) \
V(StringOrReceiver, kString | kReceiver) \
V(Unique, kBoolean | kUniqueName | kNull | kUndefined | \
kReceiver) \

View File

@ -579,6 +579,10 @@ void Verifier::Visitor::Check(Node* node) {
// Type is OtherObject.
CheckTypeIs(node, Type::OtherObject());
break;
case IrOpcode::kJSCreateKeyValueArray:
// Type is OtherObject.
CheckTypeIs(node, Type::OtherObject());
break;
case IrOpcode::kJSCreateLiteralArray:
case IrOpcode::kJSCreateLiteralObject:
case IrOpcode::kJSCreateLiteralRegExp:

View File

@ -1279,9 +1279,28 @@ inline std::ostream& operator<<(std::ostream& os, UnicodeEncoding encoding) {
return os;
}
enum class IterationKind { kKeys, kValues, kEntries };
inline std::ostream& operator<<(std::ostream& os, IterationKind kind) {
switch (kind) {
case IterationKind::kKeys:
return os << "IterationKind::kKeys";
case IterationKind::kValues:
return os << "IterationKind::kValues";
case IterationKind::kEntries:
return os << "IterationKind::kEntries";
}
UNREACHABLE();
return os;
}
} // namespace internal
} // namespace v8
// Used by js-builtin-reducer to identify whether ReduceArrayIterator() is
// reducing a JSArray method, or a JSTypedArray method.
enum class ArrayIteratorKind { kArray, kTypedArray };
namespace i = v8::internal;
#endif // V8_GLOBALS_H_

View File

@ -2850,6 +2850,10 @@ void Heap::CreateInitialObjects() {
cell->set_value(Smi::FromInt(Isolate::kArrayProtectorValid));
set_string_length_protector(*cell);
Handle<Cell> fast_array_iteration_cell = factory->NewCell(
handle(Smi::FromInt(Isolate::kArrayProtectorValid), isolate()));
set_fast_array_iteration_protector(*fast_array_iteration_cell);
set_serialized_templates(empty_fixed_array());
set_weak_stack_trace_list(Smi::kZero);

View File

@ -166,6 +166,7 @@ using v8::MemoryPressureLevel;
V(PropertyCell, has_instance_protector, HasInstanceProtector) \
V(Cell, species_protector, SpeciesProtector) \
V(PropertyCell, string_length_protector, StringLengthProtector) \
V(Cell, fast_array_iteration_protector, FastArrayIterationProtector) \
/* Special numbers */ \
V(HeapNumber, nan_value, NanValue) \
V(HeapNumber, hole_nan_value, HoleNanValue) \

View File

@ -143,6 +143,11 @@ bool Isolate::IsStringLengthOverflowIntact() {
return has_instance_cell->value() == Smi::FromInt(kArrayProtectorValid);
}
bool Isolate::IsFastArrayIterationIntact() {
Cell* fast_iteration = heap()->fast_array_iteration_protector();
return fast_iteration->value() == Smi::FromInt(kArrayProtectorValid);
}
} // namespace internal
} // namespace v8

View File

@ -997,6 +997,9 @@ class Isolate {
bool IsIsConcatSpreadableLookupChainIntact(JSReceiver* receiver);
inline bool IsStringLengthOverflowIntact();
// Avoid deopt loops if fast Array Iterators migrate to slow Array Iterators.
inline bool IsFastArrayIterationIntact();
// On intent to set an element in object, make sure that appropriate
// notifications occur if the set is on the elements of the array or
// object prototype. Also ensure that changes to prototype chain between

View File

@ -20345,5 +20345,47 @@ MaybeHandle<Name> FunctionTemplateInfo::TryGetCachedPropertyName(
return MaybeHandle<Name>();
}
// static
ElementsKind JSArrayIterator::ElementsKindForInstanceType(InstanceType type) {
DCHECK_GE(type, FIRST_ARRAY_ITERATOR_TYPE);
DCHECK_LE(type, LAST_ARRAY_ITERATOR_TYPE);
if (type <= LAST_ARRAY_KEY_ITERATOR_TYPE) {
// Should be ignored for key iterators.
return FAST_ELEMENTS;
} else {
ElementsKind kind;
if (type < FIRST_ARRAY_VALUE_ITERATOR_TYPE) {
// Convert `type` to a value iterator from an entries iterator
type = static_cast<InstanceType>(type +
(FIRST_ARRAY_VALUE_ITERATOR_TYPE -
FIRST_ARRAY_KEY_VALUE_ITERATOR_TYPE));
DCHECK_GE(type, FIRST_ARRAY_VALUE_ITERATOR_TYPE);
DCHECK_LE(type, LAST_ARRAY_ITERATOR_TYPE);
}
if (type <= JS_UINT8_CLAMPED_ARRAY_VALUE_ITERATOR_TYPE) {
kind =
static_cast<ElementsKind>(FIRST_FIXED_TYPED_ARRAY_ELEMENTS_KIND +
(type - FIRST_ARRAY_VALUE_ITERATOR_TYPE));
DCHECK_LE(kind, LAST_FIXED_TYPED_ARRAY_ELEMENTS_KIND);
} else if (type < JS_GENERIC_ARRAY_VALUE_ITERATOR_TYPE) {
kind = static_cast<ElementsKind>(
FIRST_FAST_ELEMENTS_KIND +
(type - JS_FAST_SMI_ARRAY_VALUE_ITERATOR_TYPE));
DCHECK_LE(kind, LAST_FAST_ELEMENTS_KIND);
} else {
// For any slow element cases, the actual elements kind is not known.
// Simply
// return a slow elements kind in this case. Users of this function must
// not
// depend on this.
return DICTIONARY_ELEMENTS;
}
DCHECK_LE(kind, LAST_ELEMENTS_KIND);
return kind;
}
}
} // namespace internal
} // namespace v8

View File

@ -7279,6 +7279,10 @@ enum BuiltinFunctionId {
kMathPowHalf,
// These are manually assigned to special getters during bootstrapping.
kArrayBufferByteLength,
kArrayEntries,
kArrayKeys,
kArrayValues,
kArrayIteratorNext,
kDataViewBuffer,
kDataViewByteLength,
kDataViewByteOffset,
@ -7292,7 +7296,10 @@ enum BuiltinFunctionId {
kGlobalIsNaN,
kTypedArrayByteLength,
kTypedArrayByteOffset,
kTypedArrayEntries,
kTypedArrayKeys,
kTypedArrayLength,
kTypedArrayValues,
kSharedArrayBufferByteLength,
kStringIterator,
kStringIteratorNext,
@ -10709,6 +10716,10 @@ class JSArrayIterator : public JSObject {
// allocated.
DECL_ACCESSORS(object_map, Object)
// Return the ElementsKind that a JSArrayIterator's [[IteratedObject]] is
// expected to have, based on its instance type.
static ElementsKind ElementsKindForInstanceType(InstanceType instance_type);
static const int kIteratedObjectOffset = JSObject::kHeaderSize;
static const int kNextIndexOffset = kIteratedObjectOffset + kPointerSize;
static const int kIteratedObjectMapOffset = kNextIndexOffset + kPointerSize;

View File

@ -31,8 +31,10 @@ RUNTIME_FUNCTION(Runtime_FinishArrayPrototypeSetup) {
return Smi::kZero;
}
static void InstallCode(Isolate* isolate, Handle<JSObject> holder,
const char* name, Handle<Code> code, int argc = -1) {
static void InstallCode(
Isolate* isolate, Handle<JSObject> holder, const char* name,
Handle<Code> code, int argc = -1,
BuiltinFunctionId id = static_cast<BuiltinFunctionId>(-1)) {
Handle<String> key = isolate->factory()->InternalizeUtf8String(name);
Handle<JSFunction> optimized =
isolate->factory()->NewFunctionWithoutPrototype(key, code);
@ -41,15 +43,19 @@ static void InstallCode(Isolate* isolate, Handle<JSObject> holder,
} else {
optimized->shared()->set_internal_formal_parameter_count(argc);
}
if (id >= 0) {
optimized->shared()->set_builtin_function_id(id);
}
JSObject::AddProperty(holder, key, optimized, NONE);
}
static void InstallBuiltin(Isolate* isolate, Handle<JSObject> holder,
const char* name, Builtins::Name builtin_name,
int argc = -1) {
static void InstallBuiltin(
Isolate* isolate, Handle<JSObject> holder, const char* name,
Builtins::Name builtin_name, int argc = -1,
BuiltinFunctionId id = static_cast<BuiltinFunctionId>(-1)) {
InstallCode(isolate, holder, name,
handle(isolate->builtins()->builtin(builtin_name), isolate),
argc);
handle(isolate->builtins()->builtin(builtin_name), isolate), argc,
id);
}
RUNTIME_FUNCTION(Runtime_SpecialArrayFunctions) {
@ -71,10 +77,12 @@ RUNTIME_FUNCTION(Runtime_SpecialArrayFunctions) {
InstallBuiltin(isolate, holder, "splice", Builtins::kArraySplice);
InstallBuiltin(isolate, holder, "includes", Builtins::kArrayIncludes, 2);
InstallBuiltin(isolate, holder, "indexOf", Builtins::kArrayIndexOf, 2);
InstallBuiltin(isolate, holder, "keys", Builtins::kArrayPrototypeKeys, 0);
InstallBuiltin(isolate, holder, "values", Builtins::kArrayPrototypeValues, 0);
InstallBuiltin(isolate, holder, "keys", Builtins::kArrayPrototypeKeys, 0,
kArrayKeys);
InstallBuiltin(isolate, holder, "values", Builtins::kArrayPrototypeValues, 0,
kArrayValues);
InstallBuiltin(isolate, holder, "entries", Builtins::kArrayPrototypeEntries,
0);
0, kArrayEntries);
return *holder;
}

View File

@ -925,6 +925,17 @@ RUNTIME_FUNCTION(Runtime_CreateIterResultObject) {
return *isolate->factory()->NewJSIteratorResult(value, done->BooleanValue());
}
RUNTIME_FUNCTION(Runtime_CreateKeyValueArray) {
HandleScope scope(isolate);
DCHECK_EQ(2, args.length());
CONVERT_ARG_HANDLE_CHECKED(Object, key, 0);
CONVERT_ARG_HANDLE_CHECKED(Object, value, 1);
Handle<FixedArray> elements = isolate->factory()->NewFixedArray(2);
elements->set(0, *key);
elements->set(1, *value);
return *isolate->factory()->NewJSArrayWithElements(elements, FAST_ELEMENTS,
2);
}
RUNTIME_FUNCTION(Runtime_IsAccessCheckNeeded) {
SealHandleScope shs(isolate);

View File

@ -426,6 +426,7 @@ namespace internal {
F(Compare, 3, 1) \
F(HasInPrototypeChain, 2, 1) \
F(CreateIterResultObject, 2, 1) \
F(CreateKeyValueArray, 2, 1) \
F(IsAccessCheckNeeded, 1, 1) \
F(CreateDataProperty, 3, 1)

View File

@ -0,0 +1,243 @@
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --turbo --turbo-escape --allow-natives-syntax
"use strict";
const kDeoptimized = 2;
const kTurbofanned = 7;
const kInterpreted = 8;
function GetOptimizationStatus(fn) {
let status = %GetOptimizationStatus(fn);
switch (status) {
case kInterpreted: // Treat interpreted frames as unoptimized
status = kDeoptimized;
break;
}
return status;
}
let global = this;
let tests = {
FastElementsKind() {
let runners = {
FAST_SMI_ELEMENTS(array) {
let sum = 0;
for (let x of array) sum += x;
return sum;
},
FAST_HOLEY_SMI_ELEMENTS(array) {
let sum = 0;
for (let x of array) {
if (x) sum += x;
}
return sum;
},
FAST_ELEMENTS(array) {
let ret = "";
for (let str of array) ret += `> ${str}`;
return ret;
},
FAST_HOLEY_ELEMENTS(array) {
let ret = "";
for (let str of array) ret += `> ${str}`;
return ret;
},
FAST_DOUBLE_ELEMENTS(array) {
let sum = 0.0;
for (let x of array) sum += x;
return sum;
},
FAST_HOLEY_DOUBLE_ELEMENTS(array) {
let sum = 0.0;
for (let x of array) {
if (x) sum += x;
}
return sum;
}
};
let tests = {
FAST_SMI_ELEMENTS: {
array: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10],
expected: 55,
array2: [1, 2, 3],
expected2: 6
},
FAST_HOLEY_SMI_ELEMENTS: {
array: [1, , 3, , 5, , 7, , 9, ,],
expected: 25,
array2: [1, , 3],
expected2: 4
},
FAST_ELEMENTS: {
array: ["a", "b", "c", "d", "e", "f", "g", "h", "i", "j"],
expected: "> a> b> c> d> e> f> g> h> i> j",
array2: ["a", "b", "c"],
expected2: "> a> b> c"
},
FAST_HOLEY_ELEMENTS: {
array: ["a", , "c", , "e", , "g", , "i", ,],
expected: "> a> undefined> c> undefined> e> undefined> g" +
"> undefined> i> undefined",
array2: ["a", , "c"],
expected2: "> a> undefined> c"
},
FAST_DOUBLE_ELEMENTS: {
array: [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0],
expected: 5.5,
array2: [0.6, 0.4, 0.2],
expected2: 1.2
},
FAST_HOLEY_DOUBLE_ELEMENTS: {
array: [0.1, , 0.3, , 0.5, , 0.7, , 0.9, ,],
expected: 2.5,
array2: [0.1, , 0.3],
expected2: 0.4
}
};
for (let key of Object.keys(runners)) {
let fn = runners[key];
let { array, expected, array2, expected2 } = tests[key];
// Warmup:
fn(array);
fn(array);
%OptimizeFunctionOnNextCall(fn);
fn(array);
// TODO(bmeurer): FAST_HOLEY_DOUBLE_ELEMENTS maps generally deopt when
// a hole is encountered. Test should be fixed once that is corrected.
let status = /HOLEY_DOUBLE/.test(key) ? kDeoptimized : kTurbofanned;
assertEquals(status, GetOptimizationStatus(fn), key);
assertEquals(expected, fn(array), key);
assertEquals(status, GetOptimizationStatus(fn), key);
// Check no deopt when another arra with the same map is used
assertTrue(%HaveSameMap(array, array2), key);
assertEquals(status, GetOptimizationStatus(fn), key);
assertEquals(expected2, fn(array2), key);
// CheckMaps bailout
let newArray = Object.defineProperty(
[1, 2, 3], 2, { enumerable: false, configurable: false,
get() { return 7; } });
fn(newArray);
assertEquals(kDeoptimized, GetOptimizationStatus(fn), key);
}
},
TypedArrays() {
let tests = {
Uint8Array: {
array: new Uint8Array([1, 2, 3, 4, 5, 6, 7, 8, -1, 256]),
expected: 291,
array2: new Uint8Array([1, 2, 3]),
expected2: 6
},
Int8Array: {
array: new Int8Array([1, 2, 3, 4, 5, 6, 7, 8, -129, 128]),
expected: 35,
array2: new Int8Array([1, 2, 3]),
expected2: 6
},
Uint16Array: {
array: new Uint16Array([1, 2, 3, 4, 5, 6, 7, 8, -1, 0x10000]),
expected: 65571,
array2: new Uint16Array([1, 2, 3]),
expected2: 6
},
Int16Array: {
array: new Int16Array([1, 2, 3, 4, 5, 6, 7, 8, -32769, 0x7FFF]),
expected: 65570,
array2: new Int16Array([1, 2, 3]),
expected2: 6
},
Uint32Array: {
array: new Uint32Array([1, 2, 3, 4, 5, 6, 7, 8, -1, 0x100000000]),
expected: 4294967331,
array2: new Uint32Array([1, 2, 3]),
expected2: 6
},
Int32Array: {
array: new Int32Array([1, 2, 3, 4, 5, 6, 7, 8,
-2147483649, 0x7FFFFFFF]),
expected: 4294967330,
array2: new Int32Array([1, 2, 3]),
expected2: 6
},
Float32Array: {
array: new Float32Array([9.5, 8.0, 7.0, 7.0, 5.0, 4.0, 3.0, 2.0]),
expected: 45.5,
array2: new Float32Array([10.5, 5.5, 1.5]),
expected2: 17.5
},
Float64Array: {
array: new Float64Array([9.5, 8.0, 7.0, 7.0, 5.0, 4.0, 3.0, 2.0]),
expected: 45.5,
array2: new Float64Array([10.5, 5.5, 1.5]),
expected2: 17.5
},
Uint8ClampedArray: {
array: new Uint8ClampedArray([4.3, 7.45632, 3.14, 4.61, 5.0004, 6.493,
7.12, 8, 1.7, 3.6]),
expected: 51,
array2: new Uint8ClampedArray([1, 2, 3]),
expected2: 6
}
};
for (let key of Object.keys(tests)) {
let test = tests[key];
let { array, expected, array2, expected2 } = test;
let sum = function(array) {
let ret = 0;
for (let x of array) ret += x;
return ret;
};
// Warmup
sum(array);
sum(array);
%OptimizeFunctionOnNextCall(sum);
assertEquals(expected, sum(array), key);
assertEquals(kTurbofanned, GetOptimizationStatus(sum), key);
// Not deoptimized when called on typed array of same type / map
assertTrue(%HaveSameMap(array, array2));
assertEquals(expected2, sum(array2), key);
assertEquals(kTurbofanned, GetOptimizationStatus(sum), key);
// Throw when detached
let clone = new array.constructor(array);
%ArrayBufferNeuter(clone.buffer);
assertThrows(() => sum(clone), TypeError);
}
}
};
for (let name of Object.keys(tests)) {
let test = tests[name];
test();
}

View File

@ -594,14 +594,22 @@
'regress/regress-353551': [SKIP],
}], # 'arch == ppc64'
##############################################################################
['variant == nocrankshaft', {
'es6/array-iterator-turbo': [SKIP],
}], # variant == nocranshaft
##############################################################################
['variant == stress', {
'es6/array-iterator-turbo': [SKIP],
'ignition/regress-599001-verifyheap': [SKIP],
'unicode-test': [SKIP],
}], # variant == stress
##############################################################################
['variant == turbofan_opt', {
'es6/array-iterator-turbo': [SKIP],
# TODO(jarin/mstarzinger): Investigate debugger issues with TurboFan.
'debug-evaluate-locals': [FAIL],