[turbofan] Split JSNativeContextSpecialization::BuildElementAccess

Change-Id: I69a0ac55d5a9aeae91e913e223a58c37b81091d3
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/4147623
Commit-Queue: Maya Lekova <mslekova@chromium.org>
Auto-Submit: Nico Hartmann <nicohartmann@chromium.org>
Reviewed-by: Maya Lekova <mslekova@chromium.org>
Commit-Queue: Nico Hartmann <nicohartmann@chromium.org>
Cr-Commit-Position: refs/heads/main@{#85158}
This commit is contained in:
Nico Hartmann 2023-01-09 18:05:46 +01:00 committed by V8 LUCI CQ
parent dc7a7545a4
commit 0b327a7c61
2 changed files with 560 additions and 546 deletions

View File

@ -3123,6 +3123,369 @@ JSNativeContextSpecialization::BuildElementAccess(
if (IsTypedArrayElementsKind(elements_kind) ||
IsRabGsabTypedArrayElementsKind(elements_kind)) {
return BuildElementAccessForTypedArrayOrRabGsabTypedArray(
elements_kind, receiver, index, value, effect, control, context,
access_info, keyed_mode);
}
// Load the elements for the {receiver}.
Node* elements = effect = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForJSObjectElements()), receiver,
effect, control);
// Don't try to store to a copy-on-write backing store (unless supported by
// the store mode).
if (IsAnyStore(keyed_mode.access_mode()) &&
IsSmiOrObjectElementsKind(elements_kind) &&
!IsCOWHandlingStoreMode(keyed_mode.store_mode())) {
effect =
graph()->NewNode(simplified()->CheckMaps(
CheckMapsFlag::kNone,
ZoneHandleSet<Map>(factory()->fixed_array_map())),
elements, effect, control);
}
// Check if the {receiver} is a JSArray.
bool receiver_is_jsarray = HasOnlyJSArrayMaps(broker(), receiver_maps);
// Load the length of the {receiver}.
Node* length = effect =
receiver_is_jsarray
? graph()->NewNode(
simplified()->LoadField(
AccessBuilder::ForJSArrayLength(elements_kind)),
receiver, effect, control)
: graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForFixedArrayLength()),
elements, effect, control);
// Check if we might need to grow the {elements} backing store.
if (keyed_mode.IsStore() && IsGrowStoreMode(keyed_mode.store_mode())) {
// For growing stores we validate the {index} below.
} else if (keyed_mode.IsLoad() &&
keyed_mode.load_mode() == LOAD_IGNORE_OUT_OF_BOUNDS &&
CanTreatHoleAsUndefined(receiver_maps)) {
// Check that the {index} is a valid array index, we do the actual
// bounds check below and just skip the store below if it's out of
// bounds for the {receiver}.
index = effect = graph()->NewNode(
simplified()->CheckBounds(FeedbackSource(),
CheckBoundsFlag::kConvertStringAndMinusZero),
index, jsgraph()->Constant(Smi::kMaxValue), effect, control);
} else {
// Check that the {index} is in the valid range for the {receiver}.
index = effect = graph()->NewNode(
simplified()->CheckBounds(FeedbackSource(),
CheckBoundsFlag::kConvertStringAndMinusZero),
index, length, effect, control);
}
// Compute the element access.
Type element_type = Type::NonInternal();
MachineType element_machine_type = MachineType::AnyTagged();
if (IsDoubleElementsKind(elements_kind)) {
element_type = Type::Number();
element_machine_type = MachineType::Float64();
} else if (IsSmiElementsKind(elements_kind)) {
element_type = Type::SignedSmall();
element_machine_type = MachineType::TaggedSigned();
}
ElementAccess element_access = {kTaggedBase, FixedArray::kHeaderSize,
element_type, element_machine_type,
kFullWriteBarrier};
// Access the actual element.
if (keyed_mode.access_mode() == AccessMode::kLoad) {
// Compute the real element access type, which includes the hole in case
// of holey backing stores.
if (IsHoleyElementsKind(elements_kind)) {
element_access.type =
Type::Union(element_type, Type::Hole(), graph()->zone());
}
if (elements_kind == HOLEY_ELEMENTS ||
elements_kind == HOLEY_SMI_ELEMENTS) {
element_access.machine_type = MachineType::AnyTagged();
}
// Check if we can return undefined for out-of-bounds loads.
if (keyed_mode.load_mode() == LOAD_IGNORE_OUT_OF_BOUNDS &&
CanTreatHoleAsUndefined(receiver_maps)) {
Node* check =
graph()->NewNode(simplified()->NumberLessThan(), index, length);
Node* branch =
graph()->NewNode(common()->Branch(BranchHint::kTrue), check, control);
Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
Node* etrue = effect;
Node* vtrue;
{
// Do a real bounds check against {length}. This is in order to
// protect against a potential typer bug leading to the elimination of
// the NumberLessThan above.
index = etrue = graph()->NewNode(
simplified()->CheckBounds(
FeedbackSource(), CheckBoundsFlag::kConvertStringAndMinusZero |
CheckBoundsFlag::kAbortOnOutOfBounds),
index, length, etrue, if_true);
// Perform the actual load
vtrue = etrue =
graph()->NewNode(simplified()->LoadElement(element_access),
elements, index, etrue, if_true);
// Handle loading from holey backing stores correctly, by either
// mapping the hole to undefined if possible, or deoptimizing
// otherwise.
if (elements_kind == HOLEY_ELEMENTS ||
elements_kind == HOLEY_SMI_ELEMENTS) {
// Turn the hole into undefined.
vtrue = graph()->NewNode(simplified()->ConvertTaggedHoleToUndefined(),
vtrue);
} else if (elements_kind == HOLEY_DOUBLE_ELEMENTS) {
// Return the signaling NaN hole directly if all uses are
// truncating.
vtrue = etrue = graph()->NewNode(
simplified()->CheckFloat64Hole(
CheckFloat64HoleMode::kAllowReturnHole, FeedbackSource()),
vtrue, etrue, if_true);
}
}
Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
Node* efalse = effect;
Node* vfalse;
{
// Materialize undefined for out-of-bounds loads.
vfalse = jsgraph()->UndefinedConstant();
}
control = graph()->NewNode(common()->Merge(2), if_true, if_false);
effect = graph()->NewNode(common()->EffectPhi(2), etrue, efalse, control);
value = graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
vtrue, vfalse, control);
} else {
// Perform the actual load.
value = effect =
graph()->NewNode(simplified()->LoadElement(element_access), elements,
index, effect, control);
// Handle loading from holey backing stores correctly, by either mapping
// the hole to undefined if possible, or deoptimizing otherwise.
if (elements_kind == HOLEY_ELEMENTS ||
elements_kind == HOLEY_SMI_ELEMENTS) {
// Check if we are allowed to turn the hole into undefined.
if (CanTreatHoleAsUndefined(receiver_maps)) {
// Turn the hole into undefined.
value = graph()->NewNode(simplified()->ConvertTaggedHoleToUndefined(),
value);
} else {
// Bailout if we see the hole.
value = effect = graph()->NewNode(simplified()->CheckNotTaggedHole(),
value, effect, control);
}
} else if (elements_kind == HOLEY_DOUBLE_ELEMENTS) {
// Perform the hole check on the result.
CheckFloat64HoleMode mode = CheckFloat64HoleMode::kNeverReturnHole;
// Check if we are allowed to return the hole directly.
if (CanTreatHoleAsUndefined(receiver_maps)) {
// Return the signaling NaN hole directly if all uses are
// truncating.
mode = CheckFloat64HoleMode::kAllowReturnHole;
}
value = effect = graph()->NewNode(
simplified()->CheckFloat64Hole(mode, FeedbackSource()), value,
effect, control);
}
}
} else if (keyed_mode.access_mode() == AccessMode::kHas) {
// For packed arrays with NoElementsProctector valid, a bound check
// is equivalent to HasProperty.
value = effect = graph()->NewNode(simplified()->SpeculativeNumberLessThan(
NumberOperationHint::kSignedSmall),
index, length, effect, control);
if (IsHoleyElementsKind(elements_kind)) {
// If the index is in bounds, do a load and hole check.
Node* branch = graph()->NewNode(common()->Branch(), value, control);
Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
Node* efalse = effect;
Node* vfalse = jsgraph()->FalseConstant();
element_access.type =
Type::Union(element_type, Type::Hole(), graph()->zone());
if (elements_kind == HOLEY_ELEMENTS ||
elements_kind == HOLEY_SMI_ELEMENTS) {
element_access.machine_type = MachineType::AnyTagged();
}
Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
Node* etrue = effect;
Node* checked = etrue = graph()->NewNode(
simplified()->CheckBounds(
FeedbackSource(), CheckBoundsFlag::kConvertStringAndMinusZero),
index, length, etrue, if_true);
Node* element = etrue =
graph()->NewNode(simplified()->LoadElement(element_access), elements,
checked, etrue, if_true);
Node* vtrue;
if (CanTreatHoleAsUndefined(receiver_maps)) {
if (elements_kind == HOLEY_ELEMENTS ||
elements_kind == HOLEY_SMI_ELEMENTS) {
// Check if we are allowed to turn the hole into undefined.
// Turn the hole into undefined.
vtrue = graph()->NewNode(simplified()->ReferenceEqual(), element,
jsgraph()->TheHoleConstant());
} else {
vtrue =
graph()->NewNode(simplified()->NumberIsFloat64Hole(), element);
}
// has == !IsHole
vtrue = graph()->NewNode(simplified()->BooleanNot(), vtrue);
} else {
if (elements_kind == HOLEY_ELEMENTS ||
elements_kind == HOLEY_SMI_ELEMENTS) {
// Bailout if we see the hole.
etrue = graph()->NewNode(simplified()->CheckNotTaggedHole(), element,
etrue, if_true);
} else {
etrue = graph()->NewNode(
simplified()->CheckFloat64Hole(
CheckFloat64HoleMode::kNeverReturnHole, FeedbackSource()),
element, etrue, if_true);
}
vtrue = jsgraph()->TrueConstant();
}
control = graph()->NewNode(common()->Merge(2), if_true, if_false);
effect = graph()->NewNode(common()->EffectPhi(2), etrue, efalse, control);
value = graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
vtrue, vfalse, control);
}
} else {
DCHECK(keyed_mode.access_mode() == AccessMode::kStore ||
keyed_mode.access_mode() == AccessMode::kStoreInLiteral ||
keyed_mode.access_mode() == AccessMode::kDefine);
if (IsSmiElementsKind(elements_kind)) {
value = effect = graph()->NewNode(
simplified()->CheckSmi(FeedbackSource()), value, effect, control);
} else if (IsDoubleElementsKind(elements_kind)) {
value = effect = graph()->NewNode(
simplified()->CheckNumber(FeedbackSource()), value, effect, control);
// Make sure we do not store signalling NaNs into double arrays.
value = graph()->NewNode(simplified()->NumberSilenceNaN(), value);
}
// Ensure that copy-on-write backing store is writable.
if (IsSmiOrObjectElementsKind(elements_kind) &&
keyed_mode.store_mode() == STORE_HANDLE_COW) {
elements = effect =
graph()->NewNode(simplified()->EnsureWritableFastElements(), receiver,
elements, effect, control);
} else if (IsGrowStoreMode(keyed_mode.store_mode())) {
// Determine the length of the {elements} backing store.
Node* elements_length = effect = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForFixedArrayLength()),
elements, effect, control);
// Validate the {index} depending on holeyness:
//
// For HOLEY_*_ELEMENTS the {index} must not exceed the {elements}
// backing store capacity plus the maximum allowed gap, as otherwise
// the (potential) backing store growth would normalize and thus
// the elements kind of the {receiver} would change to slow mode.
//
// For PACKED_*_ELEMENTS the {index} must be within the range
// [0,length+1[ to be valid. In case {index} equals {length},
// the {receiver} will be extended, but kept packed.
Node* limit =
IsHoleyElementsKind(elements_kind)
? graph()->NewNode(simplified()->NumberAdd(), elements_length,
jsgraph()->Constant(JSObject::kMaxGap))
: graph()->NewNode(simplified()->NumberAdd(), length,
jsgraph()->OneConstant());
index = effect = graph()->NewNode(
simplified()->CheckBounds(
FeedbackSource(), CheckBoundsFlag::kConvertStringAndMinusZero),
index, limit, effect, control);
// Grow {elements} backing store if necessary.
GrowFastElementsMode mode =
IsDoubleElementsKind(elements_kind)
? GrowFastElementsMode::kDoubleElements
: GrowFastElementsMode::kSmiOrObjectElements;
elements = effect = graph()->NewNode(
simplified()->MaybeGrowFastElements(mode, FeedbackSource()), receiver,
elements, index, elements_length, effect, control);
// If we didn't grow {elements}, it might still be COW, in which case we
// copy it now.
if (IsSmiOrObjectElementsKind(elements_kind) &&
keyed_mode.store_mode() == STORE_AND_GROW_HANDLE_COW) {
elements = effect =
graph()->NewNode(simplified()->EnsureWritableFastElements(),
receiver, elements, effect, control);
}
// Also update the "length" property if {receiver} is a JSArray.
if (receiver_is_jsarray) {
Node* check =
graph()->NewNode(simplified()->NumberLessThan(), index, length);
Node* branch = graph()->NewNode(common()->Branch(), check, control);
Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
Node* etrue = effect;
{
// We don't need to do anything, the {index} is within
// the valid bounds for the JSArray {receiver}.
}
Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
Node* efalse = effect;
{
// Update the JSArray::length field. Since this is observable,
// there must be no other check after this.
Node* new_length = graph()->NewNode(simplified()->NumberAdd(), index,
jsgraph()->OneConstant());
efalse = graph()->NewNode(
simplified()->StoreField(
AccessBuilder::ForJSArrayLength(elements_kind)),
receiver, new_length, efalse, if_false);
}
control = graph()->NewNode(common()->Merge(2), if_true, if_false);
effect =
graph()->NewNode(common()->EffectPhi(2), etrue, efalse, control);
}
}
// Perform the actual element access.
effect = graph()->NewNode(simplified()->StoreElement(element_access),
elements, index, value, effect, control);
}
return ValueEffectControl(value, effect, control);
}
JSNativeContextSpecialization::ValueEffectControl
JSNativeContextSpecialization::
BuildElementAccessForTypedArrayOrRabGsabTypedArray(
ElementsKind elements_kind, Node* receiver, Node* index, Node* value,
Node* effect, Node* control, Node* context,
ElementAccessInfo const& access_info,
KeyedAccessMode const& keyed_mode) {
DCHECK(IsTypedArrayElementsKind(elements_kind) ||
IsRabGsabTypedArrayElementsKind(elements_kind));
DCHECK_IMPLIES(IsRabGsabTypedArrayElementsKind(elements_kind),
v8_flags.turbo_rab_gsab);
Node* buffer_or_receiver = receiver;
Node* length;
Node* base_pointer;
@ -3145,8 +3508,7 @@ JSNativeContextSpecialization::BuildElementAccess(
Node* dead = jsgraph_->Dead();
return ValueEffectControl{dead, dead, dead};
} else {
length =
jsgraph()->Constant(static_cast<double>(typed_array->length()));
length = jsgraph()->Constant(static_cast<double>(typed_array->length()));
DCHECK(!typed_array->is_on_heap());
// Load the (known) data pointer for the {receiver} and set
@ -3176,9 +3538,8 @@ JSNativeContextSpecialization::BuildElementAccess(
if (JSTypedArray::kMaxSizeInHeap == 0) {
base_pointer = jsgraph()->ZeroConstant();
} else {
base_pointer = effect =
graph()->NewNode(simplified()->LoadField(
AccessBuilder::ForJSTypedArrayBasePointer()),
base_pointer = effect = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForJSTypedArrayBasePointer()),
receiver, effect, control);
}
@ -3192,8 +3553,7 @@ JSNativeContextSpecialization::BuildElementAccess(
// See if we can skip the detaching check.
if (!dependencies()->DependOnArrayBufferDetachingProtector()) {
// Load the buffer for the {receiver}.
Node* buffer =
typed_array.has_value()
Node* buffer = typed_array.has_value()
? jsgraph()->Constant(typed_array->buffer())
: (effect = graph()->NewNode(
simplified()->LoadField(
@ -3212,8 +3572,8 @@ JSNativeContextSpecialization::BuildElementAccess(
jsgraph()->Constant(JSArrayBuffer::WasDetachedBit::kMask)),
jsgraph()->ZeroConstant());
effect = graph()->NewNode(
simplified()->CheckIf(DeoptimizeReason::kArrayBufferWasDetached),
check, effect, control);
simplified()->CheckIf(DeoptimizeReason::kArrayBufferWasDetached), check,
effect, control);
// Retain the {buffer} instead of {receiver} to reduce live ranges.
buffer_or_receiver = buffer;
@ -3228,8 +3588,8 @@ JSNativeContextSpecialization::BuildElementAccess(
// Only check that the {index} is in SignedSmall range. We do the actual
// bounds check below and just skip the property access if it's out of
// bounds for the {receiver}.
index = effect = graph()->NewNode(
simplified()->CheckSmi(FeedbackSource()), index, effect, control);
index = effect = graph()->NewNode(simplified()->CheckSmi(FeedbackSource()),
index, effect, control);
// Cast the {index} to Unsigned32 range, so that the bounds checks
// below are performed on unsigned values, which means that all the
@ -3239,8 +3599,8 @@ JSNativeContextSpecialization::BuildElementAccess(
} else {
// Check that the {index} is in the valid range for the {receiver}.
index = effect = graph()->NewNode(
simplified()->CheckBounds(
FeedbackSource(), CheckBoundsFlag::kConvertStringAndMinusZero),
simplified()->CheckBounds(FeedbackSource(),
CheckBoundsFlag::kConvertStringAndMinusZero),
index, length, effect, control);
situation = kBoundsCheckDone;
}
@ -3264,8 +3624,8 @@ JSNativeContextSpecialization::BuildElementAccess(
// Do a real bounds check against {length}. This is in order to
// protect against a potential typer bug leading to the elimination
// of the NumberLessThan above.
index = etrue = graph()->NewNode(
simplified()->CheckBounds(
index = etrue =
graph()->NewNode(simplified()->CheckBounds(
FeedbackSource(),
CheckBoundsFlag::kConvertStringAndMinusZero |
CheckBoundsFlag::kAbortOnOutOfBounds),
@ -3274,8 +3634,8 @@ JSNativeContextSpecialization::BuildElementAccess(
// Perform the actual load
vtrue = etrue = graph()->NewNode(
simplified()->LoadTypedElement(external_array_type),
buffer_or_receiver, base_pointer, external_pointer, index,
etrue, if_true);
buffer_or_receiver, base_pointer, external_pointer, index, etrue,
if_true);
}
Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
@ -3344,8 +3704,8 @@ JSNativeContextSpecialization::BuildElementAccess(
// Do a real bounds check against {length}. This is in order to
// protect against a potential typer bug leading to the elimination
// of the NumberLessThan above.
index = etrue = graph()->NewNode(
simplified()->CheckBounds(
index = etrue =
graph()->NewNode(simplified()->CheckBounds(
FeedbackSource(),
CheckBoundsFlag::kConvertStringAndMinusZero |
CheckBoundsFlag::kAbortOnOutOfBounds),
@ -3354,8 +3714,8 @@ JSNativeContextSpecialization::BuildElementAccess(
// Perform the actual store.
etrue = graph()->NewNode(
simplified()->StoreTypedElement(external_array_type),
buffer_or_receiver, base_pointer, external_pointer, index,
value, etrue, if_true);
buffer_or_receiver, base_pointer, external_pointer, index, value,
etrue, if_true);
}
Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
@ -3390,356 +3750,6 @@ JSNativeContextSpecialization::BuildElementAccess(
}
break;
}
} else {
// Load the elements for the {receiver}.
Node* elements = effect = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForJSObjectElements()), receiver,
effect, control);
// Don't try to store to a copy-on-write backing store (unless supported by
// the store mode).
if (IsAnyStore(keyed_mode.access_mode()) &&
IsSmiOrObjectElementsKind(elements_kind) &&
!IsCOWHandlingStoreMode(keyed_mode.store_mode())) {
effect = graph()->NewNode(
simplified()->CheckMaps(
CheckMapsFlag::kNone,
ZoneHandleSet<Map>(factory()->fixed_array_map())),
elements, effect, control);
}
// Check if the {receiver} is a JSArray.
bool receiver_is_jsarray = HasOnlyJSArrayMaps(broker(), receiver_maps);
// Load the length of the {receiver}.
Node* length = effect =
receiver_is_jsarray
? graph()->NewNode(
simplified()->LoadField(
AccessBuilder::ForJSArrayLength(elements_kind)),
receiver, effect, control)
: graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForFixedArrayLength()),
elements, effect, control);
// Check if we might need to grow the {elements} backing store.
if (keyed_mode.IsStore() && IsGrowStoreMode(keyed_mode.store_mode())) {
// For growing stores we validate the {index} below.
} else if (keyed_mode.IsLoad() &&
keyed_mode.load_mode() == LOAD_IGNORE_OUT_OF_BOUNDS &&
CanTreatHoleAsUndefined(receiver_maps)) {
// Check that the {index} is a valid array index, we do the actual
// bounds check below and just skip the store below if it's out of
// bounds for the {receiver}.
index = effect = graph()->NewNode(
simplified()->CheckBounds(
FeedbackSource(), CheckBoundsFlag::kConvertStringAndMinusZero),
index, jsgraph()->Constant(Smi::kMaxValue), effect, control);
} else {
// Check that the {index} is in the valid range for the {receiver}.
index = effect = graph()->NewNode(
simplified()->CheckBounds(
FeedbackSource(), CheckBoundsFlag::kConvertStringAndMinusZero),
index, length, effect, control);
}
// Compute the element access.
Type element_type = Type::NonInternal();
MachineType element_machine_type = MachineType::AnyTagged();
if (IsDoubleElementsKind(elements_kind)) {
element_type = Type::Number();
element_machine_type = MachineType::Float64();
} else if (IsSmiElementsKind(elements_kind)) {
element_type = Type::SignedSmall();
element_machine_type = MachineType::TaggedSigned();
}
ElementAccess element_access = {kTaggedBase, FixedArray::kHeaderSize,
element_type, element_machine_type,
kFullWriteBarrier};
// Access the actual element.
if (keyed_mode.access_mode() == AccessMode::kLoad) {
// Compute the real element access type, which includes the hole in case
// of holey backing stores.
if (IsHoleyElementsKind(elements_kind)) {
element_access.type =
Type::Union(element_type, Type::Hole(), graph()->zone());
}
if (elements_kind == HOLEY_ELEMENTS ||
elements_kind == HOLEY_SMI_ELEMENTS) {
element_access.machine_type = MachineType::AnyTagged();
}
// Check if we can return undefined for out-of-bounds loads.
if (keyed_mode.load_mode() == LOAD_IGNORE_OUT_OF_BOUNDS &&
CanTreatHoleAsUndefined(receiver_maps)) {
Node* check =
graph()->NewNode(simplified()->NumberLessThan(), index, length);
Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
check, control);
Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
Node* etrue = effect;
Node* vtrue;
{
// Do a real bounds check against {length}. This is in order to
// protect against a potential typer bug leading to the elimination of
// the NumberLessThan above.
index = etrue =
graph()->NewNode(simplified()->CheckBounds(
FeedbackSource(),
CheckBoundsFlag::kConvertStringAndMinusZero |
CheckBoundsFlag::kAbortOnOutOfBounds),
index, length, etrue, if_true);
// Perform the actual load
vtrue = etrue =
graph()->NewNode(simplified()->LoadElement(element_access),
elements, index, etrue, if_true);
// Handle loading from holey backing stores correctly, by either
// mapping the hole to undefined if possible, or deoptimizing
// otherwise.
if (elements_kind == HOLEY_ELEMENTS ||
elements_kind == HOLEY_SMI_ELEMENTS) {
// Turn the hole into undefined.
vtrue = graph()->NewNode(
simplified()->ConvertTaggedHoleToUndefined(), vtrue);
} else if (elements_kind == HOLEY_DOUBLE_ELEMENTS) {
// Return the signaling NaN hole directly if all uses are
// truncating.
vtrue = etrue = graph()->NewNode(
simplified()->CheckFloat64Hole(
CheckFloat64HoleMode::kAllowReturnHole, FeedbackSource()),
vtrue, etrue, if_true);
}
}
Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
Node* efalse = effect;
Node* vfalse;
{
// Materialize undefined for out-of-bounds loads.
vfalse = jsgraph()->UndefinedConstant();
}
control = graph()->NewNode(common()->Merge(2), if_true, if_false);
effect =
graph()->NewNode(common()->EffectPhi(2), etrue, efalse, control);
value =
graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
vtrue, vfalse, control);
} else {
// Perform the actual load.
value = effect =
graph()->NewNode(simplified()->LoadElement(element_access),
elements, index, effect, control);
// Handle loading from holey backing stores correctly, by either mapping
// the hole to undefined if possible, or deoptimizing otherwise.
if (elements_kind == HOLEY_ELEMENTS ||
elements_kind == HOLEY_SMI_ELEMENTS) {
// Check if we are allowed to turn the hole into undefined.
if (CanTreatHoleAsUndefined(receiver_maps)) {
// Turn the hole into undefined.
value = graph()->NewNode(
simplified()->ConvertTaggedHoleToUndefined(), value);
} else {
// Bailout if we see the hole.
value = effect = graph()->NewNode(
simplified()->CheckNotTaggedHole(), value, effect, control);
}
} else if (elements_kind == HOLEY_DOUBLE_ELEMENTS) {
// Perform the hole check on the result.
CheckFloat64HoleMode mode = CheckFloat64HoleMode::kNeverReturnHole;
// Check if we are allowed to return the hole directly.
if (CanTreatHoleAsUndefined(receiver_maps)) {
// Return the signaling NaN hole directly if all uses are
// truncating.
mode = CheckFloat64HoleMode::kAllowReturnHole;
}
value = effect = graph()->NewNode(
simplified()->CheckFloat64Hole(mode, FeedbackSource()), value,
effect, control);
}
}
} else if (keyed_mode.access_mode() == AccessMode::kHas) {
// For packed arrays with NoElementsProctector valid, a bound check
// is equivalent to HasProperty.
value = effect = graph()->NewNode(simplified()->SpeculativeNumberLessThan(
NumberOperationHint::kSignedSmall),
index, length, effect, control);
if (IsHoleyElementsKind(elements_kind)) {
// If the index is in bounds, do a load and hole check.
Node* branch = graph()->NewNode(common()->Branch(), value, control);
Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
Node* efalse = effect;
Node* vfalse = jsgraph()->FalseConstant();
element_access.type =
Type::Union(element_type, Type::Hole(), graph()->zone());
if (elements_kind == HOLEY_ELEMENTS ||
elements_kind == HOLEY_SMI_ELEMENTS) {
element_access.machine_type = MachineType::AnyTagged();
}
Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
Node* etrue = effect;
Node* checked = etrue = graph()->NewNode(
simplified()->CheckBounds(
FeedbackSource(), CheckBoundsFlag::kConvertStringAndMinusZero),
index, length, etrue, if_true);
Node* element = etrue =
graph()->NewNode(simplified()->LoadElement(element_access),
elements, checked, etrue, if_true);
Node* vtrue;
if (CanTreatHoleAsUndefined(receiver_maps)) {
if (elements_kind == HOLEY_ELEMENTS ||
elements_kind == HOLEY_SMI_ELEMENTS) {
// Check if we are allowed to turn the hole into undefined.
// Turn the hole into undefined.
vtrue = graph()->NewNode(simplified()->ReferenceEqual(), element,
jsgraph()->TheHoleConstant());
} else {
vtrue =
graph()->NewNode(simplified()->NumberIsFloat64Hole(), element);
}
// has == !IsHole
vtrue = graph()->NewNode(simplified()->BooleanNot(), vtrue);
} else {
if (elements_kind == HOLEY_ELEMENTS ||
elements_kind == HOLEY_SMI_ELEMENTS) {
// Bailout if we see the hole.
etrue = graph()->NewNode(simplified()->CheckNotTaggedHole(),
element, etrue, if_true);
} else {
etrue = graph()->NewNode(
simplified()->CheckFloat64Hole(
CheckFloat64HoleMode::kNeverReturnHole, FeedbackSource()),
element, etrue, if_true);
}
vtrue = jsgraph()->TrueConstant();
}
control = graph()->NewNode(common()->Merge(2), if_true, if_false);
effect =
graph()->NewNode(common()->EffectPhi(2), etrue, efalse, control);
value =
graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
vtrue, vfalse, control);
}
} else {
DCHECK(keyed_mode.access_mode() == AccessMode::kStore ||
keyed_mode.access_mode() == AccessMode::kStoreInLiteral ||
keyed_mode.access_mode() == AccessMode::kDefine);
if (IsSmiElementsKind(elements_kind)) {
value = effect = graph()->NewNode(
simplified()->CheckSmi(FeedbackSource()), value, effect, control);
} else if (IsDoubleElementsKind(elements_kind)) {
value = effect =
graph()->NewNode(simplified()->CheckNumber(FeedbackSource()), value,
effect, control);
// Make sure we do not store signalling NaNs into double arrays.
value = graph()->NewNode(simplified()->NumberSilenceNaN(), value);
}
// Ensure that copy-on-write backing store is writable.
if (IsSmiOrObjectElementsKind(elements_kind) &&
keyed_mode.store_mode() == STORE_HANDLE_COW) {
elements = effect =
graph()->NewNode(simplified()->EnsureWritableFastElements(),
receiver, elements, effect, control);
} else if (IsGrowStoreMode(keyed_mode.store_mode())) {
// Determine the length of the {elements} backing store.
Node* elements_length = effect = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForFixedArrayLength()),
elements, effect, control);
// Validate the {index} depending on holeyness:
//
// For HOLEY_*_ELEMENTS the {index} must not exceed the {elements}
// backing store capacity plus the maximum allowed gap, as otherwise
// the (potential) backing store growth would normalize and thus
// the elements kind of the {receiver} would change to slow mode.
//
// For PACKED_*_ELEMENTS the {index} must be within the range
// [0,length+1[ to be valid. In case {index} equals {length},
// the {receiver} will be extended, but kept packed.
Node* limit =
IsHoleyElementsKind(elements_kind)
? graph()->NewNode(simplified()->NumberAdd(), elements_length,
jsgraph()->Constant(JSObject::kMaxGap))
: graph()->NewNode(simplified()->NumberAdd(), length,
jsgraph()->OneConstant());
index = effect = graph()->NewNode(
simplified()->CheckBounds(
FeedbackSource(), CheckBoundsFlag::kConvertStringAndMinusZero),
index, limit, effect, control);
// Grow {elements} backing store if necessary.
GrowFastElementsMode mode =
IsDoubleElementsKind(elements_kind)
? GrowFastElementsMode::kDoubleElements
: GrowFastElementsMode::kSmiOrObjectElements;
elements = effect = graph()->NewNode(
simplified()->MaybeGrowFastElements(mode, FeedbackSource()),
receiver, elements, index, elements_length, effect, control);
// If we didn't grow {elements}, it might still be COW, in which case we
// copy it now.
if (IsSmiOrObjectElementsKind(elements_kind) &&
keyed_mode.store_mode() == STORE_AND_GROW_HANDLE_COW) {
elements = effect =
graph()->NewNode(simplified()->EnsureWritableFastElements(),
receiver, elements, effect, control);
}
// Also update the "length" property if {receiver} is a JSArray.
if (receiver_is_jsarray) {
Node* check =
graph()->NewNode(simplified()->NumberLessThan(), index, length);
Node* branch = graph()->NewNode(common()->Branch(), check, control);
Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
Node* etrue = effect;
{
// We don't need to do anything, the {index} is within
// the valid bounds for the JSArray {receiver}.
}
Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
Node* efalse = effect;
{
// Update the JSArray::length field. Since this is observable,
// there must be no other check after this.
Node* new_length = graph()->NewNode(
simplified()->NumberAdd(), index, jsgraph()->OneConstant());
efalse = graph()->NewNode(
simplified()->StoreField(
AccessBuilder::ForJSArrayLength(elements_kind)),
receiver, new_length, efalse, if_false);
}
control = graph()->NewNode(common()->Merge(2), if_true, if_false);
effect =
graph()->NewNode(common()->EffectPhi(2), etrue, efalse, control);
}
}
// Perform the actual element access.
effect = graph()->NewNode(simplified()->StoreElement(element_access),
elements, index, value, effect, control);
}
}
return ValueEffectControl(value, effect, control);
}

View File

@ -193,6 +193,10 @@ class V8_EXPORT_PRIVATE JSNativeContextSpecialization final
Node* control, Node* context,
ElementAccessInfo const& access_info,
KeyedAccessMode const& keyed_mode);
ValueEffectControl BuildElementAccessForTypedArrayOrRabGsabTypedArray(
ElementsKind elements_kind, Node* receiver, Node* index, Node* value,
Node* effect, Node* control, Node* context,
ElementAccessInfo const& access_info, KeyedAccessMode const& keyed_mode);
// Construct appropriate subgraph to load from a String.
Node* BuildIndexedStringLoad(Node* receiver, Node* index, Node* length,