[csa] type and separate {Load,Store}{Fixed,Property}ArrayElement

This enables fast bounds checks on FixedArray's.

Change-Id: I0ae57b2c6981d8e1b2c7017ba658fd9c890d2bad
Reviewed-on: https://chromium-review.googlesource.com/1163614
Commit-Queue: Tobias Tebbi <tebbi@chromium.org>
Reviewed-by: Jaroslav Sevcik <jarin@chromium.org>
Reviewed-by: Michael Stanton <mvstanton@chromium.org>
Reviewed-by: Leszek Swirski <leszeks@chromium.org>
Cr-Commit-Position: refs/heads/master@{#54946}
This commit is contained in:
Tobias Tebbi 2018-08-07 15:26:55 +02:00 committed by Commit Bot
parent c790b2790f
commit 36bb2e000b
20 changed files with 396 additions and 344 deletions

View File

@ -305,8 +305,8 @@ Node* ArgumentsBuiltinsAssembler::EmitFastNewSloppyArguments(Node* context,
JSSloppyArgumentsObject::kSize);
StoreObjectFieldNoWriteBarrier(
argument_object, JSSloppyArgumentsObject::kCalleeOffset, function);
StoreFixedArrayElement(map_array, 0, context, SKIP_WRITE_BARRIER);
StoreFixedArrayElement(map_array, 1, elements, SKIP_WRITE_BARRIER);
StoreFixedArrayElement(CAST(map_array), 0, context, SKIP_WRITE_BARRIER);
StoreFixedArrayElement(CAST(map_array), 1, elements, SKIP_WRITE_BARRIER);
Comment("Fill in non-mapped parameters");
Node* argument_offset =

View File

@ -994,8 +994,8 @@ TF_BUILTIN(ArrayPrototypePop, CodeStubAssembler) {
BIND(&fast_elements);
{
Node* value = LoadFixedArrayElement(elements, new_length);
StoreFixedArrayElement(elements, new_length, TheHoleConstant());
Node* value = LoadFixedArrayElement(CAST(elements), new_length);
StoreFixedArrayElement(CAST(elements), new_length, TheHoleConstant());
GotoIf(WordEqual(value, TheHoleConstant()), &return_undefined);
args.PopAndReturn(value);
}
@ -1194,7 +1194,7 @@ class ArrayPrototypeSliceCodeStubAssembler : public CodeStubAssembler {
native_context, Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
GotoIf(WordNotEqual(map, fast_aliasted_arguments_map), &try_simple_slice);
Node* sloppy_elements = LoadElements(array);
TNode<SloppyArgumentsElements> sloppy_elements = CAST(LoadElements(array));
TNode<Smi> sloppy_elements_length =
LoadFixedArrayBaseLength(sloppy_elements);
TNode<Smi> parameter_map_length =
@ -1213,8 +1213,8 @@ class ArrayPrototypeSliceCodeStubAssembler : public CodeStubAssembler {
TNode<Smi> end = SmiAdd(CAST(from), CAST(count));
Node* unmapped_elements = LoadFixedArrayElement(
sloppy_elements, SloppyArgumentsElements::kArgumentsIndex);
TNode<FixedArray> unmapped_elements = CAST(LoadFixedArrayElement(
sloppy_elements, SloppyArgumentsElements::kArgumentsIndex));
TNode<Smi> unmapped_elements_length =
LoadFixedArrayBaseLength(unmapped_elements);
@ -1225,7 +1225,7 @@ class ArrayPrototypeSliceCodeStubAssembler : public CodeStubAssembler {
nullptr, SMI_PARAMETERS));
index_out.Bind(IntPtrConstant(0));
Node* result_elements = LoadElements(result.value());
TNode<FixedArray> result_elements = CAST(LoadElements(result.value()));
TNode<Smi> from_mapped = SmiMin(parameter_map_length, CAST(from));
TNode<Smi> to = SmiMin(parameter_map_length, end);
Node* arguments_context = LoadFixedArrayElement(
@ -1595,35 +1595,39 @@ TF_BUILTIN(ArrayPrototypeShift, CodeStubAssembler) {
BIND(&fast_elements_tagged);
{
Node* value = LoadFixedArrayElement(elements, 0);
BuildFastLoop(IntPtrConstant(0), new_length,
[&](Node* index) {
StoreFixedArrayElement(
elements, index,
LoadFixedArrayElement(
elements, IntPtrAdd(index, IntPtrConstant(1))));
},
1, ParameterMode::INTPTR_PARAMETERS,
IndexAdvanceMode::kPost);
StoreFixedArrayElement(elements, new_length, TheHoleConstant());
TNode<FixedArray> elements_fixed_array = CAST(elements);
Node* value = LoadFixedArrayElement(elements_fixed_array, 0);
BuildFastLoop(
IntPtrConstant(0), new_length,
[&](Node* index) {
StoreFixedArrayElement(
elements_fixed_array, index,
LoadFixedArrayElement(elements_fixed_array,
IntPtrAdd(index, IntPtrConstant(1))));
},
1, ParameterMode::INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
StoreFixedArrayElement(elements_fixed_array, new_length,
TheHoleConstant());
GotoIf(WordEqual(value, TheHoleConstant()), &return_undefined);
args.PopAndReturn(value);
}
BIND(&fast_elements_smi);
{
Node* value = LoadFixedArrayElement(elements, 0);
BuildFastLoop(IntPtrConstant(0), new_length,
[&](Node* index) {
StoreFixedArrayElement(
elements, index,
LoadFixedArrayElement(
elements, IntPtrAdd(index, IntPtrConstant(1))),
SKIP_WRITE_BARRIER);
},
1, ParameterMode::INTPTR_PARAMETERS,
IndexAdvanceMode::kPost);
StoreFixedArrayElement(elements, new_length, TheHoleConstant());
TNode<FixedArray> elements_fixed_array = CAST(elements);
Node* value = LoadFixedArrayElement(elements_fixed_array, 0);
BuildFastLoop(
IntPtrConstant(0), new_length,
[&](Node* index) {
StoreFixedArrayElement(
elements_fixed_array, index,
LoadFixedArrayElement(elements_fixed_array,
IntPtrAdd(index, IntPtrConstant(1))),
SKIP_WRITE_BARRIER);
},
1, ParameterMode::INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
StoreFixedArrayElement(elements_fixed_array, new_length,
TheHoleConstant());
GotoIf(WordEqual(value, TheHoleConstant()), &return_undefined);
args.PopAndReturn(value);
}
@ -3090,7 +3094,7 @@ void ArrayIncludesIndexofAssembler::GenerateSmiOrObject(
{
GotoIfNot(UintPtrLessThan(index_var.value(), array_length_untagged),
&return_not_found);
Node* element_k = LoadFixedArrayElement(elements, index_var.value());
Node* element_k = LoadFixedArrayElement(CAST(elements), index_var.value());
GotoIf(WordEqual(element_k, search_element), &return_found);
Increment(&index_var);
@ -3102,7 +3106,7 @@ void ArrayIncludesIndexofAssembler::GenerateSmiOrObject(
GotoIfNot(UintPtrLessThan(index_var.value(), array_length_untagged),
&return_not_found);
Node* element_k = LoadFixedArrayElement(elements, index_var.value());
Node* element_k = LoadFixedArrayElement(CAST(elements), index_var.value());
GotoIf(IsUndefined(element_k), &return_found);
GotoIf(IsTheHole(element_k), &return_found);
@ -3121,7 +3125,8 @@ void ArrayIncludesIndexofAssembler::GenerateSmiOrObject(
Label continue_loop(this), not_smi(this);
GotoIfNot(UintPtrLessThan(index_var.value(), array_length_untagged),
&return_not_found);
Node* element_k = LoadFixedArrayElement(elements, index_var.value());
Node* element_k =
LoadFixedArrayElement(CAST(elements), index_var.value());
GotoIfNot(TaggedIsSmi(element_k), &not_smi);
Branch(Float64Equal(search_num.value(), SmiToFloat64(element_k)),
&return_found, &continue_loop);
@ -3142,7 +3147,8 @@ void ArrayIncludesIndexofAssembler::GenerateSmiOrObject(
Label continue_loop(this);
GotoIfNot(UintPtrLessThan(index_var.value(), array_length_untagged),
&return_not_found);
Node* element_k = LoadFixedArrayElement(elements, index_var.value());
Node* element_k =
LoadFixedArrayElement(CAST(elements), index_var.value());
GotoIf(TaggedIsSmi(element_k), &continue_loop);
GotoIfNot(IsHeapNumber(CAST(element_k)), &continue_loop);
BranchIfFloat64IsNaN(LoadHeapNumberValue(element_k), &return_found,
@ -3165,7 +3171,7 @@ void ArrayIncludesIndexofAssembler::GenerateSmiOrObject(
BIND(&next_iteration);
GotoIfNot(UintPtrLessThan(index_var.value(), array_length_untagged),
&return_not_found);
Node* element_k = LoadFixedArrayElement(elements, index_var.value());
Node* element_k = LoadFixedArrayElement(CAST(elements), index_var.value());
GotoIf(TaggedIsSmi(element_k), &continue_loop);
GotoIf(WordEqual(search_element_string, element_k), &return_found);
Node* element_k_type = LoadInstanceType(element_k);
@ -3193,7 +3199,7 @@ void ArrayIncludesIndexofAssembler::GenerateSmiOrObject(
GotoIfNot(UintPtrLessThan(index_var.value(), array_length_untagged),
&return_not_found);
Node* element_k = LoadFixedArrayElement(elements, index_var.value());
Node* element_k = LoadFixedArrayElement(CAST(elements), index_var.value());
Label continue_loop(this);
GotoIf(TaggedIsSmi(element_k), &continue_loop);
GotoIfNot(IsBigInt(CAST(element_k)), &continue_loop);
@ -3575,13 +3581,15 @@ TF_BUILTIN(ArrayIteratorPrototypeNext, CodeStubAssembler) {
BIND(&if_packed);
{
var_value.Bind(LoadFixedArrayElement(elements, index, 0, SMI_PARAMETERS));
var_value.Bind(
LoadFixedArrayElement(CAST(elements), index, 0, SMI_PARAMETERS));
Goto(&allocate_entry_if_needed);
}
BIND(&if_holey);
{
Node* element = LoadFixedArrayElement(elements, index, 0, SMI_PARAMETERS);
Node* element =
LoadFixedArrayElement(CAST(elements), index, 0, SMI_PARAMETERS);
var_value.Bind(element);
GotoIfNot(WordEqual(element, TheHoleConstant()),
&allocate_entry_if_needed);
@ -3725,7 +3733,8 @@ TF_BUILTIN(ArrayIteratorPrototypeNext, CodeStubAssembler) {
Int32Constant(static_cast<int>(IterationKind::kValues))),
&allocate_iterator_result);
Node* elements = AllocateFixedArray(PACKED_ELEMENTS, IntPtrConstant(2));
TNode<FixedArray> elements =
AllocateFixedArray(PACKED_ELEMENTS, IntPtrConstant(2));
StoreFixedArrayElement(elements, 0, index, SKIP_WRITE_BARRIER);
StoreFixedArrayElement(elements, 1, var_value.value(), SKIP_WRITE_BARRIER);

View File

@ -72,9 +72,9 @@ class ArrayBuiltinsAssembler : public BaseBuiltinsFromDSLAssembler {
TNode<Object> LoadFixedArrayElementInt(TNode<FixedArray> array, int index) {
return LoadFixedArrayElement(array, index);
}
Node* StoreFixedArrayElementInt(TNode<FixedArray> array, int index,
TNode<Object> value) {
return StoreFixedArrayElement(array, index, value);
void StoreFixedArrayElementInt(TNode<FixedArray> array, int index,
TNode<Object> value) {
StoreFixedArrayElement(array, index, value);
}
protected:

View File

@ -119,7 +119,7 @@ class BaseCollectionsAssembler : public CodeStubAssembler {
// Loads an element from a fixed array. If the element is the hole, returns
// `undefined`.
TNode<Object> LoadAndNormalizeFixedArrayElement(TNode<HeapObject> elements,
TNode<Object> LoadAndNormalizeFixedArrayElement(TNode<FixedArray> elements,
TNode<IntPtrT> index);
// Loads an element from a fixed double array. If the element is the hole,
@ -247,7 +247,7 @@ void BaseCollectionsAssembler::AddConstructorEntriesFromFastJSArray(
{
auto set_entry = [&](Node* index) {
TNode<Object> element = LoadAndNormalizeFixedArrayElement(
elements, UncheckedCast<IntPtrT>(index));
CAST(elements), UncheckedCast<IntPtrT>(index));
AddConstructorEntry(variant, context, collection, add_func, element,
if_may_have_side_effects);
};
@ -491,7 +491,7 @@ TNode<BoolT> BaseCollectionsAssembler::HasInitialCollectionPrototype(
}
TNode<Object> BaseCollectionsAssembler::LoadAndNormalizeFixedArrayElement(
TNode<HeapObject> elements, TNode<IntPtrT> index) {
TNode<FixedArray> elements, TNode<IntPtrT> index) {
TNode<Object> element = LoadFixedArrayElement(elements, index);
return Select<Object>(IsTheHole(element), [=] { return UndefinedConstant(); },
[=] { return element; });
@ -547,14 +547,18 @@ void BaseCollectionsAssembler::LoadKeyValue(
}
BIND(&if_one);
{
*key = LoadAndNormalizeFixedArrayElement(elements, IntPtrConstant(0));
*key = LoadAndNormalizeFixedArrayElement(CAST(elements),
IntPtrConstant(0));
*value = UndefinedConstant();
Goto(&exit);
}
BIND(&if_two);
{
*key = LoadAndNormalizeFixedArrayElement(elements, IntPtrConstant(0));
*value = LoadAndNormalizeFixedArrayElement(elements, IntPtrConstant(1));
TNode<FixedArray> elements_fixed_array = CAST(elements);
*key = LoadAndNormalizeFixedArrayElement(elements_fixed_array,
IntPtrConstant(0));
*value = LoadAndNormalizeFixedArrayElement(elements_fixed_array,
IntPtrConstant(1));
Goto(&exit);
}
}
@ -636,14 +640,15 @@ class CollectionsBuiltinsAssembler : public BaseCollectionsAssembler {
typedef std::function<void(Node* const table, Node* const index)>
UpdateInTransition;
template <typename TableType>
std::tuple<Node*, Node*> Transition(
Node* const table, Node* const index,
std::pair<TNode<TableType>, TNode<IntPtrT>> Transition(
TNode<TableType> const table, TNode<IntPtrT> const index,
UpdateInTransition const& update_in_transition);
template <typename IteratorType, typename TableType>
std::tuple<Node*, Node*> TransitionAndUpdate(Node* const iterator);
std::pair<TNode<TableType>, TNode<IntPtrT>> TransitionAndUpdate(
TNode<IteratorType> const iterator);
template <typename TableType>
std::tuple<Node*, Node*, Node*> NextSkipHoles(Node* table, Node* index,
Label* if_end);
std::tuple<TNode<Object>, TNode<IntPtrT>, TNode<IntPtrT>> NextSkipHoles(
TNode<TableType> table, TNode<IntPtrT> index, Label* if_end);
// Specialization for Smi.
// The {result} variable will contain the entry index if the key was found,
@ -708,12 +713,13 @@ class CollectionsBuiltinsAssembler : public BaseCollectionsAssembler {
Label* if_not_found);
Node* NormalizeNumberKey(Node* key);
void StoreOrderedHashMapNewEntry(Node* const table, Node* const key,
Node* const value, Node* const hash,
void StoreOrderedHashMapNewEntry(TNode<OrderedHashMap> const table,
Node* const key, Node* const value,
Node* const hash,
Node* const number_of_buckets,
Node* const occupancy);
void StoreOrderedHashSetNewEntry(Node* const table, Node* const key,
Node* const hash,
void StoreOrderedHashSetNewEntry(TNode<OrderedHashSet> const table,
Node* const key, Node* const hash,
Node* const number_of_buckets,
Node* const occupancy);
};
@ -1020,7 +1026,8 @@ TF_BUILTIN(OrderedHashTableHealIndex, CollectionsBuiltinsAssembler) {
Node* i = var_i.value();
GotoIfNot(IntPtrLessThan(i, number_of_deleted_elements), &return_index);
TNode<Smi> removed_index = CAST(LoadFixedArrayElement(
table, i, OrderedHashTableBase::kRemovedHolesIndex * kPointerSize));
CAST(table), i,
OrderedHashTableBase::kRemovedHolesIndex * kPointerSize));
GotoIf(SmiGreaterThanOrEqual(removed_index, index), &return_index);
Decrement(&var_index, 1, SMI_PARAMETERS);
Increment(&var_i);
@ -1035,11 +1042,12 @@ TF_BUILTIN(OrderedHashTableHealIndex, CollectionsBuiltinsAssembler) {
}
template <typename TableType>
std::tuple<Node*, Node*> CollectionsBuiltinsAssembler::Transition(
Node* const table, Node* const index,
std::pair<TNode<TableType>, TNode<IntPtrT>>
CollectionsBuiltinsAssembler::Transition(
TNode<TableType> const table, TNode<IntPtrT> const index,
UpdateInTransition const& update_in_transition) {
VARIABLE(var_index, MachineType::PointerRepresentation(), index);
VARIABLE(var_table, MachineRepresentation::kTagged, table);
TVARIABLE(IntPtrT, var_index, index);
TVARIABLE(TableType, var_table, table);
Label if_done(this), if_transition(this, Label::kDeferred);
Branch(TaggedIsSmi(
LoadObjectField(var_table.value(), TableType::kNextTableOffset)),
@ -1051,16 +1059,17 @@ std::tuple<Node*, Node*> CollectionsBuiltinsAssembler::Transition(
Goto(&loop);
BIND(&loop);
{
Node* table = var_table.value();
Node* index = var_index.value();
TNode<TableType> table = var_table.value();
TNode<IntPtrT> index = var_index.value();
Node* next_table = LoadObjectField(table, TableType::kNextTableOffset);
TNode<Object> next_table =
LoadObjectField(table, TableType::kNextTableOffset);
GotoIf(TaggedIsSmi(next_table), &done_loop);
var_table.Bind(next_table);
var_index.Bind(SmiUntag(
var_table = CAST(next_table);
var_index = SmiUntag(
CAST(CallBuiltin(Builtins::kOrderedHashTableHealIndex,
NoContextConstant(), table, SmiTag(index)))));
NoContextConstant(), table, SmiTag(index))));
Goto(&loop);
}
BIND(&done_loop);
@ -1071,14 +1080,15 @@ std::tuple<Node*, Node*> CollectionsBuiltinsAssembler::Transition(
}
BIND(&if_done);
return std::tuple<Node*, Node*>(var_table.value(), var_index.value());
return {var_table.value(), var_index.value()};
}
template <typename IteratorType, typename TableType>
std::tuple<Node*, Node*> CollectionsBuiltinsAssembler::TransitionAndUpdate(
Node* const iterator) {
std::pair<TNode<TableType>, TNode<IntPtrT>>
CollectionsBuiltinsAssembler::TransitionAndUpdate(
TNode<IteratorType> const iterator) {
return Transition<TableType>(
LoadObjectField(iterator, IteratorType::kTableOffset),
CAST(LoadObjectField(iterator, IteratorType::kTableOffset)),
LoadAndUntagObjectField(iterator, IteratorType::kIndexOffset),
[this, iterator](Node* const table, Node* const index) {
// Update the {iterator} with the new state.
@ -1089,21 +1099,23 @@ std::tuple<Node*, Node*> CollectionsBuiltinsAssembler::TransitionAndUpdate(
}
template <typename TableType>
std::tuple<Node*, Node*, Node*> CollectionsBuiltinsAssembler::NextSkipHoles(
Node* table, Node* index, Label* if_end) {
std::tuple<TNode<Object>, TNode<IntPtrT>, TNode<IntPtrT>>
CollectionsBuiltinsAssembler::NextSkipHoles(TNode<TableType> table,
TNode<IntPtrT> index,
Label* if_end) {
// Compute the used capacity for the {table}.
Node* number_of_buckets =
TNode<IntPtrT> number_of_buckets =
LoadAndUntagObjectField(table, TableType::kNumberOfBucketsOffset);
Node* number_of_elements =
TNode<IntPtrT> number_of_elements =
LoadAndUntagObjectField(table, TableType::kNumberOfElementsOffset);
Node* number_of_deleted_elements =
TNode<IntPtrT> number_of_deleted_elements =
LoadAndUntagObjectField(table, TableType::kNumberOfDeletedElementsOffset);
Node* used_capacity =
TNode<IntPtrT> used_capacity =
IntPtrAdd(number_of_elements, number_of_deleted_elements);
Node* entry_key;
Node* entry_start_position;
VARIABLE(var_index, MachineType::PointerRepresentation(), index);
TNode<Object> entry_key;
TNode<IntPtrT> entry_start_position;
TVARIABLE(IntPtrT, var_index, index);
Label loop(this, &var_index), done_loop(this);
Goto(&loop);
BIND(&loop);
@ -1120,8 +1132,8 @@ std::tuple<Node*, Node*, Node*> CollectionsBuiltinsAssembler::NextSkipHoles(
}
BIND(&done_loop);
return std::tuple<Node*, Node*, Node*>(entry_key, entry_start_position,
var_index.value());
return std::tuple<TNode<Object>, TNode<IntPtrT>, TNode<IntPtrT>>{
entry_key, entry_start_position, var_index.value()};
}
TF_BUILTIN(MapPrototypeGet, CollectionsBuiltinsAssembler) {
@ -1141,7 +1153,7 @@ TF_BUILTIN(MapPrototypeGet, CollectionsBuiltinsAssembler) {
BIND(&if_found);
Return(LoadFixedArrayElement(
table, SmiUntag(index),
CAST(table), SmiUntag(index),
(OrderedHashMap::kHashTableStartIndex + OrderedHashMap::kValueOffset) *
kPointerSize));
@ -1198,7 +1210,8 @@ TF_BUILTIN(MapPrototypeSet, CollectionsBuiltinsAssembler) {
key = NormalizeNumberKey(key);
Node* const table = LoadObjectField(receiver, JSMap::kTableOffset);
TNode<OrderedHashMap> const table =
CAST(LoadObjectField(receiver, JSMap::kTableOffset));
VARIABLE(entry_start_position_or_hash, MachineType::PointerRepresentation(),
IntPtrConstant(0));
@ -1232,7 +1245,7 @@ TF_BUILTIN(MapPrototypeSet, CollectionsBuiltinsAssembler) {
BIND(&add_entry);
VARIABLE(number_of_buckets, MachineType::PointerRepresentation());
VARIABLE(occupancy, MachineType::PointerRepresentation());
VARIABLE(table_var, MachineRepresentation::kTaggedPointer, table);
TVARIABLE(OrderedHashMap, table_var, table);
{
// Check we have enough space for the entry.
number_of_buckets.Bind(SmiUntag(CAST(
@ -1250,7 +1263,7 @@ TF_BUILTIN(MapPrototypeSet, CollectionsBuiltinsAssembler) {
// We do not have enough space, grow the table and reload the relevant
// fields.
CallRuntime(Runtime::kMapGrow, context, receiver);
table_var.Bind(LoadObjectField(receiver, JSMap::kTableOffset));
table_var = CAST(LoadObjectField(receiver, JSMap::kTableOffset));
number_of_buckets.Bind(SmiUntag(CAST(LoadFixedArrayElement(
table_var.value(), OrderedHashMap::kNumberOfBucketsIndex))));
Node* const new_number_of_elements = SmiUntag(CAST(LoadObjectField(
@ -1269,8 +1282,8 @@ TF_BUILTIN(MapPrototypeSet, CollectionsBuiltinsAssembler) {
}
void CollectionsBuiltinsAssembler::StoreOrderedHashMapNewEntry(
Node* const table, Node* const key, Node* const value, Node* const hash,
Node* const number_of_buckets, Node* const occupancy) {
TNode<OrderedHashMap> const table, Node* const key, Node* const value,
Node* const hash, Node* const number_of_buckets, Node* const occupancy) {
Node* const bucket =
WordAnd(hash, IntPtrSub(number_of_buckets, IntPtrConstant(1)));
Node* const bucket_entry = LoadFixedArrayElement(
@ -1308,7 +1321,8 @@ TF_BUILTIN(MapPrototypeDelete, CollectionsBuiltinsAssembler) {
ThrowIfNotInstanceType(context, receiver, JS_MAP_TYPE,
"Map.prototype.delete");
Node* const table = LoadObjectField(receiver, JSMap::kTableOffset);
TNode<OrderedHashMap> const table =
CAST(LoadObjectField(receiver, JSMap::kTableOffset));
VARIABLE(entry_start_position_or_hash, MachineType::PointerRepresentation(),
IntPtrConstant(0));
@ -1368,7 +1382,8 @@ TF_BUILTIN(SetPrototypeAdd, CollectionsBuiltinsAssembler) {
key = NormalizeNumberKey(key);
Node* const table = LoadObjectField(receiver, JSMap::kTableOffset);
TNode<OrderedHashSet> const table =
CAST(LoadObjectField(receiver, JSMap::kTableOffset));
VARIABLE(entry_start_position_or_hash, MachineType::PointerRepresentation(),
IntPtrConstant(0));
@ -1398,7 +1413,7 @@ TF_BUILTIN(SetPrototypeAdd, CollectionsBuiltinsAssembler) {
BIND(&add_entry);
VARIABLE(number_of_buckets, MachineType::PointerRepresentation());
VARIABLE(occupancy, MachineType::PointerRepresentation());
VARIABLE(table_var, MachineRepresentation::kTaggedPointer, table);
TVARIABLE(OrderedHashSet, table_var, table);
{
// Check we have enough space for the entry.
number_of_buckets.Bind(SmiUntag(CAST(
@ -1416,7 +1431,7 @@ TF_BUILTIN(SetPrototypeAdd, CollectionsBuiltinsAssembler) {
// We do not have enough space, grow the table and reload the relevant
// fields.
CallRuntime(Runtime::kSetGrow, context, receiver);
table_var.Bind(LoadObjectField(receiver, JSMap::kTableOffset));
table_var = CAST(LoadObjectField(receiver, JSMap::kTableOffset));
number_of_buckets.Bind(SmiUntag(CAST(LoadFixedArrayElement(
table_var.value(), OrderedHashSet::kNumberOfBucketsIndex))));
Node* const new_number_of_elements = SmiUntag(CAST(LoadObjectField(
@ -1435,7 +1450,7 @@ TF_BUILTIN(SetPrototypeAdd, CollectionsBuiltinsAssembler) {
}
void CollectionsBuiltinsAssembler::StoreOrderedHashSetNewEntry(
Node* const table, Node* const key, Node* const hash,
TNode<OrderedHashSet> const table, Node* const key, Node* const hash,
Node* const number_of_buckets, Node* const occupancy) {
Node* const bucket =
WordAnd(hash, IntPtrSub(number_of_buckets, IntPtrConstant(1)));
@ -1471,7 +1486,8 @@ TF_BUILTIN(SetPrototypeDelete, CollectionsBuiltinsAssembler) {
ThrowIfNotInstanceType(context, receiver, JS_SET_TYPE,
"Set.prototype.delete");
Node* const table = LoadObjectField(receiver, JSMap::kTableOffset);
TNode<OrderedHashSet> const table =
CAST(LoadObjectField(receiver, JSMap::kTableOffset));
VARIABLE(entry_start_position_or_hash, MachineType::PointerRepresentation(),
IntPtrConstant(0));
@ -1552,23 +1568,23 @@ TF_BUILTIN(MapPrototypeForEach, CollectionsBuiltinsAssembler) {
GotoIf(TaggedIsSmi(callback), &callback_not_callable);
GotoIfNot(IsCallable(callback), &callback_not_callable);
VARIABLE(var_index, MachineType::PointerRepresentation(), IntPtrConstant(0));
VARIABLE(var_table, MachineRepresentation::kTagged,
LoadObjectField(receiver, JSMap::kTableOffset));
TVARIABLE(IntPtrT, var_index, IntPtrConstant(0));
TVARIABLE(OrderedHashMap, var_table,
CAST(LoadObjectField(receiver, JSMap::kTableOffset)));
Label loop(this, {&var_index, &var_table}), done_loop(this);
Goto(&loop);
BIND(&loop);
{
// Transition {table} and {index} if there was any modification to
// the {receiver} while we're iterating.
Node* index = var_index.value();
Node* table = var_table.value();
TNode<IntPtrT> index = var_index.value();
TNode<OrderedHashMap> table = var_table.value();
std::tie(table, index) =
Transition<OrderedHashMap>(table, index, [](Node*, Node*) {});
// Read the next entry from the {table}, skipping holes.
Node* entry_key;
Node* entry_start_position;
TNode<Object> entry_key;
TNode<IntPtrT> entry_start_position;
std::tie(entry_key, entry_start_position, index) =
NextSkipHoles<OrderedHashMap>(table, index, &done_loop);
@ -1584,8 +1600,8 @@ TF_BUILTIN(MapPrototypeForEach, CollectionsBuiltinsAssembler) {
entry_value, entry_key, receiver);
// Continue with the next entry.
var_index.Bind(index);
var_table.Bind(table);
var_index = index;
var_table = table;
Goto(&loop);
}
@ -1644,14 +1660,14 @@ TF_BUILTIN(MapIteratorPrototypeNext, CollectionsBuiltinsAssembler) {
return_end(this, Label::kDeferred);
// Transition the {receiver} table if necessary.
Node* table;
Node* index;
TNode<OrderedHashMap> table;
TNode<IntPtrT> index;
std::tie(table, index) =
TransitionAndUpdate<JSMapIterator, OrderedHashMap>(receiver);
TransitionAndUpdate<JSMapIterator, OrderedHashMap>(CAST(receiver));
// Read the next entry from the {table}, skipping holes.
Node* entry_key;
Node* entry_start_position;
TNode<Object> entry_key;
TNode<IntPtrT> entry_start_position;
std::tie(entry_key, entry_start_position, index) =
NextSkipHoles<OrderedHashMap>(table, index, &return_end);
StoreObjectFieldNoWriteBarrier(receiver, JSMapIterator::kIndexOffset,
@ -1783,17 +1799,17 @@ TF_BUILTIN(SetPrototypeForEach, CollectionsBuiltinsAssembler) {
GotoIf(TaggedIsSmi(callback), &callback_not_callable);
GotoIfNot(IsCallable(callback), &callback_not_callable);
VARIABLE(var_index, MachineType::PointerRepresentation(), IntPtrConstant(0));
VARIABLE(var_table, MachineRepresentation::kTagged,
LoadObjectField(receiver, JSSet::kTableOffset));
TVARIABLE(IntPtrT, var_index, IntPtrConstant(0));
TVARIABLE(OrderedHashSet, var_table,
CAST(LoadObjectField(receiver, JSSet::kTableOffset)));
Label loop(this, {&var_index, &var_table}), done_loop(this);
Goto(&loop);
BIND(&loop);
{
// Transition {table} and {index} if there was any modification to
// the {receiver} while we're iterating.
Node* index = var_index.value();
Node* table = var_table.value();
TNode<IntPtrT> index = var_index.value();
TNode<OrderedHashSet> table = var_table.value();
std::tie(table, index) =
Transition<OrderedHashSet>(table, index, [](Node*, Node*) {});
@ -1808,8 +1824,8 @@ TF_BUILTIN(SetPrototypeForEach, CollectionsBuiltinsAssembler) {
entry_key, receiver);
// Continue with the next entry.
var_index.Bind(index);
var_table.Bind(table);
var_index = index;
var_table = table;
Goto(&loop);
}
@ -1858,10 +1874,10 @@ TF_BUILTIN(SetIteratorPrototypeNext, CollectionsBuiltinsAssembler) {
return_end(this, Label::kDeferred);
// Transition the {receiver} table if necessary.
Node* table;
Node* index;
TNode<OrderedHashSet> table;
TNode<IntPtrT> index;
std::tie(table, index) =
TransitionAndUpdate<JSSetIterator, OrderedHashSet>(receiver);
TransitionAndUpdate<JSSetIterator, OrderedHashSet>(CAST(receiver));
// Read the next entry from the {table}, skipping holes.
Node* entry_key;
@ -1968,7 +1984,7 @@ class WeakCollectionsBuiltinsAssembler : public BaseCollectionsAssembler {
: BaseCollectionsAssembler(state) {}
protected:
void AddEntry(TNode<HeapObject> table, TNode<IntPtrT> key_index,
void AddEntry(TNode<EphemeronHashTable> table, TNode<IntPtrT> key_index,
TNode<Object> key, TNode<Object> value,
TNode<IntPtrT> number_of_elements);
@ -2007,12 +2023,14 @@ class WeakCollectionsBuiltinsAssembler : public BaseCollectionsAssembler {
TNode<IntPtrT> number_of_deleted);
TNode<IntPtrT> KeyIndexFromEntry(TNode<IntPtrT> entry);
TNode<IntPtrT> LoadNumberOfElements(TNode<HeapObject> table, int offset);
TNode<IntPtrT> LoadNumberOfDeleted(TNode<HeapObject> table, int offset = 0);
TNode<HeapObject> LoadTable(SloppyTNode<HeapObject> collection);
TNode<IntPtrT> LoadTableCapacity(TNode<HeapObject> table);
TNode<IntPtrT> LoadNumberOfElements(TNode<EphemeronHashTable> table,
int offset);
TNode<IntPtrT> LoadNumberOfDeleted(TNode<EphemeronHashTable> table,
int offset = 0);
TNode<EphemeronHashTable> LoadTable(TNode<JSWeakCollection> collection);
TNode<IntPtrT> LoadTableCapacity(TNode<EphemeronHashTable> table);
void RemoveEntry(TNode<HeapObject> table, TNode<IntPtrT> key_index,
void RemoveEntry(TNode<EphemeronHashTable> table, TNode<IntPtrT> key_index,
TNode<IntPtrT> number_of_elements);
TNode<BoolT> ShouldRehash(TNode<IntPtrT> number_of_elements,
TNode<IntPtrT> number_of_deleted);
@ -2022,8 +2040,8 @@ class WeakCollectionsBuiltinsAssembler : public BaseCollectionsAssembler {
};
void WeakCollectionsBuiltinsAssembler::AddEntry(
TNode<HeapObject> table, TNode<IntPtrT> key_index, TNode<Object> key,
TNode<Object> value, TNode<IntPtrT> number_of_elements) {
TNode<EphemeronHashTable> table, TNode<IntPtrT> key_index,
TNode<Object> key, TNode<Object> value, TNode<IntPtrT> number_of_elements) {
// See EphemeronHashTable::AddEntry().
TNode<IntPtrT> value_index = ValueIndexFromKeyIndex(key_index);
StoreFixedArrayElement(table, key_index, key);
@ -2097,7 +2115,7 @@ TNode<IntPtrT> WeakCollectionsBuiltinsAssembler::FindKeyIndex(
TNode<IntPtrT> key_index;
{
key_index = KeyIndexFromEntry(var_entry.value());
TNode<Object> entry_key = LoadFixedArrayElement(table, key_index);
TNode<Object> entry_key = LoadFixedArrayElement(CAST(table), key_index);
key_compare(entry_key, &if_found);
@ -2146,26 +2164,26 @@ TNode<IntPtrT> WeakCollectionsBuiltinsAssembler::KeyIndexFromEntry(
}
TNode<IntPtrT> WeakCollectionsBuiltinsAssembler::LoadNumberOfElements(
TNode<HeapObject> table, int offset) {
TNode<EphemeronHashTable> table, int offset) {
TNode<IntPtrT> number_of_elements = SmiUntag(CAST(LoadFixedArrayElement(
table, EphemeronHashTable::kNumberOfElementsIndex)));
return IntPtrAdd(number_of_elements, IntPtrConstant(offset));
}
TNode<IntPtrT> WeakCollectionsBuiltinsAssembler::LoadNumberOfDeleted(
TNode<HeapObject> table, int offset) {
TNode<EphemeronHashTable> table, int offset) {
TNode<IntPtrT> number_of_deleted = SmiUntag(CAST(LoadFixedArrayElement(
table, EphemeronHashTable::kNumberOfDeletedElementsIndex)));
return IntPtrAdd(number_of_deleted, IntPtrConstant(offset));
}
TNode<HeapObject> WeakCollectionsBuiltinsAssembler::LoadTable(
SloppyTNode<HeapObject> collection) {
TNode<EphemeronHashTable> WeakCollectionsBuiltinsAssembler::LoadTable(
TNode<JSWeakCollection> collection) {
return CAST(LoadObjectField(collection, JSWeakCollection::kTableOffset));
}
TNode<IntPtrT> WeakCollectionsBuiltinsAssembler::LoadTableCapacity(
TNode<HeapObject> table) {
TNode<EphemeronHashTable> table) {
return SmiUntag(
CAST(LoadFixedArrayElement(table, EphemeronHashTable::kCapacityIndex)));
}
@ -2189,7 +2207,7 @@ TNode<Word32T> WeakCollectionsBuiltinsAssembler::InsufficientCapacityToAdd(
}
void WeakCollectionsBuiltinsAssembler::RemoveEntry(
TNode<HeapObject> table, TNode<IntPtrT> key_index,
TNode<EphemeronHashTable> table, TNode<IntPtrT> key_index,
TNode<IntPtrT> number_of_elements) {
// See EphemeronHashTable::RemoveEntry().
TNode<IntPtrT> value_index = ValueIndexFromKeyIndex(key_index);
@ -2256,7 +2274,7 @@ TF_BUILTIN(WeakSetConstructor, WeakCollectionsBuiltinsAssembler) {
}
TF_BUILTIN(WeakMapLookupHashIndex, WeakCollectionsBuiltinsAssembler) {
TNode<HeapObject> table = CAST(Parameter(Descriptor::kTable));
TNode<EphemeronHashTable> table = CAST(Parameter(Descriptor::kTable));
TNode<Object> key = CAST(Parameter(Descriptor::kKey));
Label if_not_found(this);
@ -2283,9 +2301,9 @@ TF_BUILTIN(WeakMapGet, WeakCollectionsBuiltinsAssembler) {
ThrowIfNotInstanceType(context, receiver, JS_WEAK_MAP_TYPE,
"WeakMap.prototype.get");
Node* const table = LoadTable(receiver);
Node* const index =
CallBuiltin(Builtins::kWeakMapLookupHashIndex, context, table, key);
TNode<EphemeronHashTable> const table = LoadTable(CAST(receiver));
TNode<Smi> const index =
CAST(CallBuiltin(Builtins::kWeakMapLookupHashIndex, context, table, key));
GotoIf(WordEqual(index, SmiConstant(-1)), &return_undefined);
@ -2305,7 +2323,7 @@ TF_BUILTIN(WeakMapHas, WeakCollectionsBuiltinsAssembler) {
ThrowIfNotInstanceType(context, receiver, JS_WEAK_MAP_TYPE,
"WeakMap.prototype.has");
Node* const table = LoadTable(receiver);
TNode<EphemeronHashTable> const table = LoadTable(CAST(receiver));
Node* const index =
CallBuiltin(Builtins::kWeakMapLookupHashIndex, context, table, key);
@ -2321,7 +2339,7 @@ TF_BUILTIN(WeakMapHas, WeakCollectionsBuiltinsAssembler) {
// (EphemeronHashTable) of a WeakMap or WeakSet.
TF_BUILTIN(WeakCollectionDelete, WeakCollectionsBuiltinsAssembler) {
TNode<Context> context = CAST(Parameter(Descriptor::kContext));
TNode<HeapObject> collection = CAST(Parameter(Descriptor::kCollection));
TNode<JSWeakCollection> collection = CAST(Parameter(Descriptor::kCollection));
TNode<Object> key = CAST(Parameter(Descriptor::kKey));
Label call_runtime(this), if_not_found(this);
@ -2329,7 +2347,7 @@ TF_BUILTIN(WeakCollectionDelete, WeakCollectionsBuiltinsAssembler) {
GotoIfNotJSReceiver(key, &if_not_found);
TNode<IntPtrT> hash = LoadJSReceiverIdentityHash(key, &if_not_found);
TNode<HeapObject> table = LoadTable(collection);
TNode<EphemeronHashTable> table = LoadTable(collection);
TNode<IntPtrT> capacity = LoadTableCapacity(table);
TNode<IntPtrT> key_index =
FindKeyIndexForKey(table, key, hash, EntryMask(capacity), &if_not_found);
@ -2351,7 +2369,7 @@ TF_BUILTIN(WeakCollectionDelete, WeakCollectionsBuiltinsAssembler) {
// of a WeakMap or WeakSet.
TF_BUILTIN(WeakCollectionSet, WeakCollectionsBuiltinsAssembler) {
TNode<Context> context = CAST(Parameter(Descriptor::kContext));
TNode<HeapObject> collection = CAST(Parameter(Descriptor::kCollection));
TNode<JSWeakCollection> collection = CAST(Parameter(Descriptor::kCollection));
TNode<JSReceiver> key = CAST(Parameter(Descriptor::kKey));
TNode<Object> value = CAST(Parameter(Descriptor::kValue));
@ -2359,7 +2377,7 @@ TF_BUILTIN(WeakCollectionSet, WeakCollectionsBuiltinsAssembler) {
Label call_runtime(this), if_no_hash(this), if_not_found(this);
TNode<HeapObject> table = LoadTable(collection);
TNode<EphemeronHashTable> table = LoadTable(collection);
TNode<IntPtrT> capacity = LoadTableCapacity(table);
TNode<IntPtrT> entry_mask = EntryMask(capacity);
@ -2469,7 +2487,7 @@ TF_BUILTIN(WeakSetHas, WeakCollectionsBuiltinsAssembler) {
ThrowIfNotInstanceType(context, receiver, JS_WEAK_SET_TYPE,
"WeakSet.prototype.has");
Node* const table = LoadTable(receiver);
Node* const table = LoadTable(CAST(receiver));
Node* const index =
CallBuiltin(Builtins::kWeakMapLookupHashIndex, context, table, key);

View File

@ -233,7 +233,8 @@ Node* ConstructorBuiltinsAssembler::EmitFastNewFunctionContext(
Node* size = GetFixedArrayAllocationSize(length, PACKED_ELEMENTS, mode);
// Create a new closure from the given function info in new space
Node* function_context = AllocateInNewSpace(size);
TNode<Context> function_context =
UncheckedCast<Context>(AllocateInNewSpace(size));
Heap::RootListIndex context_type;
switch (scope_type) {

View File

@ -376,7 +376,7 @@ TF_BUILTIN(ToObject, CodeStubAssembler) {
Goto(&if_wrapjsvalue);
BIND(&if_wrapjsvalue);
Node* native_context = LoadNativeContext(context);
TNode<Context> native_context = LoadNativeContext(context);
Node* constructor = LoadFixedArrayElement(
native_context, constructor_function_index_var.value());
Node* initial_map =

View File

@ -123,8 +123,8 @@ TF_BUILTIN(FastFunctionPrototypeBind, CodeStubAssembler) {
GotoIf(Uint32LessThanOrEqual(argc, Int32Constant(1)), &empty_arguments);
TNode<IntPtrT> elements_length =
Signed(ChangeUint32ToWord(Unsigned(Int32Sub(argc, Int32Constant(1)))));
Node* elements = AllocateFixedArray(PACKED_ELEMENTS, elements_length,
kAllowLargeObjectAllocation);
TNode<FixedArray> elements = AllocateFixedArray(
PACKED_ELEMENTS, elements_length, kAllowLargeObjectAllocation);
VARIABLE(index, MachineType::PointerRepresentation());
index.Bind(IntPtrConstant(0));
VariableList foreach_vars({&index}, zone());

View File

@ -100,7 +100,7 @@ TF_BUILTIN(NewArgumentsElements, CodeStubAssembler) {
BIND(&if_notempty);
{
// Allocate a FixedArray in new space.
Node* result = AllocateFixedArray(kind, length);
TNode<FixedArray> result = AllocateFixedArray(kind, length);
// The elements might be used to back mapped arguments. In that case fill
// the mapped elements (i.e. the first {mapped_count}) with the hole, but
@ -109,14 +109,13 @@ TF_BUILTIN(NewArgumentsElements, CodeStubAssembler) {
Node* the_hole = TheHoleConstant();
// Fill the first elements up to {number_of_holes} with the hole.
VARIABLE(var_index, MachineType::PointerRepresentation());
TVARIABLE(IntPtrT, var_index, IntPtrConstant(0));
Label loop1(this, &var_index), done_loop1(this);
var_index.Bind(IntPtrConstant(0));
Goto(&loop1);
BIND(&loop1);
{
// Load the current {index}.
Node* index = var_index.value();
TNode<IntPtrT> index = var_index.value();
// Check if we are done.
GotoIf(WordEqual(index, number_of_holes), &done_loop1);
@ -125,13 +124,13 @@ TF_BUILTIN(NewArgumentsElements, CodeStubAssembler) {
StoreFixedArrayElement(result, index, the_hole, SKIP_WRITE_BARRIER);
// Continue with next {index}.
var_index.Bind(IntPtrAdd(index, IntPtrConstant(1)));
var_index = IntPtrAdd(index, IntPtrConstant(1));
Goto(&loop1);
}
BIND(&done_loop1);
// Compute the effective {offset} into the {frame}.
Node* offset = IntPtrAdd(length, IntPtrConstant(1));
TNode<IntPtrT> offset = IntPtrAdd(length, IntPtrConstant(1));
// Copy the parameters from {frame} (starting at {offset}) to {result}.
Label loop2(this, &var_index), done_loop2(this);
@ -139,20 +138,21 @@ TF_BUILTIN(NewArgumentsElements, CodeStubAssembler) {
BIND(&loop2);
{
// Load the current {index}.
Node* index = var_index.value();
TNode<IntPtrT> index = var_index.value();
// Check if we are done.
GotoIf(WordEqual(index, length), &done_loop2);
// Load the parameter at the given {index}.
Node* value = Load(MachineType::AnyTagged(), frame,
TimesPointerSize(IntPtrSub(offset, index)));
TNode<Object> value =
CAST(Load(MachineType::AnyTagged(), frame,
TimesPointerSize(IntPtrSub(offset, index))));
// Store the {value} into the {result}.
StoreFixedArrayElement(result, index, value, SKIP_WRITE_BARRIER);
// Continue with next {index}.
var_index.Bind(IntPtrAdd(index, IntPtrConstant(1)));
var_index = IntPtrAdd(index, IntPtrConstant(1));
Goto(&loop2);
}
BIND(&done_loop2);

View File

@ -361,8 +361,8 @@ TNode<JSArray> ObjectEntriesValuesBuiltinsAssembler::FastGetOwnValuesOrEntries(
std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
PACKED_ELEMENTS, array_map, SmiConstant(2), nullptr,
IntPtrConstant(2));
StoreFixedArrayElement(elements, 0, next_key, SKIP_WRITE_BARRIER);
StoreFixedArrayElement(elements, 1, value, SKIP_WRITE_BARRIER);
StoreFixedArrayElement(CAST(elements), 0, next_key, SKIP_WRITE_BARRIER);
StoreFixedArrayElement(CAST(elements), 1, value, SKIP_WRITE_BARRIER);
value = TNode<JSArray>::UncheckedCast(array);
}

View File

@ -172,12 +172,12 @@ Node* RegExpBuiltinsAssembler::ConstructNewResultFromMatchInfo(
Label named_captures(this), out(this);
TNode<IntPtrT> num_indices = SmiUntag(CAST(LoadFixedArrayElement(
match_info, RegExpMatchInfo::kNumberOfCapturesIndex)));
CAST(match_info), RegExpMatchInfo::kNumberOfCapturesIndex)));
TNode<Smi> const num_results = SmiTag(WordShr(num_indices, 1));
Node* const start =
LoadFixedArrayElement(match_info, RegExpMatchInfo::kFirstCaptureIndex);
Node* const start = LoadFixedArrayElement(
CAST(match_info), RegExpMatchInfo::kFirstCaptureIndex);
Node* const end = LoadFixedArrayElement(
match_info, RegExpMatchInfo::kFirstCaptureIndex + 1);
CAST(match_info), RegExpMatchInfo::kFirstCaptureIndex + 1);
// Calculate the substring of the first match before creating the result array
// to avoid an unnecessary write barrier storing the first result.
@ -187,7 +187,7 @@ Node* RegExpBuiltinsAssembler::ConstructNewResultFromMatchInfo(
Node* const result =
AllocateRegExpResult(context, num_results, start, string);
Node* const result_elements = LoadElements(result);
TNode<FixedArray> const result_elements = CAST(LoadElements(result));
StoreFixedArrayElement(result_elements, 0, first, SKIP_WRITE_BARRIER);
@ -212,13 +212,14 @@ Node* RegExpBuiltinsAssembler::ConstructNewResultFromMatchInfo(
Node* const from_cursor = var_from_cursor.value();
Node* const to_cursor = var_to_cursor.value();
TNode<Smi> const start =
CAST(LoadFixedArrayElement(match_info, from_cursor));
CAST(LoadFixedArrayElement(CAST(match_info), from_cursor));
Label next_iter(this);
GotoIf(SmiEqual(start, SmiConstant(-1)), &next_iter);
Node* const from_cursor_plus1 = IntPtrAdd(from_cursor, IntPtrConstant(1));
Node* const end = LoadFixedArrayElement(match_info, from_cursor_plus1);
Node* const end =
LoadFixedArrayElement(CAST(match_info), from_cursor_plus1);
TNode<String> const capture =
CAST(CallBuiltin(Builtins::kSubString, context, string, start, end));
@ -243,7 +244,8 @@ Node* RegExpBuiltinsAssembler::ConstructNewResultFromMatchInfo(
// Preparations for named capture properties. Exit early if the result does
// not have any named captures to minimize performance impact.
Node* const data = LoadObjectField(regexp, JSRegExp::kDataOffset);
TNode<FixedArray> const data =
CAST(LoadObjectField(regexp, JSRegExp::kDataOffset));
CSA_ASSERT(this,
SmiEqual(CAST(LoadFixedArrayElement(data, JSRegExp::kTagIndex)),
SmiConstant(JSRegExp::IRREGEXP)));
@ -762,7 +764,7 @@ TNode<HeapObject> RegExpBuiltinsAssembler::RegExpPrototypeExecBodyWithoutResult(
// Update the new last index from {match_indices}.
TNode<Number> new_lastindex = CAST(LoadFixedArrayElement(
match_indices, RegExpMatchInfo::kFirstCaptureIndex + 1));
CAST(match_indices), RegExpMatchInfo::kFirstCaptureIndex + 1));
StoreLastIndex(context, regexp, new_lastindex, is_fastpath);
Goto(&out);
@ -1009,16 +1011,15 @@ TF_BUILTIN(RegExpExecAtom, RegExpBuiltinsAssembler) {
Node* const regexp = Parameter(Descriptor::kRegExp);
Node* const subject_string = Parameter(Descriptor::kString);
Node* const last_index = Parameter(Descriptor::kLastIndex);
Node* const match_info = Parameter(Descriptor::kMatchInfo);
TNode<FixedArray> const match_info = CAST(Parameter(Descriptor::kMatchInfo));
Node* const context = Parameter(Descriptor::kContext);
CSA_ASSERT(this, IsJSRegExp(regexp));
CSA_ASSERT(this, IsString(subject_string));
CSA_ASSERT(this, TaggedIsPositiveSmi(last_index));
CSA_ASSERT(this, IsFixedArray(match_info));
Node* const data = LoadObjectField(regexp, JSRegExp::kDataOffset);
CSA_ASSERT(this, IsFixedArray(data));
TNode<FixedArray> const data =
CAST(LoadObjectField(regexp, JSRegExp::kDataOffset));
CSA_ASSERT(this,
SmiEqual(CAST(LoadFixedArrayElement(data, JSRegExp::kTagIndex)),
SmiConstant(JSRegExp::ATOM)));
@ -1907,8 +1908,9 @@ void RegExpBuiltinsAssembler::RegExpPrototypeMatchBody(Node* const context,
if (is_fastpath) {
// On the fast path, grab the matching string from the raw match index
// array.
TNode<HeapObject> match_indices = RegExpPrototypeExecBodyWithoutResult(
CAST(context), CAST(regexp), string, &if_didnotmatch, true);
TNode<FixedArray> match_indices =
CAST(RegExpPrototypeExecBodyWithoutResult(
CAST(context), CAST(regexp), string, &if_didnotmatch, true));
Node* const match_from = LoadFixedArrayElement(
match_indices, RegExpMatchInfo::kFirstCaptureIndex);
@ -2212,8 +2214,8 @@ void RegExpBuiltinsAssembler::RegExpPrototypeSearchBodyFast(
// Call exec.
Label if_didnotmatch(this);
TNode<HeapObject> match_indices = RegExpPrototypeExecBodyWithoutResult(
CAST(context), CAST(regexp), CAST(string), &if_didnotmatch, true);
TNode<FixedArray> match_indices = CAST(RegExpPrototypeExecBodyWithoutResult(
CAST(context), CAST(regexp), CAST(string), &if_didnotmatch, true));
// Successful match.
{
@ -2397,7 +2399,7 @@ void RegExpBuiltinsAssembler::RegExpPrototypeSplitBody(Node* const context,
Node* const result = AllocateJSArray(kind, array_map, capacity, length,
allocation_site, mode);
Node* const fixed_array = LoadElements(result);
TNode<FixedArray> const fixed_array = CAST(LoadElements(result));
StoreFixedArrayElement(fixed_array, 0, string);
Return(result);
@ -2439,17 +2441,18 @@ void RegExpBuiltinsAssembler::RegExpPrototypeSplitBody(Node* const context,
Node* const last_match_info = LoadContextElement(
native_context, Context::REGEXP_LAST_MATCH_INFO_INDEX);
Node* const match_indices =
CallBuiltin(Builtins::kRegExpExecInternal, context, regexp, string,
next_search_from, last_match_info);
TNode<HeapObject> const match_indices_ho =
CAST(CallBuiltin(Builtins::kRegExpExecInternal, context, regexp, string,
next_search_from, last_match_info));
// We're done if no match was found.
{
Label next(this);
Branch(IsNull(match_indices), &push_suffix_and_out, &next);
Branch(IsNull(match_indices_ho), &push_suffix_and_out, &next);
BIND(&next);
}
TNode<FixedArray> match_indices = CAST(match_indices_ho);
TNode<Smi> const match_from = CAST(LoadFixedArrayElement(
match_indices, RegExpMatchInfo::kFirstCaptureIndex));
@ -2704,8 +2707,8 @@ Node* RegExpBuiltinsAssembler::ReplaceGlobalCallableFastPath(
}
// Call into runtime for RegExpExecMultiple.
Node* last_match_info =
LoadContextElement(native_context, Context::REGEXP_LAST_MATCH_INFO_INDEX);
TNode<FixedArray> last_match_info = CAST(LoadContextElement(
native_context, Context::REGEXP_LAST_MATCH_INFO_INDEX));
Node* const res = CallRuntime(Runtime::kRegExpExecMultiple, context, regexp,
string, last_match_info, result_array);
@ -2717,12 +2720,11 @@ Node* RegExpBuiltinsAssembler::ReplaceGlobalCallableFastPath(
GotoIf(IsNull(res), &out);
// Reload last match info since it might have changed.
last_match_info =
LoadContextElement(native_context, Context::REGEXP_LAST_MATCH_INFO_INDEX);
last_match_info = CAST(LoadContextElement(
native_context, Context::REGEXP_LAST_MATCH_INFO_INDEX));
Node* const res_length = LoadJSArrayLength(res);
Node* const res_elems = LoadElements(res);
CSA_ASSERT(this, HasInstanceType(res_elems, FIXED_ARRAY_TYPE));
TNode<FixedArray> const res_elems = CAST(LoadElements(res));
TNode<Smi> const num_capture_registers = CAST(LoadFixedArrayElement(
last_match_info, RegExpMatchInfo::kNumberOfCapturesIndex));
@ -2887,7 +2889,6 @@ Node* RegExpBuiltinsAssembler::ReplaceSimpleStringFastPath(
const bool kIsFastPath = true;
TVARIABLE(String, var_result, EmptyStringConstant());
VARIABLE(var_match_indices, MachineRepresentation::kTagged);
VARIABLE(var_last_match_end, MachineRepresentation::kTagged, SmiZero());
VARIABLE(var_is_unicode, MachineRepresentation::kWord32, Int32Constant(0));
Variable* vars[] = {&var_result, &var_last_match_end};
@ -2904,16 +2905,17 @@ Node* RegExpBuiltinsAssembler::ReplaceSimpleStringFastPath(
BIND(&loop);
{
var_match_indices.Bind(RegExpPrototypeExecBodyWithoutResult(
CAST(context), CAST(regexp), string, &if_nofurthermatches,
kIsFastPath));
TNode<FixedArray> var_match_indices =
CAST(RegExpPrototypeExecBodyWithoutResult(CAST(context), CAST(regexp),
string, &if_nofurthermatches,
kIsFastPath));
// Successful match.
{
TNode<Smi> const match_start = CAST(LoadFixedArrayElement(
var_match_indices.value(), RegExpMatchInfo::kFirstCaptureIndex));
var_match_indices, RegExpMatchInfo::kFirstCaptureIndex));
TNode<Smi> const match_end = CAST(LoadFixedArrayElement(
var_match_indices.value(), RegExpMatchInfo::kFirstCaptureIndex + 1));
var_match_indices, RegExpMatchInfo::kFirstCaptureIndex + 1));
TNode<Smi> const replace_length = LoadStringLengthAsSmi(replace_string);
@ -3242,8 +3244,8 @@ TF_BUILTIN(RegExpStringIteratorPrototypeNext, RegExpStringIteratorAssembler) {
CSA_ASSERT(this,
SmiNotEqual(LoadFastJSArrayLength(CAST(var_match.value())),
SmiZero()));
TNode<FixedArrayBase> result_fixed_array =
LoadElements(CAST(var_match.value()));
TNode<FixedArray> result_fixed_array =
CAST(LoadElements(CAST(var_match.value())));
TNode<String> match_str =
CAST(LoadFixedArrayElement(result_fixed_array, 0));

View File

@ -1951,7 +1951,7 @@ TF_BUILTIN(StringPrototypeSplit, StringBuiltinsAssembler) {
Node* const capacity = IntPtrConstant(1);
Node* const result = AllocateJSArray(kind, array_map, capacity, length);
Node* const fixed_array = LoadElements(result);
TNode<FixedArray> const fixed_array = CAST(LoadElements(result));
StoreFixedArrayElement(fixed_array, 0, subject_string);
args.PopAndReturn(result);

View File

@ -1751,8 +1751,8 @@ TNode<IntPtrT> CodeStubAssembler::LoadJSReceiverIdentityHash(
BIND(&if_property_dictionary);
{
var_hash = SmiUntag(CAST(
LoadFixedArrayElement(properties, NameDictionary::kObjectHashIndex)));
var_hash = SmiUntag(CAST(LoadFixedArrayElement(
CAST(properties), NameDictionary::kObjectHashIndex)));
Goto(&done);
}
@ -1964,13 +1964,9 @@ TNode<MaybeObject> CodeStubAssembler::LoadArrayElement(
}
TNode<Object> CodeStubAssembler::LoadFixedArrayElement(
SloppyTNode<HeapObject> object, Node* index_node, int additional_offset,
TNode<FixedArray> object, Node* index_node, int additional_offset,
ParameterMode parameter_mode, LoadSensitivity needs_poisoning) {
// This function is currently used for non-FixedArrays (e.g., PropertyArrays)
// and thus the reasonable assert IsFixedArraySubclass(object) is
// not always true. TODO(marja): Fix.
CSA_SLOW_ASSERT(
this, Word32Or(IsFixedArraySubclass(object), IsPropertyArray(object)));
CSA_ASSERT(this, IsFixedArraySubclass(object));
CSA_ASSERT(this, IsNotWeakFixedArraySubclass(object));
TNode<MaybeObject> element =
LoadArrayElement(object, FixedArray::kHeaderSize, index_node,
@ -2608,11 +2604,9 @@ Node* CodeStubAssembler::StoreElements(TNode<Object> object,
return StoreObjectField(object, JSObject::kElementsOffset, elements);
}
Node* CodeStubAssembler::StoreFixedArrayElement(Node* object, Node* index_node,
Node* value,
WriteBarrierMode barrier_mode,
int additional_offset,
ParameterMode parameter_mode) {
void CodeStubAssembler::StoreFixedArrayOrPropertyArrayElement(
Node* object, Node* index_node, Node* value, WriteBarrierMode barrier_mode,
int additional_offset, ParameterMode parameter_mode) {
CSA_SLOW_ASSERT(
this, Word32Or(IsFixedArraySubclass(object), IsPropertyArray(object)));
CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
@ -2647,10 +2641,9 @@ Node* CodeStubAssembler::StoreFixedArrayElement(Node* object, Node* index_node,
}),
FixedArray::kHeaderSize));
if (barrier_mode == SKIP_WRITE_BARRIER) {
return StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset,
value);
StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset, value);
} else {
return Store(object, offset, value);
Store(object, offset, value);
}
}
@ -3239,7 +3232,8 @@ TNode<NameDictionary> CodeStubAssembler::AllocateNameDictionaryWithCapacity(
TNode<WordT> store_size = IntPtrAdd(
TimesPointerSize(length), IntPtrConstant(NameDictionary::kHeaderSize));
Node* result = AllocateInNewSpace(store_size);
TNode<NameDictionary> result =
UncheckedCast<NameDictionary>(AllocateInNewSpace(store_size));
Comment("Initialize NameDictionary");
// Initialize FixedArray fields.
DCHECK(Heap::RootIsImmortalImmovable(Heap::kNameDictionaryMapRootIndex));
@ -3272,7 +3266,7 @@ TNode<NameDictionary> CodeStubAssembler::AllocateNameDictionaryWithCapacity(
TNode<WordT> end_address = IntPtrAdd(
result_word, IntPtrSub(store_size, IntPtrConstant(kHeapObjectTag)));
StoreFieldsNoWriteBarrier(start_address, end_address, filler);
return CAST(result);
return result;
}
TNode<NameDictionary> CodeStubAssembler::CopyNameDictionary(
@ -3433,12 +3427,13 @@ void CodeStubAssembler::FindOrderedHashTableEntry(
std::function<void(Node*, Label*, Label*)> key_compare,
Variable* entry_start_position, Label* entry_found, Label* not_found) {
// Get the index of the bucket.
Node* const number_of_buckets = SmiUntag(CAST(
LoadFixedArrayElement(table, CollectionType::kNumberOfBucketsIndex)));
Node* const number_of_buckets = SmiUntag(CAST(LoadFixedArrayElement(
CAST(table), CollectionType::kNumberOfBucketsIndex)));
Node* const bucket =
WordAnd(hash, IntPtrSub(number_of_buckets, IntPtrConstant(1)));
Node* const first_entry = SmiUntag(CAST(LoadFixedArrayElement(
table, bucket, CollectionType::kHashTableStartIndex * kPointerSize)));
CAST(table), bucket,
CollectionType::kHashTableStartIndex * kPointerSize)));
// Walk the bucket chain.
Node* entry_start;
@ -3457,14 +3452,14 @@ void CodeStubAssembler::FindOrderedHashTableEntry(
// Make sure the entry index is within range.
CSA_ASSERT(
this,
UintPtrLessThan(
var_entry.value(),
SmiUntag(SmiAdd(
CAST(LoadFixedArrayElement(
table, CollectionType::kNumberOfElementsIndex)),
CAST(LoadFixedArrayElement(
table, CollectionType::kNumberOfDeletedElementsIndex))))));
this, UintPtrLessThan(
var_entry.value(),
SmiUntag(SmiAdd(
CAST(LoadFixedArrayElement(
CAST(table), CollectionType::kNumberOfElementsIndex)),
CAST(LoadFixedArrayElement(
CAST(table),
CollectionType::kNumberOfDeletedElementsIndex))))));
// Compute the index of the entry relative to kHashTableStartIndex.
entry_start =
@ -3474,7 +3469,7 @@ void CodeStubAssembler::FindOrderedHashTableEntry(
// Load the key from the entry.
Node* const candidate_key = LoadFixedArrayElement(
table, entry_start,
CAST(table), entry_start,
CollectionType::kHashTableStartIndex * kPointerSize);
key_compare(candidate_key, &if_key_found, &continue_next_entry);
@ -3482,7 +3477,7 @@ void CodeStubAssembler::FindOrderedHashTableEntry(
BIND(&continue_next_entry);
// Load the index of the next entry in the bucket chain.
var_entry.Bind(SmiUntag(CAST(LoadFixedArrayElement(
table, entry_start,
CAST(table), entry_start,
(CollectionType::kHashTableStartIndex + CollectionType::kChainOffset) *
kPointerSize))));
@ -5808,8 +5803,9 @@ TNode<String> CodeStubAssembler::StringFromSingleCharCode(TNode<Int32T> code) {
BIND(&if_codeisonebyte);
{
// Load the isolate wide single character string cache.
Node* cache = LoadRoot(Heap::kSingleCharacterStringCacheRootIndex);
Node* code_index = ChangeUint32ToWord(code);
TNode<FixedArray> cache =
CAST(LoadRoot(Heap::kSingleCharacterStringCacheRootIndex));
TNode<IntPtrT> code_index = Signed(ChangeUint32ToWord(code));
// Check if we have an entry for the {code} in the single character string
// cache already.
@ -5821,7 +5817,7 @@ TNode<String> CodeStubAssembler::StringFromSingleCharCode(TNode<Int32T> code) {
BIND(&if_entryisundefined);
{
// Allocate a new SeqOneByteString for {code} and store it in the {cache}.
Node* result = AllocateSeqOneByteString(1);
TNode<String> result = AllocateSeqOneByteString(1);
StoreNoWriteBarrier(
MachineRepresentation::kWord8, result,
IntPtrConstant(SeqOneByteString::kHeaderSize - kHeapObjectTag), code);
@ -6476,7 +6472,7 @@ TNode<String> CodeStubAssembler::NumberToString(TNode<Number> input) {
WordAnd(word_hash, WordSar(mask, SmiShiftBitsConstant()));
// Cache entry's key must be a heap number
Node* number_key = LoadFixedArrayElement(number_string_cache, index);
Node* number_key = LoadFixedArrayElement(CAST(number_string_cache), index);
GotoIf(TaggedIsSmi(number_key), &runtime);
GotoIfNot(IsHeapNumber(number_key), &runtime);
@ -6490,8 +6486,8 @@ TNode<String> CodeStubAssembler::NumberToString(TNode<Number> input) {
// Heap number match, return value from cache entry.
IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
result =
CAST(LoadFixedArrayElement(number_string_cache, index, kPointerSize));
result = CAST(
LoadFixedArrayElement(CAST(number_string_cache), index, kPointerSize));
Goto(&done);
BIND(&runtime);
@ -6507,13 +6503,13 @@ TNode<String> CodeStubAssembler::NumberToString(TNode<Number> input) {
// Load the smi key, make sure it matches the smi we're looking for.
Node* smi_index = BitcastWordToTagged(
WordAnd(WordShl(BitcastTaggedToWord(input), one), mask));
Node* smi_key = LoadFixedArrayElement(number_string_cache, smi_index, 0,
SMI_PARAMETERS);
Node* smi_key = LoadFixedArrayElement(CAST(number_string_cache), smi_index,
0, SMI_PARAMETERS);
GotoIf(WordNotEqual(smi_key, input), &runtime);
// Smi match, return value from cache entry.
IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
result = CAST(LoadFixedArrayElement(number_string_cache, smi_index,
result = CAST(LoadFixedArrayElement(CAST(number_string_cache), smi_index,
kPointerSize, SMI_PARAMETERS));
Goto(&done);
}
@ -8209,7 +8205,7 @@ void CodeStubAssembler::LoadPropertyFromGlobalDictionary(Node* dictionary,
Comment("[ LoadPropertyFromGlobalDictionary");
CSA_ASSERT(this, IsGlobalDictionary(dictionary));
Node* property_cell = LoadFixedArrayElement(dictionary, name_index);
Node* property_cell = LoadFixedArrayElement(CAST(dictionary), name_index);
CSA_ASSERT(this, IsPropertyCell(property_cell));
Node* value = LoadObjectField(property_cell, PropertyCell::kValueOffset);
@ -8461,19 +8457,19 @@ void CodeStubAssembler::TryLookupElement(Node* object, Node* map,
BIND(&if_isobjectorsmi);
{
Node* elements = LoadElements(object);
Node* length = LoadAndUntagFixedArrayBaseLength(elements);
TNode<FixedArray> elements = CAST(LoadElements(object));
TNode<IntPtrT> length = LoadAndUntagFixedArrayBaseLength(elements);
GotoIfNot(UintPtrLessThan(intptr_index, length), &if_oob);
Node* element = LoadFixedArrayElement(elements, intptr_index);
Node* the_hole = TheHoleConstant();
TNode<Object> element = LoadFixedArrayElement(elements, intptr_index);
TNode<Oddball> the_hole = TheHoleConstant();
Branch(WordEqual(element, the_hole), if_not_found, if_found);
}
BIND(&if_isdouble);
{
Node* elements = LoadElements(object);
Node* length = LoadAndUntagFixedArrayBaseLength(elements);
TNode<FixedDoubleArray> elements = CAST(LoadElements(object));
TNode<IntPtrT> length = LoadAndUntagFixedArrayBaseLength(elements);
GotoIfNot(UintPtrLessThan(intptr_index, length), &if_oob);
@ -8997,8 +8993,8 @@ Node* CodeStubAssembler::EmitKeyedSloppyArguments(Node* receiver, Node* key,
key = SmiUntag(key);
GotoIf(IntPtrLessThan(key, IntPtrConstant(0)), bailout);
Node* elements = LoadElements(receiver);
Node* elements_length = LoadAndUntagFixedArrayBaseLength(elements);
TNode<FixedArray> elements = CAST(LoadElements(receiver));
TNode<IntPtrT> elements_length = LoadAndUntagFixedArrayBaseLength(elements);
VARIABLE(var_result, MachineRepresentation::kTagged);
if (!is_load) {
@ -9010,37 +9006,38 @@ Node* CodeStubAssembler::EmitKeyedSloppyArguments(Node* receiver, Node* key,
GotoIf(UintPtrGreaterThanOrEqual(key, adjusted_length), &if_unmapped);
Node* mapped_index =
TNode<Object> mapped_index =
LoadFixedArrayElement(elements, IntPtrAdd(key, intptr_two));
Branch(WordEqual(mapped_index, TheHoleConstant()), &if_unmapped, &if_mapped);
BIND(&if_mapped);
{
CSA_ASSERT(this, TaggedIsSmi(mapped_index));
mapped_index = SmiUntag(mapped_index);
Node* the_context = LoadFixedArrayElement(elements, 0);
TNode<IntPtrT> mapped_index_intptr = SmiUntag(CAST(mapped_index));
TNode<Context> the_context = CAST(LoadFixedArrayElement(elements, 0));
// Assert that we can use LoadFixedArrayElement/StoreFixedArrayElement
// methods for accessing Context.
STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize);
DCHECK_EQ(Context::SlotOffset(0) + kHeapObjectTag,
FixedArray::OffsetOfElementAt(0));
if (is_load) {
Node* result = LoadFixedArrayElement(the_context, mapped_index);
Node* result = LoadFixedArrayElement(the_context, mapped_index_intptr);
CSA_ASSERT(this, WordNotEqual(result, TheHoleConstant()));
var_result.Bind(result);
} else {
StoreFixedArrayElement(the_context, mapped_index, value);
StoreFixedArrayElement(the_context, mapped_index_intptr, value);
}
Goto(&end);
}
BIND(&if_unmapped);
{
Node* backing_store = LoadFixedArrayElement(elements, 1);
GotoIf(WordNotEqual(LoadMap(backing_store), FixedArrayMapConstant()),
TNode<HeapObject> backing_store_ho =
CAST(LoadFixedArrayElement(elements, 1));
GotoIf(WordNotEqual(LoadMap(backing_store_ho), FixedArrayMapConstant()),
bailout);
TNode<FixedArray> backing_store = CAST(backing_store_ho);
Node* backing_store_length =
TNode<IntPtrT> backing_store_length =
LoadAndUntagFixedArrayBaseLength(backing_store);
GotoIf(UintPtrGreaterThanOrEqual(key, backing_store_length), bailout);
@ -9117,7 +9114,7 @@ void CodeStubAssembler::StoreElement(Node* elements, ElementsKind kind,
} else {
WriteBarrierMode barrier_mode =
IsSmiElementsKind(kind) ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER;
StoreFixedArrayElement(elements, index, value, barrier_mode, 0, mode);
StoreFixedArrayElement(CAST(elements), index, value, barrier_mode, 0, mode);
}
}
@ -11753,8 +11750,8 @@ Node* CodeStubAssembler::AllocateJSIteratorResultForEntry(Node* context,
Node* native_context = LoadNativeContext(context);
Node* length = SmiConstant(2);
int const elements_size = FixedArray::SizeFor(2);
Node* elements =
Allocate(elements_size + JSArray::kSize + JSIteratorResult::kSize);
TNode<FixedArray> elements = UncheckedCast<FixedArray>(
Allocate(elements_size + JSArray::kSize + JSIteratorResult::kSize));
StoreObjectFieldRoot(elements, FixedArray::kMapOffset,
Heap::kFixedArrayMapRootIndex);
StoreObjectFieldNoWriteBarrier(elements, FixedArray::kLengthOffset, length);
@ -12229,9 +12226,8 @@ Node* CodeStubAssembler::CheckEnumCache(Node* receiver, Label* if_empty,
{
// Avoid runtime-call for empty dictionary receivers.
GotoIfNot(IsDictionaryMap(receiver_map), if_runtime);
Node* properties = LoadSlowProperties(receiver);
Node* length = LoadFixedArrayElement(
properties, NameDictionary::kNumberOfElementsIndex);
TNode<NameDictionary> properties = CAST(LoadSlowProperties(receiver));
TNode<Smi> length = GetNumberOfElements(properties);
GotoIfNot(WordEqual(length, SmiConstant(0)), if_runtime);
// Check that there are no elements on the {receiver} and its prototype
// chain. Given that we do not create an EnumCache for dict-mode objects,

View File

@ -970,11 +970,11 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
// Load an array element from a FixedArray.
TNode<Object> LoadFixedArrayElement(
SloppyTNode<HeapObject> object, Node* index, int additional_offset = 0,
TNode<FixedArray> object, Node* index, int additional_offset = 0,
ParameterMode parameter_mode = INTPTR_PARAMETERS,
LoadSensitivity needs_poisoning = LoadSensitivity::kSafe);
TNode<Object> LoadFixedArrayElement(SloppyTNode<HeapObject> object,
TNode<Object> LoadFixedArrayElement(TNode<FixedArray> object,
TNode<IntPtrT> index,
LoadSensitivity needs_poisoning) {
return LoadFixedArrayElement(object, index, 0, INTPTR_PARAMETERS,
@ -982,21 +982,20 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
}
TNode<Object> LoadFixedArrayElement(
SloppyTNode<HeapObject> object, TNode<IntPtrT> index,
int additional_offset = 0,
TNode<FixedArray> object, TNode<IntPtrT> index, int additional_offset = 0,
LoadSensitivity needs_poisoning = LoadSensitivity::kSafe) {
return LoadFixedArrayElement(object, index, additional_offset,
INTPTR_PARAMETERS, needs_poisoning);
}
TNode<Object> LoadFixedArrayElement(
SloppyTNode<HeapObject> object, int index, int additional_offset = 0,
TNode<FixedArray> object, int index, int additional_offset = 0,
LoadSensitivity needs_poisoning = LoadSensitivity::kSafe) {
return LoadFixedArrayElement(object, IntPtrConstant(index),
additional_offset, INTPTR_PARAMETERS,
needs_poisoning);
}
TNode<Object> LoadFixedArrayElement(TNode<HeapObject> object,
TNode<Object> LoadFixedArrayElement(TNode<FixedArray> object,
TNode<Smi> index) {
return LoadFixedArrayElement(object, index, 0, SMI_PARAMETERS);
}
@ -1142,8 +1141,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
Node* StoreObjectFieldRoot(Node* object, int offset,
Heap::RootListIndex root);
// Store an array element to a FixedArray.
Node* StoreFixedArrayElement(
Node* object, int index, Node* value,
void StoreFixedArrayElement(
TNode<FixedArray> object, int index, SloppyTNode<Object> value,
WriteBarrierMode barrier_mode = UPDATE_WRITE_BARRIER) {
return StoreFixedArrayElement(object, IntPtrConstant(index), value,
barrier_mode);
@ -1152,17 +1151,35 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
Node* StoreJSArrayLength(TNode<JSArray> array, TNode<Smi> length);
Node* StoreElements(TNode<Object> object, TNode<FixedArrayBase> elements);
Node* StoreFixedArrayElement(
Node* object, Node* index, Node* value,
void StoreFixedArrayOrPropertyArrayElement(
Node* array, Node* index, Node* value,
WriteBarrierMode barrier_mode = UPDATE_WRITE_BARRIER,
int additional_offset = 0,
ParameterMode parameter_mode = INTPTR_PARAMETERS);
Node* StoreFixedArrayElementSmi(
TNode<FixedArray> object, TNode<Smi> index, TNode<Object> value,
void StoreFixedArrayElement(
TNode<FixedArray> array, Node* index, SloppyTNode<Object> value,
WriteBarrierMode barrier_mode = UPDATE_WRITE_BARRIER,
int additional_offset = 0,
ParameterMode parameter_mode = INTPTR_PARAMETERS) {
StoreFixedArrayOrPropertyArrayElement(array, index, value, barrier_mode,
additional_offset, parameter_mode);
}
void StorePropertyArrayElement(
TNode<PropertyArray> array, Node* index, SloppyTNode<Object> value,
WriteBarrierMode barrier_mode = UPDATE_WRITE_BARRIER,
int additional_offset = 0,
ParameterMode parameter_mode = INTPTR_PARAMETERS) {
StoreFixedArrayOrPropertyArrayElement(array, index, value, barrier_mode,
additional_offset, parameter_mode);
}
void StoreFixedArrayElementSmi(
TNode<FixedArray> array, TNode<Smi> index, TNode<Object> value,
WriteBarrierMode barrier_mode = UPDATE_WRITE_BARRIER) {
return StoreFixedArrayElement(object, index, value, barrier_mode, 0,
SMI_PARAMETERS);
StoreFixedArrayElement(array, index, value, barrier_mode, 0,
SMI_PARAMETERS);
}
Node* StoreFixedDoubleArrayElement(
@ -2083,8 +2100,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
const int kKeyToDetailsOffset =
(ContainerType::kEntryDetailsIndex - ContainerType::kEntryKeyIndex) *
kPointerSize;
return Unsigned(LoadAndUntagToWord32FixedArrayElement(container, key_index,
kKeyToDetailsOffset));
return Unsigned(LoadAndUntagToWord32FixedArrayElement(
CAST(container), key_index, kKeyToDetailsOffset));
}
// Loads the value for the entry with the given key_index.
@ -2096,8 +2113,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
const int kKeyToValueOffset =
(ContainerType::kEntryValueIndex - ContainerType::kEntryKeyIndex) *
kPointerSize;
return UncheckedCast<Object>(
LoadFixedArrayElement(container, key_index, kKeyToValueOffset));
return LoadFixedArrayElement(CAST(container), key_index, kKeyToValueOffset);
}
TNode<Uint32T> LoadDetailsByKeyIndex(TNode<DescriptorArray> container,
@ -2110,7 +2126,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
// Stores the details for the entry with the given key_index.
// |details| must be a Smi.
template <class ContainerType>
void StoreDetailsByKeyIndex(Node* container, Node* key_index, Node* details) {
void StoreDetailsByKeyIndex(TNode<ContainerType> container,
TNode<IntPtrT> key_index, TNode<Smi> details) {
const int kKeyToDetailsOffset =
(ContainerType::kEntryDetailsIndex - ContainerType::kEntryKeyIndex) *
kPointerSize;
@ -2121,7 +2138,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
// Stores the value for the entry with the given key_index.
template <class ContainerType>
void StoreValueByKeyIndex(
Node* container, Node* key_index, Node* value,
TNode<ContainerType> container, TNode<IntPtrT> key_index,
TNode<Object> value,
WriteBarrierMode write_barrier = UPDATE_WRITE_BARRIER) {
const int kKeyToValueOffset =
(ContainerType::kEntryValueIndex - ContainerType::kEntryKeyIndex) *

View File

@ -671,12 +671,12 @@ class V8_EXPORT_PRIVATE CodeAssembler {
return TNode<T>::UncheckedCast(value);
}
CheckedNode<Object, false> Cast(Node* value, const char* location) {
CheckedNode<Object, false> Cast(Node* value, const char* location = "") {
return {value, this, location};
}
template <class T>
CheckedNode<T, true> Cast(TNode<T> value, const char* location) {
CheckedNode<T, true> Cast(TNode<T> value, const char* location = "") {
return {value, this, location};
}
@ -686,7 +686,7 @@ class V8_EXPORT_PRIVATE CodeAssembler {
#define CAST(x) \
Cast(x, "CAST(" #x ") at " __FILE__ ":" TO_STRING_LITERAL(__LINE__))
#else
#define CAST(x) Cast(x, "")
#define CAST(x) Cast(x)
#endif
#ifdef DEBUG

View File

@ -579,8 +579,8 @@ void AccessorAssembler::HandleLoadICSmiHandlerCase(
Node* module =
LoadObjectField(p->receiver, JSModuleNamespace::kModuleOffset,
MachineType::TaggedPointer());
Node* exports = LoadObjectField(module, Module::kExportsOffset,
MachineType::TaggedPointer());
TNode<ObjectHashTable> exports = CAST(LoadObjectField(
module, Module::kExportsOffset, MachineType::TaggedPointer()));
Node* cell = LoadFixedArrayElement(exports, index);
// The handler is only installed for exports that exist.
CSA_ASSERT(this, IsCell(cell));
@ -1147,16 +1147,16 @@ void AccessorAssembler::OverwriteExistingFastDataProperty(
BIND(&cont);
}
Node* properties =
ExtendPropertiesBackingStore(object, backing_store_index);
StoreFixedArrayElement(properties, backing_store_index,
var_value.value());
TNode<PropertyArray> properties =
CAST(ExtendPropertiesBackingStore(object, backing_store_index));
StorePropertyArrayElement(properties, backing_store_index,
var_value.value());
StoreMap(object, object_map);
Goto(&done);
} else {
Label tagged_rep(this), double_rep(this);
Node* properties = LoadFastProperties(object);
TNode<PropertyArray> properties = CAST(LoadFastProperties(object));
Branch(
Word32Equal(representation, Int32Constant(Representation::kDouble)),
&double_rep, &tagged_rep);
@ -1170,7 +1170,7 @@ void AccessorAssembler::OverwriteExistingFastDataProperty(
}
BIND(&tagged_rep);
{
StoreFixedArrayElement(properties, backing_store_index, value);
StorePropertyArrayElement(properties, backing_store_index, value);
Goto(&done);
}
}
@ -1267,7 +1267,8 @@ void AccessorAssembler::HandleStoreICProtoHandler(
STATIC_ASSERT(kData == 0);
GotoIf(IsSetWord32(details, kTypeAndReadOnlyMask), miss);
StoreValueByKeyIndex<NameDictionary>(properties, name_index, p->value);
StoreValueByKeyIndex<NameDictionary>(
CAST(properties), UncheckedCast<IntPtrT>(name_index), p->value);
Return(p->value);
},
miss, ic_mode);
@ -1768,13 +1769,13 @@ void AccessorAssembler::EmitElementLoad(
BIND(&if_fast_packed);
{
Comment("fast packed elements");
exit_point->Return(LoadFixedArrayElement(elements, intptr_index));
exit_point->Return(LoadFixedArrayElement(CAST(elements), intptr_index));
}
BIND(&if_fast_holey);
{
Comment("fast holey elements");
Node* element = LoadFixedArrayElement(elements, intptr_index);
Node* element = LoadFixedArrayElement(CAST(elements), intptr_index);
GotoIf(WordEqual(element, TheHoleConstant()), if_hole);
exit_point->Return(element);
}

View File

@ -647,8 +647,8 @@ Node* InterpreterAssembler::BytecodeOperandIntrinsicId(int operand_index) {
}
Node* InterpreterAssembler::LoadConstantPoolEntry(Node* index) {
Node* constant_pool = LoadObjectField(BytecodeArrayTaggedPointer(),
BytecodeArray::kConstantPoolOffset);
TNode<FixedArray> constant_pool = CAST(LoadObjectField(
BytecodeArrayTaggedPointer(), BytecodeArray::kConstantPoolOffset));
return LoadFixedArrayElement(constant_pool, UncheckedCast<IntPtrT>(index),
LoadSensitivity::kCritical);
}
@ -1599,16 +1599,18 @@ void InterpreterAssembler::AbortIfRegisterCountInvalid(
}
Node* InterpreterAssembler::ExportParametersAndRegisterFile(
Node* array, const RegListNodePair& registers,
Node* formal_parameter_count) {
TNode<FixedArray> array, const RegListNodePair& registers,
TNode<Int32T> formal_parameter_count) {
// Store the formal parameters (without receiver) followed by the
// registers into the generator's internal parameters_and_registers field.
formal_parameter_count = ChangeInt32ToIntPtr(formal_parameter_count);
TNode<IntPtrT> formal_parameter_count_intptr =
ChangeInt32ToIntPtr(formal_parameter_count);
Node* register_count = ChangeUint32ToWord(registers.reg_count());
if (FLAG_debug_code) {
CSA_ASSERT(this, IntPtrEqual(registers.base_reg_location(),
RegisterLocation(Register(0))));
AbortIfRegisterCountInvalid(array, formal_parameter_count, register_count);
AbortIfRegisterCountInvalid(array, formal_parameter_count_intptr,
register_count);
}
{
@ -1620,13 +1622,14 @@ Node* InterpreterAssembler::ExportParametersAndRegisterFile(
Node* reg_base = IntPtrAdd(
IntPtrConstant(Register::FromParameterIndex(0, 1).ToOperand() - 1),
formal_parameter_count);
formal_parameter_count_intptr);
Goto(&loop);
BIND(&loop);
{
Node* index = var_index.value();
GotoIfNot(UintPtrLessThan(index, formal_parameter_count), &done_loop);
GotoIfNot(UintPtrLessThan(index, formal_parameter_count_intptr),
&done_loop);
Node* reg_index = IntPtrSub(reg_base, index);
Node* value = LoadRegister(reg_index);
@ -1657,7 +1660,7 @@ Node* InterpreterAssembler::ExportParametersAndRegisterFile(
IntPtrSub(IntPtrConstant(Register(0).ToOperand()), index);
Node* value = LoadRegister(reg_index);
Node* array_index = IntPtrAdd(formal_parameter_count, index);
Node* array_index = IntPtrAdd(formal_parameter_count_intptr, index);
StoreFixedArrayElement(array, array_index, value);
var_index.Bind(IntPtrAdd(index, IntPtrConstant(1)));
@ -1669,19 +1672,20 @@ Node* InterpreterAssembler::ExportParametersAndRegisterFile(
return array;
}
Node* InterpreterAssembler::ImportRegisterFile(Node* array,
const RegListNodePair& registers,
Node* formal_parameter_count) {
formal_parameter_count = ChangeInt32ToIntPtr(formal_parameter_count);
Node* register_count = ChangeUint32ToWord(registers.reg_count());
Node* InterpreterAssembler::ImportRegisterFile(
TNode<FixedArray> array, const RegListNodePair& registers,
TNode<Int32T> formal_parameter_count) {
TNode<IntPtrT> formal_parameter_count_intptr =
ChangeInt32ToIntPtr(formal_parameter_count);
TNode<UintPtrT> register_count = ChangeUint32ToWord(registers.reg_count());
if (FLAG_debug_code) {
CSA_ASSERT(this, IntPtrEqual(registers.base_reg_location(),
RegisterLocation(Register(0))));
AbortIfRegisterCountInvalid(array, formal_parameter_count, register_count);
AbortIfRegisterCountInvalid(array, formal_parameter_count_intptr,
register_count);
}
Variable var_index(this, MachineType::PointerRepresentation());
var_index.Bind(IntPtrConstant(0));
TVARIABLE(IntPtrT, var_index, IntPtrConstant(0));
// Iterate over array and write values into register file. Also erase the
// array contents to not keep them alive artificially.
@ -1689,19 +1693,21 @@ Node* InterpreterAssembler::ImportRegisterFile(Node* array,
Goto(&loop);
BIND(&loop);
{
Node* index = var_index.value();
TNode<IntPtrT> index = var_index.value();
GotoIfNot(UintPtrLessThan(index, register_count), &done_loop);
Node* array_index = IntPtrAdd(formal_parameter_count, index);
Node* value = LoadFixedArrayElement(array, array_index);
TNode<IntPtrT> array_index =
IntPtrAdd(formal_parameter_count_intptr, index);
TNode<Object> value = LoadFixedArrayElement(array, array_index);
Node* reg_index = IntPtrSub(IntPtrConstant(Register(0).ToOperand()), index);
TNode<IntPtrT> reg_index =
IntPtrSub(IntPtrConstant(Register(0).ToOperand()), index);
StoreRegister(value, reg_index);
StoreFixedArrayElement(array, array_index,
LoadRoot(Heap::kStaleRegisterRootIndex));
var_index.Bind(IntPtrAdd(index, IntPtrConstant(1)));
var_index = IntPtrAdd(index, IntPtrConstant(1));
Goto(&loop);
}
BIND(&done_loop);

View File

@ -103,11 +103,11 @@ class V8_EXPORT_PRIVATE InterpreterAssembler : public CodeStubAssembler {
// - Resume copies only the registers from the generator, the arguments
// are copied by the ResumeGenerator trampoline.
compiler::Node* ExportParametersAndRegisterFile(
compiler::Node* array, const RegListNodePair& registers,
compiler::Node* formal_parameter_count);
compiler::Node* ImportRegisterFile(compiler::Node* array,
TNode<FixedArray> array, const RegListNodePair& registers,
TNode<Int32T> formal_parameter_count);
compiler::Node* ImportRegisterFile(TNode<FixedArray> array,
const RegListNodePair& registers,
compiler::Node* formal_parameter_count);
TNode<Int32T> formal_parameter_count);
// Loads from and stores to the interpreter register file.
compiler::Node* LoadRegister(Register reg);

View File

@ -681,8 +681,8 @@ IGNITION_HANDLER(LdaModuleVariable, InterpreterAssembler) {
BIND(&if_export);
{
Node* regular_exports =
LoadObjectField(module, Module::kRegularExportsOffset);
TNode<FixedArray> regular_exports =
CAST(LoadObjectField(module, Module::kRegularExportsOffset));
// The actual array index is (cell_index - 1).
Node* export_index = IntPtrSub(cell_index, IntPtrConstant(1));
Node* cell = LoadFixedArrayElement(regular_exports, export_index);
@ -692,8 +692,8 @@ IGNITION_HANDLER(LdaModuleVariable, InterpreterAssembler) {
BIND(&if_import);
{
Node* regular_imports =
LoadObjectField(module, Module::kRegularImportsOffset);
TNode<FixedArray> regular_imports =
CAST(LoadObjectField(module, Module::kRegularImportsOffset));
// The actual array index is (-cell_index - 1).
Node* import_index = IntPtrSub(IntPtrConstant(-1), cell_index);
Node* cell = LoadFixedArrayElement(regular_imports, import_index);
@ -723,8 +723,8 @@ IGNITION_HANDLER(StaModuleVariable, InterpreterAssembler) {
BIND(&if_export);
{
Node* regular_exports =
LoadObjectField(module, Module::kRegularExportsOffset);
TNode<FixedArray> regular_exports =
CAST(LoadObjectField(module, Module::kRegularExportsOffset));
// The actual array index is (cell_index - 1).
Node* export_index = IntPtrSub(cell_index, IntPtrConstant(1));
Node* cell = LoadFixedArrayElement(regular_exports, export_index);
@ -2932,7 +2932,7 @@ IGNITION_HANDLER(ForInNext, InterpreterAssembler) {
Node* feedback_vector = LoadFeedbackVector();
// Load the next key from the enumeration array.
Node* key = LoadFixedArrayElement(cache_array, index, 0,
Node* key = LoadFixedArrayElement(CAST(cache_array), index, 0,
CodeStubAssembler::SMI_PARAMETERS);
// Check if we can use the for-in fast path potentially using the enum cache.
@ -3025,8 +3025,8 @@ IGNITION_HANDLER(Illegal, InterpreterAssembler) {
// in the accumulator.
IGNITION_HANDLER(SuspendGenerator, InterpreterAssembler) {
Node* generator = LoadRegisterAtOperandIndex(0);
Node* array = LoadObjectField(
generator, JSGeneratorObject::kParametersAndRegistersOffset);
TNode<FixedArray> array = CAST(LoadObjectField(
generator, JSGeneratorObject::kParametersAndRegistersOffset));
Node* closure = LoadRegister(Register::function_closure());
Node* context = GetContext();
RegListNodePair registers = GetRegisterListAtOperandIndex(1);
@ -3034,9 +3034,9 @@ IGNITION_HANDLER(SuspendGenerator, InterpreterAssembler) {
Node* shared =
LoadObjectField(closure, JSFunction::kSharedFunctionInfoOffset);
Node* formal_parameter_count =
TNode<Int32T> formal_parameter_count = UncheckedCast<Int32T>(
LoadObjectField(shared, SharedFunctionInfo::kFormalParameterCountOffset,
MachineType::Uint16());
MachineType::Uint16()));
ExportParametersAndRegisterFile(array, registers, formal_parameter_count);
StoreObjectField(generator, JSGeneratorObject::kContextOffset, context);
@ -3108,13 +3108,13 @@ IGNITION_HANDLER(ResumeGenerator, InterpreterAssembler) {
Node* shared =
LoadObjectField(closure, JSFunction::kSharedFunctionInfoOffset);
Node* formal_parameter_count =
TNode<Int32T> formal_parameter_count = UncheckedCast<Int32T>(
LoadObjectField(shared, SharedFunctionInfo::kFormalParameterCountOffset,
MachineType::Uint16());
MachineType::Uint16()));
ImportRegisterFile(
LoadObjectField(generator,
JSGeneratorObject::kParametersAndRegistersOffset),
CAST(LoadObjectField(generator,
JSGeneratorObject::kParametersAndRegistersOffset)),
registers, formal_parameter_count);
// Return the generator's input_or_debug_pos in the accumulator.

View File

@ -83,8 +83,8 @@ Handle<Code> BuildSetupFunction(Isolate* isolate,
// First allocate the FixedArray which will hold the final results. Here we
// should take care of all allocations, meaning we allocate HeapNumbers and
// FixedArrays representing Simd128 values.
Node* state_out = __ AllocateFixedArray(PACKED_ELEMENTS,
__ IntPtrConstant(parameters.size()));
TNode<FixedArray> state_out = __ AllocateFixedArray(
PACKED_ELEMENTS, __ IntPtrConstant(parameters.size()));
for (int i = 0; i < static_cast<int>(parameters.size()); i++) {
switch (parameters[i].representation()) {
case MachineRepresentation::kTagged:
@ -94,7 +94,7 @@ Handle<Code> BuildSetupFunction(Isolate* isolate,
__ StoreFixedArrayElement(state_out, i, __ AllocateHeapNumber());
break;
case MachineRepresentation::kSimd128: {
Node* vector =
TNode<FixedArray> vector =
__ AllocateFixedArray(PACKED_SMI_ELEMENTS, __ IntPtrConstant(4));
for (int lane = 0; lane < 4; lane++) {
__ StoreFixedArrayElement(vector, lane, __ SmiConstant(0));
@ -109,7 +109,7 @@ Handle<Code> BuildSetupFunction(Isolate* isolate,
}
params.push_back(state_out);
// Then take each element of the initial state and pass them as arguments.
Node* state_in = __ Parameter(1);
TNode<FixedArray> state_in = __ Cast(__ Parameter(1));
for (int i = 0; i < static_cast<int>(parameters.size()); i++) {
Node* element = __ LoadFixedArrayElement(state_in, __ IntPtrConstant(i));
// Unbox all elements before passing them as arguments.
@ -197,7 +197,7 @@ Handle<Code> BuildTeardownFunction(Isolate* isolate,
std::vector<AllocatedOperand> parameters) {
CodeAssemblerTester tester(isolate, call_descriptor, "teardown");
CodeStubAssembler assembler(tester.state());
Node* result_array = __ Parameter(1);
TNode<FixedArray> result_array = __ Cast(__ Parameter(1));
for (int i = 0; i < static_cast<int>(parameters.size()); i++) {
// The first argument is not used and the second is "result_array".
Node* param = __ Parameter(i + 2);
@ -216,7 +216,8 @@ Handle<Code> BuildTeardownFunction(Isolate* isolate,
param, MachineRepresentation::kFloat64);
break;
case MachineRepresentation::kSimd128: {
Node* vector = __ LoadFixedArrayElement(result_array, i);
TNode<FixedArray> vector =
__ Cast(__ LoadFixedArrayElement(result_array, i));
for (int lane = 0; lane < 4; lane++) {
Node* lane_value =
__ SmiFromInt32(tester.raw_assembler_for_testing()->AddNode(

View File

@ -2379,7 +2379,7 @@ TEST(CreatePromiseResolvingFunctions) {
std::tie(resolve, reject) = m.CreatePromiseResolvingFunctions(
promise, m.BooleanConstant(false), native_context);
Node* const kSize = m.IntPtrConstant(2);
Node* const arr = m.AllocateFixedArray(PACKED_ELEMENTS, kSize);
TNode<FixedArray> const arr = m.AllocateFixedArray(PACKED_ELEMENTS, kSize);
m.StoreFixedArrayElement(arr, 0, resolve);
m.StoreFixedArrayElement(arr, 1, reject);
m.Return(arr);