[wasm-gc][turbofan] Introduce wasm load elimination

We introduce high-level typed load elimination for wasm. It is based
on CSALoadElimination. It operates on wasm struct.set/get and
array.length operators (with array operations pending). Wasm types are
used to refine the may-alias analysis ran for stores.

Drive-by:
- Type more nodes in wasm-compiler and wasm-gc-operator-reducer.
- Remove an unsafe-cast test which now hits an Unreachable Turbofan
  node.

Bug: v8:7748
Change-Id: I309e4af4d9f9c584e27ff79804a776666b5dc3c1
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/4146430
Reviewed-by: Maya Lekova <mslekova@chromium.org>
Reviewed-by: Jakob Kummerow <jkummerow@chromium.org>
Commit-Queue: Manos Koukoutos <manoskouk@chromium.org>
Cr-Commit-Position: refs/heads/main@{#85255}
This commit is contained in:
Manos Koukoutos 2023-01-12 12:55:04 +01:00 committed by V8 LUCI CQ
parent c020a31092
commit 90c972bb52
10 changed files with 677 additions and 18 deletions

View File

@ -2977,6 +2977,8 @@ filegroup(
"src/compiler/wasm-compiler.h",
"src/compiler/wasm-escape-analysis.cc",
"src/compiler/wasm-escape-analysis.h",
"src/compiler/wasm-load-elimination.cc",
"src/compiler/wasm-load-elimination.h",
"src/compiler/wasm-loop-peeling.cc",
"src/compiler/wasm-loop-peeling.h",
"src/compiler/wasm-gc-lowering.cc",

View File

@ -3719,6 +3719,7 @@ v8_header_set("v8_internal_headers") {
"src/compiler/wasm-gc-operator-reducer.h",
"src/compiler/wasm-graph-assembler.h",
"src/compiler/wasm-inlining.h",
"src/compiler/wasm-load-elimination.h",
"src/compiler/wasm-loop-peeling.h",
"src/compiler/wasm-typer.h",
"src/debug/debug-wasm-objects-inl.h",
@ -4381,6 +4382,7 @@ if (v8_enable_webassembly) {
"src/compiler/wasm-gc-operator-reducer.cc",
"src/compiler/wasm-graph-assembler.cc",
"src/compiler/wasm-inlining.cc",
"src/compiler/wasm-load-elimination.cc",
"src/compiler/wasm-loop-peeling.cc",
"src/compiler/wasm-typer.cc",
]

View File

@ -127,6 +127,7 @@
#include "src/compiler/wasm-gc-lowering.h"
#include "src/compiler/wasm-gc-operator-reducer.h"
#include "src/compiler/wasm-inlining.h"
#include "src/compiler/wasm-load-elimination.h"
#include "src/compiler/wasm-loop-peeling.h"
#include "src/compiler/wasm-typer.h"
#include "src/wasm/function-body-decoder.h"
@ -2202,8 +2203,11 @@ struct WasmGCOptimizationPhase {
GraphReducer graph_reducer(
temp_zone, data->graph(), &data->info()->tick_counter(), data->broker(),
data->jsgraph()->Dead(), data->observe_node_manager());
WasmLoadElimination load_elimination(&graph_reducer, data->jsgraph(),
temp_zone);
WasmGCOperatorReducer wasm_gc(&graph_reducer, temp_zone, data->mcgraph(),
module);
AddReducer(data, &graph_reducer, &load_elimination);
AddReducer(data, &graph_reducer, &wasm_gc);
graph_reducer.ReduceGraph();
}
@ -3597,7 +3601,7 @@ void Pipeline::GenerateCodeForWasmFunction(
// Int64Lowering must happen after inlining (otherwise inlining would have
// to invoke it separately for the inlined function body).
// It must also happen after WasmGCLowering, otherwise it would have to
// add type annotations to nodes it creates.
// add type annotations to nodes it creates, and handle wasm-gc nodes.
LowerInt64(function_body.sig, mcgraph, data.simplified(), pipeline);
if (v8_flags.wasm_opt || is_asm_js) {

View File

@ -5448,7 +5448,8 @@ Node* WasmGraphBuilder::ArrayNewFixed(const wasm::ArrayType* type, Node* rtt,
wasm::ObjectAccess::ToTagged(JSReceiver::kPropertiesOrHashOffset),
LOAD_ROOT(EmptyFixedArray, empty_fixed_array));
gasm_->ArrayInitializeLength(
array, Int32Constant(static_cast<int>(elements.size())));
array, SetType(Int32Constant(static_cast<int>(elements.size())),
wasm::kWasmI32));
for (int i = 0; i < static_cast<int>(elements.size()); i++) {
gasm_->ArraySet(array, gasm_->Int32Constant(i), elements[i], type);
}

View File

@ -227,16 +227,20 @@ Reduction WasmGCOperatorReducer::ReduceCheckNull(Node* node) {
// Optimize the check away if the argument is known to be non-null.
if (object_type.type.is_non_nullable()) {
ReplaceWithValue(
node, gasm_.Int32Constant(node->opcode() == IrOpcode::kIsNull ? 0 : 1));
ReplaceWithValue(node,
SetType(gasm_.Int32Constant(
node->opcode() == IrOpcode::kIsNull ? 0 : 1),
wasm::kWasmI32));
node->Kill();
return Replace(object); // Irrelevant replacement.
}
// Optimize the check away if the argument is known to be null.
if (object->opcode() == IrOpcode::kNull) {
ReplaceWithValue(
node, gasm_.Int32Constant(node->opcode() == IrOpcode::kIsNull ? 1 : 0));
ReplaceWithValue(node,
SetType(gasm_.Int32Constant(
node->opcode() == IrOpcode::kIsNull ? 1 : 0),
wasm::kWasmI32));
node->Kill();
return Replace(object); // Irrelevant replacement.
}

View File

@ -0,0 +1,446 @@
// Copyright 2023 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/compiler/wasm-load-elimination.h"
#include "src/compiler/common-operator.h"
#include "src/compiler/graph.h"
#include "src/compiler/js-graph.h"
#include "src/compiler/node-matchers.h"
#include "src/compiler/node-properties.h"
#include "src/compiler/simplified-operator.h"
#include "src/wasm/struct-types.h"
#include "src/wasm/wasm-subtyping.h"
namespace v8::internal::compiler {
/**** Helpers ****/
namespace {
bool TypesUnrelated(Node* lhs, Node* rhs) {
wasm::TypeInModule type1 = NodeProperties::GetType(lhs).AsWasm();
wasm::TypeInModule type2 = NodeProperties::GetType(rhs).AsWasm();
return wasm::TypesUnrelated(type1.type, type2.type, type1.module,
type2.module);
}
bool IsFresh(Node* node) {
return node->opcode() == IrOpcode::kAllocate ||
node->opcode() == IrOpcode::kAllocateRaw;
}
bool IsConstant(Node* node) {
return node->opcode() == IrOpcode::kParameter ||
node->opcode() == IrOpcode::kHeapConstant;
}
bool MayAlias(Node* lhs, Node* rhs) {
if (lhs == rhs) return true;
if (TypesUnrelated(lhs, rhs) || (IsFresh(lhs) && IsFresh(rhs)) ||
(IsFresh(lhs) && IsConstant(rhs)) || (IsConstant(lhs) && IsFresh(rhs))) {
return false;
}
return true;
}
Node* ResolveAliases(Node* node) {
while (node->opcode() == IrOpcode::kWasmTypeCast ||
node->opcode() == IrOpcode::kAssertNotNull ||
node->opcode() == IrOpcode::kTypeGuard) {
node = NodeProperties::GetValueInput(node, 0);
}
return node;
}
// We model array length as a field at index kArrayLengthFieldIndex.
constexpr int kArrayLengthFieldIndex = -1;
} // namespace
Reduction WasmLoadElimination::UpdateState(Node* node,
AbstractState const* state) {
AbstractState const* original = node_states_.Get(node);
// Only signal that the {node} has Changed, if the information about {state}
// has changed wrt. the {original}.
if (state != original) {
if (original == nullptr || !state->Equals(original)) {
node_states_.Set(node, state);
return Changed(node);
}
}
return NoChange();
}
std::tuple<Node*, Node*> WasmLoadElimination::TruncateAndExtendOrType(
Node* value, Node* effect, Node* control, wasm::ValueType field_type,
bool is_signed) {
if (field_type == wasm::kWasmI8 || field_type == wasm::kWasmI16) {
Node* ret = nullptr;
if (is_signed) {
int shift = 32 - 8 * field_type.value_kind_size();
ret = graph()->NewNode(machine()->Word32Sar(),
graph()->NewNode(machine()->Word32Shl(), value,
jsgraph()->Int32Constant(shift)),
jsgraph()->Int32Constant(shift));
} else {
int mask = (1 << 8 * field_type.value_kind_size()) - 1;
ret = graph()->NewNode(machine()->Word32And(), value,
jsgraph()->Int32Constant(mask));
}
NodeProperties::SetType(ret, NodeProperties::GetType(value));
return {ret, effect};
}
wasm::TypeInModule node_type = NodeProperties::GetType(value).AsWasm();
// TODO(12166): Adapt this if cross-module inlining is allowed.
if (!wasm::IsSubtypeOf(node_type.type, field_type, node_type.module)) {
Type type = Type::Wasm({field_type, node_type.module}, graph()->zone());
Node* ret =
graph()->NewNode(common()->TypeGuard(type), value, effect, control);
NodeProperties::SetType(ret, type);
return {ret, ret};
}
return {value, effect};
}
/***** Reductions *****/
Reduction WasmLoadElimination::Reduce(Node* node) {
if (v8_flags.trace_turbo_load_elimination) {
// TODO(manoskouk): Add some tracing.
}
switch (node->opcode()) {
case IrOpcode::kWasmStructGet:
return ReduceWasmStructGet(node);
case IrOpcode::kWasmStructSet:
return ReduceWasmStructSet(node);
case IrOpcode::kWasmArrayLength:
return ReduceWasmArrayLength(node);
case IrOpcode::kWasmArrayInitializeLength:
return ReduceWasmArrayInitializeLength(node);
case IrOpcode::kEffectPhi:
return ReduceEffectPhi(node);
case IrOpcode::kDead:
return NoChange();
case IrOpcode::kStart:
return ReduceStart(node);
default:
return ReduceOtherNode(node);
}
}
Reduction WasmLoadElimination::ReduceWasmStructGet(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kWasmStructGet);
Node* object = ResolveAliases(NodeProperties::GetValueInput(node, 0));
Node* effect = NodeProperties::GetEffectInput(node);
Node* control = NodeProperties::GetControlInput(node);
AbstractState const* state = node_states_.Get(effect);
if (state == nullptr) return NoChange();
const WasmFieldInfo& field_info = OpParameter<WasmFieldInfo>(node->op());
bool is_mutable = field_info.type->mutability(field_info.field_index);
// - The node can only be typed as bottom in unreachable code.
// - We can only find the field in the wrong half-state in unreachable code.
if (NodeProperties::GetType(node).AsWasm().type.is_bottom() ||
!(is_mutable ? &state->immutable_state : &state->mutable_state)
->LookupField(field_info.field_index, object)
.IsEmpty()) {
Node* unreachable =
graph()->NewNode(jsgraph()->common()->Unreachable(), effect, control);
MachineRepresentation rep =
field_info.type->field(field_info.field_index).machine_representation();
Node* dead_value =
graph()->NewNode(jsgraph()->common()->DeadValue(rep), unreachable);
NodeProperties::SetType(dead_value, NodeProperties::GetType(node));
ReplaceWithValue(node, dead_value, unreachable, control);
node->Kill();
return Replace(dead_value);
}
HalfState const* half_state =
is_mutable ? &state->mutable_state : &state->immutable_state;
FieldOrElementValue lookup_result =
half_state->LookupField(field_info.field_index, object);
if (!lookup_result.IsEmpty() && !lookup_result.value->IsDead()) {
std::tuple<Node*, Node*> replacement = TruncateAndExtendOrType(
lookup_result.value, effect, control,
field_info.type->field(field_info.field_index), field_info.is_signed);
ReplaceWithValue(node, std::get<0>(replacement), std::get<1>(replacement),
control);
node->Kill();
return Replace(std::get<0>(replacement));
}
half_state = half_state->AddField(field_info.field_index, object, node);
AbstractState const* new_state =
is_mutable
? zone()->New<AbstractState>(*half_state, state->immutable_state)
: zone()->New<AbstractState>(state->mutable_state, *half_state);
return UpdateState(node, new_state);
}
Reduction WasmLoadElimination::ReduceWasmStructSet(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kWasmStructSet);
Node* object = ResolveAliases(NodeProperties::GetValueInput(node, 0));
Node* value = NodeProperties::GetValueInput(node, 1);
Node* effect = NodeProperties::GetEffectInput(node);
Node* control = NodeProperties::GetControlInput(node);
AbstractState const* state = node_states_.Get(effect);
if (state == nullptr) return NoChange();
const WasmFieldInfo& field_info = OpParameter<WasmFieldInfo>(node->op());
bool is_mutable = field_info.type->mutability(field_info.field_index);
if (is_mutable) {
// We can find the field in the wrong half-state only in unreachable code.
if (!(state->immutable_state.LookupField(field_info.field_index, object)
.IsEmpty())) {
Node* unreachable =
graph()->NewNode(jsgraph()->common()->Unreachable(), effect, control);
return Replace(unreachable);
}
HalfState const* mutable_state =
state->mutable_state.KillField(field_info.field_index, object);
mutable_state =
mutable_state->AddField(field_info.field_index, object, value);
AbstractState const* new_state =
zone()->New<AbstractState>(*mutable_state, state->immutable_state);
return UpdateState(node, new_state);
} else {
// We can find the field in the wrong half-state only in unreachable code.
if (!(state->mutable_state.LookupField(field_info.field_index, object)
.IsEmpty())) {
Node* unreachable =
graph()->NewNode(jsgraph()->common()->Unreachable(), effect, control);
return Replace(unreachable);
}
// We should not initialize the same immutable field twice.
DCHECK(state->immutable_state.LookupField(field_info.field_index, object)
.IsEmpty());
HalfState const* immutable_state =
state->immutable_state.AddField(field_info.field_index, object, value);
AbstractState const* new_state =
zone()->New<AbstractState>(state->mutable_state, *immutable_state);
return UpdateState(node, new_state);
}
}
Reduction WasmLoadElimination::ReduceWasmArrayLength(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kWasmArrayLength);
Node* object = ResolveAliases(NodeProperties::GetValueInput(node, 0));
Node* effect = NodeProperties::GetEffectInput(node);
Node* control = NodeProperties::GetControlInput(node);
AbstractState const* state = node_states_.Get(effect);
if (state == nullptr) return NoChange();
HalfState const* immutable_state = &state->immutable_state;
FieldOrElementValue lookup_result =
immutable_state->LookupField(kArrayLengthFieldIndex, object);
if (!lookup_result.IsEmpty() && !lookup_result.value->IsDead()) {
ReplaceWithValue(node, lookup_result.value, effect, control);
node->Kill();
return Replace(lookup_result.value);
}
immutable_state =
immutable_state->AddField(kArrayLengthFieldIndex, object, node);
AbstractState const* new_state =
zone()->New<AbstractState>(state->mutable_state, *immutable_state);
return UpdateState(node, new_state);
}
Reduction WasmLoadElimination::ReduceWasmArrayInitializeLength(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kWasmArrayInitializeLength);
Node* object = ResolveAliases(NodeProperties::GetValueInput(node, 0));
Node* value = NodeProperties::GetValueInput(node, 1);
Node* effect = NodeProperties::GetEffectInput(node);
AbstractState const* state = node_states_.Get(effect);
if (state == nullptr) return NoChange();
// We should not initialize the length twice.
DCHECK(state->immutable_state.LookupField(kArrayLengthFieldIndex, object)
.IsEmpty());
HalfState const* immutable_state =
state->immutable_state.AddField(kArrayLengthFieldIndex, object, value);
AbstractState const* new_state =
zone()->New<AbstractState>(state->mutable_state, *immutable_state);
return UpdateState(node, new_state);
}
Reduction WasmLoadElimination::ReduceOtherNode(Node* node) {
if (node->op()->EffectOutputCount() == 0) return NoChange();
DCHECK_EQ(node->op()->EffectInputCount(), 1);
Node* const effect = NodeProperties::GetEffectInput(node);
AbstractState const* state = node_states_.Get(effect);
// If we do not know anything about the predecessor, do not propagate just
// yet because we will have to recompute anyway once we compute the
// predecessor.
if (state == nullptr) return NoChange();
// If this {node} has some uncontrolled side effects (i.e. it is a call
// without {kNoWrite}), set its state to the immutable half-state of its
// input state, otherwise to its input state.
return UpdateState(node, node->opcode() == IrOpcode::kCall &&
!node->op()->HasProperty(Operator::kNoWrite)
? zone()->New<AbstractState>(
HalfState(zone()), state->immutable_state)
: state);
}
Reduction WasmLoadElimination::ReduceStart(Node* node) {
return UpdateState(node, empty_state());
}
Reduction WasmLoadElimination::ReduceEffectPhi(Node* node) {
Node* const effect0 = NodeProperties::GetEffectInput(node, 0);
Node* const control = NodeProperties::GetControlInput(node);
AbstractState const* state0 = node_states_.Get(effect0);
if (state0 == nullptr) return NoChange();
if (control->opcode() == IrOpcode::kLoop) {
// Here we rely on having only reducible loops:
// The loop entry edge always dominates the header, so we can just take
// the state from the first input, and compute the loop state based on it.
AbstractState const* state = ComputeLoopState(node, state0);
return UpdateState(node, state);
}
DCHECK_EQ(IrOpcode::kMerge, control->opcode());
// Shortcut for the case when we do not know anything about some input.
int const input_count = node->op()->EffectInputCount();
for (int i = 1; i < input_count; ++i) {
Node* const effect = NodeProperties::GetEffectInput(node, i);
if (node_states_.Get(effect) == nullptr) return NoChange();
}
// Make a copy of the first input's state and intersect it with the state
// from other inputs.
// TODO(manoskouk): Consider computing phis for at least a subset of the
// state.
AbstractState* state = zone()->New<AbstractState>(*state0);
for (int i = 1; i < input_count; ++i) {
Node* const input = NodeProperties::GetEffectInput(node, i);
state->IntersectWith(node_states_.Get(input));
}
return UpdateState(node, state);
}
/***** AbstractState implementation *****/
WasmLoadElimination::FieldOrElementValue
WasmLoadElimination::HalfState::LookupField(int field_index,
Node* object) const {
return fields_.Get(field_index).Get(object);
}
WasmLoadElimination::HalfState const* WasmLoadElimination::HalfState::AddField(
int field_index, Node* object, Node* value) const {
HalfState* new_state = zone_->New<HalfState>(*this);
Update(new_state->fields_, field_index, object, FieldOrElementValue(value));
return new_state;
}
WasmLoadElimination::HalfState const* WasmLoadElimination::HalfState::KillField(
int field_index, Node* object) const {
const InnerMap& same_index_map = fields_.Get(field_index);
InnerMap new_map(same_index_map);
for (std::pair<Node*, FieldOrElementValue> pair : same_index_map) {
if (MayAlias(pair.first, object)) {
new_map.Set(pair.first, FieldOrElementValue());
}
}
HalfState* result = zone_->New<HalfState>(*this);
result->fields_.Set(field_index, new_map);
return result;
}
WasmLoadElimination::AbstractState const* WasmLoadElimination::ComputeLoopState(
Node* node, AbstractState const* state) const {
DCHECK_EQ(node->opcode(), IrOpcode::kEffectPhi);
std::queue<Node*> queue;
std::unordered_set<Node*> visited;
visited.insert(node);
for (int i = 1; i < node->InputCount() - 1; ++i) {
queue.push(node->InputAt(i));
}
while (!queue.empty()) {
Node* const current = queue.front();
queue.pop();
if (visited.insert(current).second) {
if (current->opcode() == IrOpcode::kWasmStructSet) {
Node* object = NodeProperties::GetValueInput(current, 0);
WasmFieldInfo field_info = OpParameter<WasmFieldInfo>(current->op());
bool is_mutable = field_info.type->mutability(field_info.field_index);
if (is_mutable) {
const HalfState* new_mutable_state =
state->mutable_state.KillField(field_info.field_index, object);
state = zone()->New<AbstractState>(*new_mutable_state,
state->immutable_state);
} else {
// TODO(manoskouk): DCHECK
}
} else if (current->opcode() == IrOpcode::kCall &&
!current->op()->HasProperty(Operator::kNoWrite)) {
return zone()->New<AbstractState>(HalfState(zone()),
state->immutable_state);
}
for (int i = 0; i < current->op()->EffectInputCount(); ++i) {
queue.push(NodeProperties::GetEffectInput(current, i));
}
}
}
return state;
}
void WasmLoadElimination::HalfState::IntersectWith(HalfState const* that) {
FieldOrElementValue empty;
for (const std::pair<int, InnerMap> to_map : fields_) {
InnerMap to_map_copy(to_map.second);
int key = to_map.first;
const InnerMap& current_map = that->fields_.Get(key);
for (std::pair<Node*, FieldOrElementValue> value : to_map.second) {
if (current_map.Get(value.first) != value.second) {
to_map_copy.Set(value.first, empty);
}
}
fields_.Set(key, to_map_copy);
}
}
/***** Constructor/ trivial accessors *****/
WasmLoadElimination::WasmLoadElimination(Editor* editor, JSGraph* jsgraph,
Zone* zone)
: AdvancedReducer(editor),
empty_state_(zone),
node_states_(jsgraph->graph()->NodeCount(), zone),
jsgraph_(jsgraph),
zone_(zone) {}
CommonOperatorBuilder* WasmLoadElimination::common() const {
return jsgraph()->common();
}
MachineOperatorBuilder* WasmLoadElimination::machine() const {
return jsgraph()->machine();
}
Graph* WasmLoadElimination::graph() const { return jsgraph()->graph(); }
Isolate* WasmLoadElimination::isolate() const { return jsgraph()->isolate(); }
} // namespace v8::internal::compiler

View File

@ -0,0 +1,155 @@
// Copyright 2023 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_COMPILER_WASM_LOAD_ELIMINATION_H_
#define V8_COMPILER_WASM_LOAD_ELIMINATION_H_
#include "src/base/compiler-specific.h"
#include "src/codegen/machine-type.h"
#include "src/compiler/graph-reducer.h"
#include "src/compiler/node-aux-data.h"
#include "src/compiler/persistent-map.h"
namespace v8::internal::compiler {
// Forward declarations.
class CommonOperatorBuilder;
class Graph;
class JSGraph;
class MachineOperatorBuilder;
struct ObjectAccess;
class V8_EXPORT_PRIVATE WasmLoadElimination final
: public NON_EXPORTED_BASE(AdvancedReducer) {
public:
WasmLoadElimination(Editor* editor, JSGraph* jsgraph, Zone* zone);
~WasmLoadElimination() final = default;
WasmLoadElimination(const WasmLoadElimination&) = delete;
WasmLoadElimination& operator=(const WasmLoadElimination&) = delete;
const char* reducer_name() const override { return "WasmLoadElimination"; }
Reduction Reduce(Node* node) final;
private:
struct FieldOrElementValue {
FieldOrElementValue() = default;
explicit FieldOrElementValue(Node* value) : value(value) {}
bool operator==(const FieldOrElementValue& other) const {
return value == other.value;
}
bool operator!=(const FieldOrElementValue& other) const {
return !(*this == other);
}
bool IsEmpty() const { return value == nullptr; }
Node* value = nullptr;
};
class HalfState final : public ZoneObject {
public:
explicit HalfState(Zone* zone)
: zone_(zone),
fields_(zone, InnerMap(zone)),
elements_(zone, InnerMap(zone)) {}
bool Equals(HalfState const* that) const {
return fields_ == that->fields_ && elements_ == that->elements_;
}
void IntersectWith(HalfState const* that);
HalfState const* KillField(int field_index, Node* object) const;
HalfState const* AddField(int field_index, Node* object, Node* value) const;
FieldOrElementValue LookupField(int field_index, Node* object) const;
void Print() const;
private:
using InnerMap = PersistentMap<Node*, FieldOrElementValue>;
template <typename OuterKey>
using OuterMap = PersistentMap<OuterKey, InnerMap>;
// offset -> object -> info
using FieldInfos = OuterMap<int>;
// object -> offset -> info
using ElementInfos = OuterMap<Node*>;
// Update {map} so that {map.Get(outer_key).Get(inner_key)} returns {info}.
template <typename OuterKey>
static void Update(OuterMap<OuterKey>& map, OuterKey outer_key,
Node* inner_key, FieldOrElementValue info) {
InnerMap map_copy(map.Get(outer_key));
map_copy.Set(inner_key, info);
map.Set(outer_key, map_copy);
}
static void KillField(int field_index, Node* object,
MachineRepresentation repr, Zone* zone);
static void Print(const FieldInfos& infos);
static void Print(const ElementInfos& infos);
Zone* zone_;
FieldInfos fields_;
ElementInfos elements_;
};
// An {AbstractState} consists of two {HalfState}s, representing the sets of
// known mutable and immutable struct fields, respectively. The two
// half-states should not overlap.
struct AbstractState : public ZoneObject {
explicit AbstractState(Zone* zone)
: mutable_state(zone), immutable_state(zone) {}
explicit AbstractState(HalfState mutable_state, HalfState immutable_state)
: mutable_state(mutable_state), immutable_state(immutable_state) {}
bool Equals(AbstractState const* that) const {
return this->immutable_state.Equals(&that->immutable_state) &&
this->mutable_state.Equals(&that->mutable_state);
}
void IntersectWith(AbstractState const* that) {
mutable_state.IntersectWith(&that->mutable_state);
immutable_state.IntersectWith(&that->immutable_state);
}
HalfState mutable_state;
HalfState immutable_state;
};
Reduction ReduceWasmStructGet(Node* node);
Reduction ReduceWasmStructSet(Node* node);
Reduction ReduceWasmArrayLength(Node* node);
Reduction ReduceWasmArrayInitializeLength(Node* node);
Reduction ReduceEffectPhi(Node* node);
Reduction ReduceStart(Node* node);
Reduction ReduceOtherNode(Node* node);
Reduction UpdateState(Node* node, AbstractState const* state);
AbstractState const* ComputeLoopState(Node* node,
AbstractState const* state) const;
// Returns the replacement value and effect for a load given an initial value
// node, after optional {TypeGuard}ing and i8/i16 adaptation to i32.
std::tuple<Node*, Node*> TruncateAndExtendOrType(Node* value, Node* effect,
Node* control,
wasm::ValueType field_type,
bool is_signed);
Reduction AssertUnreachable(Node* node);
CommonOperatorBuilder* common() const;
MachineOperatorBuilder* machine() const;
Isolate* isolate() const;
Graph* graph() const;
JSGraph* jsgraph() const { return jsgraph_; }
Zone* zone() const { return zone_; }
AbstractState const* empty_state() const { return &empty_state_; }
AbstractState const empty_state_;
NodeAuxData<AbstractState const*> node_states_;
JSGraph* const jsgraph_;
Zone* zone_;
};
} // namespace v8::internal::compiler
#endif // V8_COMPILER_WASM_LOAD_ELIMINATION_H_

View File

@ -80,6 +80,13 @@ V8_INLINE bool IsSubtypeOf(ValueType subtype, ValueType supertype,
return IsSubtypeOfImpl(subtype, supertype, module, module);
}
V8_INLINE bool TypesUnrelated(ValueType type1, ValueType type2,
const WasmModule* module1,
const WasmModule* module2) {
return !IsSubtypeOf(type1, type2, module1, module2) &&
!IsSubtypeOf(type2, type1, module2, module1);
}
V8_INLINE bool IsHeapSubtypeOf(HeapType subtype, HeapType supertype,
const WasmModule* sub_module,
const WasmModule* super_module) {

View File

@ -544,8 +544,6 @@ WASM_COMPILED_EXEC_TEST(RefCastNoChecks) {
const byte supertype_index = tester.DefineStruct({F(kWasmI32, true)});
const byte subtype1_index = tester.DefineStruct(
{F(kWasmI32, true), F(kWasmF32, true)}, supertype_index);
const byte subtype2_index = tester.DefineStruct(
{F(kWasmI32, true), F(kWasmI64, false)}, supertype_index);
const byte kTestSuccessful = tester.DefineFunction(
tester.sigs.i_v(), {ValueType::RefNull(supertype_index)},
@ -554,16 +552,8 @@ WASM_COMPILED_EXEC_TEST(RefCastNoChecks) {
WASM_REF_CAST(WASM_LOCAL_GET(0), subtype1_index)),
WASM_END});
const byte kTestFailed = tester.DefineFunction(
tester.sigs.i_v(), {ValueType::RefNull(supertype_index)},
{WASM_LOCAL_SET(0, WASM_STRUCT_NEW_DEFAULT(subtype1_index)),
WASM_STRUCT_GET(subtype2_index, 0,
WASM_REF_CAST(WASM_LOCAL_GET(0), subtype2_index)),
WASM_END});
tester.CompileModule();
tester.CheckResult(kTestSuccessful, 0);
tester.CheckResult(kTestFailed, 0);
}
WASM_COMPILED_EXEC_TEST(BrOnCast) {
@ -576,7 +566,7 @@ WASM_COMPILED_EXEC_TEST(BrOnCast) {
{WASM_BLOCK_R(
ValueType::RefNull(type_index), WASM_LOCAL_SET(0, WASM_I32V(111)),
// Pipe a struct through a local so it's statically typed
// as dataref.
// as structref.
WASM_LOCAL_SET(1, WASM_STRUCT_NEW(other_type_index, WASM_F32(1.0))),
WASM_LOCAL_GET(1),
// The type check fails, so this branch isn't taken.

View File

@ -2,7 +2,8 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --experimental-wasm-gc --no-liftoff
// Flags: --experimental-wasm-gc --no-liftoff --no-wasm-lazy-compilation
// Flags: --no-wasm-inlining --no-wasm-speculative-inlining
// This tests are meant to examine if Turbofan CsaLoadElimination works
// correctly for wasm. The TurboFan graphs can be examined with --trace-turbo.
@ -314,6 +315,53 @@ d8.file.execute("test/mjsunit/wasm/wasm-module-builder.js");
assertEquals(value_0 + value_1, instance.exports.main());
})();
(function WasmLoadEliminationArrayLength() {
print(arguments.callee.name);
let builder = new WasmModuleBuilder();
let array = builder.addArray(kWasmI32, true);
builder.addFunction("producer", makeSig([kWasmI32], [wasmRefType(array)]))
.addBody([kExprLocalGet, 0, kGCPrefix, kExprArrayNewDefault, array])
.exportFunc();
let side_effect = builder.addFunction("side_effect", kSig_v_v).addBody([]);
builder.addFunction("tester", makeSig([wasmRefType(array)], [kWasmI32]))
.addBody([kExprLocalGet, 0, kGCPrefix, kExprArrayLen,
kExprI32Const, 1, kExprI32Add,
kGCPrefix, kExprArrayNewDefault, array,
kExprCallFunction, side_effect.index, // unknown side-effect
kGCPrefix, kExprArrayLen,
kExprLocalGet, 0, kGCPrefix, kExprArrayLen,
kExprI32Mul])
.exportFunc();
let instance = builder.instantiate();
// TODO(manoskouk): Add this when we allow arrays at the boundary.
// assertEquals(10 * 11,
// instance.exports.tester(instance.exports.producer(10)));
})();
(function WasmLoadEliminationUnrelatedTypes() {
print(arguments.callee.name);
let builder = new WasmModuleBuilder();
let struct1 = builder.addStruct([makeField(kWasmI32, true)]);
let struct2 = builder.addStruct([makeField(kWasmI32, true),
makeField(kWasmI64, true)]);
builder.addFunction("tester",
makeSig([wasmRefType(struct1), wasmRefType(struct2)], [kWasmI32]))
// f(x, y) { y.f = x.f + 10; return y.f * x.f }
// x.f load in the state should survive y.f store.
.addBody([kExprLocalGet, 1,
kExprLocalGet, 0, kGCPrefix, kExprStructGet, struct1, 0,
kExprI32Const, 10, kExprI32Add,
kGCPrefix, kExprStructSet, struct2, 0,
kExprLocalGet, 0, kGCPrefix, kExprStructGet, struct1, 0,
kExprLocalGet, 1, kGCPrefix, kExprStructGet, struct2, 0,
kExprI32Mul]);
builder.instantiate()
})();
(function EscapeAnalysisWithLoadElimination() {
print(arguments.callee.name);