[wasm-gc][turbofan] Introduce wasm-gc-specific nodes

We introduce wasm-gc specific nodes into the Turbofan IR, corresponding
to the wasm opcodes:
ref.as_non_null, ref.is_null, ref.null, rtt.canon, ref.test, ref.cast.
We define them as simplified operators. These are lowered by a dedicated
phase in the wasm pipeline.
Optimizations based on these nodes will be introduced later.
Note: We rename ObjectReferenceKnowledge to WasmTypeCheckConfig and move
it to a separate file, as it is now used in simplified-operator as well.

Bug: v8:7748
Change-Id: Iceaf04eca089b08bad794f567359196e8ba78d93
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3654102
Reviewed-by: Maya Lekova <mslekova@chromium.org>
Commit-Queue: Manos Koukoutos <manoskouk@chromium.org>
Reviewed-by: Jakob Kummerow <jkummerow@chromium.org>
Cr-Commit-Position: refs/heads/main@{#80746}
This commit is contained in:
Manos Koukoutos 2022-05-25 12:02:30 +00:00 committed by V8 LUCI CQ
parent 8e47a2c603
commit 9e7ada8e2b
18 changed files with 567 additions and 129 deletions

View File

@ -2442,12 +2442,6 @@ filegroup(
"src/asmjs/asm-scanner.h",
"src/asmjs/asm-types.cc",
"src/asmjs/asm-types.h",
"src/compiler/int64-lowering.h",
"src/compiler/wasm-compiler.h",
"src/compiler/wasm-escape-analysis.h",
"src/compiler/wasm-graph-assembler.h",
"src/compiler/wasm-inlining.h",
"src/compiler/wasm-loop-peeling.h",
"src/debug/debug-wasm-objects.cc",
"src/debug/debug-wasm-objects.h",
"src/debug/debug-wasm-objects-inl.h",
@ -2859,11 +2853,20 @@ filegroup(
] + select({
":is_v8_enable_webassembly": [
"src/compiler/int64-lowering.cc",
"src/compiler/int64-lowering.h",
"src/compiler/wasm-compiler-definitions.h",
"src/compiler/wasm-compiler.cc",
"src/compiler/wasm-loop-peeling.cc",
"src/compiler/wasm-compiler.h",
"src/compiler/wasm-escape-analysis.cc",
"src/compiler/wasm-escape-analysis.h",
"src/compiler/wasm-loop-peeling.cc",
"src/compiler/wasm-loop-peeling.h",
"src/compiler/wasm-gc-lowering.cc",
"src/compiler/wasm-gc-lowering.h",
"src/compiler/wasm-graph-assembler.cc",
"src/compiler/wasm-graph-assembler.h",
"src/compiler/wasm-inlining.cc",
"src/compiler/wasm-inlining.h",
],
"//conditions:default": [],
}),

View File

@ -3547,8 +3547,10 @@ v8_header_set("v8_internal_headers") {
"src/asmjs/asm-scanner.h",
"src/asmjs/asm-types.h",
"src/compiler/int64-lowering.h",
"src/compiler/wasm-compiler-definitions.h",
"src/compiler/wasm-compiler.h",
"src/compiler/wasm-escape-analysis.h",
"src/compiler/wasm-gc-lowering.h",
"src/compiler/wasm-graph-assembler.h",
"src/compiler/wasm-inlining.h",
"src/compiler/wasm-loop-peeling.h",
@ -4047,6 +4049,7 @@ if (v8_enable_webassembly) {
"src/compiler/int64-lowering.cc",
"src/compiler/wasm-compiler.cc",
"src/compiler/wasm-escape-analysis.cc",
"src/compiler/wasm-gc-lowering.cc",
"src/compiler/wasm-graph-assembler.cc",
"src/compiler/wasm-inlining.cc",
"src/compiler/wasm-loop-peeling.cc",

View File

@ -12,6 +12,7 @@ include_rules = [
"-src/compiler",
"+src/compiler/pipeline.h",
"+src/compiler/code-assembler.h",
"+src/compiler/wasm-compiler-definitions.h",
"+src/compiler/wasm-compiler.h",
"-src/heap",
"+src/heap/basic-memory-chunk.h",

View File

@ -504,6 +504,14 @@
V(SpeculativeBigIntAsUintN) \
V(SpeculativeBigIntNegate)
#define SIMPLIFIED_WASM_OP_LIST(V) \
V(AssertNotNull) \
V(IsNull) \
V(Null) \
V(RttCanon) \
V(WasmTypeCast) \
V(WasmTypeCheck)
#define SIMPLIFIED_OP_LIST(V) \
SIMPLIFIED_CHANGE_OP_LIST(V) \
SIMPLIFIED_CHECKED_OP_LIST(V) \
@ -516,6 +524,7 @@
SIMPLIFIED_SPECULATIVE_NUMBER_UNOP_LIST(V) \
SIMPLIFIED_SPECULATIVE_BIGINT_UNOP_LIST(V) \
SIMPLIFIED_SPECULATIVE_BIGINT_BINOP_LIST(V) \
IF_WASM(SIMPLIFIED_WASM_OP_LIST, V) \
SIMPLIFIED_OTHER_OP_LIST(V)
// Opcodes for Machine-level operators.

View File

@ -104,6 +104,7 @@
#if V8_ENABLE_WEBASSEMBLY
#include "src/compiler/wasm-compiler.h"
#include "src/compiler/wasm-escape-analysis.h"
#include "src/compiler/wasm-gc-lowering.h"
#include "src/compiler/wasm-inlining.h"
#include "src/compiler/wasm-loop-peeling.h"
#include "src/wasm/function-body-decoder.h"
@ -2044,6 +2045,22 @@ struct TurboshaftRecreateSchedulePhase {
};
#if V8_ENABLE_WEBASSEMBLY
struct WasmGCLoweringPhase {
DECL_PIPELINE_PHASE_CONSTANTS(WasmGCLowering)
void Run(PipelineData* data, Zone* temp_zone) {
GraphReducer graph_reducer(
temp_zone, data->graph(), &data->info()->tick_counter(), data->broker(),
data->jsgraph()->Dead(), data->observe_node_manager());
WasmGCLowering lowering(&graph_reducer, data->mcgraph());
DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
data->common(), temp_zone);
AddReducer(data, &graph_reducer, &lowering);
AddReducer(data, &graph_reducer, &dead_code_elimination);
graph_reducer.ReduceGraph();
}
};
struct WasmOptimizationPhase {
DECL_PIPELINE_PHASE_CONSTANTS(WasmOptimization)
@ -3239,6 +3256,11 @@ void Pipeline::GenerateCodeForWasmFunction(
}
const bool is_asm_js = is_asmjs_module(module);
if (FLAG_experimental_wasm_gc) {
pipeline.Run<WasmGCLoweringPhase>();
pipeline.RunPrintAndVerify(WasmGCLoweringPhase::phase_name(), true);
}
if (FLAG_wasm_opt || is_asm_js) {
pipeline.Run<WasmOptimizationPhase>(is_asm_js);
pipeline.RunPrintAndVerify(WasmOptimizationPhase::phase_name(), true);

View File

@ -16,6 +16,10 @@
#include "src/objects/name.h"
#include "src/objects/objects-inl.h"
#if V8_ENABLE_WEBASSEMBLY
#include "src/compiler/wasm-compiler-definitions.h"
#endif
namespace v8 {
namespace internal {
namespace compiler {
@ -1141,6 +1145,31 @@ struct SimplifiedOperatorGlobalCache final {
};
LoadStackArgumentOperator kLoadStackArgument;
#if V8_ENABLE_WEBASSEMBLY
struct IsNullOperator final : public Operator {
IsNullOperator()
: Operator(IrOpcode::kIsNull, Operator::kPure, "IsNull", 1, 0, 0, 1, 0,
0) {}
};
IsNullOperator kIsNull;
struct NullOperator final : public Operator {
NullOperator()
: Operator(IrOpcode::kNull, Operator::kPure, "Null", 0, 0, 0, 1, 0, 0) {
}
};
NullOperator kNull;
struct AssertNotNullOperator final : public Operator {
AssertNotNullOperator()
: Operator(
IrOpcode::kAssertNotNull,
Operator::kNoWrite | Operator::kNoThrow | Operator::kIdempotent,
"AssertNotNull", 1, 1, 1, 1, 0, 1) {}
};
AssertNotNullOperator kAssertNotNull;
#endif
#define SPECULATIVE_NUMBER_BINOP(Name) \
template <NumberOperationHint kHint> \
struct Name##Operator final : public Operator1<NumberOperationHint> { \
@ -1303,6 +1332,36 @@ const Operator* SimplifiedOperatorBuilder::VerifyType() {
"VerifyType", 1, 0, 0, 1, 0, 0);
}
#if V8_ENABLE_WEBASSEMBLY
const Operator* SimplifiedOperatorBuilder::WasmTypeCheck(
WasmTypeCheckConfig config) {
return zone_->New<Operator1<WasmTypeCheckConfig>>(
IrOpcode::kWasmTypeCheck, Operator::kEliminatable | Operator::kIdempotent,
"WasmTypeCheck", 2, 1, 1, 1, 1, 1, config);
}
const Operator* SimplifiedOperatorBuilder::WasmTypeCast(
WasmTypeCheckConfig config) {
return zone_->New<Operator1<WasmTypeCheckConfig>>(
IrOpcode::kWasmTypeCast,
Operator::kNoWrite | Operator::kNoThrow | Operator::kIdempotent,
"WasmTypeCast", 2, 1, 1, 1, 1, 1, config);
}
const Operator* SimplifiedOperatorBuilder::RttCanon(int index) {
return zone()->New<Operator1<int>>(IrOpcode::kRttCanon, Operator::kPure,
"RttCanon", 0, 0, 0, 1, 0, 0, index);
}
const Operator* SimplifiedOperatorBuilder::Null() { return &cache_.kNull; }
const Operator* SimplifiedOperatorBuilder::AssertNotNull() {
return &cache_.kAssertNotNull;
}
const Operator* SimplifiedOperatorBuilder::IsNull() { return &cache_.kIsNull; }
#endif // V8_ENABLE_WEBASSEMBLY
const Operator* SimplifiedOperatorBuilder::CheckIf(
DeoptimizeReason reason, const FeedbackSource& feedback) {
if (!feedback.IsValid()) {

View File

@ -36,9 +36,10 @@ class Zone;
namespace compiler {
// Forward declarations.
class CallDescriptor;
class Operator;
struct SimplifiedOperatorGlobalCache;
class CallDescriptor;
struct WasmTypeCheckConfig;
enum BaseTaggedness : uint8_t { kUntaggedBase, kTaggedBase };
@ -1060,6 +1061,15 @@ class V8_EXPORT_PRIVATE SimplifiedOperatorBuilder final
// SimplifiedLowering.
const Operator* VerifyType();
#if V8_ENABLE_WEBASSEMBLY
const Operator* AssertNotNull();
const Operator* IsNull();
const Operator* Null();
const Operator* RttCanon(int index);
const Operator* WasmTypeCheck(WasmTypeCheckConfig config);
const Operator* WasmTypeCast(WasmTypeCheckConfig config);
#endif
const Operator* DateNow();
// Represents the inputs necessary to construct a fast and a slow API call.

View File

@ -124,6 +124,7 @@ class Typer::Visitor : public Reducer {
DECLARE_IMPOSSIBLE_CASE(End)
SIMPLIFIED_CHANGE_OP_LIST(DECLARE_IMPOSSIBLE_CASE)
SIMPLIFIED_CHECKED_OP_LIST(DECLARE_IMPOSSIBLE_CASE)
IF_WASM(SIMPLIFIED_WASM_OP_LIST, DECLARE_IMPOSSIBLE_CASE)
MACHINE_SIMD_OP_LIST(DECLARE_IMPOSSIBLE_CASE)
MACHINE_OP_LIST(DECLARE_IMPOSSIBLE_CASE)
#undef DECLARE_IMPOSSIBLE_CASE

View File

@ -1639,6 +1639,14 @@ void Verifier::Visitor::Check(Node* node, const AllNodes& all) {
CheckTypeIs(node, Type::Any());
CheckValueInputIs(node, 0, Type::Any()); // callee
break;
case IrOpcode::kWasmTypeCheck:
case IrOpcode::kWasmTypeCast:
case IrOpcode::kRttCanon:
case IrOpcode::kNull:
case IrOpcode::kIsNull:
case IrOpcode::kAssertNotNull:
// TODO(manoskouk): What are the constraints here?
break;
#endif // V8_ENABLE_WEBASSEMBLY
// Machine operators

View File

@ -0,0 +1,46 @@
// Copyright 2022 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#if !V8_ENABLE_WEBASSEMBLY
#error This header should only be included if WebAssembly is enabled.
#endif // !V8_ENABLE_WEBASSEMBLY
#ifndef V8_COMPILER_WASM_COMPILER_DEFINITIONS_H_
#define V8_COMPILER_WASM_COMPILER_DEFINITIONS_H_
#include <cstdint>
#include <ostream>
#include "src/base/functional.h"
namespace v8 {
namespace internal {
namespace compiler {
struct WasmTypeCheckConfig {
bool object_can_be_null;
uint8_t rtt_depth;
};
V8_INLINE std::ostream& operator<<(std::ostream& os,
WasmTypeCheckConfig const& p) {
return os << (p.object_can_be_null ? "nullable" : "non-nullable")
<< ", depth=" << static_cast<int>(p.rtt_depth);
}
V8_INLINE size_t hash_value(WasmTypeCheckConfig const& p) {
return base::hash_combine(p.object_can_be_null, p.rtt_depth);
}
V8_INLINE bool operator==(const WasmTypeCheckConfig& p1,
const WasmTypeCheckConfig& p2) {
return p1.object_can_be_null == p2.object_can_be_null &&
p1.rtt_depth == p2.rtt_depth;
}
} // namespace compiler
} // namespace internal
} // namespace v8
#endif // V8_COMPILER_WASM_COMPILER_DEFINITIONS_H_

View File

@ -37,6 +37,7 @@
#include "src/compiler/node-origin-table.h"
#include "src/compiler/node-properties.h"
#include "src/compiler/pipeline.h"
#include "src/compiler/wasm-compiler-definitions.h"
#include "src/compiler/wasm-graph-assembler.h"
#include "src/compiler/zone-stats.h"
#include "src/execution/isolate-inl.h"
@ -294,7 +295,11 @@ Node* WasmGraphBuilder::EffectPhi(unsigned count, Node** effects_and_control) {
effects_and_control);
}
Node* WasmGraphBuilder::RefNull() { return LOAD_ROOT(NullValue, null_value); }
Node* WasmGraphBuilder::RefNull() {
return (FLAG_experimental_wasm_gc && parameter_mode_ == kInstanceMode)
? gasm_->Null()
: LOAD_ROOT(NullValue, null_value);
}
Node* WasmGraphBuilder::RefFunc(uint32_t function_index) {
return gasm_->CallRuntimeStub(wasm::WasmCode::kWasmRefFunc,
@ -304,10 +309,7 @@ Node* WasmGraphBuilder::RefFunc(uint32_t function_index) {
Node* WasmGraphBuilder::RefAsNonNull(Node* arg,
wasm::WasmCodePosition position) {
if (!FLAG_experimental_wasm_skip_null_checks) {
TrapIfTrue(wasm::kTrapIllegalCast, IsNull(arg), position);
}
return arg;
return AssertNotNull(arg, position);
}
Node* WasmGraphBuilder::NoContextConstant() {
@ -377,7 +379,7 @@ void WasmGraphBuilder::StackCheck(
constexpr Operator::Properties properties =
Operator::kNoThrow | Operator::kNoWrite;
// If we ever want to mark this call as kNoDeopt, we'll have to make it
// If we ever want to mark this call as kNoDeopt, we'll have to make it
// non-eliminatable some other way.
static_assert((properties & Operator::kEliminatable) !=
Operator::kEliminatable);
@ -1127,6 +1129,14 @@ void WasmGraphBuilder::TrapIfFalse(wasm::TrapReason reason, Node* cond,
SetSourcePosition(node, position);
}
Node* WasmGraphBuilder::AssertNotNull(Node* object,
wasm::WasmCodePosition position) {
if (FLAG_experimental_wasm_skip_null_checks) return object;
Node* result = gasm_->AssertNotNull(object);
SetSourcePosition(result, position);
return result;
}
// Add a check that traps if {node} is equal to {val}.
void WasmGraphBuilder::TrapIfEq32(wasm::TrapReason reason, Node* node,
int32_t val,
@ -2589,7 +2599,9 @@ Node* WasmGraphBuilder::BuildDiv64Call(Node* left, Node* right,
}
Node* WasmGraphBuilder::IsNull(Node* object) {
return gasm_->TaggedEqual(object, RefNull());
return (FLAG_experimental_wasm_gc && parameter_mode_ == kInstanceMode)
? gasm_->IsNull(object)
: gasm_->TaggedEqual(object, RefNull());
}
template <typename... Args>
@ -2914,7 +2926,7 @@ Node* WasmGraphBuilder::BuildCallRef(const wasm::FunctionSig* real_sig,
IsReturnCall continuation,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) {
TrapIfTrue(wasm::kTrapNullDereference, IsNull(args[0]), position);
args[0] = AssertNotNull(args[0], position);
}
Node* function = args[0];
@ -5266,11 +5278,7 @@ Node* WasmGraphBuilder::ArrayInitFromData(const wasm::ArrayType* type,
}
Node* WasmGraphBuilder::RttCanon(uint32_t type_index) {
Node* maps_list =
LOAD_INSTANCE_FIELD(ManagedObjectMaps, MachineType::TaggedPointer());
return gasm_->LoadImmutable(
MachineType::TaggedPointer(), maps_list,
wasm::ObjectAccess::ElementOffsetInTaggedFixedArray(type_index));
return graph()->NewNode(gasm_->simplified()->RttCanon(type_index));
}
WasmGraphBuilder::Callbacks WasmGraphBuilder::TestCallbacks(
@ -5338,40 +5346,6 @@ WasmGraphBuilder::Callbacks WasmGraphBuilder::BranchCallbacks(
}};
}
void WasmGraphBuilder::TypeCheck(
Node* object, Node* rtt, WasmGraphBuilder::ObjectReferenceKnowledge config,
bool null_succeeds, Callbacks callbacks) {
if (config.object_can_be_null) {
(null_succeeds ? callbacks.succeed_if : callbacks.fail_if)(
IsNull(object), BranchHint::kFalse);
}
Node* map = gasm_->LoadMap(object);
// First, check if types happen to be equal. This has been shown to give large
// speedups.
callbacks.succeed_if(gasm_->TaggedEqual(map, rtt), BranchHint::kTrue);
Node* type_info = gasm_->LoadWasmTypeInfo(map);
Node* supertypes = gasm_->LoadSupertypes(type_info);
Node* rtt_depth = gasm_->UintPtrConstant(config.rtt_depth);
// If the depth of the rtt is known to be less that the minimum supertype
// array length, we can access the supertype without bounds-checking the
// supertype array.
if (config.rtt_depth >= wasm::kMinimumSupertypeArraySize) {
Node* supertypes_length = gasm_->BuildChangeSmiToIntPtr(
gasm_->LoadFixedArrayLengthAsSmi(supertypes));
callbacks.fail_if_not(gasm_->UintLessThan(rtt_depth, supertypes_length),
BranchHint::kTrue);
}
Node* maybe_match = gasm_->LoadImmutableFixedArrayElement(
supertypes, rtt_depth, MachineType::TaggedPointer());
callbacks.fail_if_not(gasm_->TaggedEqual(maybe_match, rtt),
BranchHint::kTrue);
}
void WasmGraphBuilder::DataCheck(Node* object, bool object_can_be_null,
Callbacks callbacks) {
if (object_can_be_null) {
@ -5407,51 +5381,58 @@ void WasmGraphBuilder::BrOnCastAbs(
match_effects.emplace_back(effect());
// Wire up the control/effect nodes.
unsigned count = static_cast<unsigned>(match_controls.size());
DCHECK_EQ(match_controls.size(), match_effects.size());
*match_control = Merge(count, match_controls.data());
// EffectPhis need their control dependency as an additional input.
match_effects.emplace_back(*match_control);
*match_effect = EffectPhi(count, match_effects.data());
unsigned match_count = static_cast<unsigned>(match_controls.size());
if (match_count == 1) {
*match_control = match_controls[0];
*match_effect = match_effects[0];
} else {
*match_control = Merge(match_count, match_controls.data());
// EffectPhis need their control dependency as an additional input.
match_effects.emplace_back(*match_control);
*match_effect = EffectPhi(match_count, match_effects.data());
}
DCHECK_EQ(no_match_controls.size(), no_match_effects.size());
// Range is 2..4, so casting to unsigned is safe.
count = static_cast<unsigned>(no_match_controls.size());
*no_match_control = Merge(count, no_match_controls.data());
// EffectPhis need their control dependency as an additional input.
no_match_effects.emplace_back(*no_match_control);
*no_match_effect = EffectPhi(count, no_match_effects.data());
unsigned no_match_count = static_cast<unsigned>(no_match_controls.size());
if (no_match_count == 1) {
*no_match_control = no_match_controls[0];
*no_match_effect = no_match_effects[0];
} else {
// Range is 2..4, so casting to unsigned is safe.
*no_match_control = Merge(no_match_count, no_match_controls.data());
// EffectPhis need their control dependency as an additional input.
no_match_effects.emplace_back(*no_match_control);
*no_match_effect = EffectPhi(no_match_count, no_match_effects.data());
}
}
Node* WasmGraphBuilder::RefTest(Node* object, Node* rtt,
ObjectReferenceKnowledge config) {
auto done = gasm_->MakeLabel(MachineRepresentation::kWord32);
TypeCheck(object, rtt, config, false, TestCallbacks(&done));
gasm_->Goto(&done, Int32Constant(1));
gasm_->Bind(&done);
return done.PhiAt(0);
WasmTypeCheckConfig config) {
return gasm_->WasmTypeCheck(object, rtt, config);
}
Node* WasmGraphBuilder::RefCast(Node* object, Node* rtt,
ObjectReferenceKnowledge config,
WasmTypeCheckConfig config,
wasm::WasmCodePosition position) {
if (!FLAG_experimental_wasm_assume_ref_cast_succeeds) {
auto done = gasm_->MakeLabel();
TypeCheck(object, rtt, config, true, CastCallbacks(&done, position));
gasm_->Goto(&done);
gasm_->Bind(&done);
}
return object;
return FLAG_experimental_wasm_assume_ref_cast_succeeds
? object
: gasm_->WasmTypeCast(object, rtt, config);
}
void WasmGraphBuilder::BrOnCast(Node* object, Node* rtt,
ObjectReferenceKnowledge config,
WasmTypeCheckConfig config,
Node** match_control, Node** match_effect,
Node** no_match_control,
Node** no_match_effect) {
BrOnCastAbs(match_control, match_effect, no_match_control, no_match_effect,
[=](Callbacks callbacks) -> void {
return TypeCheck(object, rtt, config, false, callbacks);
});
Node* true_node;
Node* false_node;
BranchNoHint(gasm_->WasmTypeCheck(object, rtt, config), &true_node,
&false_node);
*match_effect = *no_match_effect = effect();
*match_control = true_node;
*no_match_control = false_node;
}
Node* WasmGraphBuilder::RefIsData(Node* object, bool object_can_be_null) {
@ -5472,7 +5453,7 @@ Node* WasmGraphBuilder::RefAsData(Node* object, bool object_can_be_null,
}
void WasmGraphBuilder::BrOnData(Node* object, Node* /*rtt*/,
ObjectReferenceKnowledge config,
WasmTypeCheckConfig config,
Node** match_control, Node** match_effect,
Node** no_match_control,
Node** no_match_effect) {
@ -5503,7 +5484,7 @@ Node* WasmGraphBuilder::RefAsFunc(Node* object, bool object_can_be_null,
}
void WasmGraphBuilder::BrOnFunc(Node* object, Node* /*rtt*/,
ObjectReferenceKnowledge config,
WasmTypeCheckConfig config,
Node** match_control, Node** match_effect,
Node** no_match_control,
Node** no_match_effect) {
@ -5535,7 +5516,7 @@ Node* WasmGraphBuilder::RefAsArray(Node* object, bool object_can_be_null,
}
void WasmGraphBuilder::BrOnArray(Node* object, Node* /*rtt*/,
ObjectReferenceKnowledge config,
WasmTypeCheckConfig config,
Node** match_control, Node** match_effect,
Node** no_match_control,
Node** no_match_effect) {
@ -5556,13 +5537,12 @@ Node* WasmGraphBuilder::RefAsI31(Node* object,
}
void WasmGraphBuilder::BrOnI31(Node* object, Node* /* rtt */,
ObjectReferenceKnowledge /* config */,
WasmTypeCheckConfig /* config */,
Node** match_control, Node** match_effect,
Node** no_match_control,
Node** no_match_effect) {
gasm_->Branch(gasm_->IsI31(object), match_control, no_match_control,
BranchHint::kTrue);
SetControl(*no_match_control);
*match_effect = effect();
*no_match_effect = effect();
@ -5574,7 +5554,7 @@ Node* WasmGraphBuilder::StructGet(Node* struct_object,
bool is_signed,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) {
TrapIfTrue(wasm::kTrapNullDereference, IsNull(struct_object), position);
struct_object = AssertNotNull(struct_object, position);
}
// It is not enough to invoke ValueType::machine_type(), because the
// signedness has to be determined by {is_signed}.
@ -5593,7 +5573,7 @@ void WasmGraphBuilder::StructSet(Node* struct_object,
CheckForNull null_check,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) {
TrapIfTrue(wasm::kTrapNullDereference, IsNull(struct_object), position);
struct_object = AssertNotNull(struct_object, position);
}
gasm_->StoreStructField(struct_object, struct_type, field_index, field_value);
}
@ -5623,7 +5603,7 @@ Node* WasmGraphBuilder::ArrayGet(Node* array_object,
CheckForNull null_check, bool is_signed,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) {
TrapIfTrue(wasm::kTrapNullDereference, IsNull(array_object), position);
array_object = AssertNotNull(array_object, position);
}
BoundsCheckArray(array_object, index, position);
MachineType machine_type = MachineType::TypeForRepresentation(
@ -5640,7 +5620,7 @@ void WasmGraphBuilder::ArraySet(Node* array_object, const wasm::ArrayType* type,
CheckForNull null_check,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) {
TrapIfTrue(wasm::kTrapNullDereference, IsNull(array_object), position);
array_object = AssertNotNull(array_object, position);
}
BoundsCheckArray(array_object, index, position);
Node* offset = gasm_->WasmArrayElementOffset(index, type->element_type());
@ -5651,7 +5631,7 @@ void WasmGraphBuilder::ArraySet(Node* array_object, const wasm::ArrayType* type,
Node* WasmGraphBuilder::ArrayLen(Node* array_object, CheckForNull null_check,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) {
TrapIfTrue(wasm::kTrapNullDereference, IsNull(array_object), position);
array_object = AssertNotNull(array_object, position);
}
return gasm_->LoadWasmArrayLength(array_object);
}
@ -5664,10 +5644,10 @@ void WasmGraphBuilder::ArrayCopy(Node* dst_array, Node* dst_index,
Node* length,
wasm::WasmCodePosition position) {
if (dst_null_check == kWithNullCheck) {
TrapIfTrue(wasm::kTrapNullDereference, IsNull(dst_array), position);
dst_array = AssertNotNull(dst_array, position);
}
if (src_null_check == kWithNullCheck) {
TrapIfTrue(wasm::kTrapNullDereference, IsNull(src_array), position);
src_array = AssertNotNull(src_array, position);
}
BoundsCheckArrayCopy(dst_array, dst_index, length, position);
BoundsCheckArrayCopy(src_array, src_index, length, position);

View File

@ -45,14 +45,12 @@ enum class TrapId : uint32_t;
struct Int64LoweringSpecialCase;
template <size_t VarCount>
class GraphAssemblerLabel;
struct WasmTypeCheckConfig;
} // namespace compiler
namespace wasm {
class AssemblerBufferCache;
struct DecodeStruct;
// Expose {Node} and {Graph} opaquely as {wasm::TFNode} and {wasm::TFGraph}.
using TFNode = compiler::Node;
using TFGraph = compiler::MachineGraph;
class WasmCode;
class WasmFeatures;
class WireBytesStorage;
@ -224,10 +222,6 @@ class WasmGraphBuilder {
kWasmApiFunctionRefMode,
kNoSpecialParameterMode
};
struct ObjectReferenceKnowledge {
bool object_can_be_null;
uint8_t rtt_depth;
};
enum EnforceBoundsCheck : bool { // --
kNeedsBoundsCheck = true,
kCanOmitBoundsCheck = false
@ -513,33 +507,33 @@ class WasmGraphBuilder {
Node* I31GetU(Node* input);
Node* RttCanon(uint32_t type_index);
Node* RefTest(Node* object, Node* rtt, ObjectReferenceKnowledge config);
Node* RefCast(Node* object, Node* rtt, ObjectReferenceKnowledge config,
Node* RefTest(Node* object, Node* rtt, WasmTypeCheckConfig config);
Node* RefCast(Node* object, Node* rtt, WasmTypeCheckConfig config,
wasm::WasmCodePosition position);
void BrOnCast(Node* object, Node* rtt, ObjectReferenceKnowledge config,
void BrOnCast(Node* object, Node* rtt, WasmTypeCheckConfig config,
Node** match_control, Node** match_effect,
Node** no_match_control, Node** no_match_effect);
Node* RefIsData(Node* object, bool object_can_be_null);
Node* RefAsData(Node* object, bool object_can_be_null,
wasm::WasmCodePosition position);
void BrOnData(Node* object, Node* rtt, ObjectReferenceKnowledge config,
void BrOnData(Node* object, Node* rtt, WasmTypeCheckConfig config,
Node** match_control, Node** match_effect,
Node** no_match_control, Node** no_match_effect);
Node* RefIsFunc(Node* object, bool object_can_be_null);
Node* RefAsFunc(Node* object, bool object_can_be_null,
wasm::WasmCodePosition position);
void BrOnFunc(Node* object, Node* rtt, ObjectReferenceKnowledge config,
void BrOnFunc(Node* object, Node* rtt, WasmTypeCheckConfig config,
Node** match_control, Node** match_effect,
Node** no_match_control, Node** no_match_effect);
Node* RefIsArray(Node* object, bool object_can_be_null);
Node* RefAsArray(Node* object, bool object_can_be_null,
wasm::WasmCodePosition position);
void BrOnArray(Node* object, Node* rtt, ObjectReferenceKnowledge config,
void BrOnArray(Node* object, Node* rtt, WasmTypeCheckConfig config,
Node** match_control, Node** match_effect,
Node** no_match_control, Node** no_match_effect);
Node* RefIsI31(Node* object);
Node* RefAsI31(Node* object, wasm::WasmCodePosition position);
void BrOnI31(Node* object, Node* rtt, ObjectReferenceKnowledge config,
void BrOnI31(Node* object, Node* rtt, WasmTypeCheckConfig config,
Node** match_control, Node** match_effect,
Node** no_match_control, Node** no_match_effect);
@ -700,6 +694,8 @@ class WasmGraphBuilder {
Node* IsNull(Node* object);
Node* AssertNotNull(Node* object, wasm::WasmCodePosition position);
void GetGlobalBaseAndOffset(const wasm::WasmGlobal&, Node** base_node,
Node** offset_node);
@ -711,9 +707,9 @@ class WasmGraphBuilder {
};
// This type is used to collect control/effect nodes we need to merge at the
// end of BrOn* functions. Nodes are collected in {TypeCheck} etc. by calling
// the passed callbacks succeed_if, fail_if and fail_if_not. We have up to 5
// control nodes to merge; the EffectPhi needs an additional input.
// end of BrOn* functions. Nodes are collected by calling the passed callbacks
// succeed_if, fail_if and fail_if_not. We have up to 5 control nodes to
// merge; the EffectPhi needs an additional input.
using SmallNodeVector = base::SmallVector<Node*, 6>;
Callbacks TestCallbacks(GraphAssemblerLabel<1>* label);
@ -724,8 +720,6 @@ class WasmGraphBuilder {
SmallNodeVector& match_controls,
SmallNodeVector& match_effects);
void TypeCheck(Node* object, Node* rtt, ObjectReferenceKnowledge config,
bool null_succeeds, Callbacks callbacks);
void DataCheck(Node* object, bool object_can_be_null, Callbacks callbacks);
void ManagedObjectInstanceCheck(Node* object, bool object_can_be_null,
InstanceType instance_type,

View File

@ -0,0 +1,215 @@
// Copyright 2022 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/compiler/wasm-gc-lowering.h"
#include "src/base/logging.h"
#include "src/common/globals.h"
#include "src/compiler/common-operator.h"
#include "src/compiler/node-matchers.h"
#include "src/compiler/node-properties.h"
#include "src/compiler/opcodes.h"
#include "src/compiler/operator.h"
#include "src/compiler/simplified-operator.h"
#include "src/compiler/wasm-compiler-definitions.h"
#include "src/compiler/wasm-graph-assembler.h"
#include "src/wasm/object-access.h"
#include "src/wasm/wasm-linkage.h"
#include "src/wasm/wasm-objects.h"
namespace v8 {
namespace internal {
namespace compiler {
WasmGCLowering::WasmGCLowering(Editor* editor, MachineGraph* mcgraph)
: AdvancedReducer(editor),
gasm_(mcgraph, mcgraph->zone()),
dead_(mcgraph->Dead()),
instance_node_(nullptr) {
// Find and store the instance node.
for (Node* start_use : mcgraph->graph()->start()->uses()) {
if (start_use->opcode() == IrOpcode::kParameter &&
ParameterIndexOf(start_use->op()) == 0) {
instance_node_ = start_use;
break;
}
}
DCHECK_NOT_NULL(instance_node_);
}
Reduction WasmGCLowering::Reduce(Node* node) {
switch (node->opcode()) {
case IrOpcode::kWasmTypeCheck:
return ReduceWasmTypeCheck(node);
case IrOpcode::kWasmTypeCast:
return ReduceWasmTypeCast(node);
case IrOpcode::kAssertNotNull:
return ReduceAssertNotNull(node);
case IrOpcode::kNull:
return ReduceNull(node);
case IrOpcode::kIsNull:
return ReduceIsNull(node);
case IrOpcode::kRttCanon:
return ReduceRttCanon(node);
default:
return NoChange();
}
}
Node* WasmGCLowering::Null() {
Node* isolate_root = gasm_.LoadImmutable(
MachineType::Pointer(), instance_node_,
WasmInstanceObject::kIsolateRootOffset - kHeapObjectTag);
return gasm_.LoadImmutable(
MachineType::Pointer(), isolate_root,
IsolateData::root_slot_offset(RootIndex::kNullValue));
}
// TODO(manoskouk): Use the Callbacks infrastructure from wasm-compiler.h to
// unify all check/cast implementations.
// TODO(manoskouk): Find a way to optimize branches on typechecks.
Reduction WasmGCLowering::ReduceWasmTypeCheck(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kWasmTypeCheck);
Node* object = node->InputAt(0);
Node* rtt = node->InputAt(1);
Node* effect_input = NodeProperties::GetEffectInput(node);
Node* control_input = NodeProperties::GetControlInput(node);
auto config = OpParameter<WasmTypeCheckConfig>(node->op());
int rtt_depth = config.rtt_depth;
bool object_can_be_null = config.object_can_be_null;
gasm_.InitializeEffectControl(effect_input, control_input);
auto end_label = gasm_.MakeLabel(MachineRepresentation::kWord32);
if (object_can_be_null) {
gasm_.GotoIf(gasm_.TaggedEqual(object, Null()), &end_label,
BranchHint::kFalse, gasm_.Int32Constant(0));
}
Node* map = gasm_.LoadMap(object);
// First, check if types happen to be equal. This has been shown to give large
// speedups.
gasm_.GotoIf(gasm_.TaggedEqual(map, rtt), &end_label, BranchHint::kTrue,
gasm_.Int32Constant(1));
Node* type_info = gasm_.LoadWasmTypeInfo(map);
Node* supertypes = gasm_.LoadSupertypes(type_info);
DCHECK_GE(rtt_depth, 0);
Node* rtt_depth_node = gasm_.IntPtrConstant(rtt_depth);
// If the depth of the rtt is known to be less that the minimum supertype
// array length, we can access the supertype without bounds-checking the
// supertype array.
if (static_cast<uint32_t>(rtt_depth) >= wasm::kMinimumSupertypeArraySize) {
Node* supertypes_length = gasm_.BuildChangeSmiToIntPtr(
gasm_.LoadFixedArrayLengthAsSmi(supertypes));
gasm_.GotoIfNot(gasm_.UintLessThan(rtt_depth_node, supertypes_length),
&end_label, BranchHint::kTrue, gasm_.Int32Constant(0));
}
Node* maybe_match = gasm_.LoadImmutableFixedArrayElement(
supertypes, rtt_depth_node, MachineType::TaggedPointer());
gasm_.Goto(&end_label, gasm_.TaggedEqual(maybe_match, rtt));
gasm_.Bind(&end_label);
ReplaceWithValue(node, end_label.PhiAt(0), gasm_.effect(), gasm_.control());
node->Kill();
return Replace(end_label.PhiAt(0)); // Meaningless argument.
}
Reduction WasmGCLowering::ReduceWasmTypeCast(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kWasmTypeCast);
Node* object = node->InputAt(0);
Node* rtt = node->InputAt(1);
Node* effect_input = NodeProperties::GetEffectInput(node);
Node* control_input = NodeProperties::GetControlInput(node);
auto config = OpParameter<WasmTypeCheckConfig>(node->op());
int rtt_depth = config.rtt_depth;
bool object_can_be_null = config.object_can_be_null;
gasm_.InitializeEffectControl(effect_input, control_input);
auto end_label = gasm_.MakeLabel();
if (object_can_be_null) {
gasm_.GotoIf(gasm_.TaggedEqual(object, Null()), &end_label,
BranchHint::kFalse);
}
Node* map = gasm_.LoadMap(object);
// First, check if types happen to be equal. This has been shown to give large
// speedups.
gasm_.GotoIf(gasm_.TaggedEqual(map, rtt), &end_label, BranchHint::kTrue);
Node* type_info = gasm_.LoadWasmTypeInfo(map);
Node* supertypes = gasm_.LoadSupertypes(type_info);
DCHECK_GE(rtt_depth, 0);
Node* rtt_depth_node = gasm_.IntPtrConstant(rtt_depth);
// If the depth of the rtt is known to be less that the minimum supertype
// array length, we can access the supertype without bounds-checking the
// supertype array.
if (static_cast<uint32_t>(rtt_depth) >= wasm::kMinimumSupertypeArraySize) {
Node* supertypes_length = gasm_.BuildChangeSmiToIntPtr(
gasm_.LoadFixedArrayLengthAsSmi(supertypes));
gasm_.TrapUnless(gasm_.UintLessThan(rtt_depth_node, supertypes_length),
TrapId::kTrapIllegalCast);
}
Node* maybe_match = gasm_.LoadImmutableFixedArrayElement(
supertypes, rtt_depth_node, MachineType::TaggedPointer());
gasm_.TrapUnless(gasm_.TaggedEqual(maybe_match, rtt),
TrapId::kTrapIllegalCast);
gasm_.Goto(&end_label);
gasm_.Bind(&end_label);
ReplaceWithValue(node, object, gasm_.effect(), gasm_.control());
node->Kill();
return Replace(object);
}
Reduction WasmGCLowering::ReduceAssertNotNull(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kAssertNotNull);
Node* effect = NodeProperties::GetEffectInput(node);
Node* control = NodeProperties::GetControlInput(node);
Node* object = NodeProperties::GetValueInput(node, 0);
gasm_.InitializeEffectControl(effect, control);
gasm_.TrapIf(gasm_.TaggedEqual(object, Null()), TrapId::kTrapNullDereference);
ReplaceWithValue(node, object, gasm_.effect(), gasm_.control());
node->Kill();
return Replace(object);
}
Reduction WasmGCLowering::ReduceNull(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kNull);
return Replace(Null());
}
Reduction WasmGCLowering::ReduceIsNull(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kIsNull);
Node* object = NodeProperties::GetValueInput(node, 0);
return Replace(gasm_.TaggedEqual(object, Null()));
}
Reduction WasmGCLowering::ReduceRttCanon(Node* node) {
int type_index = OpParameter<int>(node->op());
Node* maps_list = gasm_.LoadImmutable(
MachineType::TaggedPointer(), instance_node_,
WasmInstanceObject::kManagedObjectMapsOffset - kHeapObjectTag);
return Replace(gasm_.LoadImmutable(
MachineType::TaggedPointer(), maps_list,
wasm::ObjectAccess::ElementOffsetInTaggedFixedArray(type_index)));
}
} // namespace compiler
} // namespace internal
} // namespace v8

View File

@ -0,0 +1,47 @@
// Copyright 2022 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#if !V8_ENABLE_WEBASSEMBLY
#error This header should only be included if WebAssembly is enabled.
#endif // !V8_ENABLE_WEBASSEMBLY
#ifndef V8_COMPILER_WASM_GC_LOWERING_H_
#define V8_COMPILER_WASM_GC_LOWERING_H_
#include "src/compiler/graph-reducer.h"
#include "src/compiler/wasm-graph-assembler.h"
namespace v8 {
namespace internal {
namespace compiler {
class MachineGraph;
class WasmGraphAssembler;
class WasmGCLowering final : public AdvancedReducer {
public:
WasmGCLowering(Editor* editor, MachineGraph* mcgraph);
const char* reducer_name() const override { return "WasmGCLowering"; }
Reduction Reduce(Node* node) final;
private:
Reduction ReduceWasmTypeCheck(Node* node);
Reduction ReduceWasmTypeCast(Node* node);
Reduction ReduceAssertNotNull(Node* node);
Reduction ReduceNull(Node* node);
Reduction ReduceIsNull(Node* node);
Reduction ReduceRttCanon(Node* node);
Node* Null();
WasmGraphAssembler gasm_;
Node* dead_;
Node* instance_node_;
};
} // namespace compiler
} // namespace internal
} // namespace v8
#endif // V8_COMPILER_WASM_GC_LOWERING_H_

View File

@ -6,6 +6,7 @@
#include "src/compiler/diamond.h"
#include "src/compiler/node-matchers.h"
#include "src/compiler/wasm-compiler-definitions.h"
#include "src/wasm/object-access.h"
#include "src/wasm/wasm-objects.h"
@ -347,6 +348,31 @@ Node* WasmGraphAssembler::IsDataRefMap(Node* map) {
Int32Constant(LAST_WASM_OBJECT_TYPE - FIRST_WASM_OBJECT_TYPE));
}
Node* WasmGraphAssembler::WasmTypeCheck(Node* object, Node* rtt,
WasmTypeCheckConfig config) {
return AddNode(graph()->NewNode(simplified_.WasmTypeCheck(config), object,
rtt, effect(), control()));
}
Node* WasmGraphAssembler::WasmTypeCast(Node* object, Node* rtt,
WasmTypeCheckConfig config) {
return AddNode(graph()->NewNode(simplified_.WasmTypeCast(config), object, rtt,
effect(), control()));
}
Node* WasmGraphAssembler::Null() {
return AddNode(graph()->NewNode(simplified_.Null()));
}
Node* WasmGraphAssembler::IsNull(Node* object) {
return AddNode(graph()->NewNode(simplified_.IsNull(), object));
}
Node* WasmGraphAssembler::AssertNotNull(Node* object) {
return AddNode(graph()->NewNode(simplified_.AssertNotNull(), object, effect(),
control()));
}
// Generic HeapObject helpers.
Node* WasmGraphAssembler::HasInstanceType(Node* heap_object,

View File

@ -240,7 +240,17 @@ class WasmGraphAssembler : public GraphAssembler {
Node* IsDataRefMap(Node* map);
// Generic HeapObject helpers.
Node* WasmTypeCheck(Node* object, Node* rtt, WasmTypeCheckConfig config);
Node* WasmTypeCast(Node* object, Node* rtt, WasmTypeCheckConfig config);
Node* Null();
Node* IsNull(Node* object);
Node* AssertNotNull(Node* object);
// Generic helpers.
Node* HasInstanceType(Node* heap_object, InstanceType type);

View File

@ -378,6 +378,7 @@ class RuntimeCallTimer final {
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, Untyper) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, VerifyGraph) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, WasmBaseOptimization) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, WasmGCLowering) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, WasmInlining) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, WasmLoopPeeling) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, WasmLoopUnrolling) \

View File

@ -4,6 +4,7 @@
#include "src/wasm/graph-builder-interface.h"
#include "src/compiler/wasm-compiler-definitions.h"
#include "src/compiler/wasm-compiler.h"
#include "src/flags/flags.h"
#include "src/handles/handles.h"
@ -25,6 +26,8 @@ namespace wasm {
namespace {
using TFNode = compiler::Node;
// An SsaEnv environment carries the current local variable renaming
// as well as the current effect and control dependency in the TF graph.
// It maintains a control state that tracks whether the environment
@ -1179,12 +1182,12 @@ class WasmGraphBuildingInterface {
result->node = builder_->RttCanon(type_index);
}
using StaticKnowledge = compiler::WasmGraphBuilder::ObjectReferenceKnowledge;
using WasmTypeCheckConfig = v8::internal::compiler::WasmTypeCheckConfig;
StaticKnowledge ComputeStaticKnowledge(ValueType object_type,
ValueType rtt_type,
const WasmModule* module) {
StaticKnowledge result;
WasmTypeCheckConfig ComputeWasmTypeCheckConfig(ValueType object_type,
ValueType rtt_type,
const WasmModule* module) {
WasmTypeCheckConfig result;
result.object_can_be_null = object_type.is_nullable();
DCHECK(object_type.is_object_reference()); // Checked by validation.
// In the bottom case, the result is irrelevant.
@ -1197,27 +1200,27 @@ class WasmGraphBuildingInterface {
void RefTest(FullDecoder* decoder, const Value& object, const Value& rtt,
Value* result) {
StaticKnowledge config =
ComputeStaticKnowledge(object.type, rtt.type, decoder->module_);
WasmTypeCheckConfig config =
ComputeWasmTypeCheckConfig(object.type, rtt.type, decoder->module_);
result->node = builder_->RefTest(object.node, rtt.node, config);
}
void RefCast(FullDecoder* decoder, const Value& object, const Value& rtt,
Value* result) {
StaticKnowledge config =
ComputeStaticKnowledge(object.type, rtt.type, decoder->module_);
WasmTypeCheckConfig config =
ComputeWasmTypeCheckConfig(object.type, rtt.type, decoder->module_);
result->node =
builder_->RefCast(object.node, rtt.node, config, decoder->position());
}
template <void (compiler::WasmGraphBuilder::*branch_function)(
TFNode*, TFNode*, StaticKnowledge, TFNode**, TFNode**, TFNode**,
TFNode*, TFNode*, WasmTypeCheckConfig, TFNode**, TFNode**, TFNode**,
TFNode**)>
void BrOnCastAbs(FullDecoder* decoder, const Value& object, const Value& rtt,
Value* forwarding_value, uint32_t br_depth,
bool branch_on_match) {
StaticKnowledge config =
ComputeStaticKnowledge(object.type, rtt.type, decoder->module_);
WasmTypeCheckConfig config =
ComputeWasmTypeCheckConfig(object.type, rtt.type, decoder->module_);
SsaEnv* branch_env = Split(decoder->zone(), ssa_env_);
SsaEnv* no_branch_env = Steal(decoder->zone(), ssa_env_);
no_branch_env->SetNotMerged();