[*] Another merge. Is GC still unstable?

Last aurora commit: 87131415
This commit is contained in:
Reece Wilson 2023-02-07 18:09:22 +00:00
commit 11385c7b85
68 changed files with 1791 additions and 1063 deletions

View File

@ -557,7 +557,8 @@ if (v8_enable_static_roots == "") {
assert(!v8_enable_static_roots ||
(v8_enable_pointer_compression && v8_enable_shared_ro_heap &&
v8_enable_pointer_compression_shared_cage &&
v8_enable_webassembly && v8_enable_i18n_support),
v8_enable_external_code_space && v8_enable_webassembly &&
v8_enable_i18n_support),
"Trying to enable static roots in a configuration that is not supported")
if (v8_enable_webassembly && !target_is_simulator && v8_current_cpu == "x64") {

12
DEPS
View File

@ -108,9 +108,9 @@ deps = {
'base/trace_event/common':
Var('chromium_url') + '/chromium/src/base/trace_event/common.git' + '@' + '05a225a3e0bbd6fb6a9cac02d482ab784194411d',
'build':
Var('chromium_url') + '/chromium/src/build.git' + '@' + 'd0fad164969ab7f41f163f9ee738ea692f43df53',
Var('chromium_url') + '/chromium/src/build.git' + '@' + 'a88dbf7fdc8b48ac7bfe979a5966f05b5a3dad91',
'buildtools':
Var('chromium_url') + '/chromium/src/buildtools.git' + '@' + '5408fe0e010a7d36bb2684d5f38df67dcdfe31de',
Var('chromium_url') + '/chromium/src/buildtools.git' + '@' + '8b4b823eed3396ac0ee966fa01d8e2817fe1e7c2',
'buildtools/clang_format/script':
Var('chromium_url') + '/external/github.com/llvm/llvm-project/clang/tools/clang-format.git' + '@' + 'f97059df7f8b205064625cdb5f97b56668a125ef',
'buildtools/linux64': {
@ -134,7 +134,7 @@ deps = {
'condition': 'host_os == "mac"',
},
'buildtools/third_party/libc++/trunk':
Var('chromium_url') + '/external/github.com/llvm/llvm-project/libcxx.git' + '@' + '549781a48cef7a038cadbe8ae9034c2d63685d9a',
Var('chromium_url') + '/external/github.com/llvm/llvm-project/libcxx.git' + '@' + '31cf778cc264a5baf99a0f1f240d7aceb206065f',
'buildtools/third_party/libc++abi/trunk':
Var('chromium_url') + '/external/github.com/llvm/llvm-project/libcxxabi.git' + '@' + 'b74d7716111d7eda5c03cb8f5dfc940e1c2c0030',
'buildtools/third_party/libunwind/trunk':
@ -212,7 +212,7 @@ deps = {
'dep_type': 'cipd',
},
'third_party/catapult': {
'url': Var('chromium_url') + '/catapult.git' + '@' + 'd0d703ea303c91f3afe39ebf8d2d4c9342accedc',
'url': Var('chromium_url') + '/catapult.git' + '@' + 'e8a7a25a9b48872864df9744c4195d057d0e2c32',
'condition': 'checkout_android',
},
'third_party/colorama/src': {
@ -220,7 +220,7 @@ deps = {
'condition': 'checkout_android',
},
'third_party/depot_tools':
Var('chromium_url') + '/chromium/tools/depot_tools.git' + '@' + 'ef2d011ad3041801565aa8c6d1418cc82c0ddb2e',
Var('chromium_url') + '/chromium/tools/depot_tools.git' + '@' + '2c9079313953c843617f88ee766e838ee3ed8255',
'third_party/fuchsia-sdk/sdk': {
'packages': [
{
@ -275,7 +275,7 @@ deps = {
'third_party/zlib':
Var('chromium_url') + '/chromium/src/third_party/zlib.git'+ '@' + '2d44c51ada6d325b85b53427b02dabf44648bca4',
'tools/clang':
Var('chromium_url') + '/chromium/src/tools/clang.git' + '@' + '44e5e39a90511e079e4b9bc2f753059f2fe6ac4b',
Var('chromium_url') + '/chromium/src/tools/clang.git' + '@' + '6e5ae687b5d91f01b57a43a8761f8a39149756f0',
'tools/luci-go': {
'packages': [
{

View File

@ -9,6 +9,7 @@
#include "v8-data.h" // NOLINT(build/include_directory)
#include "v8-local-handle.h" // NOLINT(build/include_directory)
#include "v8-maybe.h" // NOLINT(build/include_directory)
#include "v8-snapshot.h" // NOLINT(build/include_directory)
#include "v8config.h" // NOLINT(build/include_directory)
@ -163,6 +164,13 @@ class V8_EXPORT Context : public Data {
*/
void Exit();
/**
* Attempts to recursively freeze all objects reachable from this context.
* Some objects (generators, iterators, non-const closures) can not be frozen
* and will cause this method to throw an error.
*/
Maybe<void> DeepFreeze();
/** Returns the isolate associated with a current context. */
Isolate* GetIsolate();

View File

@ -544,7 +544,7 @@ static constexpr PlatformSharedMemoryHandle kInvalidSharedMemoryHandle = -1;
// to avoid pulling in large OS header files into this header file. Instead,
// the users of these routines are expected to include the respecitve OS
// headers in addition to this one.
#if V8_OS_MACOS
#if V8_OS_DARWIN
// Convert between a shared memory handle and a mach_port_t referencing a memory
// entry object.
inline PlatformSharedMemoryHandle SharedMemoryHandleFromMachMemoryEntry(

View File

@ -344,6 +344,11 @@ class V8_EXPORT Value : public Data {
*/
bool IsWasmModuleObject() const;
/**
* Returns true if this value is the WasmNull object.
*/
bool IsWasmNull() const;
/**
* Returns true if the value is a Module Namespace Object.
*/

View File

@ -820,7 +820,7 @@
'v8_win_dbg': {
'swarming_dimensions' : {
'cpu': 'x86-64',
'os': 'Windows-7-SP1',
'os': 'Windows-10-19045',
},
'tests': [
{'name': 'mozilla'},
@ -830,7 +830,7 @@
'v8_win_rel': {
'swarming_dimensions' : {
'cpu': 'x86-64',
'os': 'Windows-7-SP1',
'os': 'Windows-10-19045',
},
'tests': [
{'name': 'mozilla'},
@ -1886,7 +1886,7 @@
'V8 Win32': {
'swarming_dimensions': {
'cpu': 'x86-64',
'os': 'Windows-7-SP1',
'os': 'Windows-10-19045',
},
'tests': [
{'name': 'mozilla'},
@ -1897,7 +1897,7 @@
'V8 Win32 - debug': {
'swarming_dimensions': {
'cpu': 'x86-64',
'os': 'Windows-7-SP1',
'os': 'Windows-10-19045',
},
'tests': [
{'name': 'mozilla'},

View File

@ -47,6 +47,7 @@
#include "src/compiler-dispatcher/lazy-compile-dispatcher.h"
#include "src/date/date.h"
#include "src/objects/primitive-heap-object.h"
#include "src/utils/identity-map.h"
#if V8_ENABLE_WEBASSEMBLY
#include "src/debug/debug-wasm-objects.h"
#endif // V8_ENABLE_WEBASSEMBLY
@ -85,6 +86,7 @@
#include "src/objects/embedder-data-slot-inl.h"
#include "src/objects/hash-table-inl.h"
#include "src/objects/heap-object.h"
#include "src/objects/instance-type.h"
#include "src/objects/js-array-buffer-inl.h"
#include "src/objects/js-array-inl.h"
#include "src/objects/js-collection-inl.h"
@ -3885,9 +3887,11 @@ VALUE_IS_SPECIFIC_TYPE(Set, JSSet)
#if V8_ENABLE_WEBASSEMBLY
VALUE_IS_SPECIFIC_TYPE(WasmMemoryObject, WasmMemoryObject)
VALUE_IS_SPECIFIC_TYPE(WasmModuleObject, WasmModuleObject)
VALUE_IS_SPECIFIC_TYPE(WasmNull, WasmNull)
#else
bool Value::IsWasmMemoryObject() const { return false; }
bool Value::IsWasmModuleObject() const { return false; }
bool Value::IsWasmNull() const { return false; }
#endif // V8_ENABLE_WEBASSEMBLY
VALUE_IS_SPECIFIC_TYPE(WeakMap, JSWeakMap)
VALUE_IS_SPECIFIC_TYPE(WeakSet, JSWeakSet)
@ -6833,6 +6837,218 @@ Local<Value> v8::Context::GetSecurityToken() {
return Utils::ToLocal(token_handle);
}
namespace {
bool MayContainObjectsToFreeze(i::InstanceType obj_type) {
if (i::InstanceTypeChecker::IsString(obj_type)) return false;
if (i::InstanceTypeChecker::IsSharedFunctionInfo(obj_type)) return false;
return true;
}
bool IsJSReceiverSafeToFreeze(i::InstanceType obj_type) {
DCHECK(i::InstanceTypeChecker::IsJSReceiver(obj_type));
switch (obj_type) {
case i::JS_OBJECT_TYPE:
case i::JS_GLOBAL_OBJECT_TYPE:
case i::JS_GLOBAL_PROXY_TYPE:
case i::JS_PRIMITIVE_WRAPPER_TYPE:
case i::JS_FUNCTION_TYPE:
/* Function types */
case i::BIGINT64_TYPED_ARRAY_CONSTRUCTOR_TYPE:
case i::BIGUINT64_TYPED_ARRAY_CONSTRUCTOR_TYPE:
case i::FLOAT32_TYPED_ARRAY_CONSTRUCTOR_TYPE:
case i::FLOAT64_TYPED_ARRAY_CONSTRUCTOR_TYPE:
case i::INT16_TYPED_ARRAY_CONSTRUCTOR_TYPE:
case i::INT32_TYPED_ARRAY_CONSTRUCTOR_TYPE:
case i::INT8_TYPED_ARRAY_CONSTRUCTOR_TYPE:
case i::UINT16_TYPED_ARRAY_CONSTRUCTOR_TYPE:
case i::UINT32_TYPED_ARRAY_CONSTRUCTOR_TYPE:
case i::UINT8_CLAMPED_TYPED_ARRAY_CONSTRUCTOR_TYPE:
case i::UINT8_TYPED_ARRAY_CONSTRUCTOR_TYPE:
case i::JS_ARRAY_CONSTRUCTOR_TYPE:
case i::JS_PROMISE_CONSTRUCTOR_TYPE:
case i::JS_REG_EXP_CONSTRUCTOR_TYPE:
case i::JS_CLASS_CONSTRUCTOR_TYPE:
/* Prototype Types */
case i::JS_ARRAY_ITERATOR_PROTOTYPE_TYPE:
case i::JS_ITERATOR_PROTOTYPE_TYPE:
case i::JS_MAP_ITERATOR_PROTOTYPE_TYPE:
case i::JS_OBJECT_PROTOTYPE_TYPE:
case i::JS_PROMISE_PROTOTYPE_TYPE:
case i::JS_REG_EXP_PROTOTYPE_TYPE:
case i::JS_SET_ITERATOR_PROTOTYPE_TYPE:
case i::JS_SET_PROTOTYPE_TYPE:
case i::JS_STRING_ITERATOR_PROTOTYPE_TYPE:
case i::JS_TYPED_ARRAY_PROTOTYPE_TYPE:
/* */
case i::JS_ARRAY_TYPE:
return true;
#if V8_ENABLE_WEBASSEMBLY
case i::WASM_ARRAY_TYPE:
case i::WASM_STRUCT_TYPE:
#endif // V8_ENABLE_WEBASSEMBLY
case i::JS_PROXY_TYPE:
return true;
// These types are known not to freeze.
case i::JS_MAP_KEY_ITERATOR_TYPE:
case i::JS_MAP_KEY_VALUE_ITERATOR_TYPE:
case i::JS_MAP_VALUE_ITERATOR_TYPE:
case i::JS_SET_KEY_VALUE_ITERATOR_TYPE:
case i::JS_SET_VALUE_ITERATOR_TYPE:
case i::JS_GENERATOR_OBJECT_TYPE:
case i::JS_ASYNC_FUNCTION_OBJECT_TYPE:
case i::JS_ASYNC_GENERATOR_OBJECT_TYPE:
case i::JS_ARRAY_ITERATOR_TYPE: {
return false;
}
default:
// TODO(behamilton): Handle any types that fall through here.
return false;
}
}
class ObjectVisitorDeepFreezer : i::ObjectVisitor {
public:
explicit ObjectVisitorDeepFreezer(i::Isolate* isolate) : isolate_(isolate) {}
bool DeepFreeze(i::Handle<i::Context> context) {
bool success = VisitObject(*i::Handle<i::HeapObject>::cast(context));
DCHECK_EQ(success, !error_.has_value());
if (!success) {
THROW_NEW_ERROR_RETURN_VALUE(
isolate_, NewTypeError(error_->msg_id, error_->name), false);
}
for (const auto& obj : objects_to_freeze_) {
MAYBE_RETURN_ON_EXCEPTION_VALUE(
isolate_,
i::JSReceiver::SetIntegrityLevel(isolate_, obj, i::FROZEN,
i::kThrowOnError),
false);
}
return true;
}
void VisitPointers(i::HeapObject host, i::ObjectSlot start,
i::ObjectSlot end) final {
VisitPointersImpl(start, end);
}
void VisitPointers(i::HeapObject host, i::MaybeObjectSlot start,
i::MaybeObjectSlot end) final {
VisitPointersImpl(start, end);
}
void VisitMapPointer(i::HeapObject host) final {
VisitPointer(host, host.map_slot());
}
void VisitCodePointer(i::HeapObject host, i::CodeObjectSlot slot) final {}
void VisitCodeTarget(i::InstructionStream host, i::RelocInfo* rinfo) final {}
void VisitEmbeddedPointer(i::InstructionStream host,
i::RelocInfo* rinfo) final {}
void VisitCustomWeakPointers(i::HeapObject host, i::ObjectSlot start,
i::ObjectSlot end) final {}
private:
struct ErrorInfo {
i::MessageTemplate msg_id;
i::Handle<i::String> name;
};
template <typename TSlot>
void VisitPointersImpl(TSlot start, TSlot end) {
for (TSlot current = start; current < end; ++current) {
typename TSlot::TObject object = current.load(isolate_);
i::HeapObject heap_object;
if (object.GetHeapObjectIfStrong(&heap_object)) {
if (!VisitObject(heap_object)) {
return;
}
}
}
}
bool VisitObject(i::HeapObject obj) {
DCHECK(!error_.has_value());
DCHECK(!obj.is_null());
i::DisallowGarbageCollection no_gc;
i::InstanceType obj_type = obj.map().instance_type();
// Skip common types that can't contain items to freeze.
if (!MayContainObjectsToFreeze(obj_type)) {
return true;
}
if (!done_list_.insert(obj).second) {
// If we couldn't insert (because it is already in the set) then we're
// done.
return true;
}
// For contexts we need to ensure that all accessible locals are const.
// If not they could be replaced to bypass freezing.
if (i::InstanceTypeChecker::IsContext(obj_type)) {
i::ScopeInfo scope_info = i::Context::cast(obj).scope_info();
for (auto it : i::ScopeInfo::IterateLocalNames(&scope_info, no_gc)) {
if (scope_info.ContextLocalMode(it->index()) !=
i::VariableMode::kConst) {
DCHECK(!error_.has_value());
error_ = ErrorInfo{i::MessageTemplate::kCannotDeepFreezeValue,
i::handle(it->name(), isolate_)};
return false;
}
}
} else if (i::InstanceTypeChecker::IsJSReceiver(obj_type)) {
i::Handle<i::JSReceiver> receiver =
i::handle(i::JSReceiver::cast(obj), isolate_);
if (!IsJSReceiverSafeToFreeze(obj_type)) {
DCHECK(!error_.has_value());
error_ = ErrorInfo{i::MessageTemplate::kCannotDeepFreezeObject,
i::handle(receiver->class_name(), isolate_)};
return false;
}
// Save this to freeze after we are done. Freezing triggers garbage
// collection which doesn't work well with this visitor pattern, so we
// delay it until after.
objects_to_freeze_.push_back(receiver);
} else {
DCHECK(!i::InstanceTypeChecker::IsContext(obj_type) &&
!i::InstanceTypeChecker::IsJSReceiver(obj_type));
}
DCHECK(!error_.has_value());
obj.Iterate(isolate_, this);
// Iterate sets error_ on failure. We should propagate errors.
return !error_.has_value();
}
i::Isolate* isolate_;
std::unordered_set<i::Object, i::Object::Hasher> done_list_;
std::vector<i::Handle<i::JSReceiver>> objects_to_freeze_;
base::Optional<ErrorInfo> error_;
};
} // namespace
Maybe<void> Context::DeepFreeze() {
i::Handle<i::Context> env = Utils::OpenHandle(this);
i::Isolate* i_isolate = env->GetIsolate();
// TODO(behamilton): Incorporate compatibility improvements similar to NodeJS:
// https://github.com/nodejs/node/blob/main/lib/internal/freeze_intrinsics.js
// These need to be done before freezing.
Local<Context> context = Utils::ToLocal(env);
ENTER_V8_NO_SCRIPT(i_isolate, context, Context, DeepFreeze, Nothing<void>(),
i::HandleScope);
ObjectVisitorDeepFreezer vfreezer(i_isolate);
has_pending_exception = !vfreezer.DeepFreeze(env);
RETURN_ON_FAILED_EXECUTION_PRIMITIVE(void);
return JustVoid();
}
v8::Isolate* Context::GetIsolate() {
i::Handle<i::Context> env = Utils::OpenHandle(this);
return reinterpret_cast<Isolate*>(env->GetIsolate());

View File

@ -445,7 +445,7 @@ void OS::Free(void* address, size_t size) {
}
// macOS specific implementation in platform-macos.cc.
#if !defined(V8_OS_MACOS)
#if !defined(V8_OS_DARWIN)
// static
void* OS::AllocateShared(void* hint, size_t size, MemoryPermission access,
PlatformSharedMemoryHandle handle, uint64_t offset) {
@ -456,7 +456,7 @@ void* OS::AllocateShared(void* hint, size_t size, MemoryPermission access,
if (result == MAP_FAILED) return nullptr;
return result;
}
#endif // !defined(V8_OS_MACOS)
#endif // !defined(V8_OS_DARWIN)
// static
void OS::FreeShared(void* address, size_t size) {
@ -624,7 +624,7 @@ void OS::FreeAddressSpaceReservation(AddressSpaceReservation reservation) {
}
// macOS specific implementation in platform-macos.cc.
#if !defined(V8_OS_MACOS)
#if !defined(V8_OS_DARWIN)
// static
// Need to disable CFI_ICALL due to the indirect call to memfd_create.
DISABLE_CFI_ICALL
@ -657,7 +657,7 @@ void OS::DestroySharedMemoryHandle(PlatformSharedMemoryHandle handle) {
int fd = FileDescriptorFromSharedMemoryHandle(handle);
CHECK_EQ(0, close(fd));
}
#endif // !defined(V8_OS_MACOS)
#endif // !defined(V8_OS_DARWIN)
// static
bool OS::HasLazyCommits() {
@ -1016,7 +1016,7 @@ bool AddressSpaceReservation::Free(void* address, size_t size) {
}
// macOS specific implementation in platform-macos.cc.
#if !defined(V8_OS_MACOS)
#if !defined(V8_OS_DARWIN)
bool AddressSpaceReservation::AllocateShared(void* address, size_t size,
OS::MemoryPermission access,
PlatformSharedMemoryHandle handle,
@ -1027,7 +1027,7 @@ bool AddressSpaceReservation::AllocateShared(void* address, size_t size,
return mmap(address, size, prot, MAP_SHARED | MAP_FIXED, fd, offset) !=
MAP_FAILED;
}
#endif // !defined(V8_OS_MACOS)
#endif // !defined(V8_OS_DARWIN)
bool AddressSpaceReservation::FreeShared(void* address, size_t size) {
DCHECK(Contains(address, size));

View File

@ -97,10 +97,10 @@ builtin WasmInt32ToHeapNumber(val: int32): HeapNumber {
return AllocateHeapNumberWithValue(Convert<float64>(val));
}
builtin WasmFuncRefToJS(val: WasmInternalFunction|Null): JSFunction|Null|
builtin WasmFuncRefToJS(val: WasmInternalFunction|WasmNull): JSFunction|Null|
Undefined {
typeswitch (val) {
case (Null): {
case (WasmNull): {
return Null;
}
case (func: WasmInternalFunction): {

View File

@ -186,6 +186,7 @@ enum class PrimitiveType { kBoolean, kNumber, kString, kSymbol };
V(NoClosuresCellMap, no_closures_cell_map, NoClosuresCellMap) \
V(null_to_string, null_to_string, NullToString) \
V(NullValue, null_value, Null) \
IF_WASM(V, WasmNull, wasm_null, WasmNull) \
V(number_string, number_string, NumberString) \
V(number_to_string, number_to_string, NumberToString) \
V(Object_string, Object_string, ObjectString) \

View File

@ -1839,13 +1839,11 @@ inline std::ostream& operator<<(std::ostream& os, CollectionKind kind) {
UNREACHABLE();
}
// Flags for the runtime function kDefineKeyedOwnPropertyInLiteral. A property
// can be enumerable or not, and, in case of functions, the function name can be
// set or not.
// Flags for the runtime function kDefineKeyedOwnPropertyInLiteral.
// - Whether the function name should be set or not.
enum class DefineKeyedOwnPropertyInLiteralFlag {
kNoFlags = 0,
kDontEnum = 1 << 0,
kSetFunctionName = 1 << 1
kSetFunctionName = 1 << 0
};
using DefineKeyedOwnPropertyInLiteralFlags =
base::Flags<DefineKeyedOwnPropertyInLiteralFlag>;

View File

@ -711,7 +711,10 @@ namespace internal {
T(OptionalChainingNoSuper, "Invalid optional chain from super property") \
T(OptionalChainingNoTemplate, "Invalid tagged template on optional chain") \
/* AggregateError */ \
T(AllPromisesRejected, "All promises were rejected")
T(AllPromisesRejected, "All promises were rejected") \
T(CannotDeepFreezeObject, "Cannot DeepFreeze object of type %") \
T(CannotDeepFreezeValue, "Cannot DeepFreeze non-const value %")
enum class MessageTemplate {
#define TEMPLATE(NAME, STRING) k##NAME,
MESSAGE_TEMPLATES(TEMPLATE)

View File

@ -55,15 +55,8 @@ std::ostream& operator<<(std::ostream& os, TrapId trap_id) {
}
TrapId TrapIdOf(const Operator* const op) {
#if V8_ENABLE_WEBASSEMBLY
// Combining this with the #else into a single DCHECK() does not with MSVC.
DCHECK(op->opcode() == IrOpcode::kTrapIf ||
op->opcode() == IrOpcode::kTrapUnless ||
op->opcode() == IrOpcode::kAssertNotNull);
#else
DCHECK(op->opcode() == IrOpcode::kTrapIf ||
op->opcode() == IrOpcode::kTrapUnless);
#endif
return OpParameter<TrapId>(op);
}

View File

@ -3069,7 +3069,6 @@ JSNativeContextSpecialization::ReduceJSDefineKeyedOwnPropertyInLiteral(
NumberMatcher mflags(n.flags());
CHECK(mflags.HasResolvedValue());
DefineKeyedOwnPropertyInLiteralFlags cflags(mflags.ResolvedValue());
DCHECK(!(cflags & DefineKeyedOwnPropertyInLiteralFlag::kDontEnum));
if (cflags & DefineKeyedOwnPropertyInLiteralFlag::kSetFunctionName)
return NoChange();

View File

@ -740,6 +740,22 @@ bool operator==(CheckMinusZeroParameters const& lhs,
return lhs.mode() == rhs.mode() && lhs.feedback() == rhs.feedback();
}
#if V8_ENABLE_WEBASSEMBLY
V8_EXPORT_PRIVATE std::ostream& operator<<(
std::ostream& os, AssertNotNullParameters const& params) {
return os << params.type << ", " << params.trap_id;
}
size_t hash_value(AssertNotNullParameters const& params) {
return base::hash_combine(params.type, params.trap_id);
}
bool operator==(AssertNotNullParameters const& lhs,
AssertNotNullParameters const& rhs) {
return lhs.type == rhs.type && lhs.trap_id == rhs.trap_id;
}
#endif
#define PURE_OP_LIST(V) \
V(BooleanNot, Operator::kNoProperties, 1, 0) \
V(NumberEqual, Operator::kCommutative, 2, 0) \
@ -1238,40 +1254,6 @@ struct SimplifiedOperatorGlobalCache final {
LoadStackArgumentOperator kLoadStackArgument;
#if V8_ENABLE_WEBASSEMBLY
// Note: The following two operators have a control input solely to find the
// typing context from the control path in wasm-gc-operator-reducer.
struct IsNullOperator final : public Operator {
IsNullOperator()
: Operator(IrOpcode::kIsNull, Operator::kPure, "IsNull", 1, 0, 1, 1, 0,
0) {}
};
IsNullOperator kIsNull;
struct IsNotNullOperator final : public Operator {
IsNotNullOperator()
: Operator(IrOpcode::kIsNotNull, Operator::kPure, "IsNotNull", 1, 0, 1,
1, 0, 0) {}
};
IsNotNullOperator kIsNotNull;
struct NullOperator final : public Operator {
NullOperator()
: Operator(IrOpcode::kNull, Operator::kPure, "Null", 0, 0, 0, 1, 0, 0) {
}
};
NullOperator kNull;
struct AssertNotNullOperator final : public Operator1<TrapId> {
explicit AssertNotNullOperator(TrapId trap_id)
: Operator1(
IrOpcode::kAssertNotNull,
Operator::kNoWrite | Operator::kNoThrow | Operator::kIdempotent,
"AssertNotNull", 1, 1, 1, 1, 1, 1, trap_id) {}
};
AssertNotNullOperator kAssertNotNullIllegalCast{TrapId::kTrapIllegalCast};
AssertNotNullOperator kAssertNotNullNullDereference{
TrapId::kTrapNullDereference};
struct WasmArrayLengthOperator final : public Operator {
WasmArrayLengthOperator()
: Operator(IrOpcode::kWasmArrayLength, Operator::kEliminatable,
@ -1517,22 +1499,48 @@ const Operator* SimplifiedOperatorBuilder::RttCanon(int index) {
"RttCanon", 0, 0, 0, 1, 0, 0, index);
}
const Operator* SimplifiedOperatorBuilder::Null() { return &cache_.kNull; }
// Note: The following two operators have a control input solely to find the
// typing context from the control path in wasm-gc-operator-reducer.
struct IsNullOperator final : public Operator1<wasm::ValueType> {
explicit IsNullOperator(wasm::ValueType type)
: Operator1(IrOpcode::kIsNull, Operator::kPure, "IsNull", 1, 0, 1, 1, 0,
0, type) {}
};
const Operator* SimplifiedOperatorBuilder::AssertNotNull(TrapId trap_id) {
switch (trap_id) {
case TrapId::kTrapNullDereference:
return &cache_.kAssertNotNullNullDereference;
case TrapId::kTrapIllegalCast:
return &cache_.kAssertNotNullIllegalCast;
default:
UNREACHABLE();
}
struct IsNotNullOperator final : public Operator1<wasm::ValueType> {
explicit IsNotNullOperator(wasm::ValueType type)
: Operator1(IrOpcode::kIsNotNull, Operator::kPure, "IsNotNull", 1, 0, 1,
1, 0, 0, type) {}
};
struct NullOperator final : public Operator1<wasm::ValueType> {
explicit NullOperator(wasm::ValueType type)
: Operator1(IrOpcode::kNull, Operator::kPure, "Null", 0, 0, 0, 1, 0, 0,
type) {}
};
struct AssertNotNullOperator final : public Operator1<AssertNotNullParameters> {
explicit AssertNotNullOperator(wasm::ValueType type, TrapId trap_id)
: Operator1(
IrOpcode::kAssertNotNull,
Operator::kNoWrite | Operator::kNoThrow | Operator::kIdempotent,
"AssertNotNull", 1, 1, 1, 1, 1, 1, {type, trap_id}) {}
};
const Operator* SimplifiedOperatorBuilder::Null(wasm::ValueType type) {
return zone()->New<NullOperator>(type);
}
const Operator* SimplifiedOperatorBuilder::IsNull() { return &cache_.kIsNull; }
const Operator* SimplifiedOperatorBuilder::IsNotNull() {
return &cache_.kIsNotNull;
const Operator* SimplifiedOperatorBuilder::AssertNotNull(wasm::ValueType type,
TrapId trap_id) {
return zone()->New<AssertNotNullOperator>(type, trap_id);
}
const Operator* SimplifiedOperatorBuilder::IsNull(wasm::ValueType type) {
return zone()->New<IsNullOperator>(type);
}
const Operator* SimplifiedOperatorBuilder::IsNotNull(wasm::ValueType type) {
return zone()->New<IsNotNullOperator>(type);
}
const Operator* SimplifiedOperatorBuilder::StringAsWtf16() {

View File

@ -743,6 +743,21 @@ size_t hash_value(FastApiCallParameters const&);
bool operator==(FastApiCallParameters const&, FastApiCallParameters const&);
#if V8_ENABLE_WEBASSEMBLY
struct AssertNotNullParameters {
wasm::ValueType type;
TrapId trap_id;
};
V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream&,
AssertNotNullParameters const&);
size_t hash_value(AssertNotNullParameters const&);
bool operator==(AssertNotNullParameters const&, AssertNotNullParameters const&);
#endif
// Interface for building simplified operators, which represent the
// medium-level operations of V8, including adding numbers, allocating objects,
// indexing into objects and arrays, etc.
@ -1146,10 +1161,10 @@ class V8_EXPORT_PRIVATE SimplifiedOperatorBuilder final
const Operator* CheckTurboshaftTypeOf();
#if V8_ENABLE_WEBASSEMBLY
const Operator* AssertNotNull(TrapId trap_id);
const Operator* IsNull();
const Operator* IsNotNull();
const Operator* Null();
const Operator* AssertNotNull(wasm::ValueType type, TrapId trap_id);
const Operator* IsNull(wasm::ValueType type);
const Operator* IsNotNull(wasm::ValueType type);
const Operator* Null(wasm::ValueType type);
const Operator* RttCanon(int index);
const Operator* WasmTypeCheck(WasmTypeCheckConfig config);
const Operator* WasmTypeCast(WasmTypeCheckConfig config);

View File

@ -278,10 +278,12 @@ Node* WasmGraphBuilder::EffectPhi(unsigned count, Node** effects_and_control) {
effects_and_control);
}
Node* WasmGraphBuilder::RefNull() {
Node* WasmGraphBuilder::RefNull(wasm::ValueType type) {
return (v8_flags.experimental_wasm_gc && parameter_mode_ == kInstanceMode)
? gasm_->Null()
: LOAD_ROOT(NullValue, null_value);
? gasm_->Null(type)
: (type == wasm::kWasmExternRef || type == wasm::kWasmNullExternRef)
? LOAD_ROOT(NullValue, null_value)
: LOAD_ROOT(WasmNull, wasm_null);
}
Node* WasmGraphBuilder::RefFunc(uint32_t function_index) {
@ -752,6 +754,7 @@ Node* WasmGraphBuilder::Binop(wasm::WasmOpcode opcode, Node* left, Node* right,
}
Node* WasmGraphBuilder::Unop(wasm::WasmOpcode opcode, Node* input,
wasm::ValueType type,
wasm::WasmCodePosition position) {
const Operator* op;
MachineOperatorBuilder* m = mcgraph()->machine();
@ -1013,11 +1016,11 @@ Node* WasmGraphBuilder::Unop(wasm::WasmOpcode opcode, Node* input,
? BuildCcallConvertFloat(input, position, opcode)
: BuildIntConvertFloat(input, position, opcode);
case wasm::kExprRefIsNull:
return IsNull(input);
return IsNull(input, type);
// We abuse ref.as_non_null, which isn't otherwise used in this switch, as
// a sentinel for the negation of ref.is_null.
case wasm::kExprRefAsNonNull:
return gasm_->Int32Sub(gasm_->Int32Constant(1), IsNull(input));
return gasm_->Word32Equal(gasm_->Int32Constant(0), IsNull(input, type));
case wasm::kExprI32AsmjsLoadMem8S:
return BuildAsmjsLoadMem(MachineType::Int8(), input);
case wasm::kExprI32AsmjsLoadMem8U:
@ -1144,11 +1147,11 @@ void WasmGraphBuilder::TrapIfFalse(wasm::TrapReason reason, Node* cond,
SetSourcePosition(control(), position);
}
Node* WasmGraphBuilder::AssertNotNull(Node* object,
Node* WasmGraphBuilder::AssertNotNull(Node* object, wasm::ValueType type,
wasm::WasmCodePosition position,
wasm::TrapReason reason) {
TrapId trap_id = GetTrapIdForTrap(reason);
Node* result = gasm_->AssertNotNull(object, trap_id);
Node* result = gasm_->AssertNotNull(object, type, trap_id);
SetSourcePosition(result, position);
return result;
}
@ -2608,10 +2611,10 @@ Node* WasmGraphBuilder::BuildDiv64Call(Node* left, Node* right,
return gasm_->Load(result_type, stack_slot, 0);
}
Node* WasmGraphBuilder::IsNull(Node* object) {
Node* WasmGraphBuilder::IsNull(Node* object, wasm::ValueType type) {
return (v8_flags.experimental_wasm_gc && parameter_mode_ == kInstanceMode)
? gasm_->IsNull(object)
: gasm_->TaggedEqual(object, RefNull());
? gasm_->IsNull(object, type)
: gasm_->TaggedEqual(object, RefNull(type));
}
template <typename... Args>
@ -2985,7 +2988,8 @@ Node* WasmGraphBuilder::BuildCallRef(const wasm::FunctionSig* sig,
IsReturnCall continuation,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) {
args[0] = AssertNotNull(args[0], position);
args[0] =
AssertNotNull(args[0], wasm::kWasmFuncRef /* good enough */, position);
}
Node* function = args[0];
@ -3091,9 +3095,9 @@ Node* WasmGraphBuilder::ReturnCallIndirect(uint32_t table_index,
kReturnCall);
}
void WasmGraphBuilder::BrOnNull(Node* ref_object, Node** null_node,
Node** non_null_node) {
BranchExpectFalse(IsNull(ref_object), null_node, non_null_node);
void WasmGraphBuilder::BrOnNull(Node* ref_object, wasm::ValueType type,
Node** null_node, Node** non_null_node) {
BranchExpectFalse(IsNull(ref_object, type), null_node, non_null_node);
}
Node* WasmGraphBuilder::BuildI32Rol(Node* left, Node* right) {
@ -5259,7 +5263,7 @@ Node* WasmGraphBuilder::DefaultValue(wasm::ValueType type) {
case wasm::kS128:
return S128Zero();
case wasm::kRefNull:
return RefNull();
return RefNull(type);
case wasm::kRtt:
case wasm::kVoid:
case wasm::kBottom:
@ -5363,8 +5367,9 @@ Node* WasmGraphBuilder::ArrayNew(uint32_t array_index,
mcgraph()->machine()->BitcastFloat64ToInt64(), initial_value);
break;
case wasm::kRefNull:
initial_value_i64 =
initial_value == nullptr ? gasm_->Null() : initial_value;
initial_value_i64 = initial_value == nullptr
? gasm_->Null(type->element_type())
: initial_value;
if (kSystemPointerSize == 4) {
initial_value_i64 = graph()->NewNode(
mcgraph()->machine()->ChangeInt32ToInt64(), initial_value_i64);
@ -5511,9 +5516,10 @@ void WasmGraphBuilder::EqCheck(Node* object, bool object_can_be_null,
// TODO(7748): Is the extra null check actually beneficial for performance?
if (object_can_be_null) {
if (null_succeeds) {
callbacks.succeed_if(IsNull(object), BranchHint::kFalse);
callbacks.succeed_if(IsNull(object, wasm::kWasmAnyRef),
BranchHint::kFalse);
} else {
callbacks.fail_if(IsNull(object), BranchHint::kFalse);
callbacks.fail_if(IsNull(object, wasm::kWasmAnyRef), BranchHint::kFalse);
}
}
callbacks.succeed_if(gasm_->IsI31(object), BranchHint::kFalse);
@ -5528,9 +5534,10 @@ void WasmGraphBuilder::ManagedObjectInstanceCheck(Node* object,
bool null_succeeds) {
if (object_can_be_null) {
if (null_succeeds) {
callbacks.succeed_if(IsNull(object), BranchHint::kFalse);
callbacks.succeed_if(IsNull(object, wasm::kWasmAnyRef),
BranchHint::kFalse);
} else {
callbacks.fail_if(IsNull(object), BranchHint::kFalse);
callbacks.fail_if(IsNull(object, wasm::kWasmAnyRef), BranchHint::kFalse);
}
}
callbacks.fail_if(gasm_->IsI31(object), BranchHint::kFalse);
@ -5599,7 +5606,7 @@ Node* WasmGraphBuilder::RefTestAbstract(Node* object, wasm::HeapType type,
case wasm::HeapType::kNoExtern:
case wasm::HeapType::kNoFunc:
DCHECK(null_succeeds);
return IsNull(object);
return IsNull(object, wasm::ValueType::RefNull(type));
case wasm::HeapType::kAny:
// Any may never need a cast as it is either implicitly convertible or
// never convertible for any given type.
@ -5632,7 +5639,8 @@ Node* WasmGraphBuilder::RefCastAbstract(Node* object, wasm::HeapType type,
case wasm::HeapType::kNoExtern:
case wasm::HeapType::kNoFunc: {
DCHECK(null_succeeds);
TrapIfFalse(wasm::kTrapIllegalCast, IsNull(object), position);
TrapIfFalse(wasm::kTrapIllegalCast,
IsNull(object, wasm::ValueType::RefNull(type)), position);
return object;
}
case wasm::HeapType::kAny:
@ -5686,10 +5694,10 @@ void WasmGraphBuilder::BrOnEq(Node* object, Node* /*rtt*/,
[=](Callbacks callbacks) -> void {
if (config.from.is_nullable()) {
if (config.to.is_nullable()) {
callbacks.succeed_if(gasm_->IsNull(object),
callbacks.succeed_if(gasm_->IsNull(object, config.from),
BranchHint::kFalse);
} else {
callbacks.fail_if(gasm_->IsNull(object),
callbacks.fail_if(gasm_->IsNull(object, config.from),
BranchHint::kFalse);
}
}
@ -5775,7 +5783,7 @@ Node* WasmGraphBuilder::RefIsI31(Node* object, bool null_succeeds) {
auto done = gasm_->MakeLabel(MachineRepresentation::kWord32);
gasm_->GotoIf(gasm_->IsI31(object), &done, BranchHint::kTrue,
Int32Constant(1));
gasm_->Goto(&done, gasm_->IsNull(object));
gasm_->Goto(&done, gasm_->IsNull(object, wasm::kWasmAnyRef));
gasm_->Bind(&done);
return done.PhiAt(0);
}
@ -5786,7 +5794,7 @@ Node* WasmGraphBuilder::RefAsI31(Node* object, wasm::WasmCodePosition position,
bool null_succeeds) {
if (null_succeeds) {
auto done = gasm_->MakeLabel();
gasm_->GotoIf(gasm_->IsNull(object), &done);
gasm_->GotoIf(gasm_->IsNull(object, wasm::kWasmAnyRef), &done);
TrapIfFalse(wasm::kTrapIllegalCast, gasm_->IsI31(object), position);
gasm_->Goto(&done);
gasm_->Bind(&done);
@ -5805,9 +5813,11 @@ void WasmGraphBuilder::BrOnI31(Node* object, Node* /* rtt */,
[=](Callbacks callbacks) -> void {
if (config.from.is_nullable()) {
if (config.to.is_nullable()) {
callbacks.succeed_if(gasm_->IsNull(object), BranchHint::kFalse);
callbacks.succeed_if(gasm_->IsNull(object, config.from),
BranchHint::kFalse);
} else {
callbacks.fail_if(gasm_->IsNull(object), BranchHint::kFalse);
callbacks.fail_if(gasm_->IsNull(object, config.from),
BranchHint::kFalse);
}
}
callbacks.fail_if_not(gasm_->IsI31(object), BranchHint::kTrue);
@ -5827,7 +5837,8 @@ Node* WasmGraphBuilder::StructGet(Node* struct_object,
bool is_signed,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) {
struct_object = AssertNotNull(struct_object, position);
struct_object =
AssertNotNull(struct_object, wasm::kWasmStructRef, position);
}
return gasm_->StructGet(struct_object, struct_type, field_index, is_signed);
}
@ -5838,7 +5849,8 @@ void WasmGraphBuilder::StructSet(Node* struct_object,
CheckForNull null_check,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) {
struct_object = AssertNotNull(struct_object, position);
struct_object =
AssertNotNull(struct_object, wasm::kWasmStructRef, position);
}
gasm_->StructSet(struct_object, field_value, struct_type, field_index);
}
@ -5868,7 +5880,7 @@ Node* WasmGraphBuilder::ArrayGet(Node* array_object,
CheckForNull null_check, bool is_signed,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) {
array_object = AssertNotNull(array_object, position);
array_object = AssertNotNull(array_object, wasm::kWasmArrayRef, position);
}
BoundsCheckArray(array_object, index, position);
return gasm_->ArrayGet(array_object, index, type, is_signed);
@ -5879,7 +5891,7 @@ void WasmGraphBuilder::ArraySet(Node* array_object, const wasm::ArrayType* type,
CheckForNull null_check,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) {
array_object = AssertNotNull(array_object, position);
array_object = AssertNotNull(array_object, wasm::kWasmArrayRef, position);
}
BoundsCheckArray(array_object, index, position);
gasm_->ArraySet(array_object, index, value, type);
@ -5888,7 +5900,7 @@ void WasmGraphBuilder::ArraySet(Node* array_object, const wasm::ArrayType* type,
Node* WasmGraphBuilder::ArrayLen(Node* array_object, CheckForNull null_check,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) {
array_object = AssertNotNull(array_object, position);
array_object = AssertNotNull(array_object, wasm::kWasmArrayRef, position);
}
return gasm_->ArrayLength(array_object);
}
@ -5901,10 +5913,10 @@ void WasmGraphBuilder::ArrayCopy(Node* dst_array, Node* dst_index,
Node* length,
wasm::WasmCodePosition position) {
if (dst_null_check == kWithNullCheck) {
dst_array = AssertNotNull(dst_array, position);
dst_array = AssertNotNull(dst_array, wasm::kWasmArrayRef, position);
}
if (src_null_check == kWithNullCheck) {
src_array = AssertNotNull(src_array, position);
src_array = AssertNotNull(src_array, wasm::kWasmArrayRef, position);
}
BoundsCheckArrayCopy(dst_array, dst_index, length, position);
BoundsCheckArrayCopy(src_array, src_index, length, position);
@ -5976,7 +5988,7 @@ Node* WasmGraphBuilder::StringConst(uint32_t index) {
Node* WasmGraphBuilder::StringMeasureUtf8(Node* string, CheckForNull null_check,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) {
string = AssertNotNull(string, position);
string = AssertNotNull(string, wasm::kWasmStringRef, position);
}
return gasm_->CallBuiltin(Builtin::kWasmStringMeasureUtf8,
Operator::kEliminatable, string);
@ -5985,7 +5997,7 @@ Node* WasmGraphBuilder::StringMeasureUtf8(Node* string, CheckForNull null_check,
Node* WasmGraphBuilder::StringMeasureWtf8(Node* string, CheckForNull null_check,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) {
string = AssertNotNull(string, position);
string = AssertNotNull(string, wasm::kWasmStringRef, position);
}
return gasm_->CallBuiltin(Builtin::kWasmStringMeasureWtf8,
Operator::kEliminatable, string);
@ -5995,7 +6007,7 @@ Node* WasmGraphBuilder::StringMeasureWtf16(Node* string,
CheckForNull null_check,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) {
string = AssertNotNull(string, position);
string = AssertNotNull(string, wasm::kWasmStringRef, position);
}
return gasm_->LoadImmutableFromObject(
MachineType::Int32(), string,
@ -6008,7 +6020,7 @@ Node* WasmGraphBuilder::StringEncodeWtf8(uint32_t memory,
Node* offset,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) {
string = AssertNotNull(string, position);
string = AssertNotNull(string, wasm::kWasmStringRef, position);
}
return gasm_->CallBuiltin(Builtin::kWasmStringEncodeWtf8,
Operator::kNoDeopt | Operator::kNoThrow, string,
@ -6021,10 +6033,10 @@ Node* WasmGraphBuilder::StringEncodeWtf8Array(
Node* array, CheckForNull array_null_check, Node* start,
wasm::WasmCodePosition position) {
if (string_null_check == kWithNullCheck) {
string = AssertNotNull(string, position);
string = AssertNotNull(string, wasm::kWasmStringRef, position);
}
if (array_null_check == kWithNullCheck) {
array = AssertNotNull(array, position);
array = AssertNotNull(array, wasm::kWasmArrayRef, position);
}
return gasm_->CallBuiltin(Builtin::kWasmStringEncodeWtf8Array,
Operator::kNoDeopt | Operator::kNoThrow, string,
@ -6036,7 +6048,7 @@ Node* WasmGraphBuilder::StringEncodeWtf16(uint32_t memory, Node* string,
CheckForNull null_check, Node* offset,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) {
string = AssertNotNull(string, position);
string = AssertNotNull(string, wasm::kWasmStringRef, position);
}
return gasm_->CallBuiltin(Builtin::kWasmStringEncodeWtf16,
Operator::kNoDeopt | Operator::kNoThrow, string,
@ -6046,7 +6058,7 @@ Node* WasmGraphBuilder::StringEncodeWtf16(uint32_t memory, Node* string,
Node* WasmGraphBuilder::StringAsWtf16(Node* string, CheckForNull null_check,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) {
string = AssertNotNull(string, position);
string = AssertNotNull(string, wasm::kWasmStringRef, position);
}
return gasm_->StringAsWtf16(string);
}
@ -6056,10 +6068,10 @@ Node* WasmGraphBuilder::StringEncodeWtf16Array(
CheckForNull array_null_check, Node* start,
wasm::WasmCodePosition position) {
if (string_null_check == kWithNullCheck) {
string = AssertNotNull(string, position);
string = AssertNotNull(string, wasm::kWasmStringRef, position);
}
if (array_null_check == kWithNullCheck) {
array = AssertNotNull(array, position);
array = AssertNotNull(array, wasm::kWasmArrayRef, position);
}
return gasm_->CallBuiltin(Builtin::kWasmStringEncodeWtf16Array,
Operator::kNoDeopt | Operator::kNoThrow, string,
@ -6069,8 +6081,12 @@ Node* WasmGraphBuilder::StringEncodeWtf16Array(
Node* WasmGraphBuilder::StringConcat(Node* head, CheckForNull head_null_check,
Node* tail, CheckForNull tail_null_check,
wasm::WasmCodePosition position) {
if (head_null_check == kWithNullCheck) head = AssertNotNull(head, position);
if (tail_null_check == kWithNullCheck) tail = AssertNotNull(tail, position);
if (head_null_check == kWithNullCheck) {
head = AssertNotNull(head, wasm::kWasmStringRef, position);
}
if (tail_null_check == kWithNullCheck) {
tail = AssertNotNull(tail, wasm::kWasmStringRef, position);
}
return gasm_->CallBuiltin(
Builtin::kStringAdd_CheckNone, Operator::kNoDeopt | Operator::kNoThrow,
head, tail,
@ -6084,10 +6100,12 @@ Node* WasmGraphBuilder::StringEqual(Node* a, CheckForNull a_null_check, Node* b,
// Covers "identical string pointer" and "both are null" cases.
gasm_->GotoIf(gasm_->TaggedEqual(a, b), &done, Int32Constant(1));
if (a_null_check == kWithNullCheck) {
gasm_->GotoIf(gasm_->IsNull(a), &done, Int32Constant(0));
gasm_->GotoIf(gasm_->IsNull(a, wasm::kWasmStringRef), &done,
Int32Constant(0));
}
if (b_null_check == kWithNullCheck) {
gasm_->GotoIf(gasm_->IsNull(b), &done, Int32Constant(0));
gasm_->GotoIf(gasm_->IsNull(b, wasm::kWasmStringRef), &done,
Int32Constant(0));
}
gasm_->Goto(&done, gasm_->CallBuiltin(Builtin::kWasmStringEqual,
Operator::kEliminatable, a, b));
@ -6097,7 +6115,9 @@ Node* WasmGraphBuilder::StringEqual(Node* a, CheckForNull a_null_check, Node* b,
Node* WasmGraphBuilder::StringIsUSVSequence(Node* str, CheckForNull null_check,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) str = AssertNotNull(str, position);
if (null_check == kWithNullCheck) {
str = AssertNotNull(str, wasm::kWasmStringRef, position);
}
return gasm_->CallBuiltin(Builtin::kWasmStringIsUSVSequence,
Operator::kEliminatable, str);
@ -6105,7 +6125,9 @@ Node* WasmGraphBuilder::StringIsUSVSequence(Node* str, CheckForNull null_check,
Node* WasmGraphBuilder::StringAsWtf8(Node* str, CheckForNull null_check,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) str = AssertNotNull(str, position);
if (null_check == kWithNullCheck) {
str = AssertNotNull(str, wasm::kWasmStringRef, position);
}
return gasm_->CallBuiltin(Builtin::kWasmStringAsWtf8, Operator::kEliminatable,
str);
@ -6115,7 +6137,9 @@ Node* WasmGraphBuilder::StringViewWtf8Advance(Node* view,
CheckForNull null_check,
Node* pos, Node* bytes,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) view = AssertNotNull(view, position);
if (null_check == kWithNullCheck) {
view = AssertNotNull(view, wasm::kWasmStringRef, position);
}
return gasm_->CallBuiltin(Builtin::kWasmStringViewWtf8Advance,
Operator::kEliminatable, view, pos, bytes);
@ -6126,7 +6150,7 @@ void WasmGraphBuilder::StringViewWtf8Encode(
CheckForNull null_check, Node* addr, Node* pos, Node* bytes,
Node** next_pos, Node** bytes_written, wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) {
view = AssertNotNull(view, position);
view = AssertNotNull(view, wasm::kWasmStringRef, position);
}
Node* pair =
gasm_->CallBuiltin(Builtin::kWasmStringViewWtf8Encode,
@ -6141,7 +6165,7 @@ Node* WasmGraphBuilder::StringViewWtf8Slice(Node* view, CheckForNull null_check,
Node* pos, Node* bytes,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) {
view = AssertNotNull(view, position);
view = AssertNotNull(view, wasm::kWasmStringRef, position);
}
return gasm_->CallBuiltin(Builtin::kWasmStringViewWtf8Slice,
Operator::kEliminatable, view, pos, bytes);
@ -6151,7 +6175,7 @@ Node* WasmGraphBuilder::StringViewWtf16GetCodeUnit(
Node* string, CheckForNull null_check, Node* offset,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) {
string = AssertNotNull(string, position);
string = AssertNotNull(string, wasm::kWasmStringRef, position);
}
Node* prepare = gasm_->StringPrepareForGetCodeunit(string);
Node* base = gasm_->Projection(0, prepare);
@ -6211,7 +6235,7 @@ Node* WasmGraphBuilder::StringViewWtf16Encode(uint32_t memory, Node* string,
Node* codeunits,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) {
string = AssertNotNull(string, position);
string = AssertNotNull(string, wasm::kWasmStringRef, position);
}
return gasm_->CallBuiltin(Builtin::kWasmStringViewWtf16Encode,
Operator::kNoDeopt | Operator::kNoThrow, offset,
@ -6224,7 +6248,7 @@ Node* WasmGraphBuilder::StringViewWtf16Slice(Node* string,
Node* start, Node* end,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) {
string = AssertNotNull(string, position);
string = AssertNotNull(string, wasm::kWasmStringRef, position);
}
return gasm_->CallBuiltin(Builtin::kWasmStringViewWtf16Slice,
Operator::kEliminatable, string, start, end);
@ -6232,7 +6256,9 @@ Node* WasmGraphBuilder::StringViewWtf16Slice(Node* string,
Node* WasmGraphBuilder::StringAsIter(Node* str, CheckForNull null_check,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) str = AssertNotNull(str, position);
if (null_check == kWithNullCheck) {
str = AssertNotNull(str, wasm::kWasmStringRef, position);
}
return gasm_->CallBuiltin(Builtin::kWasmStringAsIter, Operator::kEliminatable,
str);
@ -6240,7 +6266,9 @@ Node* WasmGraphBuilder::StringAsIter(Node* str, CheckForNull null_check,
Node* WasmGraphBuilder::StringViewIterNext(Node* view, CheckForNull null_check,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) view = AssertNotNull(view, position);
if (null_check == kWithNullCheck) {
view = AssertNotNull(view, wasm::kWasmStringRef, position);
}
return gasm_->CallBuiltin(Builtin::kWasmStringViewIterNext,
Operator::kEliminatable, view);
@ -6250,7 +6278,9 @@ Node* WasmGraphBuilder::StringViewIterAdvance(Node* view,
CheckForNull null_check,
Node* codepoints,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) view = AssertNotNull(view, position);
if (null_check == kWithNullCheck) {
view = AssertNotNull(view, wasm::kWasmStringRef, position);
}
return gasm_->CallBuiltin(Builtin::kWasmStringViewIterAdvance,
Operator::kEliminatable, view, codepoints);
@ -6260,7 +6290,9 @@ Node* WasmGraphBuilder::StringViewIterRewind(Node* view,
CheckForNull null_check,
Node* codepoints,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) view = AssertNotNull(view, position);
if (null_check == kWithNullCheck) {
view = AssertNotNull(view, wasm::kWasmStringRef, position);
}
return gasm_->CallBuiltin(Builtin::kWasmStringViewIterRewind,
Operator::kEliminatable, view, codepoints);
@ -6269,7 +6301,9 @@ Node* WasmGraphBuilder::StringViewIterRewind(Node* view,
Node* WasmGraphBuilder::StringViewIterSlice(Node* view, CheckForNull null_check,
Node* codepoints,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) view = AssertNotNull(view, position);
if (null_check == kWithNullCheck) {
view = AssertNotNull(view, wasm::kWasmStringRef, position);
}
return gasm_->CallBuiltin(Builtin::kWasmStringViewIterSlice,
Operator::kEliminatable, view, codepoints);
@ -6278,8 +6312,12 @@ Node* WasmGraphBuilder::StringViewIterSlice(Node* view, CheckForNull null_check,
Node* WasmGraphBuilder::StringCompare(Node* lhs, CheckForNull null_check_lhs,
Node* rhs, CheckForNull null_check_rhs,
wasm::WasmCodePosition position) {
if (null_check_lhs == kWithNullCheck) lhs = AssertNotNull(lhs, position);
if (null_check_rhs == kWithNullCheck) rhs = AssertNotNull(rhs, position);
if (null_check_lhs == kWithNullCheck) {
lhs = AssertNotNull(lhs, wasm::kWasmStringRef, position);
}
if (null_check_rhs == kWithNullCheck) {
rhs = AssertNotNull(rhs, wasm::kWasmStringRef, position);
}
return gasm_->BuildChangeSmiToInt32(gasm_->CallBuiltin(
Builtin::kWasmStringCompare, Operator::kEliminatable, lhs, rhs));
}
@ -6292,7 +6330,7 @@ Node* WasmGraphBuilder::StringFromCodePoint(Node* code_point) {
Node* WasmGraphBuilder::StringHash(Node* string, CheckForNull null_check,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) {
string = AssertNotNull(string, position);
string = AssertNotNull(string, wasm::kWasmStringRef, position);
}
auto runtime_label = gasm_->MakeLabel();
@ -6341,7 +6379,9 @@ Node* WasmGraphBuilder::I31New(Node* input) {
Node* WasmGraphBuilder::I31GetS(Node* input, CheckForNull null_check,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) input = AssertNotNull(input, position);
if (null_check == kWithNullCheck) {
input = AssertNotNull(input, wasm::kWasmI31Ref, position);
}
if constexpr (SmiValuesAre31Bits()) {
input = gasm_->BuildTruncateIntPtrToInt32(input);
return gasm_->Word32SarShiftOutZeros(input,
@ -6356,7 +6396,9 @@ Node* WasmGraphBuilder::I31GetS(Node* input, CheckForNull null_check,
Node* WasmGraphBuilder::I31GetU(Node* input, CheckForNull null_check,
wasm::WasmCodePosition position) {
if (null_check == kWithNullCheck) input = AssertNotNull(input, position);
if (null_check == kWithNullCheck) {
input = AssertNotNull(input, wasm::kWasmI31Ref, position);
}
if constexpr (SmiValuesAre31Bits()) {
input = gasm_->BuildTruncateIntPtrToInt32(input);
return gasm_->Word32Shr(input, gasm_->BuildSmiShiftBitsConstant32());
@ -6593,63 +6635,80 @@ class WasmWrapperGraphBuilder : public WasmGraphBuilder {
case wasm::kF64:
return BuildChangeFloat64ToNumber(node);
case wasm::kRef:
switch (type.heap_representation()) {
case wasm::HeapType::kEq:
case wasm::HeapType::kI31:
case wasm::HeapType::kStruct:
case wasm::HeapType::kArray:
case wasm::HeapType::kAny:
case wasm::HeapType::kExtern:
case wasm::HeapType::kString:
case wasm::HeapType::kNone:
case wasm::HeapType::kNoFunc:
case wasm::HeapType::kNoExtern:
return node;
case wasm::HeapType::kBottom:
case wasm::HeapType::kStringViewWtf8:
case wasm::HeapType::kStringViewWtf16:
case wasm::HeapType::kStringViewIter:
UNREACHABLE();
case wasm::HeapType::kFunc:
default:
if (type.heap_representation() == wasm::HeapType::kFunc ||
module_->has_signature(type.ref_index())) {
// Typed function. Extract the external function.
return gasm_->LoadFromObject(
MachineType::TaggedPointer(), node,
wasm::ObjectAccess::ToTagged(
WasmInternalFunction::kExternalOffset));
} else {
return node;
}
}
case wasm::kRefNull:
switch (type.heap_representation()) {
case wasm::HeapType::kFunc: {
if (type.kind() == wasm::kRefNull) {
case wasm::HeapType::kExtern:
case wasm::HeapType::kNoExtern:
return node;
case wasm::HeapType::kNone:
case wasm::HeapType::kNoFunc:
return LOAD_ROOT(NullValue, null_value);
case wasm::HeapType::kEq:
case wasm::HeapType::kStruct:
case wasm::HeapType::kArray:
case wasm::HeapType::kString:
case wasm::HeapType::kI31:
case wasm::HeapType::kAny: {
auto done = gasm_->MakeLabel(MachineRepresentation::kTaggedPointer);
gasm_->GotoIfNot(IsNull(node, type), &done, node);
gasm_->Goto(&done, LOAD_ROOT(NullValue, null_value));
gasm_->Bind(&done);
return done.PhiAt(0);
}
case wasm::HeapType::kFunc:
default: {
if (type == wasm::kWasmFuncRef ||
module_->has_signature(type.ref_index())) {
auto done =
gasm_->MakeLabel(MachineRepresentation::kTaggedPointer);
// Null gets passed as-is.
gasm_->GotoIf(IsNull(node), &done, node);
auto null_label = gasm_->MakeLabel();
gasm_->GotoIf(IsNull(node, type), &null_label);
gasm_->Goto(&done,
gasm_->LoadFromObject(
MachineType::TaggedPointer(), node,
wasm::ObjectAccess::ToTagged(
WasmInternalFunction::kExternalOffset)));
gasm_->Bind(&null_label);
gasm_->Goto(&done, LOAD_ROOT(NullValue, null_value));
gasm_->Bind(&done);
return done.PhiAt(0);
} else {
return gasm_->LoadFromObject(
MachineType::TaggedPointer(), node,
wasm::ObjectAccess::ToTagged(
WasmInternalFunction::kExternalOffset));
}
}
case wasm::HeapType::kEq:
case wasm::HeapType::kStruct:
case wasm::HeapType::kArray:
case wasm::HeapType::kString:
case wasm::HeapType::kExtern:
case wasm::HeapType::kAny:
case wasm::HeapType::kNone:
case wasm::HeapType::kNoFunc:
case wasm::HeapType::kNoExtern:
case wasm::HeapType::kI31:
return node;
default: {
DCHECK(type.has_index());
if (module_->has_signature(type.ref_index())) {
// Typed function. Extract the external function.
if (type.kind() == wasm::kRefNull) {
auto done =
gasm_->MakeLabel(MachineRepresentation::kTaggedPointer);
// Null gets passed as-is.
gasm_->GotoIf(IsNull(node), &done, node);
gasm_->Goto(&done,
gasm_->LoadFromObject(
MachineType::TaggedPointer(), node,
wasm::ObjectAccess::ToTagged(
WasmInternalFunction::kExternalOffset)));
gasm_->Bind(&done);
return done.PhiAt(0);
} else {
return gasm_->LoadFromObject(
MachineType::TaggedPointer(), node,
wasm::ObjectAccess::ToTagged(
WasmInternalFunction::kExternalOffset));
}
} else {
return node;
auto done =
gasm_->MakeLabel(MachineRepresentation::kTaggedPointer);
gasm_->GotoIfNot(IsNull(node, type), &done, node);
gasm_->Goto(&done, LOAD_ROOT(NullValue, null_value));
gasm_->Bind(&done);
return done.PhiAt(0);
}
}
}
@ -6689,7 +6748,10 @@ class WasmWrapperGraphBuilder : public WasmGraphBuilder {
auto done = gasm_->MakeLabel(MachineRepresentation::kTagged);
auto type_error = gasm_->MakeLabel();
gasm_->GotoIf(IsSmi(input), &type_error, BranchHint::kFalse);
if (type.is_nullable()) gasm_->GotoIf(IsNull(input), &done, input);
if (type.is_nullable()) {
gasm_->GotoIf(IsNull(input, wasm::kWasmExternRef), &done,
LOAD_ROOT(WasmNull, wasm_null));
}
Node* map = gasm_->LoadMap(input);
Node* instance_type = gasm_->LoadInstanceType(map);
Node* check = gasm_->Uint32LessThan(
@ -6710,15 +6772,14 @@ class WasmWrapperGraphBuilder : public WasmGraphBuilder {
case wasm::kRef:
case wasm::kRefNull: {
switch (type.heap_representation()) {
// Fast paths for extern and string.
// TODO(7748): Add more/all fast paths?
// TODO(7748): Add more fast paths?
case wasm::HeapType::kExtern:
case wasm::HeapType::kNoExtern:
return input;
case wasm::HeapType::kString:
return BuildCheckString(input, js_context, type);
case wasm::HeapType::kNone:
case wasm::HeapType::kNoFunc:
case wasm::HeapType::kNoExtern:
case wasm::HeapType::kI31:
case wasm::HeapType::kAny:
case wasm::HeapType::kFunc:

View File

@ -253,10 +253,10 @@ class WasmGraphBuilder {
Node* tnode, Node* fnode);
Node* CreateOrMergeIntoEffectPhi(Node* merge, Node* tnode, Node* fnode);
Node* EffectPhi(unsigned count, Node** effects_and_control);
Node* RefNull();
Node* RefNull(wasm::ValueType type);
Node* RefFunc(uint32_t function_index);
Node* AssertNotNull(
Node* object, wasm::WasmCodePosition position,
Node* object, wasm::ValueType type, wasm::WasmCodePosition position,
wasm::TrapReason reason = wasm::TrapReason::kTrapNullDereference);
Node* TraceInstruction(uint32_t mark_id);
Node* Int32Constant(int32_t value);
@ -266,7 +266,9 @@ class WasmGraphBuilder {
Node* Simd128Constant(const uint8_t value[16]);
Node* Binop(wasm::WasmOpcode opcode, Node* left, Node* right,
wasm::WasmCodePosition position = wasm::kNoCodePosition);
// The {type} argument is only required for null-checking operations.
Node* Unop(wasm::WasmOpcode opcode, Node* input,
wasm::ValueType type = wasm::kWasmBottom,
wasm::WasmCodePosition position = wasm::kNoCodePosition);
Node* MemoryGrow(Node* input);
Node* Throw(uint32_t tag_index, const wasm::WasmTag* tag,
@ -352,7 +354,8 @@ class WasmGraphBuilder {
Node** failure_control,
bool is_last_case);
void BrOnNull(Node* ref_object, Node** non_null_node, Node** null_node);
void BrOnNull(Node* ref_object, wasm::ValueType type, Node** non_null_node,
Node** null_node);
Node* Invert(Node* node);
@ -600,7 +603,7 @@ class WasmGraphBuilder {
Node* StringFromCodePoint(Node* code_point);
Node* StringHash(Node* string, CheckForNull null_check,
wasm::WasmCodePosition position);
Node* IsNull(Node* object);
Node* IsNull(Node* object, wasm::ValueType type);
Node* TypeGuard(Node* value, wasm::ValueType type);
bool has_simd() const { return has_simd_; }

View File

@ -96,12 +96,19 @@ Node* WasmGCLowering::RootNode(RootIndex index) {
IsolateData::root_slot_offset(index));
}
Node* WasmGCLowering::Null() { return RootNode(RootIndex::kNullValue); }
Node* WasmGCLowering::Null(wasm::ValueType type) {
return wasm::IsSubtypeOf(type, wasm::kWasmExternRef, module_)
? RootNode(RootIndex::kNullValue)
: RootNode(RootIndex::kWasmNull);
}
Node* WasmGCLowering::IsNull(Node* object) {
Tagged_t static_null = wasm::GetWasmEngine()->compressed_null_value_or_zero();
Node* null_value =
static_null != 0 ? gasm_.UintPtrConstant(static_null) : Null();
Node* WasmGCLowering::IsNull(Node* object, wasm::ValueType type) {
Tagged_t static_null =
wasm::GetWasmEngine()->compressed_wasm_null_value_or_zero();
Node* null_value = !wasm::IsSubtypeOf(type, wasm::kWasmExternRef, module_) &&
static_null != 0
? gasm_.UintPtrConstant(static_null)
: Null(type);
return gasm_.TaggedEqual(object, null_value);
}
@ -131,8 +138,8 @@ Reduction WasmGCLowering::ReduceWasmTypeCheck(Node* node) {
// being a wasm object and return 0 (failure).
if (object_can_be_null && (!is_cast_from_any || config.to.is_nullable())) {
const int kResult = config.to.is_nullable() ? 1 : 0;
gasm_.GotoIf(IsNull(object), &end_label, BranchHint::kFalse,
gasm_.Int32Constant(kResult));
gasm_.GotoIf(IsNull(object, wasm::kWasmAnyRef), &end_label,
BranchHint::kFalse, gasm_.Int32Constant(kResult));
}
if (object_can_be_i31) {
@ -209,7 +216,7 @@ Reduction WasmGCLowering::ReduceWasmTypeCast(Node* node) {
// failure. In that case the instance type check will identify null as not
// being a wasm object and trap.
if (object_can_be_null && (!is_cast_from_any || config.to.is_nullable())) {
Node* is_null = IsNull(object);
Node* is_null = IsNull(object, wasm::kWasmAnyRef);
if (config.to.is_nullable()) {
gasm_.GotoIf(is_null, &end_label, BranchHint::kFalse);
} else if (!v8_flags.experimental_wasm_skip_null_checks) {
@ -276,8 +283,9 @@ Reduction WasmGCLowering::ReduceAssertNotNull(Node* node) {
Node* control = NodeProperties::GetControlInput(node);
Node* object = NodeProperties::GetValueInput(node, 0);
gasm_.InitializeEffectControl(effect, control);
auto op_parameter = OpParameter<AssertNotNullParameters>(node->op());
if (!v8_flags.experimental_wasm_skip_null_checks) {
gasm_.TrapIf(IsNull(object), TrapIdOf(node->op()));
gasm_.TrapIf(IsNull(object, op_parameter.type), op_parameter.trap_id);
}
ReplaceWithValue(node, object, gasm_.effect(), gasm_.control());
@ -287,19 +295,23 @@ Reduction WasmGCLowering::ReduceAssertNotNull(Node* node) {
Reduction WasmGCLowering::ReduceNull(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kNull);
return Replace(Null());
auto type = OpParameter<wasm::ValueType>(node->op());
return Replace(Null(type));
}
Reduction WasmGCLowering::ReduceIsNull(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kIsNull);
Node* object = NodeProperties::GetValueInput(node, 0);
return Replace(IsNull(object));
auto type = OpParameter<wasm::ValueType>(node->op());
return Replace(IsNull(object, type));
}
Reduction WasmGCLowering::ReduceIsNotNull(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kIsNotNull);
Node* object = NodeProperties::GetValueInput(node, 0);
return Replace(gasm_.Word32Equal(IsNull(object), gasm_.Int32Constant(0)));
auto type = OpParameter<wasm::ValueType>(node->op());
return Replace(
gasm_.Word32Equal(IsNull(object, type), gasm_.Int32Constant(0)));
}
Reduction WasmGCLowering::ReduceRttCanon(Node* node) {
@ -328,13 +340,18 @@ Reduction WasmGCLowering::ReduceWasmExternInternalize(Node* node) {
UNREACHABLE();
}
// TODO(7748): WasmExternExternalize is a no-op. Consider removing it.
Reduction WasmGCLowering::ReduceWasmExternExternalize(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kWasmExternExternalize);
Node* object = NodeProperties::GetValueInput(node, 0);
ReplaceWithValue(node, object);
Node* object = node->InputAt(0);
gasm_.InitializeEffectControl(NodeProperties::GetEffectInput(node),
NodeProperties::GetControlInput(node));
auto label = gasm_.MakeLabel(MachineRepresentation::kTagged);
gasm_.GotoIfNot(IsNull(object, wasm::kWasmAnyRef), &label, object);
gasm_.Goto(&label, Null(wasm::kWasmExternRef));
gasm_.Bind(&label);
ReplaceWithValue(node, label.PhiAt(0), gasm_.effect(), gasm_.control());
node->Kill();
return Replace(object);
return Replace(label.PhiAt(0));
}
Reduction WasmGCLowering::ReduceWasmStructGet(Node* node) {

View File

@ -49,8 +49,8 @@ class WasmGCLowering final : public AdvancedReducer {
Reduction ReduceStringPrepareForGetCodeunit(Node* node);
Node* IsolateRoot();
Node* RootNode(RootIndex index);
Node* Null();
Node* IsNull(Node* object);
Node* Null(wasm::ValueType type);
Node* IsNull(Node* object, wasm::ValueType type);
Node* BuildLoadExternalPointerFromObject(Node* object, int offset,
ExternalPointerTag tag);
WasmGraphAssembler gasm_;

View File

@ -301,9 +301,9 @@ Reduction WasmGCOperatorReducer::ReduceWasmTypeCast(Node* node) {
return Changed(node);
} else {
gasm_.InitializeEffectControl(effect, control);
return Replace(
SetType(gasm_.AssertNotNull(object, TrapId::kTrapIllegalCast),
object_type.type.AsNonNull()));
return Replace(SetType(gasm_.AssertNotNull(object, object_type.type,
TrapId::kTrapIllegalCast),
object_type.type.AsNonNull()));
}
}
@ -314,11 +314,12 @@ Reduction WasmGCOperatorReducer::ReduceWasmTypeCast(Node* node) {
// A cast between unrelated types can only succeed if the argument is null.
// Otherwise, it always fails.
Node* non_trapping_condition = object_type.type.is_nullable() && to_nullable
? gasm_.IsNull(object)
? gasm_.IsNull(object, object_type.type)
: gasm_.Int32Constant(0);
gasm_.TrapUnless(SetType(non_trapping_condition, wasm::kWasmI32),
TrapId::kTrapIllegalCast);
Node* null_node = SetType(gasm_.Null(), wasm::ToNullSentinel(object_type));
Node* null_node = SetType(gasm_.Null(object_type.type),
wasm::ToNullSentinel(object_type));
ReplaceWithValue(node, null_node, gasm_.effect(), gasm_.control());
node->Kill();
return Replace(null_node);
@ -360,7 +361,7 @@ Reduction WasmGCOperatorReducer::ReduceWasmTypeCheck(Node* node) {
// Type cast will fail only on null.
gasm_.InitializeEffectControl(effect, control);
Node* condition = SetType(object_type.type.is_nullable() && !null_succeeds
? gasm_.IsNotNull(object)
? gasm_.IsNotNull(object, object_type.type)
: gasm_.Int32Constant(1),
wasm::kWasmI32);
ReplaceWithValue(node, condition);
@ -377,7 +378,8 @@ Reduction WasmGCOperatorReducer::ReduceWasmTypeCheck(Node* node) {
if (null_succeeds && object_type.type.is_nullable()) {
// The cast only succeeds in case of null.
gasm_.InitializeEffectControl(effect, control);
condition = SetType(gasm_.IsNull(object), wasm::kWasmI32);
condition =
SetType(gasm_.IsNull(object, object_type.type), wasm::kWasmI32);
} else {
// The cast never succeeds.
condition = SetType(gasm_.Int32Constant(0), wasm::kWasmI32);

View File

@ -375,21 +375,23 @@ Node* WasmGraphAssembler::WasmTypeCast(Node* object, Node* rtt,
effect(), control()));
}
Node* WasmGraphAssembler::Null() {
return AddNode(graph()->NewNode(simplified_.Null()));
Node* WasmGraphAssembler::Null(wasm::ValueType type) {
return AddNode(graph()->NewNode(simplified_.Null(type)));
}
Node* WasmGraphAssembler::IsNull(Node* object) {
return AddNode(graph()->NewNode(simplified_.IsNull(), object, control()));
Node* WasmGraphAssembler::IsNull(Node* object, wasm::ValueType type) {
return AddNode(graph()->NewNode(simplified_.IsNull(type), object, control()));
}
Node* WasmGraphAssembler::IsNotNull(Node* object) {
return AddNode(graph()->NewNode(simplified_.IsNotNull(), object, control()));
Node* WasmGraphAssembler::IsNotNull(Node* object, wasm::ValueType type) {
return AddNode(
graph()->NewNode(simplified_.IsNotNull(type), object, control()));
}
Node* WasmGraphAssembler::AssertNotNull(Node* object, TrapId trap_id) {
return AddNode(graph()->NewNode(simplified_.AssertNotNull(trap_id), object,
effect(), control()));
Node* WasmGraphAssembler::AssertNotNull(Node* object, wasm::ValueType type,
TrapId trap_id) {
return AddNode(graph()->NewNode(simplified_.AssertNotNull(type, trap_id),
object, effect(), control()));
}
Node* WasmGraphAssembler::WasmExternInternalize(Node* object) {

View File

@ -246,13 +246,13 @@ class WasmGraphAssembler : public GraphAssembler {
Node* WasmTypeCast(Node* object, Node* rtt, WasmTypeCheckConfig config);
Node* Null();
Node* Null(wasm::ValueType type);
Node* IsNull(Node* object);
Node* IsNull(Node* object, wasm::ValueType type);
Node* IsNotNull(Node* object);
Node* IsNotNull(Node* object, wasm::ValueType type);
Node* AssertNotNull(Node* object, TrapId trap_id);
Node* AssertNotNull(Node* object, wasm::ValueType type, TrapId trap_id);
Node* WasmExternInternalize(Node* object);

View File

@ -948,6 +948,10 @@ Handle<WasmValueObject> WasmValueObject::New(
isolate);
}
t = GetRefTypeName(isolate, value.type(), module_object);
} else if (ref->IsWasmNull()) {
// TODO(manoskouk): Is this value correct?
v = isolate->factory()->null_value();
t = GetRefTypeName(isolate, value.type(), module_object);
} else if (ref->IsJSFunction() || ref->IsSmi() || ref->IsNull() ||
ref->IsString() ||
value.type().is_reference_to(wasm::HeapType::kExtern) ||

View File

@ -5,7 +5,6 @@
#include "src/extensions/gc-extension.h"
#include "include/v8-isolate.h"
#include "include/v8-microtask-queue.h"
#include "include/v8-object.h"
#include "include/v8-persistent-handle.h"
#include "include/v8-primitive.h"
@ -122,8 +121,6 @@ class AsyncGC final : public CancelableTask {
InvokeGC(isolate_, ExecutionType::kAsync, type_);
auto resolver = v8::Local<v8::Promise::Resolver>::New(isolate_, resolver_);
auto ctx = Local<v8::Context>::New(isolate_, ctx_);
v8::MicrotasksScope microtasks_scope(
ctx, v8::MicrotasksScope::kDoNotRunMicrotasks);
resolver->Resolve(ctx, v8::Undefined(isolate_)).ToChecked();
}

View File

@ -1022,7 +1022,8 @@ DEFINE_BOOL(turbo_collect_feedback_in_generic_lowering, false,
DEFINE_BOOL(isolate_script_cache_ageing, true,
"enable ageing of the isolate script cache.")
DEFINE_BOOL(turboshaft, false, "enable TurboFan's Turboshaft phases for JS")
DEFINE_EXPERIMENTAL_FEATURE(turboshaft,
"enable TurboFan's Turboshaft phases for JS")
DEFINE_BOOL(turboshaft_trace_reduction, false,
"trace individual Turboshaft reduction steps")
DEFINE_BOOL(turboshaft_wasm, false,

View File

@ -2575,15 +2575,13 @@ void Heap::MinorMarkCompact() {
DCHECK(new_space());
DCHECK(!incremental_marking()->IsMajorMarking());
PauseAllocationObserversScope pause_observers(this);
SetGCState(MINOR_MARK_COMPACT);
TRACE_GC(tracer(), GCTracer::Scope::MINOR_MC);
PauseAllocationObserversScope pause_observers(this);
AlwaysAllocateScope always_allocate(this);
minor_mark_compact_collector_->Prepare();
SetGCState(MINOR_MARK_COMPACT);
minor_mark_compact_collector_->CollectGarbage();
SetGCState(NOT_IN_GC);
}

View File

@ -5860,41 +5860,7 @@ class MinorMarkCompactCollector::RootMarkingVisitor : public RootVisitor {
MinorMarkCompactCollector* const collector_;
};
void MinorMarkCompactCollector::Prepare() {
DCHECK(!sweeper()->AreSweeperTasksRunning() &&
sweeper()->IsSweepingDoneForSpace(NEW_SPACE));
// Probably requires more.
if (!heap()->incremental_marking()->IsMarking()) {
if (heap()->cpp_heap()) {
TRACE_GC(heap()->tracer(),
GCTracer::Scope::MINOR_MC_MARK_EMBEDDER_PROLOGUE);
// InitializeTracing should be called before visitor initialization in
// StartMarking.
CppHeap::From(heap()->cpp_heap())
->InitializeTracing(CppHeap::CollectionType::kMinor);
}
StartMarking();
if (heap()->cpp_heap()) {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_EMBEDDER_PROLOGUE);
// StartTracing immediately starts marking which requires V8 worklists to
// be set up.
CppHeap::From(heap()->cpp_heap())->StartTracing();
}
}
heap()->new_space()->FreeLinearAllocationArea();
}
void MinorMarkCompactCollector::StartMarking() {
auto* cpp_heap = CppHeap::From(heap_->cpp_heap());
local_marking_worklists_ = std::make_unique<MarkingWorklists::Local>(
marking_worklists(),
cpp_heap ? cpp_heap->CreateCppMarkingStateForMutatorThread()
: MarkingWorklists::Local::kNoCppMarkingState);
main_marking_visitor_ = std::make_unique<YoungGenerationMainMarkingVisitor>(
heap()->isolate(), marking_state(), local_marking_worklists());
#ifdef VERIFY_HEAP
if (v8_flags.verify_heap) {
for (Page* page : *heap()->new_space()) {
@ -5902,6 +5868,28 @@ void MinorMarkCompactCollector::StartMarking() {
}
}
#endif // VERIFY_HEAP
auto* cpp_heap = CppHeap::From(heap_->cpp_heap());
if (cpp_heap && cpp_heap->generational_gc_supported()) {
TRACE_GC(heap()->tracer(),
GCTracer::Scope::MINOR_MC_MARK_EMBEDDER_PROLOGUE);
// InitializeTracing should be called before visitor initialization in
// StartMarking.
cpp_heap->InitializeTracing(CppHeap::CollectionType::kMinor);
}
local_marking_worklists_ = std::make_unique<MarkingWorklists::Local>(
marking_worklists(),
cpp_heap ? cpp_heap->CreateCppMarkingStateForMutatorThread()
: MarkingWorklists::Local::kNoCppMarkingState);
main_marking_visitor_ = std::make_unique<YoungGenerationMainMarkingVisitor>(
heap()->isolate(), marking_state(), local_marking_worklists());
if (cpp_heap && cpp_heap->generational_gc_supported()) {
TRACE_GC(heap()->tracer(),
GCTracer::Scope::MINOR_MC_MARK_EMBEDDER_PROLOGUE);
// StartTracing immediately starts marking which requires V8 worklists to
// be set up.
cpp_heap->StartTracing();
}
}
void MinorMarkCompactCollector::Finish() {
@ -5956,6 +5944,10 @@ void MinorMarkCompactCollector::CollectGarbage() {
// Minor MC does not support processing the ephemeron remembered set.
DCHECK(heap()->ephemeron_remembered_set_.empty());
DCHECK(!heap()->array_buffer_sweeper()->sweeping_in_progress());
DCHECK(!sweeper()->AreSweeperTasksRunning());
DCHECK(sweeper()->IsSweepingDoneForSpace(NEW_SPACE));
heap()->new_space()->FreeLinearAllocationArea();
MarkLiveObjects();
ClearNonLiveReferences();
@ -6339,12 +6331,11 @@ void MinorMarkCompactCollector::MarkLiveObjectsInParallel(
void MinorMarkCompactCollector::MarkLiveObjects() {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK);
DCHECK_NOT_NULL(local_marking_worklists_);
DCHECK_NOT_NULL(main_marking_visitor_);
const bool was_marked_incrementally =
!heap_->incremental_marking()->IsStopped();
if (was_marked_incrementally) {
if (!was_marked_incrementally) {
StartMarking();
} else {
TRACE_GC(heap()->tracer(),
GCTracer::Scope::MINOR_MC_MARK_FINISH_INCREMENTAL);
auto* incremental_marking = heap_->incremental_marking();
@ -6357,6 +6348,9 @@ void MinorMarkCompactCollector::MarkLiveObjects() {
FinishConcurrentMarking();
}
DCHECK_NOT_NULL(local_marking_worklists_);
DCHECK_NOT_NULL(main_marking_visitor_);
RootMarkingVisitor root_visitor(this);
MarkLiveObjectsInParallel(&root_visitor, was_marked_incrementally);

View File

@ -699,7 +699,7 @@ class MinorMarkCompactCollector final : public CollectorBase {
void SetUp() final;
void TearDown() final;
void CollectGarbage() final;
void Prepare() final;
void Prepare() final {}
void StartMarking() final;
void MakeIterable(Page* page, FreeSpaceTreatmentMode free_space_mode);

View File

@ -70,7 +70,8 @@ namespace internal {
IF_WASM(V, WasmSuspenderObject) \
IF_WASM(V, WasmResumeData) \
IF_WASM(V, WasmTypeInfo) \
IF_WASM(V, WasmContinuationObject)
IF_WASM(V, WasmContinuationObject) \
IF_WASM(V, WasmNull)
#define FORWARD_DECLARE(TypeName) class TypeName;
TYPED_VISITOR_ID_LIST(FORWARD_DECLARE)

View File

@ -274,6 +274,8 @@ class ReadOnlySpace : public BaseSpace {
size_t capacity_;
const size_t area_size_;
friend class Heap;
};
class SharedReadOnlySpace : public ReadOnlySpace {

View File

@ -588,6 +588,7 @@ bool Heap::CreateInitialReadOnlyMaps() {
wasm_type_info)
IF_WASM(ALLOCATE_MAP, WASM_CONTINUATION_OBJECT_TYPE,
WasmContinuationObject::kSize, wasm_continuation_object)
IF_WASM(ALLOCATE_MAP, WASM_NULL_TYPE, kVariableSizeSentinel, wasm_null);
ALLOCATE_MAP(WEAK_CELL_TYPE, WeakCell::kSize, weak_cell)
}
@ -984,6 +985,56 @@ void Heap::CreateInitialReadOnlyObjects() {
Handle<ScopeInfo> shadow_realm_scope_info =
ScopeInfo::CreateForShadowRealmNativeContext(isolate());
set_shadow_realm_scope_info(*shadow_realm_scope_info);
// Initialize the wasm null_value.
#ifdef V8_ENABLE_WEBASSEMBLY
// Allocate the wasm-null object. It is a regular V8 heap object contained in
// a V8 page. It is large enough so that its payload (other than its map word)
// can be mprotected on OS page granularity.
// We adjust the layout such that we have a filler object in the current OS
// page, and the wasm-null map word at the end of the current OS page. The
// payload then is contained on a separate OS page which can be protected.
// Ensure all of the following lands on the same V8 page.
constexpr int kOffsetAfterMapWord = HeapObject::kMapOffset + kTaggedSize;
constexpr size_t kLargestPossibleOSPageSize = 64 * KB;
static_assert(kLargestPossibleOSPageSize >= kMinimumOSPageSize);
read_only_space_->EnsureSpaceForAllocation(
kLargestPossibleOSPageSize + WasmNull::kSize - kOffsetAfterMapWord);
Address next_page =
RoundUp(read_only_space_->top(), kLargestPossibleOSPageSize);
CHECK_EQ(kOffsetAfterMapWord % kObjectAlignment, 0);
// Add some filler to end up right before an OS page boundary.
{
int filler_size = static_cast<int>(next_page - read_only_space_->top() -
kOffsetAfterMapWord);
HeapObject filler =
allocator()->AllocateRawWith<HeapAllocator::kRetryOrFail>(
filler_size, AllocationType::kReadOnly, AllocationOrigin::kRuntime,
AllocationAlignment::kTaggedAligned);
CreateFillerObjectAt(filler.address(), filler_size,
ClearFreedMemoryMode::kClearFreedMemory);
CHECK_EQ(read_only_space_->top() + kOffsetAfterMapWord, next_page);
}
// Finally, allocate the wasm-null object.
{
HeapObject obj;
CHECK(AllocateRaw(WasmNull::kSize, AllocationType::kReadOnly).To(&obj));
obj.set_map_after_allocation(roots.wasm_null_map(), SKIP_WRITE_BARRIER);
MemsetUint32(
reinterpret_cast<uint32_t*>(obj.ptr() - kHeapObjectTag + kTaggedSize),
0, (WasmNull::kSize - kTaggedSize) / sizeof(uint32_t));
set_wasm_null(WasmNull::cast(obj));
CHECK_EQ(read_only_space_->top() % kLargestPossibleOSPageSize, 0);
}
#endif
// We prefer to fit all of read-only space in one page.
CHECK_EQ(read_only_space_->pages().size(), 1);
}
void Heap::CreateInitialMutableObjects() {

View File

@ -1150,8 +1150,12 @@ void KeyedStoreGenericAssembler::KeyedStoreGeneric(
value);
} else {
DCHECK(IsDefineKeyedOwnInLiteral());
TailCallRuntime(Runtime::kDefineKeyedOwnPropertyInLiteral_Simple, context,
receiver, key, value);
TNode<Smi> flags =
SmiConstant(DefineKeyedOwnPropertyInLiteralFlag::kNoFlags);
// TODO(v8:10047): Use TaggedIndexConstant here once TurboFan supports it.
TNode<Smi> slot = SmiConstant(FeedbackSlot::Invalid().ToInt());
TailCallRuntime(Runtime::kDefineKeyedOwnPropertyInLiteral, context,
receiver, key, value, flags, UndefinedConstant(), slot);
}
}
}
@ -1238,8 +1242,12 @@ void KeyedStoreGenericAssembler::StoreProperty(TNode<Context> context,
BIND(&slow);
{
if (IsDefineKeyedOwnInLiteral()) {
CallRuntime(Runtime::kDefineKeyedOwnPropertyInLiteral_Simple, context,
receiver, unique_name, value);
TNode<Smi> flags =
SmiConstant(DefineKeyedOwnPropertyInLiteralFlag::kNoFlags);
// TODO(v8:10047): Use TaggedIndexConstant here once TurboFan supports it.
TNode<Smi> slot = SmiConstant(FeedbackSlot::Invalid().ToInt());
CallRuntime(Runtime::kDefineKeyedOwnPropertyInLiteral, context, receiver,
unique_name, value, flags, p.vector(), slot);
} else {
CallRuntime(Runtime::kSetKeyedProperty, context, receiver, unique_name,
value);

View File

@ -144,6 +144,7 @@ class RuntimeCallTimer final {
V(BigUint64Array_New) \
V(BooleanObject_BooleanValue) \
V(BooleanObject_New) \
V(Context_DeepFreeze) \
V(Context_New) \
V(Context_NewRemoteContext) \
V(DataView_New) \

View File

@ -142,6 +142,9 @@ bool RootToBoolean(RootIndex index) {
case RootIndex::kHoleNanValue:
case RootIndex::kMinusZeroValue:
case RootIndex::kempty_string:
#ifdef V8_ENABLE_WEBASSEMBLY
case RootIndex::kWasmNull:
#endif
return false;
default:
return true;

View File

@ -52,16 +52,13 @@ UNIQUE_INSTANCE_TYPE_MAP_LIST_GENERATOR(INSTANCE_TYPE_MAP, _)
inline constexpr base::Optional<RootIndex> UniqueMapOfInstanceType(
InstanceType type) {
switch (type) {
#define INSTANCE_TYPE_CHECK(it, forinstancetype) \
case forinstancetype: \
if (type == forinstancetype) { \
return InstanceTypeChecker::UniqueMapOfInstanceType< \
InstanceTypeChecker::InstanceTypeTraits::it>(); \
INSTANCE_TYPE_CHECKERS_SINGLE(INSTANCE_TYPE_CHECK);
#undef INSTANCE_TYPE_CHECK
default: {
}
}
INSTANCE_TYPE_CHECKERS_SINGLE(INSTANCE_TYPE_CHECK);
#undef INSTANCE_TYPE_CHECK
return {};
}

View File

@ -411,6 +411,8 @@ VisitorId Map::GetVisitorId(Map map) {
return kVisitWasmCapiFunctionData;
case WASM_SUSPENDER_OBJECT_TYPE:
return kVisitWasmSuspenderObject;
case WASM_NULL_TYPE:
return kVisitWasmNull;
#endif // V8_ENABLE_WEBASSEMBLY
#define MAKE_TQ_CASE(TYPE, Name) \

View File

@ -30,7 +30,8 @@ enum InstanceType : uint16_t;
V(CoverageInfo) \
V(DataObject) \
V(FeedbackMetadata) \
V(FixedDoubleArray)
V(FixedDoubleArray) \
IF_WASM(V, WasmNull)
#define POINTER_VISITOR_ID_LIST(V) \
V(AccessorInfo) \

View File

@ -280,6 +280,7 @@ class ZoneForwardList;
IF_WASM(V, WasmValueObject) \
IF_WASM(V, WasmSuspenderObject) \
IF_WASM(V, WasmContinuationObject) \
IF_WASM(V, WasmNull) \
V(WeakFixedArray) \
V(WeakArrayList) \
V(WeakCell) \

View File

@ -881,6 +881,19 @@ class WasmStruct::BodyDescriptor final : public BodyDescriptorBase {
}
};
class WasmNull::BodyDescriptor final : public BodyDescriptorBase {
public:
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
UNREACHABLE();
}
template <typename ObjectVisitor>
static inline void IterateBody(Map map, HeapObject obj, int object_size,
ObjectVisitor* v) {}
static inline int SizeOf(Map map, HeapObject obj) { return WasmNull::kSize; }
};
#endif // V8_ENABLE_WEBASSEMBLY
class ExternalOneByteString::BodyDescriptor final : public BodyDescriptorBase {
@ -1306,6 +1319,8 @@ auto BodyDescriptorApply(InstanceType type, Args&&... args) {
#if V8_ENABLE_WEBASSEMBLY
case WASM_INSTANCE_OBJECT_TYPE:
return CALL_APPLY(WasmInstanceObject);
case WASM_NULL_TYPE:
return CALL_APPLY(WasmNull);
#endif // V8_ENABLE_WEBASSEMBLY
case JS_WEAK_MAP_TYPE:
case JS_WEAK_SET_TYPE:

View File

@ -651,6 +651,9 @@ bool Object::BooleanValue(IsolateT* isolate) {
DCHECK(IsHeapObject());
if (IsBoolean()) return IsTrue(isolate);
if (IsNullOrUndefined(isolate)) return false;
#ifdef V8_ENABLE_WEBASSEMBLY
if (IsWasmNull()) return false;
#endif
if (IsUndetectable()) return false; // Undetectable object is false.
if (IsString()) return String::cast(*this).length() != 0;
if (IsHeapNumber()) return DoubleToBoolean(HeapNumber::cast(*this).value());
@ -2292,6 +2295,9 @@ int HeapObject::SizeFromMap(Map map) const {
if (instance_type == WASM_ARRAY_TYPE) {
return WasmArray::SizeFor(map, WasmArray::unchecked_cast(*this).length());
}
if (instance_type == WASM_NULL_TYPE) {
return WasmNull::kSize;
}
#endif // V8_ENABLE_WEBASSEMBLY
DCHECK_EQ(instance_type, EMBEDDER_DATA_ARRAY_TYPE);
return EmbedderDataArray::SizeFor(

View File

@ -27,6 +27,10 @@
#include "src/roots/roots.h"
#include "src/roots/static-roots.h"
#if V8_ENABLE_WEBASSEMBLY
#include "src/wasm/wasm-objects.h"
#endif
namespace v8 {
namespace internal {

View File

@ -139,6 +139,7 @@ class Symbol;
IF_WASM(V, Map, wasm_resume_data_map, WasmResumeDataMap) \
IF_WASM(V, Map, wasm_type_info_map, WasmTypeInfoMap) \
IF_WASM(V, Map, wasm_continuation_object_map, WasmContinuationObjectMap) \
IF_WASM(V, Map, wasm_null_map, WasmNullMap) \
V(Map, weak_fixed_array_map, WeakFixedArrayMap) \
V(Map, weak_array_list_map, WeakArrayListMap) \
V(Map, ephemeron_hash_table_map, EphemeronHashTableMap) \
@ -225,7 +226,8 @@ class Symbol;
V(ScopeInfo, shadow_realm_scope_info, ShadowRealmScopeInfo) \
V(RegisteredSymbolTable, empty_symbol_table, EmptySymbolTable) \
/* Hash seed */ \
V(ByteArray, hash_seed, HashSeed)
V(ByteArray, hash_seed, HashSeed) \
IF_WASM(V, WasmNull, wasm_null, WasmNull)
// Mutable roots that are known to be immortal immovable, for which we can
// safely skip write barriers.

File diff suppressed because it is too large Load Diff

View File

@ -911,32 +911,6 @@ RUNTIME_FUNCTION(Runtime_SetNamedProperty) {
StoreOrigin::kNamed));
}
// Similar to DefineKeyedOwnPropertyInLiteral, but does not update feedback, and
// and does not have a flags parameter for performing SetFunctionName().
//
// Currently, this is used for ObjectLiteral spread properties in CloneObjectIC
// and for array literal creations in StoreInArrayLiteralIC.
// TODO(v8:12548): merge this into DefineKeyedOwnPropertyInLiteral.
RUNTIME_FUNCTION(Runtime_DefineKeyedOwnPropertyInLiteral_Simple) {
HandleScope scope(isolate);
DCHECK_EQ(3, args.length());
Handle<JSReceiver> object = args.at<JSReceiver>(0);
Handle<Object> key = args.at(1);
Handle<Object> value = args.at(2);
PropertyKey lookup_key(isolate, key);
LookupIterator it(isolate, object, lookup_key, LookupIterator::OWN);
Maybe<bool> result = JSObject::DefineOwnPropertyIgnoreAttributes(
&it, value, NONE, Just(kDontThrow));
RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate);
DCHECK(result.IsJust());
USE(result);
return *value;
}
namespace {
// ES6 section 12.5.4.
@ -1137,20 +1111,22 @@ RUNTIME_FUNCTION(Runtime_SetFunctionName) {
RUNTIME_FUNCTION(Runtime_DefineKeyedOwnPropertyInLiteral) {
HandleScope scope(isolate);
DCHECK_EQ(6, args.length());
Handle<JSObject> object = args.at<JSObject>(0);
Handle<Name> name = args.at<Name>(1);
Handle<JSReceiver> object = args.at<JSReceiver>(0);
Handle<Object> name = args.at(1);
Handle<Object> value = args.at(2);
int flag = args.smi_value_at(3);
Handle<HeapObject> maybe_vector = args.at<HeapObject>(4);
int index = args.tagged_index_value_at(5);
if (!maybe_vector->IsUndefined()) {
int index = args.tagged_index_value_at(5);
DCHECK(name->IsName());
DCHECK(maybe_vector->IsFeedbackVector());
Handle<FeedbackVector> vector = Handle<FeedbackVector>::cast(maybe_vector);
FeedbackNexus nexus(vector, FeedbackVector::ToSlot(index));
if (nexus.ic_state() == InlineCacheState::UNINITIALIZED) {
if (name->IsUniqueName()) {
nexus.ConfigureMonomorphic(name, handle(object->map(), isolate),
nexus.ConfigureMonomorphic(Handle<Name>::cast(name),
handle(object->map(), isolate),
MaybeObjectHandle());
} else {
nexus.ConfigureMegamorphic(IcCheckType::kProperty);
@ -1163,17 +1139,14 @@ RUNTIME_FUNCTION(Runtime_DefineKeyedOwnPropertyInLiteral) {
}
DefineKeyedOwnPropertyInLiteralFlags flags(flag);
PropertyAttributes attrs =
(flags & DefineKeyedOwnPropertyInLiteralFlag::kDontEnum)
? PropertyAttributes::DONT_ENUM
: PropertyAttributes::NONE;
if (flags & DefineKeyedOwnPropertyInLiteralFlag::kSetFunctionName) {
DCHECK(name->IsName());
DCHECK(value->IsJSFunction());
Handle<JSFunction> function = Handle<JSFunction>::cast(value);
DCHECK(!function->shared().HasSharedName());
Handle<Map> function_map(function->map(), isolate);
if (!JSFunction::SetName(function, name,
if (!JSFunction::SetName(function, Handle<Name>::cast(name),
isolate->factory()->empty_string())) {
return ReadOnlyRoots(isolate).exception();
}
@ -1186,7 +1159,7 @@ RUNTIME_FUNCTION(Runtime_DefineKeyedOwnPropertyInLiteral) {
LookupIterator it(isolate, object, key, object, LookupIterator::OWN);
Maybe<bool> result = JSObject::DefineOwnPropertyIgnoreAttributes(
&it, value, attrs, Just(kDontThrow));
&it, value, PropertyAttributes::NONE, Just(kDontThrow));
// Cannot fail since this should only be called when
// creating an object literal.
RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate);

View File

@ -951,7 +951,7 @@ RUNTIME_FUNCTION(Runtime_WasmStringNewWtf8) {
if (utf8_variant == unibrow::Utf8Variant::kUtf8NoTrap) {
DCHECK(!isolate->has_pending_exception());
if (result_string.is_null()) {
return *isolate->factory()->null_value();
return *isolate->factory()->wasm_null();
}
return *result_string.ToHandleChecked();
}
@ -976,7 +976,7 @@ RUNTIME_FUNCTION(Runtime_WasmStringNewWtf8Array) {
if (utf8_variant == unibrow::Utf8Variant::kUtf8NoTrap) {
DCHECK(!isolate->has_pending_exception());
if (result_string.is_null()) {
return *isolate->factory()->null_value();
return *isolate->factory()->wasm_null();
}
return *result_string.ToHandleChecked();
}

View File

@ -351,7 +351,6 @@ namespace internal {
F(DefineObjectOwnProperty, 3, 1) \
F(SetNamedProperty, 3, 1) \
F(SetOwnPropertyIgnoreAttributes, 4, 1) \
F(DefineKeyedOwnPropertyInLiteral_Simple, 3, 1) \
F(ShrinkNameDictionary, 1, 1) \
F(ShrinkSwissNameDictionary, 1, 1) \
F(ToFastProperties, 1, 1) \

View File

@ -620,7 +620,7 @@ class CallStatsScopedTracer {
Data* p_data_;
Data data_;
};
#endif // defined(V8_RUNTIME_CALL_STATS)
#endif // V8_RUNTIME_CALL_STATS
} // namespace tracing
} // namespace internal
@ -660,8 +660,9 @@ class CallStatsScopedTracer {
} PERFETTO_UID(scoped_event) { \
{ isolate, 0 } \
}
#endif // defined(V8_RUNTIME_CALL_STATS)
#else // V8_RUNTIME_CALL_STATS
#define TRACE_EVENT_CALL_STATS_SCOPED(isolate, category, name)
#endif // V8_RUNTIME_CALL_STATS
#endif // defined(V8_USE_PERFETTO)
#endif // V8_TRACING_TRACE_EVENT_H_

View File

@ -923,14 +923,22 @@ class LiftoffCompiler {
// Initialize all reference type locals with ref.null.
if (has_refs) {
Register null_ref_reg = __ GetUnusedRegister(kGpReg, {}).gp();
LoadNullValue(null_ref_reg, {});
LiftoffRegList pinned;
Register null_ref_reg =
pinned.set(__ GetUnusedRegister(kGpReg, pinned).gp());
Register wasm_null_ref_reg =
pinned.set(__ GetUnusedRegister(kGpReg, pinned).gp());
LoadNullValue(null_ref_reg, pinned, kWasmExternRef);
LoadNullValue(wasm_null_ref_reg, pinned, kWasmAnyRef);
for (uint32_t local_index = num_params; local_index < __ num_locals();
++local_index) {
ValueKind kind = __ local_kind(local_index);
if (is_reference(kind)) {
ValueType type = decoder->local_types_[local_index];
if (type.is_reference()) {
__ Spill(__ cache_state()->stack_state[local_index].offset(),
LiftoffRegister(null_ref_reg), kind);
IsSubtypeOf(type, kWasmExternRef, decoder->module_)
? LiftoffRegister(null_ref_reg)
: LiftoffRegister(wasm_null_ref_reg),
type.kind());
}
}
}
@ -1712,11 +1720,11 @@ class LiftoffCompiler {
__ PushRegister(dst_kind, dst);
}
void EmitIsNull(WasmOpcode opcode) {
void EmitIsNull(WasmOpcode opcode, ValueType type) {
LiftoffRegList pinned;
LiftoffRegister ref = pinned.set(__ PopToRegister());
LiftoffRegister null = __ GetUnusedRegister(kGpReg, pinned);
LoadNullValueForCompare(null.gp(), pinned);
LoadNullValueForCompare(null.gp(), pinned, type);
// Prefer to overwrite one of the input registers with the result
// of the comparison.
LiftoffRegister dst = __ GetUnusedRegister(kGpReg, {ref, null}, {});
@ -1861,7 +1869,7 @@ class LiftoffCompiler {
// We abuse ref.as_non_null, which isn't otherwise used in this switch, as
// a sentinel for the negation of ref.is_null.
case kExprRefAsNonNull:
return EmitIsNull(opcode);
return EmitIsNull(opcode, value.type);
case kExprExternInternalize: {
LiftoffAssembler::VarState input_state =
__ cache_state()->stack_state.back();
@ -1872,9 +1880,22 @@ class LiftoffCompiler {
__ PushRegister(kRef, LiftoffRegister(kReturnRegister0));
return;
}
case kExprExternExternalize:
// This is a no-op.
case kExprExternExternalize: {
LiftoffRegList pinned;
LiftoffRegister ref = pinned.set(__ PopToRegister(pinned));
LiftoffRegister null = __ GetUnusedRegister(kGpReg, pinned);
LoadNullValueForCompare(null.gp(), pinned, kWasmAnyRef);
Label label;
{
FREEZE_STATE(frozen);
__ emit_cond_jump(kNotEqual, &label, kRefNull, ref.gp(), null.gp(),
frozen);
LoadNullValue(ref.gp(), pinned, kWasmExternRef);
__ bind(&label);
}
__ PushRegister(kRefNull, ref);
return;
}
default:
UNREACHABLE();
}
@ -2301,7 +2322,7 @@ class LiftoffCompiler {
void RefNull(FullDecoder* decoder, ValueType type, Value*) {
LiftoffRegister null = __ GetUnusedRegister(kGpReg, {});
LoadNullValue(null.gp(), {});
LoadNullValue(null.gp(), {}, type);
__ PushRegister(type.kind(), null);
}
@ -2670,7 +2691,7 @@ class LiftoffCompiler {
Label* trap_label =
AddOutOfLineTrap(decoder, WasmCode::kThrowWasmTrapIllegalCast);
LiftoffRegister null = __ GetUnusedRegister(kGpReg, pinned);
LoadNullValueForCompare(null.gp(), pinned);
LoadNullValueForCompare(null.gp(), pinned, arg.type);
{
FREEZE_STATE(trapping);
__ emit_cond_jump(cond, trap_label, kRefNull, obj.gp(), null.gp(),
@ -3585,7 +3606,7 @@ class LiftoffCompiler {
Register tmp = NeedsTierupCheck(decoder, depth)
? pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp()
: no_reg;
LoadNullValueForCompare(null, pinned);
LoadNullValueForCompare(null, pinned, ref_object.type);
{
FREEZE_STATE(frozen);
__ emit_cond_jump(kNotEqual, &cont_false, ref_object.type.kind(),
@ -3615,7 +3636,7 @@ class LiftoffCompiler {
Register tmp = NeedsTierupCheck(decoder, depth)
? pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp()
: no_reg;
LoadNullValueForCompare(null, pinned);
LoadNullValueForCompare(null, pinned, ref_object.type);
{
FREEZE_STATE(frozen);
__ emit_cond_jump(kEqual, &cont_false, ref_object.type.kind(), ref.gp(),
@ -5542,16 +5563,19 @@ class LiftoffCompiler {
for (uint32_t i = imm.struct_type->field_count(); i > 0;) {
i--;
int offset = StructFieldOffset(imm.struct_type, i);
ValueKind field_kind = imm.struct_type->field(i).kind();
ValueType field_type = imm.struct_type->field(i);
LiftoffRegister value = pinned.set(
initial_values_on_stack
? __ PopToRegister(pinned)
: __ GetUnusedRegister(reg_class_for(field_kind), pinned));
: __ GetUnusedRegister(reg_class_for(field_type.kind()), pinned));
if (!initial_values_on_stack) {
if (!CheckSupportedType(decoder, field_kind, "default value")) return;
SetDefaultValue(value, field_kind, pinned);
if (!CheckSupportedType(decoder, field_type.kind(), "default value")) {
return;
}
SetDefaultValue(value, field_type, pinned);
}
StoreObjectField(obj.gp(), no_reg, offset, value, pinned, field_kind);
StoreObjectField(obj.gp(), no_reg, offset, value, pinned,
field_type.kind());
pinned.clear(value);
}
// If this assert fails then initialization of padding field might be
@ -5612,7 +5636,8 @@ class LiftoffCompiler {
__ emit_i32_cond_jumpi(kUnsignedGreaterThan, trap_label, length.gp(),
WasmArray::MaxLength(imm.array_type), trapping);
}
ValueKind elem_kind = imm.array_type->element_type().kind();
ValueType elem_type = imm.array_type->element_type();
ValueKind elem_kind = elem_type.kind();
int elem_size = value_kind_size(elem_kind);
// Allocate the array.
{
@ -5640,7 +5665,7 @@ class LiftoffCompiler {
__ PopToFixedRegister(value);
} else {
if (!CheckSupportedType(decoder, elem_kind, "default value")) return;
SetDefaultValue(value, elem_kind, pinned);
SetDefaultValue(value, elem_type, pinned);
}
// Initialize the array's elements.
LiftoffRegister offset = pinned.set(__ GetUnusedRegister(kGpReg, pinned));
@ -5998,7 +6023,9 @@ class LiftoffCompiler {
Register scratch_null =
pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
LiftoffRegister result = pinned.set(__ GetUnusedRegister(kGpReg, pinned));
if (obj.type.is_nullable()) LoadNullValueForCompare(scratch_null, pinned);
if (obj.type.is_nullable()) {
LoadNullValueForCompare(scratch_null, pinned, obj.type);
}
{
FREEZE_STATE(frozen);
@ -6032,7 +6059,7 @@ class LiftoffCompiler {
case HeapType::kNoExtern:
case HeapType::kNoFunc:
DCHECK(null_succeeds);
return EmitIsNull(kExprRefIsNull);
return EmitIsNull(kExprRefIsNull, obj.type);
case HeapType::kAny:
// Any may never need a cast as it is either implicitly convertible or
// never convertible for any given type.
@ -6056,7 +6083,9 @@ class LiftoffCompiler {
Register scratch_null =
pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
Register scratch2 = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
if (obj.type.is_nullable()) LoadNullValueForCompare(scratch_null, pinned);
if (obj.type.is_nullable()) {
LoadNullValueForCompare(scratch_null, pinned, obj.type);
}
{
FREEZE_STATE(frozen);
@ -6107,7 +6136,9 @@ class LiftoffCompiler {
Register scratch_null =
pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
Register scratch2 = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
if (obj.type.is_nullable()) LoadNullValue(scratch_null, pinned);
if (obj.type.is_nullable()) {
LoadNullValue(scratch_null, pinned, kWasmAnyRef);
}
FREEZE_STATE(frozen);
NullSucceeds null_handling = null_succeeds ? kNullSucceeds : kNullFails;
@ -6135,7 +6166,9 @@ class LiftoffCompiler {
Register scratch_null =
pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
Register scratch2 = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
if (obj.type.is_nullable()) LoadNullValue(scratch_null, pinned);
if (obj.type.is_nullable()) {
LoadNullValue(scratch_null, pinned, kWasmAnyRef);
}
FREEZE_STATE(frozen);
NullSucceeds null_handling = null_succeeds ? kNullSucceeds : kNullFails;
@ -6223,7 +6256,7 @@ class LiftoffCompiler {
enum PopOrPeek { kPop, kPeek };
void Initialize(TypeCheck& check, PopOrPeek pop_or_peek) {
void Initialize(TypeCheck& check, PopOrPeek pop_or_peek, ValueType type) {
LiftoffRegList pinned;
if (pop_or_peek == kPop) {
check.obj_reg = pinned.set(__ PopToRegister(pinned)).gp();
@ -6233,7 +6266,7 @@ class LiftoffCompiler {
check.tmp1 = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
check.tmp2 = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
if (check.obj_type.is_nullable()) {
LoadNullValue(check.null_reg(), pinned);
LoadNullValue(check.null_reg(), pinned, type);
}
}
void LoadInstanceType(TypeCheck& check, const FreezeCacheState& frozen,
@ -6293,7 +6326,7 @@ class LiftoffCompiler {
void AbstractTypeCheck(const Value& object, bool null_succeeds) {
Label match, no_match, done;
TypeCheck check(object.type, &no_match, null_succeeds);
Initialize(check, kPop);
Initialize(check, kPop, object.type);
LiftoffRegister result(check.tmp1);
{
FREEZE_STATE(frozen);
@ -6344,7 +6377,7 @@ class LiftoffCompiler {
Label* trap_label =
AddOutOfLineTrap(decoder, WasmCode::kThrowWasmTrapIllegalCast);
TypeCheck check(object.type, trap_label, null_succeeds);
Initialize(check, kPeek);
Initialize(check, kPeek, object.type);
FREEZE_STATE(frozen);
if (null_succeeds && check.obj_type.is_nullable()) {
@ -6389,7 +6422,7 @@ class LiftoffCompiler {
Label no_match, match;
TypeCheck check(object.type, &no_match, null_succeeds);
Initialize(check, kPeek);
Initialize(check, kPeek, object.type);
FREEZE_STATE(frozen);
if (null_succeeds && check.obj_type.is_nullable()) {
@ -6414,7 +6447,7 @@ class LiftoffCompiler {
Label no_match, end;
TypeCheck check(object.type, &no_match, null_succeeds);
Initialize(check, kPeek);
Initialize(check, kPeek, object.type);
FREEZE_STATE(frozen);
if (null_succeeds && check.obj_type.is_nullable()) {
@ -6848,7 +6881,7 @@ class LiftoffCompiler {
LiftoffRegister null = pinned.set(__ GetUnusedRegister(kGpReg, pinned));
bool check_for_null = a.type.is_nullable() || b.type.is_nullable();
if (check_for_null) {
LoadNullValueForCompare(null.gp(), pinned);
LoadNullValueForCompare(null.gp(), pinned, kWasmStringRef);
}
FREEZE_STATE(frozen);
@ -7762,25 +7795,30 @@ class LiftoffCompiler {
}
}
void LoadNullValue(Register null, LiftoffRegList pinned) {
__ LoadFullPointer(null, kRootRegister,
IsolateData::root_slot_offset(RootIndex::kNullValue));
void LoadNullValue(Register null, LiftoffRegList pinned, ValueType type) {
__ LoadFullPointer(
null, kRootRegister,
type == kWasmExternRef || type == kWasmNullExternRef
? IsolateData::root_slot_offset(RootIndex::kNullValue)
: IsolateData::root_slot_offset(RootIndex::kWasmNull));
}
// Stores the null value representation in the passed register.
// If pointer compression is active, only the compressed tagged pointer
// will be stored. Any operations with this register therefore must
// not compare this against 64 bits using quadword instructions.
void LoadNullValueForCompare(Register null, LiftoffRegList pinned) {
void LoadNullValueForCompare(Register null, LiftoffRegList pinned,
ValueType type) {
Tagged_t static_null =
wasm::GetWasmEngine()->compressed_null_value_or_zero();
if (static_null != 0) {
wasm::GetWasmEngine()->compressed_wasm_null_value_or_zero();
if (type != kWasmExternRef && type != kWasmNullExternRef &&
static_null != 0) {
// static_null is only set for builds with pointer compression.
DCHECK_LE(static_null, std::numeric_limits<uint32_t>::max());
__ LoadConstant(LiftoffRegister(null),
WasmValue(static_cast<uint32_t>(static_null)));
} else {
LoadNullValue(null, pinned);
LoadNullValue(null, pinned, type);
}
}
@ -7798,7 +7836,7 @@ class LiftoffCompiler {
Label* trap_label =
AddOutOfLineTrap(decoder, WasmCode::kThrowWasmTrapNullDereference);
LiftoffRegister null = __ GetUnusedRegister(kGpReg, pinned);
LoadNullValueForCompare(null.gp(), pinned);
LoadNullValueForCompare(null.gp(), pinned, type);
FREEZE_STATE(trapping);
__ emit_cond_jump(kEqual, trap_label, kRefNull, object, null.gp(),
trapping);
@ -7847,10 +7885,10 @@ class LiftoffCompiler {
}
}
void SetDefaultValue(LiftoffRegister reg, ValueKind kind,
void SetDefaultValue(LiftoffRegister reg, ValueType type,
LiftoffRegList pinned) {
DCHECK(is_defaultable(kind));
switch (kind) {
DCHECK(is_defaultable(type.kind()));
switch (type.kind()) {
case kI8:
case kI16:
case kI32:
@ -7865,7 +7903,7 @@ class LiftoffCompiler {
DCHECK(CpuFeatures::SupportsWasmSimd128());
return __ emit_s128_xor(reg, reg, reg);
case kRefNull:
return LoadNullValue(reg.gp(), pinned);
return LoadNullValue(reg.gp(), pinned, type);
case kRtt:
case kVoid:
case kBottom:

View File

@ -1884,6 +1884,9 @@ auto Global::get() const -> Val {
handle(i::Handle<i::WasmInternalFunction>::cast(result)->external(),
v8_global->GetIsolate());
}
if (result->IsWasmNull()) {
result = v8_global->GetIsolate()->factory()->null_value();
}
return Val(V8RefValueToWasm(store, result));
}
case i::wasm::kS128:
@ -2023,6 +2026,9 @@ auto Table::get(size_t index) const -> own<Ref> {
result = handle(
i::Handle<i::WasmInternalFunction>::cast(result)->external(), isolate);
}
if (result->IsWasmNull()) {
result = isolate->factory()->null_value();
}
DCHECK(result->IsNull(isolate) || result->IsJSReceiver());
return V8RefValueToWasm(impl(this)->store(), result);
}

View File

@ -106,7 +106,11 @@ void ConstantExpressionInterface::BinOp(FullDecoder* decoder, WasmOpcode opcode,
void ConstantExpressionInterface::RefNull(FullDecoder* decoder, ValueType type,
Value* result) {
if (!generate_value()) return;
result->runtime_value = WasmValue(isolate_->factory()->null_value(), type);
result->runtime_value =
WasmValue(type == kWasmExternRef || type == kWasmNullExternRef
? Handle<Object>::cast(isolate_->factory()->null_value())
: Handle<Object>::cast(isolate_->factory()->wasm_null()),
type);
}
void ConstantExpressionInterface::RefFunc(FullDecoder* decoder,

View File

@ -35,8 +35,11 @@ ValueOrError EvaluateConstantExpression(Zone* zone, ConstantExpression expr,
case ConstantExpression::kI32Const:
return WasmValue(expr.i32_value());
case ConstantExpression::kRefNull:
return WasmValue(isolate->factory()->null_value(),
ValueType::RefNull(expr.repr()));
return WasmValue(
expected == kWasmExternRef || expected == kWasmNullExternRef
? Handle<Object>::cast(isolate->factory()->null_value())
: Handle<Object>::cast(isolate->factory()->wasm_null()),
ValueType::RefNull(expr.repr()));
case ConstantExpression::kRefFunc: {
uint32_t index = expr.index();
Handle<Object> value =

View File

@ -233,7 +233,7 @@ class WasmGraphBuildingInterface {
DCHECK(type.is_reference());
// TODO(jkummerow): Consider using "the hole" instead, to make any
// illegal uses more obvious.
node = builder_->SetType(builder_->RefNull(), type);
node = builder_->SetType(builder_->RefNull(type), type);
} else {
node = builder_->SetType(builder_->DefaultValue(type), type);
}
@ -444,8 +444,8 @@ class WasmGraphBuildingInterface {
void UnOp(FullDecoder* decoder, WasmOpcode opcode, const Value& value,
Value* result) {
SetAndTypeNode(result,
builder_->Unop(opcode, value.node, decoder->position()));
SetAndTypeNode(result, builder_->Unop(opcode, value.node, value.type,
decoder->position()));
}
void BinOp(FullDecoder* decoder, WasmOpcode opcode, const Value& lhs,
@ -481,7 +481,7 @@ class WasmGraphBuildingInterface {
}
void RefNull(FullDecoder* decoder, ValueType type, Value* result) {
SetAndTypeNode(result, builder_->RefNull());
SetAndTypeNode(result, builder_->RefNull(type));
}
void RefFunc(FullDecoder* decoder, uint32_t function_index, Value* result) {
@ -489,7 +489,8 @@ class WasmGraphBuildingInterface {
}
void RefAsNonNull(FullDecoder* decoder, const Value& arg, Value* result) {
TFNode* cast_node = builder_->AssertNotNull(arg.node, decoder->position());
TFNode* cast_node =
builder_->AssertNotNull(arg.node, arg.type, decoder->position());
SetAndTypeNode(result, cast_node);
}
@ -539,15 +540,16 @@ class WasmGraphBuildingInterface {
void AssertNullTypecheck(FullDecoder* decoder, const Value& obj,
Value* result) {
builder_->TrapIfFalse(wasm::TrapReason::kTrapIllegalCast,
builder_->IsNull(obj.node), decoder->position());
builder_->IsNull(obj.node, obj.type),
decoder->position());
Forward(decoder, obj, result);
}
void AssertNotNullTypecheck(FullDecoder* decoder, const Value& obj,
Value* result) {
SetAndTypeNode(result,
builder_->AssertNotNull(obj.node, decoder->position(),
TrapReason::kTrapIllegalCast));
SetAndTypeNode(
result, builder_->AssertNotNull(obj.node, obj.type, decoder->position(),
TrapReason::kTrapIllegalCast));
}
void NopForTestingUnsupportedInLiftoff(FullDecoder* decoder) {}
@ -915,7 +917,7 @@ class WasmGraphBuildingInterface {
SsaEnv* false_env = ssa_env_;
SsaEnv* true_env = Split(decoder->zone(), false_env);
false_env->SetNotMerged();
builder_->BrOnNull(ref_object.node, &true_env->control,
builder_->BrOnNull(ref_object.node, ref_object.type, &true_env->control,
&false_env->control);
builder_->SetControl(false_env->control);
{
@ -934,7 +936,7 @@ class WasmGraphBuildingInterface {
SsaEnv* false_env = ssa_env_;
SsaEnv* true_env = Split(decoder->zone(), false_env);
false_env->SetNotMerged();
builder_->BrOnNull(ref_object.node, &false_env->control,
builder_->BrOnNull(ref_object.node, ref_object.type, &false_env->control,
&true_env->control);
builder_->SetControl(false_env->control);
ScopedSsaEnv scoped_env(this, true_env);

View File

@ -680,7 +680,9 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() {
Handle<WasmTableObject> table_obj = WasmTableObject::New(
isolate_, instance, table.type, table.initial_size,
table.has_maximum_size, table.maximum_size, nullptr,
isolate_->factory()->null_value());
IsSubtypeOf(table.type, kWasmExternRef, module_)
? Handle<Object>::cast(isolate_->factory()->null_value())
: Handle<Object>::cast(isolate_->factory()->wasm_null()));
tables->set(i, *table_obj);
}
instance->set_tables(*tables);
@ -1967,7 +1969,7 @@ V8_INLINE void SetFunctionTablePlaceholder(Isolate* isolate,
V8_INLINE void SetFunctionTableNullEntry(Isolate* isolate,
Handle<WasmTableObject> table_object,
uint32_t entry_index) {
table_object->entries().set(entry_index, *isolate->factory()->null_value());
table_object->entries().set(entry_index, *isolate->factory()->wasm_null());
WasmTableObject::ClearDispatchTables(isolate, table_object, entry_index);
}
} // namespace

View File

@ -1041,8 +1041,8 @@ void WasmEngine::AddIsolate(Isolate* isolate) {
#if defined(V8_COMPRESS_POINTERS)
// The null value is not accessible on mksnapshot runs.
if (isolate->snapshot_available()) {
null_tagged_compressed_ = V8HeapCompressionScheme::CompressTagged(
isolate->factory()->null_value()->ptr());
wasm_null_tagged_compressed_ = V8HeapCompressionScheme::CompressTagged(
isolate->factory()->wasm_null()->ptr());
}
#endif

View File

@ -371,8 +371,8 @@ class V8_EXPORT_PRIVATE WasmEngine {
// Returns either the compressed tagged pointer representing a null value or
// 0 if pointer compression is not available.
Tagged_t compressed_null_value_or_zero() const {
return null_tagged_compressed_;
Tagged_t compressed_wasm_null_value_or_zero() const {
return wasm_null_tagged_compressed_;
}
// Call on process start and exit.
@ -411,7 +411,7 @@ class V8_EXPORT_PRIVATE WasmEngine {
std::atomic<int> next_compilation_id_{0};
// Compressed tagged pointer to null value.
std::atomic<Tagged_t> null_tagged_compressed_{0};
std::atomic<Tagged_t> wasm_null_tagged_compressed_{0};
TypeCanonicalizer type_canonicalizer_;

View File

@ -1135,15 +1135,13 @@ bool GetInitialOrMinimumProperty(v8::Isolate* isolate, ErrorThrower* thrower,
namespace {
i::Handle<i::Object> DefaultReferenceValue(i::Isolate* isolate,
i::wasm::ValueType type) {
if (type.is_reference()) {
// Use undefined for JS type (externref) but null for wasm types as wasm
// does not know undefined.
if (type.heap_representation() == i::wasm::HeapType::kExtern) {
return isolate->factory()->undefined_value();
}
return isolate->factory()->null_value();
DCHECK(type.is_object_reference());
// Use undefined for JS type (externref) but null for wasm types as wasm does
// not know undefined.
if (type.heap_representation() == i::wasm::HeapType::kExtern) {
return isolate->factory()->undefined_value();
}
UNREACHABLE();
return isolate->factory()->wasm_null();
}
} // namespace
@ -2335,6 +2333,13 @@ void WebAssemblyTableSet(const v8::FunctionCallbackInfo<v8::Value>& args) {
i::Handle<i::Object> element;
if (args.Length() >= 2) {
element = Utils::OpenHandle(*args[1]);
const char* error_message;
if (!i::WasmTableObject::JSToWasmElement(i_isolate, table_object, element,
&error_message)
.ToHandle(&element)) {
thrower.TypeError("Argument 1 is invalid for table: %s", error_message);
return;
}
} else if (table_object->type().is_defaultable()) {
element = DefaultReferenceValue(i_isolate, table_object->type());
} else {
@ -2343,14 +2348,6 @@ void WebAssemblyTableSet(const v8::FunctionCallbackInfo<v8::Value>& args) {
return;
}
const char* error_message;
if (!i::WasmTableObject::JSToWasmElement(i_isolate, table_object, element,
&error_message)
.ToHandle(&element)) {
thrower.TypeError("Argument 1 is invalid for table: %s", error_message);
return;
}
i::WasmTableObject::Set(i_isolate, table_object, index, element);
}

View File

@ -56,6 +56,7 @@ TQ_OBJECT_CONSTRUCTORS_IMPL(WasmArray)
TQ_OBJECT_CONSTRUCTORS_IMPL(WasmContinuationObject)
TQ_OBJECT_CONSTRUCTORS_IMPL(WasmSuspenderObject)
TQ_OBJECT_CONSTRUCTORS_IMPL(WasmResumeData)
TQ_OBJECT_CONSTRUCTORS_IMPL(WasmNull)
CAST_ACCESSOR(WasmInstanceObject)

View File

@ -280,9 +280,9 @@ void WasmTableObject::SetFunctionTableEntry(Isolate* isolate,
Handle<FixedArray> entries,
int entry_index,
Handle<Object> entry) {
if (entry->IsNull(isolate)) {
if (entry->IsWasmNull(isolate)) {
ClearDispatchTables(isolate, table, entry_index); // Degenerate case.
entries->set(entry_index, ReadOnlyRoots(isolate).null_value());
entries->set(entry_index, ReadOnlyRoots(isolate).wasm_null());
return;
}
Handle<Object> external =
@ -362,7 +362,7 @@ Handle<Object> WasmTableObject::Get(Isolate* isolate,
Handle<Object> entry(entries->get(entry_index), isolate);
if (entry->IsNull(isolate)) {
if (entry->IsWasmNull(isolate)) {
return entry;
}
@ -598,7 +598,7 @@ void WasmTableObject::GetFunctionTableEntry(
*is_valid = true;
Handle<Object> element(table->entries().get(entry_index), isolate);
*is_null = element->IsNull(isolate);
*is_null = element->IsWasmNull(isolate);
if (*is_null) return;
if (element->IsWasmInternalFunction()) {
@ -2249,7 +2249,10 @@ MaybeHandle<Object> JSToWasmObject(Isolate* isolate, Handle<Object> value,
*error_message = "stringview_iter has no JS representation";
return {};
default:
return value;
bool is_extern_subtype =
expected_canonical.heap_representation() == HeapType::kExtern ||
expected_canonical.heap_representation() == HeapType::kNoExtern;
return is_extern_subtype ? value : isolate->factory()->wasm_null();
}
}
@ -2425,15 +2428,15 @@ MaybeHandle<Object> WasmToJSObject(Isolate* isolate, Handle<Object> value,
case i::wasm::HeapType::kArray:
case i::wasm::HeapType::kEq:
case i::wasm::HeapType::kAny:
return value;
return value->IsWasmNull() ? isolate->factory()->null_value() : value;
case i::wasm::HeapType::kFunc: {
if (!value->IsNull()) {
if (value->IsWasmNull()) {
return isolate->factory()->null_value();
} else {
DCHECK(value->IsWasmInternalFunction());
return handle(
i::Handle<i::WasmInternalFunction>::cast(value)->external(),
isolate);
} else {
return value;
}
}
case i::wasm::HeapType::kStringViewWtf8:
@ -2448,7 +2451,9 @@ MaybeHandle<Object> WasmToJSObject(Isolate* isolate, Handle<Object> value,
case i::wasm::HeapType::kBottom:
UNREACHABLE();
default:
if (value->IsWasmInternalFunction()) {
if (value->IsWasmNull()) {
return isolate->factory()->null_value();
} else if (value->IsWasmInternalFunction()) {
return handle(
i::Handle<i::WasmInternalFunction>::cast(value)->external(),
isolate);

View File

@ -1063,6 +1063,20 @@ class WasmSuspenderObject
TQ_OBJECT_CONSTRUCTORS(WasmSuspenderObject)
};
class WasmNull : public TorqueGeneratedWasmNull<WasmNull, HeapObject> {
public:
// TODO(manoskouk): Make it smaller if able and needed.
static constexpr int kSize = 64 * KB + kTaggedSize;
// Payload should be a multiple of page size.
static_assert((kSize - kTaggedSize) % kMinimumOSPageSize == 0);
// Any wasm struct offset should fit in the object.
static_assert(kSize >= WasmStruct::kHeaderSize +
wasm::kV8MaxWasmStructFields * kSimd128Size);
class BodyDescriptor;
TQ_OBJECT_CONSTRUCTORS(WasmNull)
};
#undef DECL_OPTIONAL_ACCESSORS
namespace wasm {

View File

@ -220,3 +220,8 @@ class WasmStringViewIter extends HeapObject {
string: String;
offset: uint32; // Index into string.
}
extern class WasmNull extends HeapObject {}
extern macro WasmNullConstant(): WasmNull;
const kWasmNull: WasmNull = WasmNullConstant();

View File

@ -29553,3 +29553,265 @@ TEST(WasmAbortStreamingAfterContextDisposal) {
wasm_streaming.reset();
}
#endif // V8_ENABLE_WEBASSEMBLY
TEST(DeepFreezeIncompatibleTypes) {
const int numCases = 7;
struct {
const char* script;
const char* exception;
} test_cases[numCases] = {
{
R"(
"use strict"
let foo = 1;
)",
"TypeError: Cannot DeepFreeze non-const value foo"},
{
R"(
"use strict"
const foo = 1;
const generator = function*() {
yield 1;
yield 2;
}
const gen = generator();
)",
"TypeError: Cannot DeepFreeze object of type Generator"},
{
R"(
"use strict"
const incrementer = (function() {
let a = 1;
return function() { a += 1; return a; };
})();
)",
"TypeError: Cannot DeepFreeze non-const value a"},
{
R"(
let a = new Number();
)",
"TypeError: Cannot DeepFreeze non-const value a"},
{
R"(
const a = [0, 1, 2, 3, 4, 5];
var it = a[Symbol.iterator]();
function foo() {
return it.next().value;
}
foo();
)",
"TypeError: Cannot DeepFreeze object of type Array Iterator"},
{
R"(
const a = "0123456789";
var it = a[Symbol.iterator]();
function foo() {
return it.next().value;
}
foo();
)",
"TypeError: Cannot DeepFreeze object of type Object"},
{R"(
const a = "0123456789";
var it = a.matchAll(/\d/g);
function foo() {
return it.next().value;
}
foo();
)",
"TypeError: Cannot DeepFreeze object of type Object"},
};
for (int idx = 0; idx < numCases; idx++) {
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
v8::HandleScope scope(isolate);
v8::Local<v8::Context> context = env.local();
v8::Maybe<void> maybe_success = v8::Nothing<void>();
CompileRun(context, test_cases[idx].script);
v8::TryCatch tc(isolate);
maybe_success = context->DeepFreeze();
CHECK(maybe_success.IsNothing());
CHECK(tc.HasCaught());
v8::String::Utf8Value uS(isolate, tc.Exception());
std::string exception(*uS, uS.length());
CHECK_EQ(std::string(test_cases[idx].exception), exception);
}
}
TEST(DeepFreezeIsFrozen) {
const int numCases = 10;
struct {
const char* script;
const char* exception;
int32_t expected;
} test_cases[numCases] = {
{// Closure
R"(
const incrementer = (function() {
const a = {b: 1};
return function() { a.b += 1; return a.b; };
})();
const foo = function() { return incrementer(); }
foo();
)",
nullptr, 2},
{
R"(
const incrementer = (function() {
const a = {b: 1};
return function() { a.b += 1; return a.b; };
})();
const foo = function() { return incrementer(); }
foo();
)",
nullptr, 2},
{// Array
R"(
const a = [0, -1, -2];
const foo = function() { a[0] += 1; return a[0]; }
)",
nullptr, 0},
{
R"(
const a = [0, -1, -2];
const foo = function() { a[0] += 1; return a[0]; }
)",
nullptr, 0},
{// Wrapper Objects
R"(
const a = {b: new Number()};
const foo = function() {
a.b = new Number(a.b + 1);
return a.b.valueOf();
}
)",
nullptr, 0},
{// Functions
// Assignment to constant doesn't work.
R"(
const foo = function() {
foo = function() { return 2;}
return 1;
}
)",
"TypeError: Assignment to constant variable.", 0},
{
R"(
const a = {b: {c: {d: {e: {f: 1}}}}};
const foo = function() {
a.b.c.d.e.f += 1;
return a.b.c.d.e.f;
}
)",
nullptr, 1},
{
R"(
const foo = function() {
if (!('count' in globalThis))
globalThis.count = 1;
++count;
return count;
}
)",
"ReferenceError: count is not defined", 0},
{
R"(
const countPrototype = {
get() {
return 1;
},
};
const count = Object.create(countPrototype);
function foo() {
const curr_count = count.get();
count.prototype = { get() { return curr_count + 1; }};
return count.get();
}
)",
nullptr, 1},
{
R"(
const a = (function(){
function A(){};
A.o = 1;
return new A();
})();
function foo() {
a.constructor.o++;
return a.constructor.o;
}
)",
nullptr, 1},
};
for (int idx = 0; idx < numCases; idx++) {
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
v8::HandleScope scope(isolate);
v8::Local<v8::Context> context = env.local();
v8::Maybe<void> maybe_success = v8::Nothing<void>();
v8::TryCatch tc(isolate);
v8::MaybeLocal<v8::Value> status =
CompileRun(context, test_cases[idx].script);
CHECK(!status.IsEmpty());
CHECK(!tc.HasCaught());
maybe_success = context->DeepFreeze();
CHECK(!tc.HasCaught());
status = CompileRun(context, "foo()");
if (test_cases[idx].exception) {
CHECK(tc.HasCaught());
v8::String::Utf8Value uS(isolate, tc.Exception());
std::string exception(*uS, uS.length());
CHECK_EQ(std::string(test_cases[idx].exception), exception);
} else {
CHECK(!tc.HasCaught());
CHECK(!status.IsEmpty());
ExpectInt32("foo()", test_cases[idx].expected);
}
}
}
TEST(DeepFreezeAllowsSyntax) {
const int numCases = 2;
struct {
const char* script;
int32_t expected;
} test_cases[numCases] = {
{
R"(
const a = 1;
function foo() {
let b = 4;
b += 1;
return a + b;
}
)",
6,
},
{
R"(
var a = 1;
function foo() {
let b = 4;
b += 1;
return a + b;
}
)",
6,
}}; // TODO(behamilton): Add more cases that should be supported.
for (int idx = 0; idx < numCases; idx++) {
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
v8::HandleScope scope(isolate);
v8::Local<v8::Context> context = env.local();
v8::Maybe<void> maybe_success = v8::Nothing<void>();
v8::MaybeLocal<v8::Value> status =
CompileRun(context, test_cases[idx].script);
CHECK(!status.IsEmpty());
maybe_success = context->DeepFreeze();
CHECK(!maybe_success.IsNothing());
ExpectInt32("foo()", test_cases[idx].expected);
}
}

View File

@ -386,7 +386,7 @@ WASM_COMPILED_EXEC_TEST(WasmRefAsNonNullSkipCheck) {
tester.CompileModule();
Handle<Object> result = tester.GetResultObject(kFunc).ToHandleChecked();
// Without null checks, ref.as_non_null can actually return null.
CHECK(result->IsNull());
CHECK(result->IsWasmNull());
}
WASM_COMPILED_EXEC_TEST(WasmBrOnNull) {
@ -1145,7 +1145,7 @@ WASM_COMPILED_EXEC_TEST(WasmArrayCopy) {
{
Handle<Object> result5 =
tester.GetResultObject(kCopyRef, 5).ToHandleChecked();
CHECK(result5->IsNull());
CHECK(result5->IsWasmNull());
for (int i = 6; i <= 9; i++) {
Handle<Object> res =
tester.GetResultObject(kCopyRef, i).ToHandleChecked();
@ -1156,7 +1156,7 @@ WASM_COMPILED_EXEC_TEST(WasmArrayCopy) {
}
CHECK(tester.GetResultObject(kCopyRefOverlapping, 6)
.ToHandleChecked()
->IsNull());
->IsWasmNull());
Handle<Object> res0 =
tester.GetResultObject(kCopyRefOverlapping, 0).ToHandleChecked();
CHECK(res0->IsWasmArray());

View File

@ -18,6 +18,7 @@
#include "src/wasm/wasm-import-wrapper-cache.h"
#include "src/wasm/wasm-objects-inl.h"
#include "src/wasm/wasm-opcodes.h"
#include "src/wasm/wasm-subtyping.h"
namespace v8 {
namespace internal {
@ -241,10 +242,12 @@ void TestingModuleBuilder::AddIndirectFunctionTable(
WasmInstanceObject::EnsureIndirectFunctionTableWithMinimumSize(
instance_object(), table_index, table_size);
Handle<WasmTableObject> table_obj =
WasmTableObject::New(isolate_, instance, table.type, table.initial_size,
table.has_maximum_size, table.maximum_size, nullptr,
isolate_->factory()->null_value());
Handle<WasmTableObject> table_obj = WasmTableObject::New(
isolate_, instance, table.type, table.initial_size,
table.has_maximum_size, table.maximum_size, nullptr,
IsSubtypeOf(table.type, kWasmExternRef, test_module_.get())
? Handle<Object>::cast(isolate_->factory()->null_value())
: Handle<Object>::cast(isolate_->factory()->wasm_null()));
WasmTableObject::AddDispatchTable(isolate_, table_obj, instance_object_,
table_index);

View File

@ -25,7 +25,7 @@ function testGrowInternalExternRefTable(table_index) {
const initial_size = 5;
// Add 10 tables, we only test one.
for (let i = 0; i < 10; ++i) {
builder.addTable(kWasmExternRef, initial_size).index;
builder.addTable(kWasmExternRef, initial_size);
}
builder.addFunction('grow', kSig_i_ri)
.addBody([kExprLocalGet, 0,

View File

@ -30,7 +30,6 @@ class Preparation(Step):
MESSAGE = "Preparation."
def RunStep(self):
self['json_output']['monitoring_state'] = 'preparation'
# Update v8 remote tracking branches.
self.GitFetchOrigin()
self.Git("fetch origin +refs/tags/*:refs/tags/*")
@ -40,7 +39,6 @@ class DetectLastRoll(Step):
MESSAGE = "Detect commit ID of the last Chromium roll."
def RunStep(self):
self['json_output']['monitoring_state'] = 'detect_last_roll'
self["last_roll"] = self._options.last_roll
if not self["last_roll"]:
# Get last-rolled v8 revision from Chromium's DEPS file.
@ -55,7 +53,6 @@ class DetectRevisionToRoll(Step):
MESSAGE = "Detect commit ID of the V8 revision to roll."
def RunStep(self):
self['json_output']['monitoring_state'] = 'detect_revision'
self["roll"] = self._options.revision
if self["roll"]:
# If the revision was passed on the cmd line, continue script execution
@ -83,7 +80,6 @@ class DetectRevisionToRoll(Step):
else:
print("There is no newer v8 revision than the one in Chromium (%s)."
% self["last_roll"])
self['json_output']['monitoring_state'] = 'up_to_date'
return True
@ -91,7 +87,6 @@ class PrepareRollCandidate(Step):
MESSAGE = "Robustness checks of the roll candidate."
def RunStep(self):
self['json_output']['monitoring_state'] = 'prepare_candidate'
self["roll_title"] = self.GitLog(n=1, format="%s",
git_hash=self["roll"])
@ -106,7 +101,6 @@ class SwitchChromium(Step):
MESSAGE = "Switch to Chromium checkout."
def RunStep(self):
self['json_output']['monitoring_state'] = 'switch_chromium'
cwd = self._options.chromium
self.InitialEnvironmentChecks(cwd)
# Check for a clean workdir.
@ -121,7 +115,6 @@ class UpdateChromiumCheckout(Step):
MESSAGE = "Update the checkout and create a new branch."
def RunStep(self):
self['json_output']['monitoring_state'] = 'update_chromium'
cwd = self._options.chromium
self.GitCheckout("main", cwd=cwd)
self.DeleteBranch("work-branch", cwd=cwd)
@ -137,7 +130,6 @@ class UploadCL(Step):
MESSAGE = "Create and upload CL."
def RunStep(self):
self['json_output']['monitoring_state'] = 'upload'
cwd = self._options.chromium
# Patch DEPS file.
if self.Command("gclient", "setdep -r src/v8@%s" %
@ -172,7 +164,6 @@ class CleanUp(Step):
MESSAGE = "Done!"
def RunStep(self):
self['json_output']['monitoring_state'] = 'success'
print("Congratulations, you have successfully rolled %s into "
"Chromium."
% self["roll"])

View File

@ -579,8 +579,6 @@ deps = {
"-c", TEST_CONFIG["CHROMIUM"],
"--json-output", json_output_file])
self.assertEquals(0, result)
json_output = json.loads(FileToText(json_output_file))
self.assertEquals("up_to_date", json_output["monitoring_state"])
def testChromiumRoll(self):
@ -638,9 +636,6 @@ deps = {
deps = FileToText(os.path.join(chrome_dir, "DEPS"))
self.assertTrue(re.search("\"v8_revision\": \"22624\"", deps))
json_output = json.loads(FileToText(json_output_file))
self.assertEquals("success", json_output["monitoring_state"])
def testCheckLastPushRecently(self):
self.Expect([
Cmd("git fetch origin +refs/tags/*:refs/tags/*", ""),

View File

@ -178,18 +178,19 @@ INSTANCE_TYPES = {
267: "WASM_API_FUNCTION_REF_TYPE",
268: "WASM_CONTINUATION_OBJECT_TYPE",
269: "WASM_INTERNAL_FUNCTION_TYPE",
270: "WASM_RESUME_DATA_TYPE",
271: "WASM_STRING_VIEW_ITER_TYPE",
272: "WASM_TYPE_INFO_TYPE",
273: "WEAK_ARRAY_LIST_TYPE",
274: "WEAK_CELL_TYPE",
275: "WASM_ARRAY_TYPE",
276: "WASM_STRUCT_TYPE",
277: "JS_PROXY_TYPE",
270: "WASM_NULL_TYPE",
271: "WASM_RESUME_DATA_TYPE",
272: "WASM_STRING_VIEW_ITER_TYPE",
273: "WASM_TYPE_INFO_TYPE",
274: "WEAK_ARRAY_LIST_TYPE",
275: "WEAK_CELL_TYPE",
276: "WASM_ARRAY_TYPE",
277: "WASM_STRUCT_TYPE",
278: "JS_PROXY_TYPE",
1057: "JS_OBJECT_TYPE",
278: "JS_GLOBAL_OBJECT_TYPE",
279: "JS_GLOBAL_PROXY_TYPE",
280: "JS_MODULE_NAMESPACE_TYPE",
279: "JS_GLOBAL_OBJECT_TYPE",
280: "JS_GLOBAL_PROXY_TYPE",
281: "JS_MODULE_NAMESPACE_TYPE",
1040: "JS_SPECIAL_API_OBJECT_TYPE",
1041: "JS_PRIMITIVE_WRAPPER_TYPE",
1058: "JS_API_OBJECT_TYPE",
@ -297,7 +298,7 @@ KNOWN_MAPS = {
("read_only_space", 0x02141): (255, "MetaMap"),
("read_only_space", 0x02169): (175, "FixedArrayMap"),
("read_only_space", 0x02191): (240, "WeakFixedArrayMap"),
("read_only_space", 0x021b9): (273, "WeakArrayListMap"),
("read_only_space", 0x021b9): (274, "WeakArrayListMap"),
("read_only_space", 0x021e1): (175, "FixedCOWArrayMap"),
("read_only_space", 0x02209): (236, "DescriptorArrayMap"),
("read_only_space", 0x02231): (131, "UndefinedMap"),
@ -429,7 +430,7 @@ KNOWN_MAPS = {
("read_only_space", 0x0366d): (235, "AbstractInternalClassSubclass2Map"),
("read_only_space", 0x03695): (230, "ExportedSubClass2Map"),
("read_only_space", 0x036bd): (265, "SortStateMap"),
("read_only_space", 0x036e5): (271, "WasmStringViewIterMap"),
("read_only_space", 0x036e5): (272, "WasmStringViewIterMap"),
("read_only_space", 0x0370d): (194, "SloppyArgumentsElementsMap"),
("read_only_space", 0x03735): (237, "StrongDescriptorArrayMap"),
("read_only_space", 0x0375d): (200, "TurboshaftWord32SetTypeMap"),
@ -477,10 +478,11 @@ KNOWN_MAPS = {
("read_only_space", 0x03df5): (226, "WasmExportedFunctionDataMap"),
("read_only_space", 0x03e1d): (269, "WasmInternalFunctionMap"),
("read_only_space", 0x03e45): (227, "WasmJSFunctionDataMap"),
("read_only_space", 0x03e6d): (270, "WasmResumeDataMap"),
("read_only_space", 0x03e95): (272, "WasmTypeInfoMap"),
("read_only_space", 0x03e6d): (271, "WasmResumeDataMap"),
("read_only_space", 0x03e95): (273, "WasmTypeInfoMap"),
("read_only_space", 0x03ebd): (268, "WasmContinuationObjectMap"),
("read_only_space", 0x03ee5): (274, "WeakCellMap"),
("read_only_space", 0x03ee5): (270, "WasmNullMap"),
("read_only_space", 0x03f0d): (275, "WeakCellMap"),
("old_space", 0x043bd): (2118, "ExternalMap"),
("old_space", 0x043e5): (2122, "JSMessageObjectMap"),
}
@ -496,48 +498,49 @@ KNOWN_OBJECTS = {
("read_only_space", 0x02a99): "EmptyEnumCache",
("read_only_space", 0x02aa5): "EmptyDescriptorArray",
("read_only_space", 0x03875): "InvalidPrototypeValidityCell",
("read_only_space", 0x03f0d): "EmptyArrayList",
("read_only_space", 0x03f19): "EmptyScopeInfo",
("read_only_space", 0x03f29): "EmptyObjectBoilerplateDescription",
("read_only_space", 0x03f35): "EmptyArrayBoilerplateDescription",
("read_only_space", 0x03f41): "TrueValue",
("read_only_space", 0x03f5d): "FalseValue",
("read_only_space", 0x03f79): "EmptyByteArray",
("read_only_space", 0x03f81): "EmptyPropertyArray",
("read_only_space", 0x03f89): "EmptyClosureFeedbackCellArray",
("read_only_space", 0x03f91): "NoOpInterceptorInfo",
("read_only_space", 0x03fb9): "MinusZeroValue",
("read_only_space", 0x03fc5): "NanValue",
("read_only_space", 0x03fd1): "HoleNanValue",
("read_only_space", 0x03fdd): "InfinityValue",
("read_only_space", 0x03fe9): "MinusInfinityValue",
("read_only_space", 0x03ff5): "MaxSafeInteger",
("read_only_space", 0x04001): "MaxUInt32",
("read_only_space", 0x0400d): "SmiMinValue",
("read_only_space", 0x04019): "SmiMaxValuePlusOne",
("read_only_space", 0x04025): "HashSeed",
("read_only_space", 0x04035): "SingleCharacterStringTable",
("read_only_space", 0x0543d): "empty_string",
("read_only_space", 0x07b35): "UninitializedValue",
("read_only_space", 0x07b6d): "ArgumentsMarker",
("read_only_space", 0x07ba5): "TerminationException",
("read_only_space", 0x07be5): "Exception",
("read_only_space", 0x07c01): "OptimizedOut",
("read_only_space", 0x07c39): "StaleRegister",
("read_only_space", 0x07c71): "SelfReferenceMarker",
("read_only_space", 0x07cb1): "BasicBlockCountersMarker",
("read_only_space", 0x081c9): "EmptyPropertyDictionary",
("read_only_space", 0x081f1): "EmptySymbolTable",
("read_only_space", 0x0820d): "EmptySlowElementDictionary",
("read_only_space", 0x08231): "EmptyOrderedHashMap",
("read_only_space", 0x08245): "EmptyOrderedHashSet",
("read_only_space", 0x08259): "EmptyOrderedPropertyDictionary",
("read_only_space", 0x0827d): "EmptySwissPropertyDictionary",
("read_only_space", 0x0829d): "EmptyFeedbackMetadata",
("read_only_space", 0x082a9): "GlobalThisBindingScopeInfo",
("read_only_space", 0x082c9): "EmptyFunctionScopeInfo",
("read_only_space", 0x082ed): "NativeScopeInfo",
("read_only_space", 0x08305): "ShadowRealmScopeInfo",
("read_only_space", 0x03f35): "EmptyArrayList",
("read_only_space", 0x03f41): "EmptyScopeInfo",
("read_only_space", 0x03f51): "EmptyObjectBoilerplateDescription",
("read_only_space", 0x03f5d): "EmptyArrayBoilerplateDescription",
("read_only_space", 0x03f69): "TrueValue",
("read_only_space", 0x03f85): "FalseValue",
("read_only_space", 0x03fa1): "EmptyByteArray",
("read_only_space", 0x03fa9): "EmptyPropertyArray",
("read_only_space", 0x03fb1): "EmptyClosureFeedbackCellArray",
("read_only_space", 0x03fb9): "NoOpInterceptorInfo",
("read_only_space", 0x03fe1): "MinusZeroValue",
("read_only_space", 0x03fed): "NanValue",
("read_only_space", 0x03ff9): "HoleNanValue",
("read_only_space", 0x04005): "InfinityValue",
("read_only_space", 0x04011): "MinusInfinityValue",
("read_only_space", 0x0401d): "MaxSafeInteger",
("read_only_space", 0x04029): "MaxUInt32",
("read_only_space", 0x04035): "SmiMinValue",
("read_only_space", 0x04041): "SmiMaxValuePlusOne",
("read_only_space", 0x0404d): "HashSeed",
("read_only_space", 0x0405d): "SingleCharacterStringTable",
("read_only_space", 0x05465): "empty_string",
("read_only_space", 0x07b5d): "UninitializedValue",
("read_only_space", 0x07b95): "ArgumentsMarker",
("read_only_space", 0x07bcd): "TerminationException",
("read_only_space", 0x07c0d): "Exception",
("read_only_space", 0x07c29): "OptimizedOut",
("read_only_space", 0x07c61): "StaleRegister",
("read_only_space", 0x07c99): "SelfReferenceMarker",
("read_only_space", 0x07cd9): "BasicBlockCountersMarker",
("read_only_space", 0x081f1): "EmptyPropertyDictionary",
("read_only_space", 0x08219): "EmptySymbolTable",
("read_only_space", 0x08235): "EmptySlowElementDictionary",
("read_only_space", 0x08259): "EmptyOrderedHashMap",
("read_only_space", 0x0826d): "EmptyOrderedHashSet",
("read_only_space", 0x08281): "EmptyOrderedPropertyDictionary",
("read_only_space", 0x082a5): "EmptySwissPropertyDictionary",
("read_only_space", 0x082c5): "EmptyFeedbackMetadata",
("read_only_space", 0x082d1): "GlobalThisBindingScopeInfo",
("read_only_space", 0x082f1): "EmptyFunctionScopeInfo",
("read_only_space", 0x08315): "NativeScopeInfo",
("read_only_space", 0x0832d): "ShadowRealmScopeInfo",
("read_only_space", 0x0fffd): "WasmNull",
("old_space", 0x0426d): "ArgumentsIteratorAccessor",
("old_space", 0x04285): "ArrayLengthAccessor",
("old_space", 0x0429d): "BoundFunctionLengthAccessor",