[ptr-compr] Prepare for changing kTaggedSize, pt.1
Includes various fixes and cleanups here and there. Bug: v8:7703, v8:8852 Change-Id: I603eb0212cab3fecabfa15dceb70ee23b81cdb5a Reviewed-on: https://chromium-review.googlesource.com/c/1491595 Reviewed-by: Toon Verwaest <verwaest@chromium.org> Commit-Queue: Igor Sheludko <ishell@chromium.org> Cr-Commit-Position: refs/heads/master@{#59909}
This commit is contained in:
parent
3852a9d199
commit
f70bb59b57
@ -101,7 +101,7 @@ Node* ArgumentsBuiltinsAssembler::ConstructParametersObjectFromArgs(
|
||||
[this, elements, &offset](Node* arg) {
|
||||
StoreNoWriteBarrier(MachineRepresentation::kTagged,
|
||||
elements, offset.value(), arg);
|
||||
Increment(&offset, kSystemPointerSize);
|
||||
Increment(&offset, kTaggedSize);
|
||||
},
|
||||
first_arg, nullptr, param_mode);
|
||||
return result;
|
||||
|
@ -108,7 +108,8 @@ TNode<JSArrayBuffer> TypedArrayBuiltinsAssembler::AllocateEmptyOnHeapBuffer(
|
||||
byte_length,
|
||||
MachineType::PointerRepresentation());
|
||||
StoreObjectFieldNoWriteBarrier(buffer, JSArrayBuffer::kBackingStoreOffset,
|
||||
SmiConstant(0));
|
||||
IntPtrConstant(0),
|
||||
MachineType::PointerRepresentation());
|
||||
for (int offset = JSArrayBuffer::kHeaderSize;
|
||||
offset < JSArrayBuffer::kSizeWithEmbedderFields; offset += kTaggedSize) {
|
||||
StoreObjectFieldNoWriteBarrier(buffer, offset, SmiConstant(0));
|
||||
|
@ -1236,18 +1236,19 @@ TNode<HeapObject> CodeStubAssembler::Allocate(TNode<IntPtrT> size_in_bytes,
|
||||
new_space
|
||||
? ExternalReference::new_space_allocation_top_address(isolate())
|
||||
: ExternalReference::old_space_allocation_top_address(isolate()));
|
||||
DCHECK_EQ(kTaggedSize,
|
||||
DCHECK_EQ(kSystemPointerSize,
|
||||
ExternalReference::new_space_allocation_limit_address(isolate())
|
||||
.address() -
|
||||
ExternalReference::new_space_allocation_top_address(isolate())
|
||||
.address());
|
||||
DCHECK_EQ(kTaggedSize,
|
||||
DCHECK_EQ(kSystemPointerSize,
|
||||
ExternalReference::old_space_allocation_limit_address(isolate())
|
||||
.address() -
|
||||
ExternalReference::old_space_allocation_top_address(isolate())
|
||||
.address());
|
||||
TNode<IntPtrT> limit_address = IntPtrAdd(
|
||||
ReinterpretCast<IntPtrT>(top_address), IntPtrConstant(kTaggedSize));
|
||||
TNode<IntPtrT> limit_address =
|
||||
IntPtrAdd(ReinterpretCast<IntPtrT>(top_address),
|
||||
IntPtrConstant(kSystemPointerSize));
|
||||
|
||||
if (flags & kDoubleAlignment) {
|
||||
return AllocateRawDoubleAligned(size_in_bytes, flags,
|
||||
|
@ -76,7 +76,7 @@ namespace internal {
|
||||
constexpr int kStackSpaceRequiredForCompilation = 40;
|
||||
|
||||
// Determine whether double field unboxing feature is enabled.
|
||||
#if V8_TARGET_ARCH_64_BIT
|
||||
#if V8_TARGET_ARCH_64_BIT && !defined(V8_COMPRESS_POINTERS)
|
||||
#define V8_DOUBLE_FIELDS_UNBOXING true
|
||||
#else
|
||||
#define V8_DOUBLE_FIELDS_UNBOXING false
|
||||
|
@ -281,7 +281,7 @@ size_t Isolate::HashIsolateForEmbeddedBlob() {
|
||||
Code::kFlagsOffsetEnd + 1);
|
||||
static constexpr int kStartOffset = Code::kSafepointTableOffsetOffset;
|
||||
|
||||
for (int j = kStartOffset; j < Code::kHeaderPaddingStart; j++) {
|
||||
for (int j = kStartOffset; j < Code::kUnalignedHeaderSize; j++) {
|
||||
hash = base::hash_combine(hash, size_t{code_ptr[j]});
|
||||
}
|
||||
}
|
||||
|
@ -757,7 +757,7 @@ class EmbedderDataArray::BodyDescriptor final : public BodyDescriptorBase {
|
||||
public:
|
||||
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
|
||||
#ifdef V8_COMPRESS_POINTERS
|
||||
STATIC_ASSERT(kEmbedderDataSlotSize == 2 * kSystemPointerSize);
|
||||
STATIC_ASSERT(kEmbedderDataSlotSize == 2 * kTaggedSize);
|
||||
STATIC_ASSERT(base::bits::IsPowerOfTwo(kEmbedderDataSlotSize));
|
||||
return (offset < EmbedderDataArray::kHeaderSize) ||
|
||||
(((offset - EmbedderDataArray::kHeaderSize) &
|
||||
@ -775,7 +775,7 @@ class EmbedderDataArray::BodyDescriptor final : public BodyDescriptorBase {
|
||||
static inline void IterateBody(Map map, HeapObject obj, int object_size,
|
||||
ObjectVisitor* v) {
|
||||
#ifdef V8_COMPRESS_POINTERS
|
||||
STATIC_ASSERT(kEmbedderDataSlotSize == 2 * kSystemPointerSize);
|
||||
STATIC_ASSERT(kEmbedderDataSlotSize == 2 * kTaggedSize);
|
||||
// Iterate only tagged payload of the embedder slots and skip raw payload.
|
||||
for (int offset = EmbedderDataArray::OffsetOfElementAt(0) +
|
||||
EmbedderDataSlot::kTaggedPayloadOffset;
|
||||
|
@ -468,14 +468,21 @@ void BytecodeArray::BytecodeArrayPrint(std::ostream& os) { // NOLINT
|
||||
|
||||
|
||||
void FreeSpace::FreeSpacePrint(std::ostream& os) { // NOLINT
|
||||
os << "free space, size " << Size();
|
||||
os << "free space, size " << Size() << "\n";
|
||||
}
|
||||
|
||||
|
||||
template <class Traits>
|
||||
void FixedTypedArray<Traits>::FixedTypedArrayPrint(
|
||||
std::ostream& os) { // NOLINT
|
||||
os << "fixed " << Traits::Designator();
|
||||
PrintHeader(os, Traits::ArrayTypeName());
|
||||
os << "\n - length: " << length() << "\n - base_pointer: ";
|
||||
if (base_pointer().ptr() == kNullAddress) {
|
||||
os << "<nullptr>";
|
||||
} else {
|
||||
os << Brief(base_pointer());
|
||||
}
|
||||
os << "\n - external_pointer: " << external_pointer() << "\n";
|
||||
}
|
||||
|
||||
bool JSObject::PrintProperties(std::ostream& os) { // NOLINT
|
||||
@ -1363,6 +1370,10 @@ void JSTypedArray::JSTypedArrayPrint(std::ostream& os) { // NOLINT
|
||||
os << "\n - byte_offset: " << byte_offset();
|
||||
os << "\n - byte_length: " << byte_length();
|
||||
os << "\n - length: " << Brief(length());
|
||||
if (!buffer()->IsJSArrayBuffer()) {
|
||||
os << "\n <invalid buffer>\n";
|
||||
return;
|
||||
}
|
||||
if (WasDetached()) os << "\n - detached";
|
||||
JSObjectPrintBody(os, *this, !WasDetached());
|
||||
}
|
||||
@ -1380,6 +1391,10 @@ void JSDataView::JSDataViewPrint(std::ostream& os) { // NOLINT
|
||||
os << "\n - buffer =" << Brief(buffer());
|
||||
os << "\n - byte_offset: " << byte_offset();
|
||||
os << "\n - byte_length: " << byte_length();
|
||||
if (!buffer()->IsJSArrayBuffer()) {
|
||||
os << "\n <invalid buffer>";
|
||||
return;
|
||||
}
|
||||
if (WasDetached()) os << "\n - detached";
|
||||
JSObjectPrintBody(os, *this, !WasDetached());
|
||||
}
|
||||
@ -1608,7 +1623,8 @@ void CodeDataContainer::CodeDataContainerPrint(std::ostream& os) { // NOLINT
|
||||
}
|
||||
|
||||
void Foreign::ForeignPrint(std::ostream& os) { // NOLINT
|
||||
os << "foreign address : " << reinterpret_cast<void*>(foreign_address());
|
||||
PrintHeader(os, "Foreign");
|
||||
os << "\n - foreign address : " << reinterpret_cast<void*>(foreign_address());
|
||||
os << "\n";
|
||||
}
|
||||
|
||||
@ -2658,11 +2674,29 @@ void JSObject::PrintTransitions(std::ostream& os) { // NOLINT
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
namespace {
|
||||
|
||||
inline i::Object GetObjectFromRaw(void* object) {
|
||||
i::Address object_ptr = reinterpret_cast<i::Address>(object);
|
||||
#ifdef V8_COMPRESS_POINTERS
|
||||
if (RoundDown<i::kPtrComprIsolateRootAlignment>(object_ptr) ==
|
||||
i::kNullAddress) {
|
||||
// Try to decompress pointer.
|
||||
i::Isolate* isolate = i::Isolate::Current();
|
||||
object_ptr = i::DecompressTaggedAny(isolate->isolate_root(),
|
||||
static_cast<i::Tagged_t>(object_ptr));
|
||||
}
|
||||
#endif
|
||||
return i::Object(object_ptr);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
//
|
||||
// The following functions are used by our gdb macros.
|
||||
//
|
||||
V8_EXPORT_PRIVATE extern void _v8_internal_Print_Object(void* object) {
|
||||
i::Object(reinterpret_cast<i::Address>(object))->Print();
|
||||
GetObjectFromRaw(object)->Print();
|
||||
}
|
||||
|
||||
V8_EXPORT_PRIVATE extern void _v8_internal_Print_Code(void* object) {
|
||||
@ -2702,7 +2736,7 @@ V8_EXPORT_PRIVATE extern void _v8_internal_Print_Code(void* object) {
|
||||
|
||||
V8_EXPORT_PRIVATE extern void _v8_internal_Print_LayoutDescriptor(
|
||||
void* object) {
|
||||
i::Object o(reinterpret_cast<i::Address>(object));
|
||||
i::Object o(GetObjectFromRaw(object));
|
||||
if (!o->IsLayoutDescriptor()) {
|
||||
printf("Please provide a layout descriptor\n");
|
||||
} else {
|
||||
@ -2716,7 +2750,7 @@ V8_EXPORT_PRIVATE extern void _v8_internal_Print_StackTrace() {
|
||||
}
|
||||
|
||||
V8_EXPORT_PRIVATE extern void _v8_internal_Print_TransitionTree(void* object) {
|
||||
i::Object o(reinterpret_cast<i::Address>(object));
|
||||
i::Object o(GetObjectFromRaw(object));
|
||||
if (!o->IsMap()) {
|
||||
printf("Please provide a valid Map\n");
|
||||
} else {
|
||||
|
@ -7607,10 +7607,8 @@ template <typename Dictionary>
|
||||
struct EnumIndexComparator {
|
||||
explicit EnumIndexComparator(Dictionary dict) : dict(dict) {}
|
||||
bool operator()(Tagged_t a, Tagged_t b) {
|
||||
// TODO(ishell): revisit the code below
|
||||
STATIC_ASSERT(kTaggedSize == kSystemPointerSize);
|
||||
PropertyDetails da(dict->DetailsAt(Smi(a).value()));
|
||||
PropertyDetails db(dict->DetailsAt(Smi(b).value()));
|
||||
PropertyDetails da(dict->DetailsAt(Smi(static_cast<Address>(a)).value()));
|
||||
PropertyDetails db(dict->DetailsAt(Smi(static_cast<Address>(b)).value()));
|
||||
return da.dictionary_index() < db.dictionary_index();
|
||||
}
|
||||
Dictionary dict;
|
||||
|
@ -223,8 +223,10 @@ void Code::WipeOutHeader() {
|
||||
}
|
||||
|
||||
void Code::clear_padding() {
|
||||
memset(reinterpret_cast<void*>(address() + kHeaderPaddingStart), 0,
|
||||
kHeaderSize - kHeaderPaddingStart);
|
||||
if (FIELD_SIZE(kOptionalPaddingOffset) != 0) {
|
||||
memset(reinterpret_cast<void*>(address() + kOptionalPaddingOffset), 0,
|
||||
FIELD_SIZE(kOptionalPaddingOffset));
|
||||
}
|
||||
Address data_end =
|
||||
has_unwinding_info() ? unwinding_info_end() : raw_instruction_end();
|
||||
memset(reinterpret_cast<void*>(data_end), 0,
|
||||
|
@ -220,7 +220,7 @@ class Code : public HeapObject {
|
||||
inline void WipeOutHeader();
|
||||
|
||||
// Clear uninitialized padding space. This ensures that the snapshot content
|
||||
// is deterministic.
|
||||
// is deterministic. Depending on the V8 build mode there could be no padding.
|
||||
inline void clear_padding();
|
||||
// Initialize the flags field. Similar to clear_padding above this ensure that
|
||||
// the snapshot content is deterministic.
|
||||
@ -373,26 +373,27 @@ class Code : public HeapObject {
|
||||
class OptimizedCodeIterator;
|
||||
|
||||
// Layout description.
|
||||
#define CODE_FIELDS(V) \
|
||||
V(kRelocationInfoOffset, kTaggedSize) \
|
||||
V(kDeoptimizationDataOffset, kTaggedSize) \
|
||||
V(kSourcePositionTableOffset, kTaggedSize) \
|
||||
V(kCodeDataContainerOffset, kTaggedSize) \
|
||||
/* Data or code not directly visited by GC directly starts here. */ \
|
||||
/* The serializer needs to copy bytes starting from here verbatim. */ \
|
||||
/* Objects embedded into code is visited via reloc info. */ \
|
||||
V(kDataStart, 0) \
|
||||
V(kInstructionSizeOffset, kIntSize) \
|
||||
V(kFlagsOffset, kIntSize) \
|
||||
V(kSafepointTableOffsetOffset, kIntSize) \
|
||||
V(kHandlerTableOffsetOffset, kIntSize) \
|
||||
V(kConstantPoolOffsetOffset, \
|
||||
FLAG_enable_embedded_constant_pool ? kIntSize : 0) \
|
||||
V(kCodeCommentsOffsetOffset, kIntSize) \
|
||||
V(kBuiltinIndexOffset, kIntSize) \
|
||||
/* Add padding to align the instruction start following right after */ \
|
||||
/* the Code object header. */ \
|
||||
V(kHeaderPaddingStart, CODE_POINTER_PADDING(kHeaderPaddingStart)) \
|
||||
#define CODE_FIELDS(V) \
|
||||
V(kRelocationInfoOffset, kTaggedSize) \
|
||||
V(kDeoptimizationDataOffset, kTaggedSize) \
|
||||
V(kSourcePositionTableOffset, kTaggedSize) \
|
||||
V(kCodeDataContainerOffset, kTaggedSize) \
|
||||
/* Data or code not directly visited by GC directly starts here. */ \
|
||||
/* The serializer needs to copy bytes starting from here verbatim. */ \
|
||||
/* Objects embedded into code is visited via reloc info. */ \
|
||||
V(kDataStart, 0) \
|
||||
V(kInstructionSizeOffset, kIntSize) \
|
||||
V(kFlagsOffset, kIntSize) \
|
||||
V(kSafepointTableOffsetOffset, kIntSize) \
|
||||
V(kHandlerTableOffsetOffset, kIntSize) \
|
||||
V(kConstantPoolOffsetOffset, \
|
||||
FLAG_enable_embedded_constant_pool ? kIntSize : 0) \
|
||||
V(kCodeCommentsOffsetOffset, kIntSize) \
|
||||
V(kBuiltinIndexOffset, kIntSize) \
|
||||
V(kUnalignedHeaderSize, 0) \
|
||||
/* Add padding to align the instruction start following right after */ \
|
||||
/* the Code object header. */ \
|
||||
V(kOptionalPaddingOffset, CODE_POINTER_PADDING(kOptionalPaddingOffset)) \
|
||||
V(kHeaderSize, 0)
|
||||
|
||||
DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, CODE_FIELDS)
|
||||
@ -402,31 +403,24 @@ class Code : public HeapObject {
|
||||
// due to padding for code alignment.
|
||||
#if V8_TARGET_ARCH_ARM64
|
||||
static constexpr int kHeaderPaddingSize = 0;
|
||||
STATIC_ASSERT(kHeaderSize - kHeaderPaddingStart == kHeaderPaddingSize);
|
||||
#elif V8_TARGET_ARCH_MIPS64
|
||||
static constexpr int kHeaderPaddingSize = 0;
|
||||
STATIC_ASSERT(kHeaderSize - kHeaderPaddingStart == kHeaderPaddingSize);
|
||||
#elif V8_TARGET_ARCH_X64
|
||||
static constexpr int kHeaderPaddingSize = 0;
|
||||
STATIC_ASSERT(kHeaderSize - kHeaderPaddingStart == kHeaderPaddingSize);
|
||||
#elif V8_TARGET_ARCH_ARM
|
||||
static constexpr int kHeaderPaddingSize = 20;
|
||||
STATIC_ASSERT(kHeaderSize - kHeaderPaddingStart == kHeaderPaddingSize);
|
||||
#elif V8_TARGET_ARCH_IA32
|
||||
static constexpr int kHeaderPaddingSize = 20;
|
||||
STATIC_ASSERT(kHeaderSize - kHeaderPaddingStart == kHeaderPaddingSize);
|
||||
#elif V8_TARGET_ARCH_MIPS
|
||||
static constexpr int kHeaderPaddingSize = 20;
|
||||
STATIC_ASSERT(kHeaderSize - kHeaderPaddingStart == kHeaderPaddingSize);
|
||||
#elif V8_TARGET_ARCH_PPC64
|
||||
// No static assert possible since padding size depends on the
|
||||
// FLAG_enable_embedded_constant_pool runtime flag.
|
||||
static constexpr int kHeaderPaddingSize = 0;
|
||||
#elif V8_TARGET_ARCH_S390X
|
||||
static constexpr int kHeaderPaddingSize = 0;
|
||||
STATIC_ASSERT(kHeaderSize - kHeaderPaddingStart == kHeaderPaddingSize);
|
||||
#else
|
||||
#error Unknown architecture.
|
||||
#endif
|
||||
STATIC_ASSERT(FIELD_SIZE(kOptionalPaddingOffset) == kHeaderPaddingSize);
|
||||
|
||||
inline int GetUnwindingInfoSizeOffset() const;
|
||||
|
||||
|
@ -679,7 +679,7 @@ class FixedTypedArray : public FixedTypedArrayBase {
|
||||
public: /* NOLINT */ \
|
||||
typedef elementType ElementType; \
|
||||
static const InstanceType kInstanceType = FIXED_##TYPE##_ARRAY_TYPE; \
|
||||
static const char* Designator() { return #type " array"; } \
|
||||
static const char* ArrayTypeName() { return "Fixed" #Type "Array"; } \
|
||||
static inline Handle<Object> ToHandle(Isolate* isolate, \
|
||||
elementType scalar); \
|
||||
static inline elementType defaultValue(); \
|
||||
|
@ -210,10 +210,10 @@ class JSTypedArray : public JSArrayBufferView {
|
||||
DECL_VERIFIER(JSTypedArray)
|
||||
|
||||
// Layout description.
|
||||
#define JS_TYPED_ARRAY_FIELDS(V) \
|
||||
/* Raw data fields. */ \
|
||||
V(kLengthOffset, kSystemPointerSize) \
|
||||
/* Header size. */ \
|
||||
#define JS_TYPED_ARRAY_FIELDS(V) \
|
||||
/* Raw data fields. */ \
|
||||
V(kLengthOffset, kTaggedSize) \
|
||||
/* Header size. */ \
|
||||
V(kHeaderSize, 0)
|
||||
|
||||
DEFINE_FIELD_OFFSET_CONSTANTS(JSArrayBufferView::kHeaderSize,
|
||||
|
@ -191,7 +191,7 @@ class UncompiledDataWithPreparseData : public UncompiledData {
|
||||
#undef UNCOMPILED_DATA_WITH_PREPARSE_DATA_FIELDS
|
||||
|
||||
// Make sure the size is aligned
|
||||
STATIC_ASSERT(kSize == POINTER_SIZE_ALIGN(kSize));
|
||||
STATIC_ASSERT(IsAligned(kSize, kTaggedSize));
|
||||
|
||||
typedef SubclassBodyDescriptor<
|
||||
UncompiledData::BodyDescriptor,
|
||||
|
@ -183,10 +183,10 @@ const char* HeapEntry::TypeAsString() {
|
||||
HeapSnapshot::HeapSnapshot(HeapProfiler* profiler) : profiler_(profiler) {
|
||||
// It is very important to keep objects that form a heap snapshot
|
||||
// as small as possible. Check assumptions about data structure sizes.
|
||||
STATIC_ASSERT((kTaggedSize == 4 && sizeof(HeapGraphEdge) == 12) ||
|
||||
(kTaggedSize == 8 && sizeof(HeapGraphEdge) == 24));
|
||||
STATIC_ASSERT((kTaggedSize == 4 && sizeof(HeapEntry) == 28) ||
|
||||
(kTaggedSize == 8 && sizeof(HeapEntry) == 40));
|
||||
STATIC_ASSERT((kSystemPointerSize == 4 && sizeof(HeapGraphEdge) == 12) ||
|
||||
(kSystemPointerSize == 8 && sizeof(HeapGraphEdge) == 24));
|
||||
STATIC_ASSERT((kSystemPointerSize == 4 && sizeof(HeapEntry) == 28) ||
|
||||
(kSystemPointerSize == 8 && sizeof(HeapEntry) == 40));
|
||||
memset(&gc_subroot_entries_, 0, sizeof(gc_subroot_entries_));
|
||||
}
|
||||
|
||||
|
@ -2838,7 +2838,7 @@ TEST(InternalFieldsSubclassing) {
|
||||
if (in_object_only) {
|
||||
CHECK_LE(nof_properties, i_value->map()->GetInObjectProperties());
|
||||
} else {
|
||||
CHECK_LE(kMaxNofProperties, i_value->map()->GetInObjectProperties());
|
||||
CHECK_LE(i_value->map()->GetInObjectProperties(), kMaxNofProperties);
|
||||
}
|
||||
|
||||
// Make Sure we get the precise property count.
|
||||
@ -2849,7 +2849,7 @@ TEST(InternalFieldsSubclassing) {
|
||||
if (in_object_only) {
|
||||
CHECK_EQ(nof_properties, i_value->map()->GetInObjectProperties());
|
||||
} else {
|
||||
CHECK_LE(kMaxNofProperties, i_value->map()->GetInObjectProperties());
|
||||
CHECK_LE(i_value->map()->GetInObjectProperties(), kMaxNofProperties);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -459,8 +459,7 @@ TARGET_TEST_F(InterpreterAssemblerTest, LoadConstantPoolEntry) {
|
||||
MachineType::AnyTagged(), constant_pool_matcher,
|
||||
c::IsIntPtrAdd(
|
||||
c::IsIntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag),
|
||||
c::IsWordShl(index,
|
||||
c::IsIntPtrConstant(kSystemPointerSizeLog2))),
|
||||
c::IsWordShl(index, c::IsIntPtrConstant(kTaggedSizeLog2))),
|
||||
LoadSensitivity::kCritical));
|
||||
}
|
||||
}
|
||||
|
@ -240,8 +240,8 @@ extras_accessors = [
|
||||
'JSObject, elements, Object, kElementsOffset',
|
||||
'JSObject, internal_fields, uintptr_t, kHeaderSize',
|
||||
'FixedArray, data, uintptr_t, kHeaderSize',
|
||||
'FixedTypedArrayBase, external_pointer, Object, kExternalPointerOffset',
|
||||
'JSArrayBuffer, backing_store, Object, kBackingStoreOffset',
|
||||
'FixedTypedArrayBase, external_pointer, uintptr_t, kExternalPointerOffset',
|
||||
'JSArrayBuffer, backing_store, uintptr_t, kBackingStoreOffset',
|
||||
'JSArrayBuffer, byte_length, size_t, kByteLengthOffset',
|
||||
'JSArrayBufferView, byte_length, size_t, kByteLengthOffset',
|
||||
'JSArrayBufferView, byte_offset, size_t, kByteOffsetOffset',
|
||||
|
Loading…
Reference in New Issue
Block a user