[cleanup] Fix kPointerSize usages in src/wasm/
Also added != 0 for readability in checks like: if (FIELD_SIZE(kFooOffset) != 0) {...} Bug: v8:8477, v8:8562 Change-Id: Ibc305103475e6ec029e89e7ad095ec0a1fa30189 Reviewed-on: https://chromium-review.googlesource.com/c/1382743 Commit-Queue: Igor Sheludko <ishell@chromium.org> Reviewed-by: Clemens Hammacher <clemensh@chromium.org> Cr-Commit-Position: refs/heads/master@{#58360}
This commit is contained in:
parent
e95be4598c
commit
50f6baf404
@ -189,7 +189,7 @@ TF_BUILTIN(TypedArrayInitialize, TypedArrayBuiltinsAssembler) {
|
||||
// - Set the byte_length field to byte_length.
|
||||
// - Set backing_store to null/Smi(0).
|
||||
// - Set all embedder fields to Smi(0).
|
||||
if (FIELD_SIZE(JSArrayBuffer::kOptionalPaddingOffset)) {
|
||||
if (FIELD_SIZE(JSArrayBuffer::kOptionalPaddingOffset) != 0) {
|
||||
DCHECK_EQ(4, FIELD_SIZE(JSArrayBuffer::kOptionalPaddingOffset));
|
||||
StoreObjectFieldNoWriteBarrier(
|
||||
buffer, JSArrayBuffer::kOptionalPaddingOffset, Int32Constant(0),
|
||||
|
@ -3055,7 +3055,7 @@ TNode<BigInt> CodeStubAssembler::AllocateRawBigInt(TNode<IntPtrT> length) {
|
||||
Signed(WordShl(length, kSystemPointerSizeLog2)));
|
||||
Node* raw_result = Allocate(size, kNone);
|
||||
StoreMapNoWriteBarrier(raw_result, RootIndex::kBigIntMap);
|
||||
if (FIELD_SIZE(BigInt::kOptionalPaddingOffset)) {
|
||||
if (FIELD_SIZE(BigInt::kOptionalPaddingOffset) != 0) {
|
||||
DCHECK_EQ(4, FIELD_SIZE(BigInt::kOptionalPaddingOffset));
|
||||
StoreObjectFieldNoWriteBarrier(raw_result, BigInt::kOptionalPaddingOffset,
|
||||
Int32Constant(0),
|
||||
|
@ -696,7 +696,7 @@ class WasmInstanceObject::BodyDescriptor final : public BodyDescriptorBase {
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateBody(Map map, HeapObject* obj, int object_size,
|
||||
ObjectVisitor* v) {
|
||||
IteratePointers(obj, kPropertiesOrHashOffset, kFirstUntaggedOffset, v);
|
||||
IteratePointers(obj, kPropertiesOrHashOffset, kEndOfTaggedFieldsOffset, v);
|
||||
IterateJSObjectBodyImpl(map, obj, kSize, object_size, v);
|
||||
}
|
||||
|
||||
|
@ -1610,7 +1610,7 @@ void JSProxy::JSProxyVerify(Isolate* isolate) {
|
||||
|
||||
void JSArrayBuffer::JSArrayBufferVerify(Isolate* isolate) {
|
||||
CHECK(IsJSArrayBuffer());
|
||||
if (FIELD_SIZE(kOptionalPaddingOffset)) {
|
||||
if (FIELD_SIZE(kOptionalPaddingOffset) != 0) {
|
||||
CHECK_EQ(4, FIELD_SIZE(kOptionalPaddingOffset));
|
||||
CHECK_EQ(0,
|
||||
*reinterpret_cast<uint32_t*>(address() + kOptionalPaddingOffset));
|
||||
@ -1829,8 +1829,8 @@ void WasmInstanceObject::WasmInstanceObjectVerify(Isolate* isolate) {
|
||||
// Just generically check all tagged fields. Don't check the untagged fields,
|
||||
// as some of them might still contain the "undefined" value if the
|
||||
// WasmInstanceObject is not fully set up yet.
|
||||
for (int offset = kHeaderSize; offset < kFirstUntaggedOffset;
|
||||
offset += kPointerSize) {
|
||||
for (int offset = kHeaderSize; offset < kEndOfTaggedFieldsOffset;
|
||||
offset += kTaggedSize) {
|
||||
VerifyObjectField(isolate, offset);
|
||||
}
|
||||
}
|
||||
|
@ -119,7 +119,7 @@ class FreshlyAllocatedBigInt : public BigIntBase {
|
||||
|
||||
// Clear uninitialized padding space.
|
||||
inline void clear_padding() {
|
||||
if (FIELD_SIZE(kOptionalPaddingOffset)) {
|
||||
if (FIELD_SIZE(kOptionalPaddingOffset) != 0) {
|
||||
DCHECK_EQ(4, FIELD_SIZE(kOptionalPaddingOffset));
|
||||
memset(reinterpret_cast<void*>(address() + kOptionalPaddingOffset), 0,
|
||||
FIELD_SIZE(kOptionalPaddingOffset));
|
||||
|
@ -87,7 +87,7 @@ void JSArrayBuffer::set_is_wasm_memory(bool is_wasm_memory) {
|
||||
}
|
||||
|
||||
void JSArrayBuffer::clear_padding() {
|
||||
if (FIELD_SIZE(kOptionalPaddingOffset)) {
|
||||
if (FIELD_SIZE(kOptionalPaddingOffset) != 0) {
|
||||
DCHECK_EQ(4, FIELD_SIZE(kOptionalPaddingOffset));
|
||||
memset(reinterpret_cast<void*>(address() + kOptionalPaddingOffset), 0,
|
||||
FIELD_SIZE(kOptionalPaddingOffset));
|
||||
|
@ -48,7 +48,7 @@ ObjectSlot PreParsedScopeData::child_data_start() const {
|
||||
}
|
||||
|
||||
void PreParsedScopeData::clear_padding() {
|
||||
if (FIELD_SIZE(kOptionalPaddingOffset)) {
|
||||
if (FIELD_SIZE(kOptionalPaddingOffset) != 0) {
|
||||
DCHECK_EQ(4, FIELD_SIZE(kOptionalPaddingOffset));
|
||||
memset(reinterpret_cast<void*>(address() + kOptionalPaddingOffset), 0,
|
||||
FIELD_SIZE(kOptionalPaddingOffset));
|
||||
@ -65,7 +65,7 @@ INT32_ACCESSORS(UncompiledData, end_position, kEndPositionOffset)
|
||||
INT32_ACCESSORS(UncompiledData, function_literal_id, kFunctionLiteralIdOffset)
|
||||
|
||||
void UncompiledData::clear_padding() {
|
||||
if (FIELD_SIZE(kOptionalPaddingOffset)) {
|
||||
if (FIELD_SIZE(kOptionalPaddingOffset) != 0) {
|
||||
DCHECK_EQ(4, FIELD_SIZE(kOptionalPaddingOffset));
|
||||
memset(reinterpret_cast<void*>(address() + kOptionalPaddingOffset), 0,
|
||||
FIELD_SIZE(kOptionalPaddingOffset));
|
||||
|
@ -37,11 +37,12 @@ namespace liftoff {
|
||||
// | | v
|
||||
// -----+--------------------+ <-- stack ptr (sp)
|
||||
//
|
||||
static_assert(2 * kPointerSize == LiftoffAssembler::kStackSlotSize,
|
||||
static_assert(2 * kSystemPointerSize == LiftoffAssembler::kStackSlotSize,
|
||||
"Slot size should be twice the size of the 32 bit pointer.");
|
||||
constexpr int32_t kInstanceOffset = 2 * kPointerSize;
|
||||
constexpr int32_t kFirstStackSlotOffset = kInstanceOffset + 2 * kPointerSize;
|
||||
constexpr int32_t kConstantStackSpace = kPointerSize;
|
||||
constexpr int32_t kInstanceOffset = 2 * kSystemPointerSize;
|
||||
constexpr int32_t kFirstStackSlotOffset =
|
||||
kInstanceOffset + 2 * kSystemPointerSize;
|
||||
constexpr int32_t kConstantStackSpace = kSystemPointerSize;
|
||||
// kPatchInstructionsRequired sets a maximum limit of how many instructions that
|
||||
// PatchPrepareStackFrame will use in order to increase the stack appropriately.
|
||||
// Three instructions are required to sub a large constant, movw + movt + sub.
|
||||
@ -1337,7 +1338,7 @@ void LiftoffAssembler::CallC(wasm::FunctionSig* sig,
|
||||
ExternalReference ext_ref) {
|
||||
// Arguments are passed by pushing them all to the stack and then passing
|
||||
// a pointer to them.
|
||||
DCHECK_EQ(stack_bytes % kPointerSize, 0);
|
||||
DCHECK(IsAligned(stack_bytes, kSystemPointerSize));
|
||||
// Reserve space in the stack.
|
||||
sub(sp, sp, Operand(stack_bytes));
|
||||
|
||||
@ -1392,7 +1393,7 @@ void LiftoffAssembler::CallC(wasm::FunctionSig* sig,
|
||||
break;
|
||||
case kWasmI64:
|
||||
ldr(result_reg->low_gp(), MemOperand(sp));
|
||||
ldr(result_reg->high_gp(), MemOperand(sp, kPointerSize));
|
||||
ldr(result_reg->high_gp(), MemOperand(sp, kSystemPointerSize));
|
||||
break;
|
||||
case kWasmF32:
|
||||
vldr(liftoff::GetFloatRegister(result_reg->fp()), MemOperand(sp));
|
||||
|
@ -41,8 +41,8 @@ namespace liftoff {
|
||||
// -----+--------------------+ <-- stack ptr (sp)
|
||||
//
|
||||
|
||||
constexpr int32_t kInstanceOffset = 2 * kPointerSize;
|
||||
constexpr int32_t kFirstStackSlotOffset = kInstanceOffset + kPointerSize;
|
||||
constexpr int32_t kInstanceOffset = 2 * kSystemPointerSize;
|
||||
constexpr int32_t kFirstStackSlotOffset = kInstanceOffset + kSystemPointerSize;
|
||||
constexpr int32_t kConstantStackSpace = 0;
|
||||
|
||||
inline MemOperand GetStackSlot(uint32_t index) {
|
||||
|
@ -384,7 +384,8 @@ void LiftoffAssembler::Store(Register dst_addr, Register offset_reg,
|
||||
void LiftoffAssembler::LoadCallerFrameSlot(LiftoffRegister dst,
|
||||
uint32_t caller_slot_idx,
|
||||
ValueType type) {
|
||||
liftoff::Load(this, dst, ebp, kPointerSize * (caller_slot_idx + 1), type);
|
||||
liftoff::Load(this, dst, ebp, kSystemPointerSize * (caller_slot_idx + 1),
|
||||
type);
|
||||
}
|
||||
|
||||
void LiftoffAssembler::MoveStackValue(uint32_t dst_index, uint32_t src_index,
|
||||
@ -1649,8 +1650,9 @@ void LiftoffAssembler::PopRegisters(LiftoffRegList regs) {
|
||||
}
|
||||
|
||||
void LiftoffAssembler::DropStackSlotsAndRet(uint32_t num_stack_slots) {
|
||||
DCHECK_LT(num_stack_slots, (1 << 16) / kPointerSize); // 16 bit immediate
|
||||
ret(static_cast<int>(num_stack_slots * kPointerSize));
|
||||
DCHECK_LT(num_stack_slots,
|
||||
(1 << 16) / kSystemPointerSize); // 16 bit immediate
|
||||
ret(static_cast<int>(num_stack_slots * kSystemPointerSize));
|
||||
}
|
||||
|
||||
void LiftoffAssembler::CallC(wasm::FunctionSig* sig,
|
||||
|
@ -36,7 +36,7 @@ class LiftoffAssembler : public TurboAssembler {
|
||||
static constexpr uint32_t kStackSlotSize = 8;
|
||||
|
||||
static constexpr ValueType kWasmIntPtr =
|
||||
kPointerSize == 8 ? kWasmI64 : kWasmI32;
|
||||
kSystemPointerSize == 8 ? kWasmI64 : kWasmI32;
|
||||
|
||||
class VarState {
|
||||
public:
|
||||
@ -452,7 +452,7 @@ class LiftoffAssembler : public TurboAssembler {
|
||||
inline void emit_i32_to_intptr(Register dst, Register src);
|
||||
|
||||
inline void emit_ptrsize_add(Register dst, Register lhs, Register rhs) {
|
||||
if (kPointerSize == 8) {
|
||||
if (kSystemPointerSize == 8) {
|
||||
emit_i64_add(LiftoffRegister(dst), LiftoffRegister(lhs),
|
||||
LiftoffRegister(rhs));
|
||||
} else {
|
||||
@ -460,7 +460,7 @@ class LiftoffAssembler : public TurboAssembler {
|
||||
}
|
||||
}
|
||||
inline void emit_ptrsize_sub(Register dst, Register lhs, Register rhs) {
|
||||
if (kPointerSize == 8) {
|
||||
if (kSystemPointerSize == 8) {
|
||||
emit_i64_sub(LiftoffRegister(dst), LiftoffRegister(lhs),
|
||||
LiftoffRegister(rhs));
|
||||
} else {
|
||||
@ -468,7 +468,7 @@ class LiftoffAssembler : public TurboAssembler {
|
||||
}
|
||||
}
|
||||
inline void emit_ptrsize_and(Register dst, Register lhs, Register rhs) {
|
||||
if (kPointerSize == 8) {
|
||||
if (kSystemPointerSize == 8) {
|
||||
emit_i64_and(LiftoffRegister(dst), LiftoffRegister(lhs),
|
||||
LiftoffRegister(rhs));
|
||||
} else {
|
||||
@ -476,7 +476,7 @@ class LiftoffAssembler : public TurboAssembler {
|
||||
}
|
||||
}
|
||||
inline void emit_ptrsize_shr(Register dst, Register src, int amount) {
|
||||
if (kPointerSize == 8) {
|
||||
if (kSystemPointerSize == 8) {
|
||||
emit_i64_shr(LiftoffRegister(dst), LiftoffRegister(src), amount);
|
||||
} else {
|
||||
emit_i32_shr(dst, src, amount);
|
||||
|
@ -52,8 +52,7 @@ struct assert_field_size {
|
||||
};
|
||||
|
||||
#define WASM_INSTANCE_OBJECT_FIELD_SIZE(name) \
|
||||
(WasmInstanceObject::k##name##OffsetEnd - \
|
||||
WasmInstanceObject::k##name##Offset + 1) // NOLINT(whitespace/indent)
|
||||
FIELD_SIZE(WasmInstanceObject::k##name##Offset)
|
||||
|
||||
#define LOAD_INSTANCE_FIELD(dst, name, load_size) \
|
||||
__ LoadFromInstance(dst, WASM_INSTANCE_OBJECT_FIELD_OFFSET(name), \
|
||||
@ -76,7 +75,7 @@ struct assert_field_size {
|
||||
#endif
|
||||
|
||||
constexpr LoadType::LoadTypeValue kPointerLoadType =
|
||||
kPointerSize == 8 ? LoadType::kI64Load : LoadType::kI32Load;
|
||||
kSystemPointerSize == 8 ? LoadType::kI64Load : LoadType::kI32Load;
|
||||
|
||||
#if V8_TARGET_ARCH_ARM64
|
||||
// On ARM64, the Assembler keeps track of pointers to Labels to resolve
|
||||
@ -107,8 +106,9 @@ class MovableLabel {
|
||||
|
||||
compiler::CallDescriptor* GetLoweredCallDescriptor(
|
||||
Zone* zone, compiler::CallDescriptor* call_desc) {
|
||||
return kPointerSize == 4 ? compiler::GetI32WasmCallDescriptor(zone, call_desc)
|
||||
: call_desc;
|
||||
return kSystemPointerSize == 4
|
||||
? compiler::GetI32WasmCallDescriptor(zone, call_desc)
|
||||
: call_desc;
|
||||
}
|
||||
|
||||
constexpr ValueType kSupportedTypesArr[] = {kWasmI32, kWasmI64, kWasmF32,
|
||||
@ -306,7 +306,7 @@ class LiftoffCompiler {
|
||||
OutOfLineCode::StackCheck(position, __ cache_state()->used_registers));
|
||||
OutOfLineCode& ool = out_of_line_code_.back();
|
||||
Register limit_address = __ GetUnusedRegister(kGpReg).gp();
|
||||
LOAD_INSTANCE_FIELD(limit_address, StackLimitAddress, kPointerSize);
|
||||
LOAD_INSTANCE_FIELD(limit_address, StackLimitAddress, kSystemPointerSize);
|
||||
__ StackCheck(ool.label.get(), limit_address);
|
||||
__ bind(ool.continuation.get());
|
||||
}
|
||||
@ -1167,12 +1167,12 @@ class LiftoffCompiler {
|
||||
LiftoffRegList& pinned, uint32_t* offset) {
|
||||
Register addr = pinned.set(__ GetUnusedRegister(kGpReg)).gp();
|
||||
if (global->mutability && global->imported) {
|
||||
LOAD_INSTANCE_FIELD(addr, ImportedMutableGlobals, kPointerSize);
|
||||
LOAD_INSTANCE_FIELD(addr, ImportedMutableGlobals, kSystemPointerSize);
|
||||
__ Load(LiftoffRegister(addr), addr, no_reg,
|
||||
global->index * sizeof(Address), kPointerLoadType, pinned);
|
||||
*offset = 0;
|
||||
} else {
|
||||
LOAD_INSTANCE_FIELD(addr, GlobalsStart, kPointerSize);
|
||||
LOAD_INSTANCE_FIELD(addr, GlobalsStart, kSystemPointerSize);
|
||||
*offset = global->offset;
|
||||
}
|
||||
return addr;
|
||||
@ -1397,9 +1397,9 @@ class LiftoffCompiler {
|
||||
LiftoffRegister end_offset_reg =
|
||||
pinned.set(__ GetUnusedRegister(kGpReg, pinned));
|
||||
Register mem_size = __ GetUnusedRegister(kGpReg, pinned).gp();
|
||||
LOAD_INSTANCE_FIELD(mem_size, MemorySize, kPointerSize);
|
||||
LOAD_INSTANCE_FIELD(mem_size, MemorySize, kSystemPointerSize);
|
||||
|
||||
if (kPointerSize == 8) {
|
||||
if (kSystemPointerSize == 8) {
|
||||
__ LoadConstant(end_offset_reg, WasmValue(end_offset));
|
||||
} else {
|
||||
__ LoadConstant(end_offset_reg,
|
||||
@ -1509,7 +1509,7 @@ class LiftoffCompiler {
|
||||
Register tmp = __ GetUnusedRegister(kGpReg, pinned).gp();
|
||||
__ LoadConstant(LiftoffRegister(tmp), WasmValue(*offset));
|
||||
__ emit_ptrsize_add(index, index, tmp);
|
||||
LOAD_INSTANCE_FIELD(tmp, MemoryMask, kPointerSize);
|
||||
LOAD_INSTANCE_FIELD(tmp, MemoryMask, kSystemPointerSize);
|
||||
__ emit_ptrsize_and(index, index, tmp);
|
||||
*offset = 0;
|
||||
return index;
|
||||
@ -1530,7 +1530,7 @@ class LiftoffCompiler {
|
||||
index = AddMemoryMasking(index, &offset, pinned);
|
||||
DEBUG_CODE_COMMENT("Load from memory");
|
||||
Register addr = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
|
||||
LOAD_INSTANCE_FIELD(addr, MemoryStart, kPointerSize);
|
||||
LOAD_INSTANCE_FIELD(addr, MemoryStart, kSystemPointerSize);
|
||||
RegClass rc = reg_class_for(value_type);
|
||||
LiftoffRegister value = pinned.set(__ GetUnusedRegister(rc, pinned));
|
||||
uint32_t protected_load_pc = 0;
|
||||
@ -1564,7 +1564,7 @@ class LiftoffCompiler {
|
||||
index = AddMemoryMasking(index, &offset, pinned);
|
||||
DEBUG_CODE_COMMENT("Store to memory");
|
||||
Register addr = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
|
||||
LOAD_INSTANCE_FIELD(addr, MemoryStart, kPointerSize);
|
||||
LOAD_INSTANCE_FIELD(addr, MemoryStart, kSystemPointerSize);
|
||||
uint32_t protected_store_pc = 0;
|
||||
LiftoffRegList outer_pinned;
|
||||
if (FLAG_trace_wasm_memory) outer_pinned.set(index);
|
||||
@ -1583,7 +1583,7 @@ class LiftoffCompiler {
|
||||
|
||||
void CurrentMemoryPages(FullDecoder* decoder, Value* result) {
|
||||
Register mem_size = __ GetUnusedRegister(kGpReg).gp();
|
||||
LOAD_INSTANCE_FIELD(mem_size, MemorySize, kPointerSize);
|
||||
LOAD_INSTANCE_FIELD(mem_size, MemorySize, kSystemPointerSize);
|
||||
__ emit_ptrsize_shr(mem_size, mem_size, kWasmPageSizeLog2);
|
||||
__ PushRegister(kWasmI32, LiftoffRegister(mem_size));
|
||||
}
|
||||
@ -1643,7 +1643,7 @@ class LiftoffCompiler {
|
||||
|
||||
Register imported_targets = tmp;
|
||||
LOAD_INSTANCE_FIELD(imported_targets, ImportedFunctionTargets,
|
||||
kPointerSize);
|
||||
kSystemPointerSize);
|
||||
__ Load(LiftoffRegister(target), imported_targets, no_reg,
|
||||
imm.index * sizeof(Address), kPointerLoadType, pinned);
|
||||
|
||||
@ -1752,7 +1752,7 @@ class LiftoffCompiler {
|
||||
|
||||
DEBUG_CODE_COMMENT("Check indirect call signature");
|
||||
// Load the signature from {instance->ift_sig_ids[key]}
|
||||
LOAD_INSTANCE_FIELD(table, IndirectFunctionTableSigIds, kPointerSize);
|
||||
LOAD_INSTANCE_FIELD(table, IndirectFunctionTableSigIds, kSystemPointerSize);
|
||||
__ LoadConstant(LiftoffRegister(tmp_const),
|
||||
WasmValue(static_cast<uint32_t>(sizeof(uint32_t))));
|
||||
// TODO(wasm): use a emit_i32_shli() instead of a multiply.
|
||||
@ -1770,7 +1770,7 @@ class LiftoffCompiler {
|
||||
LiftoffAssembler::kWasmIntPtr, scratch, tmp_const);
|
||||
|
||||
DEBUG_CODE_COMMENT("Execute indirect call");
|
||||
if (kPointerSize == 8) {
|
||||
if (kSystemPointerSize == 8) {
|
||||
// {index} has already been multiplied by 4. Multiply by another 2.
|
||||
__ LoadConstant(LiftoffRegister(tmp_const), WasmValue(2));
|
||||
__ emit_i32_mul(index, index, tmp_const);
|
||||
@ -1786,7 +1786,8 @@ class LiftoffCompiler {
|
||||
Register* explicit_instance = &tmp_const;
|
||||
|
||||
// Load the target from {instance->ift_targets[key]}
|
||||
LOAD_INSTANCE_FIELD(table, IndirectFunctionTableTargets, kPointerSize);
|
||||
LOAD_INSTANCE_FIELD(table, IndirectFunctionTableTargets,
|
||||
kSystemPointerSize);
|
||||
__ Load(LiftoffRegister(scratch), table, index, 0, kPointerLoadType,
|
||||
pinned);
|
||||
|
||||
|
@ -16,7 +16,7 @@ namespace v8 {
|
||||
namespace internal {
|
||||
namespace wasm {
|
||||
|
||||
static constexpr bool kNeedI64RegPair = kPointerSize == 4;
|
||||
static constexpr bool kNeedI64RegPair = kSystemPointerSize == 4;
|
||||
|
||||
enum RegClass : uint8_t {
|
||||
kGpReg,
|
||||
|
@ -481,7 +481,7 @@ void LiftoffAssembler::Store(Register dst_addr, Register offset_reg,
|
||||
void LiftoffAssembler::LoadCallerFrameSlot(LiftoffRegister dst,
|
||||
uint32_t caller_slot_idx,
|
||||
ValueType type) {
|
||||
int32_t offset = kPointerSize * (caller_slot_idx + 1);
|
||||
int32_t offset = kSystemPointerSize * (caller_slot_idx + 1);
|
||||
liftoff::Load(this, dst, fp, offset, type);
|
||||
}
|
||||
|
||||
@ -1320,11 +1320,11 @@ void LiftoffAssembler::PushRegisters(LiftoffRegList regs) {
|
||||
LiftoffRegList gp_regs = regs & kGpCacheRegList;
|
||||
unsigned num_gp_regs = gp_regs.GetNumRegsSet();
|
||||
if (num_gp_regs) {
|
||||
unsigned offset = num_gp_regs * kPointerSize;
|
||||
unsigned offset = num_gp_regs * kSystemPointerSize;
|
||||
addiu(sp, sp, -offset);
|
||||
while (!gp_regs.is_empty()) {
|
||||
LiftoffRegister reg = gp_regs.GetFirstRegSet();
|
||||
offset -= kPointerSize;
|
||||
offset -= kSystemPointerSize;
|
||||
sw(reg.gp(), MemOperand(sp, offset));
|
||||
gp_regs.clear(reg);
|
||||
}
|
||||
@ -1361,13 +1361,14 @@ void LiftoffAssembler::PopRegisters(LiftoffRegList regs) {
|
||||
LiftoffRegister reg = gp_regs.GetLastRegSet();
|
||||
lw(reg.gp(), MemOperand(sp, gp_offset));
|
||||
gp_regs.clear(reg);
|
||||
gp_offset += kPointerSize;
|
||||
gp_offset += kSystemPointerSize;
|
||||
}
|
||||
addiu(sp, sp, gp_offset);
|
||||
}
|
||||
|
||||
void LiftoffAssembler::DropStackSlotsAndRet(uint32_t num_stack_slots) {
|
||||
DCHECK_LT(num_stack_slots, (1 << 16) / kPointerSize); // 16 bit immediate
|
||||
DCHECK_LT(num_stack_slots,
|
||||
(1 << 16) / kSystemPointerSize); // 16 bit immediate
|
||||
TurboAssembler::DropAndRet(static_cast<int>(num_stack_slots));
|
||||
}
|
||||
|
||||
|
@ -72,18 +72,18 @@ inline void Store(LiftoffAssembler* assm, Register base, int32_t offset,
|
||||
inline void push(LiftoffAssembler* assm, LiftoffRegister reg, ValueType type) {
|
||||
switch (type) {
|
||||
case kWasmI32:
|
||||
assm->daddiu(sp, sp, -kPointerSize);
|
||||
assm->daddiu(sp, sp, -kSystemPointerSize);
|
||||
assm->sw(reg.gp(), MemOperand(sp, 0));
|
||||
break;
|
||||
case kWasmI64:
|
||||
assm->push(reg.gp());
|
||||
break;
|
||||
case kWasmF32:
|
||||
assm->daddiu(sp, sp, -kPointerSize);
|
||||
assm->daddiu(sp, sp, -kSystemPointerSize);
|
||||
assm->swc1(reg.fp(), MemOperand(sp, 0));
|
||||
break;
|
||||
case kWasmF64:
|
||||
assm->daddiu(sp, sp, -kPointerSize);
|
||||
assm->daddiu(sp, sp, -kSystemPointerSize);
|
||||
assm->Sdc1(reg.fp(), MemOperand(sp, 0));
|
||||
break;
|
||||
default:
|
||||
@ -406,7 +406,7 @@ void LiftoffAssembler::Store(Register dst_addr, Register offset_reg,
|
||||
void LiftoffAssembler::LoadCallerFrameSlot(LiftoffRegister dst,
|
||||
uint32_t caller_slot_idx,
|
||||
ValueType type) {
|
||||
MemOperand src(fp, kPointerSize * (caller_slot_idx + 1));
|
||||
MemOperand src(fp, kSystemPointerSize * (caller_slot_idx + 1));
|
||||
liftoff::Load(this, dst, src, type);
|
||||
}
|
||||
|
||||
@ -1172,11 +1172,11 @@ void LiftoffAssembler::PushRegisters(LiftoffRegList regs) {
|
||||
LiftoffRegList gp_regs = regs & kGpCacheRegList;
|
||||
unsigned num_gp_regs = gp_regs.GetNumRegsSet();
|
||||
if (num_gp_regs) {
|
||||
unsigned offset = num_gp_regs * kPointerSize;
|
||||
unsigned offset = num_gp_regs * kSystemPointerSize;
|
||||
daddiu(sp, sp, -offset);
|
||||
while (!gp_regs.is_empty()) {
|
||||
LiftoffRegister reg = gp_regs.GetFirstRegSet();
|
||||
offset -= kPointerSize;
|
||||
offset -= kSystemPointerSize;
|
||||
sd(reg.gp(), MemOperand(sp, offset));
|
||||
gp_regs.clear(reg);
|
||||
}
|
||||
@ -1213,13 +1213,14 @@ void LiftoffAssembler::PopRegisters(LiftoffRegList regs) {
|
||||
LiftoffRegister reg = gp_regs.GetLastRegSet();
|
||||
ld(reg.gp(), MemOperand(sp, gp_offset));
|
||||
gp_regs.clear(reg);
|
||||
gp_offset += kPointerSize;
|
||||
gp_offset += kSystemPointerSize;
|
||||
}
|
||||
daddiu(sp, sp, gp_offset);
|
||||
}
|
||||
|
||||
void LiftoffAssembler::DropStackSlotsAndRet(uint32_t num_stack_slots) {
|
||||
DCHECK_LT(num_stack_slots, (1 << 16) / kPointerSize); // 16 bit immediate
|
||||
DCHECK_LT(num_stack_slots,
|
||||
(1 << 16) / kSystemPointerSize); // 16 bit immediate
|
||||
TurboAssembler::DropAndRet(static_cast<int>(num_stack_slots));
|
||||
}
|
||||
|
||||
|
@ -112,11 +112,11 @@ inline void push(LiftoffAssembler* assm, LiftoffRegister reg, ValueType type) {
|
||||
assm->pushq(reg.gp());
|
||||
break;
|
||||
case kWasmF32:
|
||||
assm->subp(rsp, Immediate(kPointerSize));
|
||||
assm->subp(rsp, Immediate(kSystemPointerSize));
|
||||
assm->Movss(Operand(rsp, 0), reg.fp());
|
||||
break;
|
||||
case kWasmF64:
|
||||
assm->subp(rsp, Immediate(kPointerSize));
|
||||
assm->subp(rsp, Immediate(kSystemPointerSize));
|
||||
assm->Movsd(Operand(rsp, 0), reg.fp());
|
||||
break;
|
||||
default:
|
||||
@ -311,7 +311,7 @@ void LiftoffAssembler::Store(Register dst_addr, Register offset_reg,
|
||||
void LiftoffAssembler::LoadCallerFrameSlot(LiftoffRegister dst,
|
||||
uint32_t caller_slot_idx,
|
||||
ValueType type) {
|
||||
Operand src(rbp, kPointerSize * (caller_slot_idx + 1));
|
||||
Operand src(rbp, kSystemPointerSize * (caller_slot_idx + 1));
|
||||
liftoff::Load(this, dst, src, type);
|
||||
}
|
||||
|
||||
@ -1457,8 +1457,9 @@ void LiftoffAssembler::PopRegisters(LiftoffRegList regs) {
|
||||
}
|
||||
|
||||
void LiftoffAssembler::DropStackSlotsAndRet(uint32_t num_stack_slots) {
|
||||
DCHECK_LT(num_stack_slots, (1 << 16) / kPointerSize); // 16 bit immediate
|
||||
ret(static_cast<int>(num_stack_slots * kPointerSize));
|
||||
DCHECK_LT(num_stack_slots,
|
||||
(1 << 16) / kSystemPointerSize); // 16 bit immediate
|
||||
ret(static_cast<int>(num_stack_slots * kSystemPointerSize));
|
||||
}
|
||||
|
||||
void LiftoffAssembler::CallC(wasm::FunctionSig* sig,
|
||||
|
@ -989,7 +989,7 @@ size_t WasmCodeManager::EstimateNativeModuleCodeSize(const WasmModule* module) {
|
||||
constexpr size_t kCodeSizeMultiplier = 4;
|
||||
constexpr size_t kCodeOverhead = 32; // for prologue, stack check, ...
|
||||
constexpr size_t kStaticCodeSize = 512; // runtime stubs, ...
|
||||
constexpr size_t kImportSize = 64 * kPointerSize;
|
||||
constexpr size_t kImportSize = 64 * kSystemPointerSize;
|
||||
|
||||
size_t estimate = kStaticCodeSize;
|
||||
for (auto& function : module->functions) {
|
||||
|
@ -205,7 +205,7 @@ class LinkageAllocator {
|
||||
// Stackslots are counted upwards starting from 0 (or the offset set by
|
||||
// {SetStackOffset}.
|
||||
int NumStackSlots(MachineRepresentation type) {
|
||||
return std::max(1, ElementSizeInBytes(type) / kPointerSize);
|
||||
return std::max(1, ElementSizeInBytes(type) / kSystemPointerSize);
|
||||
}
|
||||
|
||||
// Stackslots are counted upwards starting from 0 (or the offset set by
|
||||
|
@ -231,6 +231,14 @@ inline bool WasmInstanceObject::has_indirect_function_table() {
|
||||
return indirect_function_table_sig_ids() != nullptr;
|
||||
}
|
||||
|
||||
void WasmInstanceObject::clear_padding() {
|
||||
if (FIELD_SIZE(kOptionalPaddingOffset) != 0) {
|
||||
DCHECK_EQ(4, FIELD_SIZE(kOptionalPaddingOffset));
|
||||
memset(reinterpret_cast<void*>(address() + kOptionalPaddingOffset), 0,
|
||||
FIELD_SIZE(kOptionalPaddingOffset));
|
||||
}
|
||||
}
|
||||
|
||||
IndirectFunctionTableEntry::IndirectFunctionTableEntry(
|
||||
Handle<WasmInstanceObject> instance, int index)
|
||||
: instance_(instance), index_(index) {
|
||||
|
@ -145,13 +145,14 @@ class WasmInstanceNativeAllocations {
|
||||
};
|
||||
|
||||
size_t EstimateNativeAllocationsSize(const WasmModule* module) {
|
||||
size_t estimate = sizeof(WasmInstanceNativeAllocations) +
|
||||
(1 * kPointerSize * module->num_imported_mutable_globals) +
|
||||
(2 * kPointerSize * module->num_imported_functions) +
|
||||
((kPointerSize + sizeof(uint32_t) + sizeof(uint8_t)) *
|
||||
module->num_declared_data_segments);
|
||||
size_t estimate =
|
||||
sizeof(WasmInstanceNativeAllocations) +
|
||||
(1 * kSystemPointerSize * module->num_imported_mutable_globals) +
|
||||
(2 * kSystemPointerSize * module->num_imported_functions) +
|
||||
((kSystemPointerSize + sizeof(uint32_t) + sizeof(uint8_t)) *
|
||||
module->num_declared_data_segments);
|
||||
for (auto& table : module->tables) {
|
||||
estimate += 3 * kPointerSize * table.initial_size;
|
||||
estimate += 3 * kSystemPointerSize * table.initial_size;
|
||||
}
|
||||
return estimate;
|
||||
}
|
||||
@ -1283,6 +1284,7 @@ Handle<WasmInstanceObject> WasmInstanceObject::New(
|
||||
|
||||
Handle<WasmInstanceObject> instance(
|
||||
WasmInstanceObject::cast(*instance_object), isolate);
|
||||
instance->clear_padding();
|
||||
|
||||
// Initialize the imported function arrays.
|
||||
auto module = module_object->module();
|
||||
|
@ -126,13 +126,13 @@ class WasmModuleObject : public JSObject {
|
||||
DECL_VERIFIER(WasmModuleObject)
|
||||
|
||||
// Layout description.
|
||||
#define WASM_MODULE_OBJECT_FIELDS(V) \
|
||||
V(kNativeModuleOffset, kPointerSize) \
|
||||
V(kExportWrappersOffset, kPointerSize) \
|
||||
V(kScriptOffset, kPointerSize) \
|
||||
V(kWeakInstanceListOffset, kPointerSize) \
|
||||
V(kAsmJsOffsetTableOffset, kPointerSize) \
|
||||
V(kBreakPointInfosOffset, kPointerSize) \
|
||||
#define WASM_MODULE_OBJECT_FIELDS(V) \
|
||||
V(kNativeModuleOffset, kTaggedSize) \
|
||||
V(kExportWrappersOffset, kTaggedSize) \
|
||||
V(kScriptOffset, kTaggedSize) \
|
||||
V(kWeakInstanceListOffset, kTaggedSize) \
|
||||
V(kAsmJsOffsetTableOffset, kTaggedSize) \
|
||||
V(kBreakPointInfosOffset, kTaggedSize) \
|
||||
V(kSize, 0)
|
||||
|
||||
DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
|
||||
@ -259,10 +259,10 @@ class WasmTableObject : public JSObject {
|
||||
DECL_ACCESSORS2(dispatch_tables, FixedArray)
|
||||
|
||||
// Layout description.
|
||||
#define WASM_TABLE_OBJECT_FIELDS(V) \
|
||||
V(kFunctionsOffset, kPointerSize) \
|
||||
V(kMaximumLengthOffset, kPointerSize) \
|
||||
V(kDispatchTablesOffset, kPointerSize) \
|
||||
#define WASM_TABLE_OBJECT_FIELDS(V) \
|
||||
V(kFunctionsOffset, kTaggedSize) \
|
||||
V(kMaximumLengthOffset, kTaggedSize) \
|
||||
V(kDispatchTablesOffset, kTaggedSize) \
|
||||
V(kSize, 0)
|
||||
|
||||
DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize, WASM_TABLE_OBJECT_FIELDS)
|
||||
@ -303,10 +303,10 @@ class WasmMemoryObject : public JSObject {
|
||||
DECL_OPTIONAL_ACCESSORS2(instances, WeakArrayList)
|
||||
|
||||
// Layout description.
|
||||
#define WASM_MEMORY_OBJECT_FIELDS(V) \
|
||||
V(kArrayBufferOffset, kPointerSize) \
|
||||
V(kMaximumPagesOffset, kPointerSize) \
|
||||
V(kInstancesOffset, kPointerSize) \
|
||||
#define WASM_MEMORY_OBJECT_FIELDS(V) \
|
||||
V(kArrayBufferOffset, kTaggedSize) \
|
||||
V(kMaximumPagesOffset, kTaggedSize) \
|
||||
V(kInstancesOffset, kTaggedSize) \
|
||||
V(kSize, 0)
|
||||
|
||||
DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
|
||||
@ -350,10 +350,10 @@ class WasmGlobalObject : public JSObject {
|
||||
#undef WASM_GLOBAL_OBJECT_FLAGS_BIT_FIELDS
|
||||
|
||||
// Layout description.
|
||||
#define WASM_GLOBAL_OBJECT_FIELDS(V) \
|
||||
V(kArrayBufferOffset, kPointerSize) \
|
||||
V(kOffsetOffset, kPointerSize) \
|
||||
V(kFlagsOffset, kPointerSize) \
|
||||
#define WASM_GLOBAL_OBJECT_FIELDS(V) \
|
||||
V(kArrayBufferOffset, kTaggedSize) \
|
||||
V(kOffsetOffset, kTaggedSize) \
|
||||
V(kFlagsOffset, kTaggedSize) \
|
||||
V(kSize, 0)
|
||||
|
||||
DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
|
||||
@ -423,52 +423,62 @@ class WasmInstanceObject : public JSObject {
|
||||
DECL_PRIMITIVE_ACCESSORS(data_segment_sizes, uint32_t*)
|
||||
DECL_PRIMITIVE_ACCESSORS(dropped_data_segments, byte*)
|
||||
|
||||
V8_INLINE void clear_padding();
|
||||
|
||||
// Dispatched behavior.
|
||||
DECL_PRINTER(WasmInstanceObject)
|
||||
DECL_VERIFIER(WasmInstanceObject)
|
||||
|
||||
// Layout description.
|
||||
#define WASM_INSTANCE_OBJECT_FIELDS(V) \
|
||||
V(kModuleObjectOffset, kPointerSize) \
|
||||
V(kExportsObjectOffset, kPointerSize) \
|
||||
V(kNativeContextOffset, kPointerSize) \
|
||||
V(kMemoryObjectOffset, kPointerSize) \
|
||||
V(kUntaggedGlobalsBufferOffset, kPointerSize) \
|
||||
V(kTaggedGlobalsBufferOffset, kPointerSize) \
|
||||
V(kImportedMutableGlobalsBuffersOffset, kPointerSize) \
|
||||
V(kDebugInfoOffset, kPointerSize) \
|
||||
V(kTableObjectOffset, kPointerSize) \
|
||||
V(kImportedFunctionRefsOffset, kPointerSize) \
|
||||
V(kIndirectFunctionTableRefsOffset, kPointerSize) \
|
||||
V(kManagedNativeAllocationsOffset, kPointerSize) \
|
||||
V(kExceptionsTableOffset, kPointerSize) \
|
||||
V(kUndefinedValueOffset, kPointerSize) \
|
||||
V(kNullValueOffset, kPointerSize) \
|
||||
V(kCEntryStubOffset, kPointerSize) \
|
||||
V(kFirstUntaggedOffset, 0) /* marker */ \
|
||||
V(kMemoryStartOffset, kPointerSize) /* untagged */ \
|
||||
V(kMemorySizeOffset, kSizetSize) /* untagged */ \
|
||||
V(kMemoryMaskOffset, kSizetSize) /* untagged */ \
|
||||
V(kIsolateRootOffset, kPointerSize) /* untagged */ \
|
||||
V(kStackLimitAddressOffset, kPointerSize) /* untagged */ \
|
||||
V(kRealStackLimitAddressOffset, kPointerSize) /* untagged */ \
|
||||
V(kImportedFunctionTargetsOffset, kPointerSize) /* untagged */ \
|
||||
V(kGlobalsStartOffset, kPointerSize) /* untagged */ \
|
||||
V(kImportedMutableGlobalsOffset, kPointerSize) /* untagged */ \
|
||||
V(kIndirectFunctionTableSigIdsOffset, kPointerSize) /* untagged */ \
|
||||
V(kIndirectFunctionTableTargetsOffset, kPointerSize) /* untagged */ \
|
||||
V(kJumpTableStartOffset, kPointerSize) /* untagged */ \
|
||||
V(kDataSegmentStartsOffset, kPointerSize) /* untagged */ \
|
||||
V(kDataSegmentSizesOffset, kPointerSize) /* untagged */ \
|
||||
V(kDroppedDataSegmentsOffset, kPointerSize) /* untagged */ \
|
||||
V(kIndirectFunctionTableSizeOffset, kUInt32Size) /* untagged */ \
|
||||
V(k64BitArchPaddingOffset, kPointerSize - kUInt32Size) /* padding */ \
|
||||
#define WASM_INSTANCE_OBJECT_FIELDS(V) \
|
||||
/* Tagged values. */ \
|
||||
V(kModuleObjectOffset, kTaggedSize) \
|
||||
V(kExportsObjectOffset, kTaggedSize) \
|
||||
V(kNativeContextOffset, kTaggedSize) \
|
||||
V(kMemoryObjectOffset, kTaggedSize) \
|
||||
V(kUntaggedGlobalsBufferOffset, kTaggedSize) \
|
||||
V(kTaggedGlobalsBufferOffset, kTaggedSize) \
|
||||
V(kImportedMutableGlobalsBuffersOffset, kTaggedSize) \
|
||||
V(kDebugInfoOffset, kTaggedSize) \
|
||||
V(kTableObjectOffset, kTaggedSize) \
|
||||
V(kImportedFunctionRefsOffset, kTaggedSize) \
|
||||
V(kIndirectFunctionTableRefsOffset, kTaggedSize) \
|
||||
V(kManagedNativeAllocationsOffset, kTaggedSize) \
|
||||
V(kExceptionsTableOffset, kTaggedSize) \
|
||||
V(kUndefinedValueOffset, kTaggedSize) \
|
||||
V(kNullValueOffset, kTaggedSize) \
|
||||
V(kCEntryStubOffset, kTaggedSize) \
|
||||
V(kEndOfTaggedFieldsOffset, 0) \
|
||||
/* Raw data. */ \
|
||||
V(kIndirectFunctionTableSizeOffset, kUInt32Size) \
|
||||
/* Optional padding to align system pointer size fields */ \
|
||||
V(kOptionalPaddingOffset, POINTER_SIZE_PADDING(kOptionalPaddingOffset)) \
|
||||
V(kFirstSystemPointerFieldOffset, 0) \
|
||||
V(kMemoryStartOffset, kSystemPointerSize) \
|
||||
V(kMemorySizeOffset, kSizetSize) \
|
||||
V(kMemoryMaskOffset, kSizetSize) \
|
||||
V(kIsolateRootOffset, kSystemPointerSize) \
|
||||
V(kStackLimitAddressOffset, kSystemPointerSize) \
|
||||
V(kRealStackLimitAddressOffset, kSystemPointerSize) \
|
||||
V(kImportedFunctionTargetsOffset, kSystemPointerSize) \
|
||||
V(kGlobalsStartOffset, kSystemPointerSize) \
|
||||
V(kImportedMutableGlobalsOffset, kSystemPointerSize) \
|
||||
V(kIndirectFunctionTableSigIdsOffset, kSystemPointerSize) \
|
||||
V(kIndirectFunctionTableTargetsOffset, kSystemPointerSize) \
|
||||
V(kJumpTableStartOffset, kSystemPointerSize) \
|
||||
V(kDataSegmentStartsOffset, kSystemPointerSize) \
|
||||
V(kDataSegmentSizesOffset, kSystemPointerSize) \
|
||||
V(kDroppedDataSegmentsOffset, kSystemPointerSize) \
|
||||
/* Header size. */ \
|
||||
V(kSize, 0)
|
||||
|
||||
DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
|
||||
WASM_INSTANCE_OBJECT_FIELDS)
|
||||
#undef WASM_INSTANCE_OBJECT_FIELDS
|
||||
|
||||
STATIC_ASSERT(IsAligned(kFirstSystemPointerFieldOffset, kSystemPointerSize));
|
||||
STATIC_ASSERT(IsAligned(kSize, kTaggedSize));
|
||||
|
||||
V8_EXPORT_PRIVATE const wasm::WasmModule* module();
|
||||
|
||||
static bool EnsureIndirectFunctionTableWithMinimumSize(
|
||||
@ -505,9 +515,9 @@ class WasmExceptionObject : public JSObject {
|
||||
DECL_ACCESSORS(exception_tag, HeapObject)
|
||||
|
||||
// Layout description.
|
||||
#define WASM_EXCEPTION_OBJECT_FIELDS(V) \
|
||||
V(kSerializedSignatureOffset, kPointerSize) \
|
||||
V(kExceptionTagOffset, kPointerSize) \
|
||||
#define WASM_EXCEPTION_OBJECT_FIELDS(V) \
|
||||
V(kSerializedSignatureOffset, kTaggedSize) \
|
||||
V(kExceptionTagOffset, kTaggedSize) \
|
||||
V(kSize, 0)
|
||||
|
||||
DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
|
||||
@ -564,11 +574,11 @@ class WasmExportedFunctionData : public Struct {
|
||||
DECL_VERIFIER(WasmExportedFunctionData)
|
||||
|
||||
// Layout description.
|
||||
#define WASM_EXPORTED_FUNCTION_DATA_FIELDS(V) \
|
||||
V(kWrapperCodeOffset, kPointerSize) \
|
||||
V(kInstanceOffset, kPointerSize) \
|
||||
V(kJumpTableOffsetOffset, kPointerSize) /* Smi */ \
|
||||
V(kFunctionIndexOffset, kPointerSize) /* Smi */ \
|
||||
#define WASM_EXPORTED_FUNCTION_DATA_FIELDS(V) \
|
||||
V(kWrapperCodeOffset, kTaggedSize) \
|
||||
V(kInstanceOffset, kTaggedSize) \
|
||||
V(kJumpTableOffsetOffset, kTaggedSize) /* Smi */ \
|
||||
V(kFunctionIndexOffset, kTaggedSize) /* Smi */ \
|
||||
V(kSize, 0)
|
||||
|
||||
DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize,
|
||||
@ -595,13 +605,13 @@ class WasmDebugInfo : public Struct {
|
||||
DECL_VERIFIER(WasmDebugInfo)
|
||||
|
||||
// Layout description.
|
||||
#define WASM_DEBUG_INFO_FIELDS(V) \
|
||||
V(kInstanceOffset, kPointerSize) \
|
||||
V(kInterpreterHandleOffset, kPointerSize) \
|
||||
V(kInterpretedFunctionsOffset, kPointerSize) \
|
||||
V(kLocalsNamesOffset, kPointerSize) \
|
||||
V(kCWasmEntriesOffset, kPointerSize) \
|
||||
V(kCWasmEntryMapOffset, kPointerSize) \
|
||||
#define WASM_DEBUG_INFO_FIELDS(V) \
|
||||
V(kInstanceOffset, kTaggedSize) \
|
||||
V(kInterpreterHandleOffset, kTaggedSize) \
|
||||
V(kInterpretedFunctionsOffset, kTaggedSize) \
|
||||
V(kLocalsNamesOffset, kTaggedSize) \
|
||||
V(kCWasmEntriesOffset, kTaggedSize) \
|
||||
V(kCWasmEntryMapOffset, kTaggedSize) \
|
||||
V(kSize, 0)
|
||||
|
||||
DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, WASM_DEBUG_INFO_FIELDS)
|
||||
@ -695,7 +705,7 @@ class WasmExceptionTag : public Struct {
|
||||
|
||||
// Layout description.
|
||||
#define WASM_EXCEPTION_TAG_FIELDS(V) \
|
||||
V(kIndexOffset, kPointerSize) \
|
||||
V(kIndexOffset, kTaggedSize) \
|
||||
/* Total size. */ \
|
||||
V(kSize, 0)
|
||||
|
||||
@ -722,12 +732,12 @@ class AsmWasmData : public Struct {
|
||||
DECL_VERIFIER(AsmWasmData)
|
||||
|
||||
// Layout description.
|
||||
#define ASM_WASM_DATA_FIELDS(V) \
|
||||
V(kManagedNativeModuleOffset, kPointerSize) \
|
||||
V(kExportWrappersOffset, kPointerSize) \
|
||||
V(kAsmJsOffsetTableOffset, kPointerSize) \
|
||||
V(kUsesBitsetOffset, kPointerSize) \
|
||||
/* Total size. */ \
|
||||
#define ASM_WASM_DATA_FIELDS(V) \
|
||||
V(kManagedNativeModuleOffset, kTaggedSize) \
|
||||
V(kExportWrappersOffset, kTaggedSize) \
|
||||
V(kAsmJsOffsetTableOffset, kTaggedSize) \
|
||||
V(kUsesBitsetOffset, kTaggedSize) \
|
||||
/* Total size. */ \
|
||||
V(kSize, 0)
|
||||
|
||||
DEFINE_FIELD_OFFSET_CONSTANTS(Struct::kHeaderSize, ASM_WASM_DATA_FIELDS)
|
||||
|
@ -340,7 +340,7 @@ Handle<Code> WasmFunctionWrapper::GetWrapperCode() {
|
||||
auto call_descriptor =
|
||||
compiler::Linkage::GetSimplifiedCDescriptor(zone(), signature_, true);
|
||||
|
||||
if (kPointerSize == 4) {
|
||||
if (kSystemPointerSize == 4) {
|
||||
size_t num_params = signature_->parameter_count();
|
||||
// One additional parameter for the pointer of the return value.
|
||||
Signature<MachineRepresentation>::Builder rep_builder(zone(), 1,
|
||||
|
Loading…
Reference in New Issue
Block a user