[wasm][cleanup] Simplify smi conversions, rename one function

Changes:
- Rename Uint32ToUintptr() -> BuildChangeUint32ToUintPtr() for
  consistency.
- Simplify smi conversions.
- Remove an unneeded TruncateInt64ToInt32() conversion.

Change-Id: I6f3213fc57e03019d2cb26592ecd4db396bd01d5
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2817600
Reviewed-by: Jakob Kummerow <jkummerow@chromium.org>
Commit-Queue: Manos Koukoutos <manoskouk@chromium.org>
Cr-Commit-Position: refs/heads/master@{#73893}
This commit is contained in:
Manos Koukoutos 2021-04-09 12:34:14 +00:00 committed by Commit Bot
parent 4372cf4490
commit 1c6d536938
2 changed files with 36 additions and 37 deletions

View File

@ -2944,7 +2944,7 @@ Node* WasmGraphBuilder::BuildImportCall(const wasm::FunctionSig* sig,
Node* imported_function_refs =
LOAD_INSTANCE_FIELD(ImportedFunctionRefs, MachineType::TaggedPointer());
// Access fixed array at {header_size - tag + func_index * kTaggedSize}.
Node* func_index_intptr = Uint32ToUintptr(func_index);
Node* func_index_intptr = BuildChangeUint32ToUintPtr(func_index);
Node* ref_node = gasm_->LoadFixedArrayElement(
imported_function_refs, func_index_intptr, MachineType::TaggedPointer());
@ -3080,7 +3080,7 @@ Node* WasmGraphBuilder::BuildIndirectCall(uint32_t table_index,
table_type.is_nullable();
if (needs_signature_check) {
Node* int32_scaled_key =
Uint32ToUintptr(gasm_->Word32Shl(key, Int32Constant(2)));
BuildChangeUint32ToUintPtr(gasm_->Word32Shl(key, Int32Constant(2)));
Node* loaded_sig = gasm_->LoadFromObject(MachineType::Int32(), ift_sig_ids,
int32_scaled_key);
@ -3099,7 +3099,7 @@ Node* WasmGraphBuilder::BuildIndirectCall(uint32_t table_index,
}
}
Node* key_intptr = Uint32ToUintptr(key);
Node* key_intptr = BuildChangeUint32ToUintPtr(key);
Node* target_instance = gasm_->LoadFixedArrayElement(
ift_instances, key_intptr, MachineType::TaggedPointer());
@ -3345,6 +3345,25 @@ Node* WasmGraphBuilder::BuildChangeIntPtrToInt64(Node* value) {
: value;
}
Node* WasmGraphBuilder::BuildChangeUint32ToUintPtr(Node* node) {
if (mcgraph()->machine()->Is32()) return node;
// Fold instances of ChangeUint32ToUint64(IntConstant) directly.
Uint32Matcher matcher(node);
if (matcher.HasResolvedValue()) {
uintptr_t value = matcher.ResolvedValue();
return mcgraph()->IntPtrConstant(bit_cast<intptr_t>(value));
}
return gasm_->ChangeUint32ToUint64(node);
}
Node* WasmGraphBuilder::BuildSmiShiftBitsConstant() {
return gasm_->IntPtrConstant(kSmiShiftSize + kSmiTagSize);
}
Node* WasmGraphBuilder::BuildSmiShiftBitsConstant32() {
return Int32Constant(kSmiShiftSize + kSmiTagSize);
}
Node* WasmGraphBuilder::BuildChangeInt32ToSmi(Node* value) {
// With pointer compression, only the lower 32 bits are used.
return COMPRESS_POINTERS_BOOL
@ -3356,31 +3375,22 @@ Node* WasmGraphBuilder::BuildChangeInt32ToSmi(Node* value) {
Node* WasmGraphBuilder::BuildChangeUint31ToSmi(Node* value) {
return COMPRESS_POINTERS_BOOL
? gasm_->Word32Shl(value, BuildSmiShiftBitsConstant32())
: gasm_->WordShl(Uint32ToUintptr(value),
: gasm_->WordShl(BuildChangeUint32ToUintPtr(value),
BuildSmiShiftBitsConstant());
}
Node* WasmGraphBuilder::BuildSmiShiftBitsConstant() {
return gasm_->IntPtrConstant(kSmiShiftSize + kSmiTagSize);
}
Node* WasmGraphBuilder::BuildSmiShiftBitsConstant32() {
return Int32Constant(kSmiShiftSize + kSmiTagSize);
}
Node* WasmGraphBuilder::BuildChangeSmiToInt32(Node* value) {
return COMPRESS_POINTERS_BOOL
? gasm_->Word32Sar(gasm_->TruncateInt64ToInt32(value),
BuildSmiShiftBitsConstant32())
: BuildTruncateIntPtrToInt32(BuildChangeSmiToIntPtr(value));
? gasm_->Word32Sar(value, BuildSmiShiftBitsConstant32())
: BuildTruncateIntPtrToInt32(
gasm_->WordSar(value, BuildSmiShiftBitsConstant()));
}
Node* WasmGraphBuilder::BuildChangeSmiToIntPtr(Node* value) {
if (COMPRESS_POINTERS_BOOL) {
value = BuildChangeSmiToInt32(value);
return BuildChangeInt32ToIntPtr(value);
}
return gasm_->WordSar(value, BuildSmiShiftBitsConstant());
return COMPRESS_POINTERS_BOOL
? BuildChangeInt32ToIntPtr(
gasm_->Word32Sar(value, BuildSmiShiftBitsConstant32()))
: gasm_->WordSar(value, BuildSmiShiftBitsConstant());
}
Node* WasmGraphBuilder::BuildConvertUint32ToSmiWithSaturation(Node* value,
@ -3565,8 +3575,8 @@ void WasmGraphBuilder::GetBaseAndOffsetForImportedMutableExternRefGlobal(
// From the index, calculate the actual offset in the FixedArray. This
// is kHeaderSize + (index * kTaggedSize). kHeaderSize can be acquired with
// wasm::ObjectAccess::ElementOffsetInTaggedFixedArray(0).
Node* index_times_tagged_size =
gasm_->IntMul(Uint32ToUintptr(index), Int32Constant(kTaggedSize));
Node* index_times_tagged_size = gasm_->IntMul(
BuildChangeUint32ToUintPtr(index), Int32Constant(kTaggedSize));
*offset = gasm_->IntAdd(
index_times_tagged_size,
mcgraph()->IntPtrConstant(
@ -3763,7 +3773,7 @@ Node* WasmGraphBuilder::BoundsCheckMem(uint8_t access_size, Node* index,
wasm::WasmCodePosition position,
EnforceBoundsCheck enforce_check) {
DCHECK_LE(1, access_size);
if (!env_->module->is_memory64) index = Uint32ToUintptr(index);
if (!env_->module->is_memory64) index = BuildChangeUint32ToUintPtr(index);
if (!FLAG_wasm_bounds_checks) return index;
if (use_trap_handler() && enforce_check == kCanOmitBoundsCheck) {
@ -4317,7 +4327,7 @@ Node* WasmGraphBuilder::BuildAsmjsLoadMem(MachineType type, Node* index) {
// Note that we check against the memory size ignoring the size of the
// stored value, which is conservative if misaligned. Technically, asm.js
// should never have misaligned accesses.
index = Uint32ToUintptr(index);
index = BuildChangeUint32ToUintPtr(index);
Diamond bounds_check(graph(), mcgraph()->common(),
gasm_->UintLessThan(index, mem_size), BranchHint::kTrue);
bounds_check.Chain(control());
@ -4356,17 +4366,6 @@ Node* WasmGraphBuilder::BuildAsmjsLoadMem(MachineType type, Node* index) {
return bounds_check.Phi(type.representation(), load, oob_value);
}
Node* WasmGraphBuilder::Uint32ToUintptr(Node* node) {
if (mcgraph()->machine()->Is32()) return node;
// Fold instances of ChangeUint32ToUint64(IntConstant) directly.
Uint32Matcher matcher(node);
if (matcher.HasResolvedValue()) {
uintptr_t value = matcher.ResolvedValue();
return mcgraph()->IntPtrConstant(bit_cast<intptr_t>(value));
}
return gasm_->ChangeUint32ToUint64(node);
}
Node* WasmGraphBuilder::BuildAsmjsStoreMem(MachineType type, Node* index,
Node* val) {
DCHECK_NOT_NULL(instance_cache_);
@ -4391,7 +4390,7 @@ Node* WasmGraphBuilder::BuildAsmjsStoreMem(MachineType type, Node* index,
index = gasm_->Word32And(index, mem_mask);
}
index = Uint32ToUintptr(index);
index = BuildChangeUint32ToUintPtr(index);
const Operator* store_op = mcgraph()->machine()->Store(StoreRepresentation(
type.representation(), WriteBarrierKind::kNoWriteBarrier));
Node* store = graph()->NewNode(store_op, mem_start, index, val, effect(),

View File

@ -530,7 +530,6 @@ class WasmGraphBuilder {
Node* CheckBoundsAndAlignment(int8_t access_size, Node* index,
uint64_t offset, wasm::WasmCodePosition);
Node* Uint32ToUintptr(Node*);
const Operator* GetSafeLoadOperator(int offset, wasm::ValueType type);
const Operator* GetSafeStoreOperator(int offset, wasm::ValueType type);
Node* BuildChangeEndiannessStore(Node* node, MachineRepresentation rep,
@ -631,6 +630,7 @@ class WasmGraphBuilder {
Node* BuildTruncateIntPtrToInt32(Node* value);
Node* BuildChangeInt32ToIntPtr(Node* value);
Node* BuildChangeIntPtrToInt64(Node* value);
Node* BuildChangeUint32ToUintPtr(Node*);
Node* BuildChangeInt32ToSmi(Node* value);
Node* BuildChangeUint31ToSmi(Node* value);
Node* BuildSmiShiftBitsConstant();