[csa][cleanup] Remove Sloppy from TNodes in more methods
Remove from: * Smi * RawPtrT * Oddball * Float32T * Float64T * IntPtrT * WordT * Word32T Bug: v8:6949, v8:11384 Change-Id: Ia79fdedd23cd09c49ada05d031a04a1a48c2d9c6 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2712784 Reviewed-by: Dan Elphick <delphick@chromium.org> Commit-Queue: Santiago Aboy Solanes <solanes@chromium.org> Cr-Commit-Position: refs/heads/master@{#72946}
This commit is contained in:
parent
ca89bf259f
commit
7705ab1fbb
@ -320,7 +320,7 @@ TNode<IntPtrT> CodeStubAssembler::IntPtrRoundUpToPowerOfTwo32(
|
||||
return Signed(IntPtrAdd(value, IntPtrConstant(1)));
|
||||
}
|
||||
|
||||
TNode<BoolT> CodeStubAssembler::WordIsPowerOfTwo(SloppyTNode<IntPtrT> value) {
|
||||
TNode<BoolT> CodeStubAssembler::WordIsPowerOfTwo(TNode<IntPtrT> value) {
|
||||
intptr_t constant;
|
||||
if (TryToIntPtrConstant(value, &constant)) {
|
||||
return BoolConstant(base::bits::IsPowerOfTwo(constant));
|
||||
@ -334,7 +334,7 @@ TNode<BoolT> CodeStubAssembler::WordIsPowerOfTwo(SloppyTNode<IntPtrT> value) {
|
||||
IntPtrConstant(0));
|
||||
}
|
||||
|
||||
TNode<Float64T> CodeStubAssembler::Float64Round(SloppyTNode<Float64T> x) {
|
||||
TNode<Float64T> CodeStubAssembler::Float64Round(TNode<Float64T> x) {
|
||||
TNode<Float64T> one = Float64Constant(1.0);
|
||||
TNode<Float64T> one_half = Float64Constant(0.5);
|
||||
|
||||
@ -352,7 +352,7 @@ TNode<Float64T> CodeStubAssembler::Float64Round(SloppyTNode<Float64T> x) {
|
||||
return TNode<Float64T>::UncheckedCast(var_x.value());
|
||||
}
|
||||
|
||||
TNode<Float64T> CodeStubAssembler::Float64Ceil(SloppyTNode<Float64T> x) {
|
||||
TNode<Float64T> CodeStubAssembler::Float64Ceil(TNode<Float64T> x) {
|
||||
if (IsFloat64RoundUpSupported()) {
|
||||
return Float64RoundUp(x);
|
||||
}
|
||||
@ -404,7 +404,7 @@ TNode<Float64T> CodeStubAssembler::Float64Ceil(SloppyTNode<Float64T> x) {
|
||||
return TNode<Float64T>::UncheckedCast(var_x.value());
|
||||
}
|
||||
|
||||
TNode<Float64T> CodeStubAssembler::Float64Floor(SloppyTNode<Float64T> x) {
|
||||
TNode<Float64T> CodeStubAssembler::Float64Floor(TNode<Float64T> x) {
|
||||
if (IsFloat64RoundDownSupported()) {
|
||||
return Float64RoundDown(x);
|
||||
}
|
||||
@ -456,7 +456,7 @@ TNode<Float64T> CodeStubAssembler::Float64Floor(SloppyTNode<Float64T> x) {
|
||||
return TNode<Float64T>::UncheckedCast(var_x.value());
|
||||
}
|
||||
|
||||
TNode<Float64T> CodeStubAssembler::Float64RoundToEven(SloppyTNode<Float64T> x) {
|
||||
TNode<Float64T> CodeStubAssembler::Float64RoundToEven(TNode<Float64T> x) {
|
||||
if (IsFloat64RoundTiesEvenSupported()) {
|
||||
return Float64RoundTiesEven(x);
|
||||
}
|
||||
@ -487,7 +487,7 @@ TNode<Float64T> CodeStubAssembler::Float64RoundToEven(SloppyTNode<Float64T> x) {
|
||||
return TNode<Float64T>::UncheckedCast(var_result.value());
|
||||
}
|
||||
|
||||
TNode<Float64T> CodeStubAssembler::Float64Trunc(SloppyTNode<Float64T> x) {
|
||||
TNode<Float64T> CodeStubAssembler::Float64Trunc(TNode<Float64T> x) {
|
||||
if (IsFloat64RoundTruncateSupported()) {
|
||||
return Float64RoundTruncate(x);
|
||||
}
|
||||
@ -638,7 +638,7 @@ TNode<BoolT> CodeStubAssembler::IsValidPositiveSmi(TNode<IntPtrT> value) {
|
||||
return UintPtrLessThanOrEqual(value, IntPtrConstant(Smi::kMaxValue));
|
||||
}
|
||||
|
||||
TNode<Smi> CodeStubAssembler::SmiTag(SloppyTNode<IntPtrT> value) {
|
||||
TNode<Smi> CodeStubAssembler::SmiTag(TNode<IntPtrT> value) {
|
||||
int32_t constant_value;
|
||||
if (TryToInt32Constant(value, &constant_value) &&
|
||||
Smi::IsValid(constant_value)) {
|
||||
@ -652,7 +652,7 @@ TNode<Smi> CodeStubAssembler::SmiTag(SloppyTNode<IntPtrT> value) {
|
||||
return smi;
|
||||
}
|
||||
|
||||
TNode<IntPtrT> CodeStubAssembler::SmiUntag(SloppyTNode<Smi> value) {
|
||||
TNode<IntPtrT> CodeStubAssembler::SmiUntag(TNode<Smi> value) {
|
||||
intptr_t constant_value;
|
||||
if (TryToIntPtrConstant(value, &constant_value)) {
|
||||
return IntPtrConstant(constant_value >> (kSmiShiftSize + kSmiTagSize));
|
||||
@ -665,7 +665,7 @@ TNode<IntPtrT> CodeStubAssembler::SmiUntag(SloppyTNode<Smi> value) {
|
||||
return Signed(WordSarShiftOutZeros(raw_bits, SmiShiftBitsConstant()));
|
||||
}
|
||||
|
||||
TNode<Int32T> CodeStubAssembler::SmiToInt32(SloppyTNode<Smi> value) {
|
||||
TNode<Int32T> CodeStubAssembler::SmiToInt32(TNode<Smi> value) {
|
||||
if (COMPRESS_POINTERS_BOOL) {
|
||||
return Signed(Word32SarShiftOutZeros(
|
||||
TruncateIntPtrToInt32(BitcastTaggedToWordForTagAndSmiBits(value)),
|
||||
@ -675,7 +675,7 @@ TNode<Int32T> CodeStubAssembler::SmiToInt32(SloppyTNode<Smi> value) {
|
||||
return TruncateIntPtrToInt32(result);
|
||||
}
|
||||
|
||||
TNode<Float64T> CodeStubAssembler::SmiToFloat64(SloppyTNode<Smi> value) {
|
||||
TNode<Float64T> CodeStubAssembler::SmiToFloat64(TNode<Smi> value) {
|
||||
return ChangeInt32ToFloat64(SmiToInt32(value));
|
||||
}
|
||||
|
||||
@ -999,8 +999,7 @@ TNode<Int32T> CodeStubAssembler::TruncateWordToInt32(TNode<WordT> value) {
|
||||
return ReinterpretCast<Int32T>(value);
|
||||
}
|
||||
|
||||
TNode<Int32T> CodeStubAssembler::TruncateIntPtrToInt32(
|
||||
SloppyTNode<IntPtrT> value) {
|
||||
TNode<Int32T> CodeStubAssembler::TruncateIntPtrToInt32(TNode<IntPtrT> value) {
|
||||
if (Is64()) {
|
||||
return TruncateInt64ToInt32(ReinterpretCast<Int64T>(value));
|
||||
}
|
||||
@ -2110,7 +2109,7 @@ void CodeStubAssembler::FixedArrayBoundsCheck(TNode<FixedArrayBase> array,
|
||||
}
|
||||
|
||||
TNode<Object> CodeStubAssembler::LoadPropertyArrayElement(
|
||||
TNode<PropertyArray> object, SloppyTNode<IntPtrT> index) {
|
||||
TNode<PropertyArray> object, TNode<IntPtrT> index) {
|
||||
int additional_offset = 0;
|
||||
LoadSensitivity needs_poisoning = LoadSensitivity::kSafe;
|
||||
return CAST(LoadArrayElement(object, PropertyArray::kHeaderSize, index,
|
||||
@ -2148,7 +2147,7 @@ TNode<RawPtrT> CodeStubAssembler::LoadJSTypedArrayDataPtr(
|
||||
}
|
||||
|
||||
TNode<BigInt> CodeStubAssembler::LoadFixedBigInt64ArrayElementAsTagged(
|
||||
SloppyTNode<RawPtrT> data_pointer, SloppyTNode<IntPtrT> offset) {
|
||||
TNode<RawPtrT> data_pointer, TNode<IntPtrT> offset) {
|
||||
if (Is64()) {
|
||||
TNode<IntPtrT> value = Load<IntPtrT>(data_pointer, offset);
|
||||
return BigIntFromInt64(value);
|
||||
@ -2273,7 +2272,7 @@ TNode<BigInt> CodeStubAssembler::BigIntFromInt64(TNode<IntPtrT> value) {
|
||||
}
|
||||
|
||||
TNode<BigInt> CodeStubAssembler::LoadFixedBigUint64ArrayElementAsTagged(
|
||||
SloppyTNode<RawPtrT> data_pointer, SloppyTNode<IntPtrT> offset) {
|
||||
TNode<RawPtrT> data_pointer, TNode<IntPtrT> offset) {
|
||||
Label if_zero(this), done(this);
|
||||
if (Is64()) {
|
||||
TNode<UintPtrT> value = Load<UintPtrT>(data_pointer, offset);
|
||||
@ -2452,7 +2451,7 @@ TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32ArrayElement(
|
||||
if (SmiValuesAre32Bits()) {
|
||||
return Load<Int32T>(object, offset);
|
||||
} else {
|
||||
return SmiToInt32(Load(MachineType::TaggedSigned(), object, offset));
|
||||
return SmiToInt32(Load<Smi>(object, offset));
|
||||
}
|
||||
}
|
||||
|
||||
@ -2780,7 +2779,7 @@ void CodeStubAssembler::StoreObjectByteNoWriteBarrier(TNode<HeapObject> object,
|
||||
}
|
||||
|
||||
void CodeStubAssembler::StoreHeapNumberValue(TNode<HeapNumber> object,
|
||||
SloppyTNode<Float64T> value) {
|
||||
TNode<Float64T> value) {
|
||||
StoreObjectFieldNoWriteBarrier(object, HeapNumber::kValueOffset, value);
|
||||
}
|
||||
|
||||
@ -3606,8 +3605,8 @@ void CodeStubAssembler::InitializeJSObjectFromMap(
|
||||
}
|
||||
|
||||
void CodeStubAssembler::InitializeJSObjectBodyNoSlackTracking(
|
||||
TNode<HeapObject> object, TNode<Map> map,
|
||||
SloppyTNode<IntPtrT> instance_size, int start_offset) {
|
||||
TNode<HeapObject> object, TNode<Map> map, TNode<IntPtrT> instance_size,
|
||||
int start_offset) {
|
||||
STATIC_ASSERT(Map::kNoSlackTracking == 0);
|
||||
CSA_ASSERT(this, IsClearWord32<Map::Bits3::ConstructionCounterBits>(
|
||||
LoadMapBitField3(map)));
|
||||
@ -3616,8 +3615,7 @@ void CodeStubAssembler::InitializeJSObjectBodyNoSlackTracking(
|
||||
}
|
||||
|
||||
void CodeStubAssembler::InitializeJSObjectBodyWithSlackTracking(
|
||||
TNode<HeapObject> object, TNode<Map> map,
|
||||
SloppyTNode<IntPtrT> instance_size) {
|
||||
TNode<HeapObject> object, TNode<Map> map, TNode<IntPtrT> instance_size) {
|
||||
Comment("InitializeJSObjectBodyNoSlackTracking");
|
||||
|
||||
// Perform in-object slack tracking if requested.
|
||||
@ -5407,8 +5405,7 @@ TNode<Number> CodeStubAssembler::ChangeFloat32ToTagged(TNode<Float32T> value) {
|
||||
return var_result.value();
|
||||
}
|
||||
|
||||
TNode<Number> CodeStubAssembler::ChangeFloat64ToTagged(
|
||||
SloppyTNode<Float64T> value) {
|
||||
TNode<Number> CodeStubAssembler::ChangeFloat64ToTagged(TNode<Float64T> value) {
|
||||
Label if_smi(this), done(this);
|
||||
TVARIABLE(Smi, var_smi_result);
|
||||
TVARIABLE(Number, var_result);
|
||||
@ -7876,8 +7873,8 @@ TNode<IntPtrT> CodeStubAssembler::HashTableComputeCapacity(
|
||||
return IntPtrMax(capacity, IntPtrConstant(HashTableBase::kMinCapacity));
|
||||
}
|
||||
|
||||
TNode<IntPtrT> CodeStubAssembler::IntPtrMax(SloppyTNode<IntPtrT> left,
|
||||
SloppyTNode<IntPtrT> right) {
|
||||
TNode<IntPtrT> CodeStubAssembler::IntPtrMax(TNode<IntPtrT> left,
|
||||
TNode<IntPtrT> right) {
|
||||
intptr_t left_constant;
|
||||
intptr_t right_constant;
|
||||
if (TryToIntPtrConstant(left, &left_constant) &&
|
||||
@ -7888,8 +7885,8 @@ TNode<IntPtrT> CodeStubAssembler::IntPtrMax(SloppyTNode<IntPtrT> left,
|
||||
right);
|
||||
}
|
||||
|
||||
TNode<IntPtrT> CodeStubAssembler::IntPtrMin(SloppyTNode<IntPtrT> left,
|
||||
SloppyTNode<IntPtrT> right) {
|
||||
TNode<IntPtrT> CodeStubAssembler::IntPtrMin(TNode<IntPtrT> left,
|
||||
TNode<IntPtrT> right) {
|
||||
intptr_t left_constant;
|
||||
intptr_t right_constant;
|
||||
if (TryToIntPtrConstant(left, &left_constant) &&
|
||||
@ -9193,7 +9190,7 @@ void CodeStubAssembler::TryGetOwnProperty(
|
||||
|
||||
void CodeStubAssembler::TryLookupElement(
|
||||
TNode<HeapObject> object, TNode<Map> map, TNode<Int32T> instance_type,
|
||||
SloppyTNode<IntPtrT> intptr_index, Label* if_found, Label* if_absent,
|
||||
TNode<IntPtrT> intptr_index, Label* if_found, Label* if_absent,
|
||||
Label* if_not_found, Label* if_bailout) {
|
||||
// Handle special objects in runtime.
|
||||
GotoIf(IsSpecialReceiverInstanceType(instance_type), if_bailout);
|
||||
@ -9672,8 +9669,8 @@ CodeStubAssembler::ElementOffsetFromIndex<IntPtrT>(TNode<IntPtrT> index_node,
|
||||
ElementsKind kind,
|
||||
int base_size);
|
||||
|
||||
TNode<BoolT> CodeStubAssembler::IsOffsetInBounds(SloppyTNode<IntPtrT> offset,
|
||||
SloppyTNode<IntPtrT> length,
|
||||
TNode<BoolT> CodeStubAssembler::IsOffsetInBounds(TNode<IntPtrT> offset,
|
||||
TNode<IntPtrT> length,
|
||||
int header_size,
|
||||
ElementsKind kind) {
|
||||
// Make sure we point to the last field.
|
||||
@ -13086,7 +13083,7 @@ TNode<Number> CodeStubAssembler::BitwiseOp(TNode<Word32T> left32,
|
||||
}
|
||||
|
||||
TNode<JSObject> CodeStubAssembler::AllocateJSIteratorResult(
|
||||
TNode<Context> context, TNode<Object> value, SloppyTNode<Oddball> done) {
|
||||
TNode<Context> context, TNode<Object> value, TNode<Oddball> done) {
|
||||
CSA_ASSERT(this, IsBoolean(done));
|
||||
TNode<NativeContext> native_context = LoadNativeContext(context);
|
||||
TNode<Map> map = CAST(
|
||||
|
@ -537,19 +537,17 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
|
||||
// Round the 32bits payload of the provided word up to the next power of two.
|
||||
TNode<IntPtrT> IntPtrRoundUpToPowerOfTwo32(TNode<IntPtrT> value);
|
||||
// Select the maximum of the two provided IntPtr values.
|
||||
TNode<IntPtrT> IntPtrMax(SloppyTNode<IntPtrT> left,
|
||||
SloppyTNode<IntPtrT> right);
|
||||
TNode<IntPtrT> IntPtrMax(TNode<IntPtrT> left, TNode<IntPtrT> right);
|
||||
// Select the minimum of the two provided IntPtr values.
|
||||
TNode<IntPtrT> IntPtrMin(SloppyTNode<IntPtrT> left,
|
||||
SloppyTNode<IntPtrT> right);
|
||||
TNode<IntPtrT> IntPtrMin(TNode<IntPtrT> left, TNode<IntPtrT> right);
|
||||
TNode<UintPtrT> UintPtrMin(TNode<UintPtrT> left, TNode<UintPtrT> right);
|
||||
|
||||
// Float64 operations.
|
||||
TNode<Float64T> Float64Ceil(SloppyTNode<Float64T> x);
|
||||
TNode<Float64T> Float64Floor(SloppyTNode<Float64T> x);
|
||||
TNode<Float64T> Float64Round(SloppyTNode<Float64T> x);
|
||||
TNode<Float64T> Float64RoundToEven(SloppyTNode<Float64T> x);
|
||||
TNode<Float64T> Float64Trunc(SloppyTNode<Float64T> x);
|
||||
TNode<Float64T> Float64Ceil(TNode<Float64T> x);
|
||||
TNode<Float64T> Float64Floor(TNode<Float64T> x);
|
||||
TNode<Float64T> Float64Round(TNode<Float64T> x);
|
||||
TNode<Float64T> Float64RoundToEven(TNode<Float64T> x);
|
||||
TNode<Float64T> Float64Trunc(TNode<Float64T> x);
|
||||
// Select the minimum of the two provided Number values.
|
||||
TNode<Number> NumberMax(TNode<Number> left, TNode<Number> right);
|
||||
// Select the minimum of the two provided Number values.
|
||||
@ -559,17 +557,17 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
|
||||
TNode<BoolT> IsValidPositiveSmi(TNode<IntPtrT> value);
|
||||
|
||||
// Tag an IntPtr as a Smi value.
|
||||
TNode<Smi> SmiTag(SloppyTNode<IntPtrT> value);
|
||||
TNode<Smi> SmiTag(TNode<IntPtrT> value);
|
||||
// Untag a Smi value as an IntPtr.
|
||||
TNode<IntPtrT> SmiUntag(SloppyTNode<Smi> value);
|
||||
TNode<IntPtrT> SmiUntag(TNode<Smi> value);
|
||||
|
||||
// Smi conversions.
|
||||
TNode<Float64T> SmiToFloat64(SloppyTNode<Smi> value);
|
||||
TNode<Smi> SmiFromIntPtr(SloppyTNode<IntPtrT> value) { return SmiTag(value); }
|
||||
TNode<Float64T> SmiToFloat64(TNode<Smi> value);
|
||||
TNode<Smi> SmiFromIntPtr(TNode<IntPtrT> value) { return SmiTag(value); }
|
||||
TNode<Smi> SmiFromInt32(SloppyTNode<Int32T> value);
|
||||
TNode<Smi> SmiFromUint32(TNode<Uint32T> value);
|
||||
TNode<IntPtrT> SmiToIntPtr(SloppyTNode<Smi> value) { return SmiUntag(value); }
|
||||
TNode<Int32T> SmiToInt32(SloppyTNode<Smi> value);
|
||||
TNode<IntPtrT> SmiToIntPtr(TNode<Smi> value) { return SmiUntag(value); }
|
||||
TNode<Int32T> SmiToInt32(TNode<Smi> value);
|
||||
|
||||
// Smi operations.
|
||||
#define SMI_ARITHMETIC_BINOP(SmiOpName, IntPtrOpName, Int32OpName) \
|
||||
@ -883,7 +881,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
|
||||
}
|
||||
|
||||
TNode<Int32T> TruncateWordToInt32(TNode<WordT> value);
|
||||
TNode<Int32T> TruncateIntPtrToInt32(SloppyTNode<IntPtrT> value);
|
||||
TNode<Int32T> TruncateIntPtrToInt32(TNode<IntPtrT> value);
|
||||
|
||||
// Check a value for smi-ness
|
||||
TNode<BoolT> TaggedIsSmi(TNode<MaybeObject> a);
|
||||
@ -893,7 +891,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
|
||||
TNode<BoolT> TaggedIsPositiveSmi(TNode<Object> a);
|
||||
// Check that a word has a word-aligned address.
|
||||
TNode<BoolT> WordIsAligned(TNode<WordT> word, size_t alignment);
|
||||
TNode<BoolT> WordIsPowerOfTwo(SloppyTNode<IntPtrT> value);
|
||||
TNode<BoolT> WordIsPowerOfTwo(TNode<IntPtrT> value);
|
||||
|
||||
// Check if lower_limit <= value <= higher_limit.
|
||||
template <typename U>
|
||||
@ -1392,7 +1390,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
|
||||
}
|
||||
|
||||
TNode<Object> LoadPropertyArrayElement(TNode<PropertyArray> object,
|
||||
SloppyTNode<IntPtrT> index);
|
||||
TNode<IntPtrT> index);
|
||||
TNode<IntPtrT> LoadPropertyArrayLength(TNode<PropertyArray> object);
|
||||
|
||||
// Load an element from an array and untag it and return it as Word32.
|
||||
@ -1456,9 +1454,9 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
|
||||
TNode<Int32T> elements_kind);
|
||||
// Parts of the above, factored out for readability:
|
||||
TNode<BigInt> LoadFixedBigInt64ArrayElementAsTagged(
|
||||
SloppyTNode<RawPtrT> data_pointer, SloppyTNode<IntPtrT> offset);
|
||||
TNode<RawPtrT> data_pointer, TNode<IntPtrT> offset);
|
||||
TNode<BigInt> LoadFixedBigUint64ArrayElementAsTagged(
|
||||
SloppyTNode<RawPtrT> data_pointer, SloppyTNode<IntPtrT> offset);
|
||||
TNode<RawPtrT> data_pointer, TNode<IntPtrT> offset);
|
||||
// 64-bit platforms only:
|
||||
TNode<BigInt> BigIntFromInt64(TNode<IntPtrT> value);
|
||||
TNode<BigInt> BigIntFromUint64(TNode<UintPtrT> value);
|
||||
@ -1514,8 +1512,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
|
||||
TNode<Word32T> value);
|
||||
|
||||
// Store the floating point value of a HeapNumber.
|
||||
void StoreHeapNumberValue(TNode<HeapNumber> object,
|
||||
SloppyTNode<Float64T> value);
|
||||
void StoreHeapNumberValue(TNode<HeapNumber> object, TNode<Float64T> value);
|
||||
|
||||
// Store a field to an object on the heap.
|
||||
void StoreObjectField(TNode<HeapObject> object, int offset,
|
||||
TNode<Object> value);
|
||||
@ -1523,8 +1521,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
|
||||
TNode<Object> value);
|
||||
template <class T>
|
||||
void StoreObjectFieldNoWriteBarrier(TNode<HeapObject> object,
|
||||
SloppyTNode<IntPtrT> offset,
|
||||
TNode<T> value) {
|
||||
TNode<IntPtrT> offset, TNode<T> value) {
|
||||
int const_offset;
|
||||
if (TryToInt32Constant(offset, &const_offset)) {
|
||||
return StoreObjectFieldNoWriteBarrier<T>(object, const_offset, value);
|
||||
@ -1771,12 +1768,11 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
|
||||
base::Optional<TNode<FixedArray>> elements = base::nullopt,
|
||||
SlackTrackingMode slack_tracking_mode = kNoSlackTracking);
|
||||
|
||||
void InitializeJSObjectBodyWithSlackTracking(
|
||||
TNode<HeapObject> object, TNode<Map> map,
|
||||
SloppyTNode<IntPtrT> instance_size);
|
||||
void InitializeJSObjectBodyWithSlackTracking(TNode<HeapObject> object,
|
||||
TNode<Map> map,
|
||||
TNode<IntPtrT> instance_size);
|
||||
void InitializeJSObjectBodyNoSlackTracking(
|
||||
TNode<HeapObject> object, TNode<Map> map,
|
||||
SloppyTNode<IntPtrT> instance_size,
|
||||
TNode<HeapObject> object, TNode<Map> map, TNode<IntPtrT> instance_size,
|
||||
int start_offset = JSObject::kHeaderSize);
|
||||
|
||||
TNode<BoolT> IsValidFastJSArrayCapacity(TNode<IntPtrT> capacity);
|
||||
@ -1898,7 +1894,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
|
||||
// TODO(v8:9722): Return type should be JSIteratorResult
|
||||
TNode<JSObject> AllocateJSIteratorResult(TNode<Context> context,
|
||||
TNode<Object> value,
|
||||
SloppyTNode<Oddball> done);
|
||||
TNode<Oddball> done);
|
||||
|
||||
// TODO(v8:9722): Return type should be JSIteratorResult
|
||||
TNode<JSObject> AllocateJSIteratorResultForEntry(TNode<Context> context,
|
||||
@ -2240,7 +2236,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
|
||||
void TryFloat64ToSmi(TNode<Float64T> number, TVariable<Smi>* output,
|
||||
Label* if_smi);
|
||||
TNode<Number> ChangeFloat32ToTagged(TNode<Float32T> value);
|
||||
TNode<Number> ChangeFloat64ToTagged(SloppyTNode<Float64T> value);
|
||||
TNode<Number> ChangeFloat64ToTagged(TNode<Float64T> value);
|
||||
TNode<Number> ChangeInt32ToTagged(SloppyTNode<Int32T> value);
|
||||
TNode<Number> ChangeUint32ToTagged(SloppyTNode<Uint32T> value);
|
||||
TNode<Number> ChangeUintPtrToTagged(TNode<UintPtrT> value);
|
||||
@ -2720,7 +2716,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
|
||||
|
||||
// Returns true if any of the mask's bit are set in the given Smi.
|
||||
// Smi-encoding of the mask is performed implicitly!
|
||||
TNode<BoolT> IsSetSmi(SloppyTNode<Smi> smi, int untagged_mask) {
|
||||
TNode<BoolT> IsSetSmi(TNode<Smi> smi, int untagged_mask) {
|
||||
intptr_t mask_word = bit_cast<intptr_t>(Smi::FromInt(untagged_mask));
|
||||
return WordNotEqual(WordAnd(BitcastTaggedToWordForTagAndSmiBits(smi),
|
||||
IntPtrConstant(mask_word)),
|
||||
@ -3086,7 +3082,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
|
||||
// chain needs to be checked. And if_bailout if the lookup is unsupported.
|
||||
void TryLookupElement(TNode<HeapObject> object, TNode<Map> map,
|
||||
TNode<Int32T> instance_type,
|
||||
SloppyTNode<IntPtrT> intptr_index, Label* if_found,
|
||||
TNode<IntPtrT> intptr_index, Label* if_found,
|
||||
Label* if_absent, Label* if_not_found,
|
||||
Label* if_bailout);
|
||||
|
||||
@ -3478,8 +3474,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
|
||||
int base_size = 0);
|
||||
|
||||
// Check that a field offset is within the bounds of the an object.
|
||||
TNode<BoolT> IsOffsetInBounds(SloppyTNode<IntPtrT> offset,
|
||||
SloppyTNode<IntPtrT> length, int header_size,
|
||||
TNode<BoolT> IsOffsetInBounds(TNode<IntPtrT> offset, TNode<IntPtrT> length,
|
||||
int header_size,
|
||||
ElementsKind kind = HOLEY_ELEMENTS);
|
||||
|
||||
// Load a builtin's code from the builtin array in the isolate.
|
||||
|
@ -561,23 +561,23 @@ TNode<WordT> CodeAssembler::WordPoisonOnSpeculation(TNode<WordT> value) {
|
||||
CODE_ASSEMBLER_BINARY_OP_LIST(DEFINE_CODE_ASSEMBLER_BINARY_OP)
|
||||
#undef DEFINE_CODE_ASSEMBLER_BINARY_OP
|
||||
|
||||
TNode<WordT> CodeAssembler::WordShl(SloppyTNode<WordT> value, int shift) {
|
||||
TNode<WordT> CodeAssembler::WordShl(TNode<WordT> value, int shift) {
|
||||
return (shift != 0) ? WordShl(value, IntPtrConstant(shift)) : value;
|
||||
}
|
||||
|
||||
TNode<WordT> CodeAssembler::WordShr(SloppyTNode<WordT> value, int shift) {
|
||||
TNode<WordT> CodeAssembler::WordShr(TNode<WordT> value, int shift) {
|
||||
return (shift != 0) ? WordShr(value, IntPtrConstant(shift)) : value;
|
||||
}
|
||||
|
||||
TNode<WordT> CodeAssembler::WordSar(SloppyTNode<WordT> value, int shift) {
|
||||
TNode<WordT> CodeAssembler::WordSar(TNode<WordT> value, int shift) {
|
||||
return (shift != 0) ? WordSar(value, IntPtrConstant(shift)) : value;
|
||||
}
|
||||
|
||||
TNode<Word32T> CodeAssembler::Word32Shr(SloppyTNode<Word32T> value, int shift) {
|
||||
TNode<Word32T> CodeAssembler::Word32Shr(TNode<Word32T> value, int shift) {
|
||||
return (shift != 0) ? Word32Shr(value, Int32Constant(shift)) : value;
|
||||
}
|
||||
|
||||
TNode<Word32T> CodeAssembler::Word32Sar(SloppyTNode<Word32T> value, int shift) {
|
||||
TNode<Word32T> CodeAssembler::Word32Sar(TNode<Word32T> value, int shift) {
|
||||
return (shift != 0) ? Word32Sar(value, Int32Constant(shift)) : value;
|
||||
}
|
||||
|
||||
@ -646,8 +646,7 @@ TNode<Float64T> CodeAssembler::RoundIntPtrToFloat64(Node* value) {
|
||||
return UncheckedCast<Float64T>(raw_assembler()->ChangeInt32ToFloat64(value));
|
||||
}
|
||||
|
||||
TNode<Int32T> CodeAssembler::TruncateFloat32ToInt32(
|
||||
SloppyTNode<Float32T> value) {
|
||||
TNode<Int32T> CodeAssembler::TruncateFloat32ToInt32(TNode<Float32T> value) {
|
||||
return UncheckedCast<Int32T>(raw_assembler()->TruncateFloat32ToInt32(
|
||||
value, TruncateKind::kSetOverflowToMin));
|
||||
}
|
||||
|
@ -742,7 +742,7 @@ class V8_EXPORT_PRIVATE CodeAssembler {
|
||||
Load(MachineTypeOf<Type>::value, base, needs_poisoning));
|
||||
}
|
||||
template <class Type>
|
||||
TNode<Type> Load(Node* base, SloppyTNode<WordT> offset,
|
||||
TNode<Type> Load(Node* base, TNode<WordT> offset,
|
||||
LoadSensitivity needs_poisoning = LoadSensitivity::kSafe) {
|
||||
return UncheckedCast<Type>(
|
||||
Load(MachineTypeOf<Type>::value, base, offset, needs_poisoning));
|
||||
@ -996,17 +996,17 @@ class V8_EXPORT_PRIVATE CodeAssembler {
|
||||
IntPtrSub(static_cast<Node*>(left), static_cast<Node*>(right)));
|
||||
}
|
||||
|
||||
TNode<WordT> WordShl(SloppyTNode<WordT> value, int shift);
|
||||
TNode<WordT> WordShr(SloppyTNode<WordT> value, int shift);
|
||||
TNode<WordT> WordSar(SloppyTNode<WordT> value, int shift);
|
||||
TNode<WordT> WordShl(TNode<WordT> value, int shift);
|
||||
TNode<WordT> WordShr(TNode<WordT> value, int shift);
|
||||
TNode<WordT> WordSar(TNode<WordT> value, int shift);
|
||||
TNode<IntPtrT> WordShr(TNode<IntPtrT> value, int shift) {
|
||||
return UncheckedCast<IntPtrT>(WordShr(static_cast<Node*>(value), shift));
|
||||
return UncheckedCast<IntPtrT>(WordShr(TNode<WordT>(value), shift));
|
||||
}
|
||||
TNode<IntPtrT> WordSar(TNode<IntPtrT> value, int shift) {
|
||||
return UncheckedCast<IntPtrT>(WordSar(static_cast<Node*>(value), shift));
|
||||
return UncheckedCast<IntPtrT>(WordSar(TNode<WordT>(value), shift));
|
||||
}
|
||||
TNode<Word32T> Word32Shr(SloppyTNode<Word32T> value, int shift);
|
||||
TNode<Word32T> Word32Sar(SloppyTNode<Word32T> value, int shift);
|
||||
TNode<Word32T> Word32Shr(TNode<Word32T> value, int shift);
|
||||
TNode<Word32T> Word32Sar(TNode<Word32T> value, int shift);
|
||||
|
||||
// Unary
|
||||
#define DECLARE_CODE_ASSEMBLER_UNARY_OP(name, ResType, ArgType) \
|
||||
@ -1040,7 +1040,7 @@ class V8_EXPORT_PRIVATE CodeAssembler {
|
||||
// range, make sure that overflow detection is easy. In particular, return
|
||||
// int_min instead of int_max on arm platforms by using parameter
|
||||
// kSetOverflowToMin.
|
||||
TNode<Int32T> TruncateFloat32ToInt32(SloppyTNode<Float32T> value);
|
||||
TNode<Int32T> TruncateFloat32ToInt32(TNode<Float32T> value);
|
||||
|
||||
// Projections
|
||||
template <int index, class T1, class T2>
|
||||
|
@ -92,9 +92,10 @@ Handle<Code> BuildSetupFunction(Isolate* isolate,
|
||||
params.push_back(__ IntPtrConstant(i + 42));
|
||||
}
|
||||
DCHECK_EQ(param_count + 1, params.size());
|
||||
Node* raw_result = tester.raw_assembler_for_testing()->CallN(
|
||||
caller_descriptor, param_count + 1, params.data());
|
||||
__ Return(__ SmiTag(raw_result));
|
||||
TNode<IntPtrT> intptr_result =
|
||||
__ UncheckedCast<IntPtrT>(tester.raw_assembler_for_testing()->CallN(
|
||||
caller_descriptor, param_count + 1, params.data()));
|
||||
__ Return(__ SmiTag(intptr_result));
|
||||
return tester.GenerateCodeCloseAndEscape();
|
||||
}
|
||||
|
||||
|
@ -73,9 +73,10 @@ Handle<Code> BuildSetupFunction(Isolate* isolate,
|
||||
params.push_back(__ IntPtrConstant(i + 42));
|
||||
}
|
||||
DCHECK_EQ(param_count + 1, params.size());
|
||||
Node* raw_result = tester.raw_assembler_for_testing()->CallN(
|
||||
caller_descriptor, param_count + 1, params.data());
|
||||
__ Return(__ SmiTag(raw_result));
|
||||
TNode<IntPtrT> intptr_result =
|
||||
__ UncheckedCast<IntPtrT>(tester.raw_assembler_for_testing()->CallN(
|
||||
caller_descriptor, param_count + 1, params.data()));
|
||||
__ Return(__ SmiTag(intptr_result));
|
||||
return tester.GenerateCodeCloseAndEscape();
|
||||
}
|
||||
|
||||
|
@ -32,7 +32,7 @@ void TestStubCacheOffsetCalculation(StubCache::Table table) {
|
||||
auto map = m.Parameter<Map>(2);
|
||||
TNode<IntPtrT> primary_offset =
|
||||
m.StubCachePrimaryOffsetForTesting(name, map);
|
||||
Node* result;
|
||||
TNode<IntPtrT> result;
|
||||
if (table == StubCache::kPrimary) {
|
||||
result = primary_offset;
|
||||
} else {
|
||||
|
@ -65,7 +65,7 @@ TEST(CallCFunction) {
|
||||
|
||||
MachineType type_intptr = MachineType::IntPtr();
|
||||
|
||||
Node* const result =
|
||||
TNode<IntPtrT> const result = m.UncheckedCast<IntPtrT>(
|
||||
m.CallCFunction(fun_constant, type_intptr,
|
||||
std::make_pair(type_intptr, m.IntPtrConstant(0)),
|
||||
std::make_pair(type_intptr, m.IntPtrConstant(1)),
|
||||
@ -76,7 +76,7 @@ TEST(CallCFunction) {
|
||||
std::make_pair(type_intptr, m.IntPtrConstant(6)),
|
||||
std::make_pair(type_intptr, m.IntPtrConstant(7)),
|
||||
std::make_pair(type_intptr, m.IntPtrConstant(8)),
|
||||
std::make_pair(type_intptr, m.IntPtrConstant(9)));
|
||||
std::make_pair(type_intptr, m.IntPtrConstant(9))));
|
||||
m.Return(m.SmiTag(result));
|
||||
}
|
||||
|
||||
@ -99,11 +99,12 @@ TEST(CallCFunctionWithCallerSavedRegisters) {
|
||||
|
||||
MachineType type_intptr = MachineType::IntPtr();
|
||||
|
||||
Node* const result = m.CallCFunctionWithCallerSavedRegisters(
|
||||
fun_constant, type_intptr, kSaveFPRegs,
|
||||
std::make_pair(type_intptr, m.IntPtrConstant(0)),
|
||||
std::make_pair(type_intptr, m.IntPtrConstant(1)),
|
||||
std::make_pair(type_intptr, m.IntPtrConstant(2)));
|
||||
TNode<IntPtrT> const result =
|
||||
m.UncheckedCast<IntPtrT>(m.CallCFunctionWithCallerSavedRegisters(
|
||||
fun_constant, type_intptr, kSaveFPRegs,
|
||||
std::make_pair(type_intptr, m.IntPtrConstant(0)),
|
||||
std::make_pair(type_intptr, m.IntPtrConstant(1)),
|
||||
std::make_pair(type_intptr, m.IntPtrConstant(2))));
|
||||
m.Return(m.SmiTag(result));
|
||||
}
|
||||
|
||||
@ -3855,8 +3856,8 @@ TEST(InstructionSchedulingCallerSavedRegisters) {
|
||||
CodeStubAssembler m(asm_tester.state());
|
||||
|
||||
{
|
||||
Node* x = m.SmiUntag(m.Parameter<Smi>(1));
|
||||
Node* y = m.WordOr(m.WordShr(x, 1), m.IntPtrConstant(1));
|
||||
TNode<IntPtrT> x = m.SmiUntag(m.Parameter<Smi>(1));
|
||||
TNode<WordT> y = m.WordOr(m.WordShr(x, 1), m.IntPtrConstant(1));
|
||||
TNode<ExternalReference> isolate_ptr =
|
||||
m.ExternalConstant(ExternalReference::isolate_address(isolate));
|
||||
m.CallCFunctionWithCallerSavedRegisters(
|
||||
|
Loading…
Reference in New Issue
Block a user