[csa, torque, cleanup] Rename CSA_ASSERT to CSA_DCHECK

It's confusing that we have CSA_CHECK and CSA_ASSERT and it's not
clear from the names that the former works in release mode and the
latter only in debug mode.

Renaming CSA_ASSERT to CSA_DCHECK makes it clear what it does. So now
we have CSA_CHECK and CSA_DCHECK and they're not confusing.

This also renames assert() in Torque to dcheck().

Bug: v8:12244
Change-Id: I6f25d431ebc6eec7ebe326b6b8ad3a0ac5e9a108
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3190104
Reviewed-by: Nico Hartmann <nicohartmann@chromium.org>
Reviewed-by: Jakob Gruber <jgruber@chromium.org>
Reviewed-by: Igor Sheludko <ishell@chromium.org>
Reviewed-by: Shu-yu Guo <syg@chromium.org>
Commit-Queue: Marja Hölttä <marja@chromium.org>
Cr-Commit-Position: refs/heads/main@{#77160}
This commit is contained in:
Marja Hölttä 2021-09-30 09:26:47 +02:00 committed by V8 LUCI CQ
parent a2fa2efe35
commit db50b49128
145 changed files with 875 additions and 875 deletions

View File

@ -79,7 +79,7 @@ ArrayFrom(js-implicit context: NativeContext, receiver: JSAny)(...arguments):
// memory, e.g. a proxy that discarded the values. Ignoring this case
// just means we would repeatedly call CreateDataProperty with index =
// 2^53
assert(k < kMaxSafeInteger);
dcheck(k < kMaxSafeInteger);
// ii. Let Pk be ! ToString(k).

View File

@ -55,7 +55,7 @@ LoadJoinElement<array::FastDoubleElements>(
builtin LoadJoinTypedElement<T : type extends ElementsKind>(
context: Context, receiver: JSReceiver, k: uintptr): JSAny {
const typedArray: JSTypedArray = UnsafeCast<JSTypedArray>(receiver);
assert(!IsDetachedBuffer(typedArray.buffer));
dcheck(!IsDetachedBuffer(typedArray.buffer));
return typed_array::LoadFixedTypedArrayElementAsTagged(
typedArray.data_ptr, k, typed_array::KindForArrayType<T>());
}
@ -126,14 +126,14 @@ macro AddStringLength(implicit context: Context)(
macro StoreAndGrowFixedArray<T: type>(
fixedArray: FixedArray, index: intptr, element: T): FixedArray {
const length: intptr = fixedArray.length_intptr;
assert(index <= length);
dcheck(index <= length);
if (index < length) {
fixedArray.objects[index] = element;
return fixedArray;
} else
deferred {
const newLength: intptr = CalculateNewElementsCapacity(length);
assert(index < newLength);
dcheck(index < newLength);
const newfixedArray: FixedArray =
ExtractFixedArray(fixedArray, 0, length, newLength);
newfixedArray.objects[index] = element;
@ -211,7 +211,7 @@ macro NewBuffer(len: uintptr, sep: String): Buffer {
const cappedBufferSize: intptr = len > kMaxNewSpaceFixedArrayElements ?
kMaxNewSpaceFixedArrayElements :
Signed(len);
assert(cappedBufferSize > 0);
dcheck(cappedBufferSize > 0);
return Buffer{
fixedArray: AllocateZeroedFixedArray(cappedBufferSize),
index: 0,
@ -222,7 +222,7 @@ macro NewBuffer(len: uintptr, sep: String): Buffer {
macro BufferJoin(implicit context: Context)(
buffer: Buffer, sep: String): String {
assert(IsValidPositiveSmi(buffer.totalStringLength));
dcheck(IsValidPositiveSmi(buffer.totalStringLength));
if (buffer.totalStringLength == 0) return kEmptyString;
// Fast path when there's only one buffer element.

View File

@ -44,7 +44,7 @@ macro FastArrayLastIndexOf<Elements : type extends FixedArrayBase>(
const same: Boolean = StrictEqual(searchElement, element);
if (same == True) {
assert(Is<FastJSArray>(array));
dcheck(Is<FastJSArray>(array));
return k;
}
} label Hole {} // Do nothing for holes.
@ -52,7 +52,7 @@ macro FastArrayLastIndexOf<Elements : type extends FixedArrayBase>(
--k;
}
assert(Is<FastJSArray>(array));
dcheck(Is<FastJSArray>(array));
return -1;
}
@ -90,7 +90,7 @@ macro TryFastArrayLastIndexOf(
return FastArrayLastIndexOf<FixedArray>(
context, array, fromSmi, searchElement);
}
assert(IsDoubleElementsKind(kind));
dcheck(IsDoubleElementsKind(kind));
return FastArrayLastIndexOf<FixedDoubleArray>(
context, array, fromSmi, searchElement);
}

View File

@ -103,7 +103,7 @@ struct Vector {
macro CreateJSArray(implicit context: Context)(validLength: Smi): JSArray {
const length: Smi = this.fixedArray.length;
assert(validLength <= length);
dcheck(validLength <= length);
let kind: ElementsKind = ElementsKind::PACKED_SMI_ELEMENTS;
if (!this.onlySmis) {
if (this.onlyNumbers) {

View File

@ -89,7 +89,7 @@ macro HandleFastSlice(
labels Bailout {
const start: Smi = Cast<Smi>(startNumber) otherwise Bailout;
const count: Smi = Cast<Smi>(countNumber) otherwise Bailout;
assert(start >= 0);
dcheck(start >= 0);
try {
typeswitch (o) {
@ -182,12 +182,12 @@ ArrayPrototypeSlice(
// 7. Let count be max(final - k, 0).
const count: Number = Max(final - k, 0);
assert(0 <= k);
assert(k <= len);
assert(0 <= final);
assert(final <= len);
assert(0 <= count);
assert(count <= len);
dcheck(0 <= k);
dcheck(k <= len);
dcheck(0 <= final);
dcheck(final <= len);
dcheck(0 <= count);
dcheck(count <= len);
try {
return HandleFastSlice(context, o, k, count)

View File

@ -17,19 +17,19 @@ type FastDoubleElements extends ElementsKind;
type DictionaryElements extends ElementsKind;
macro EnsureWriteableFastElements(implicit context: Context)(array: JSArray) {
assert(IsFastElementsKind(array.map.elements_kind));
dcheck(IsFastElementsKind(array.map.elements_kind));
const elements: FixedArrayBase = array.elements;
if (elements.map != kCOWMap) return;
// There are no COW *_DOUBLE_ELEMENTS arrays, so we are allowed to always
// extract FixedArrays and don't have to worry about FixedDoubleArrays.
assert(IsFastSmiOrTaggedElementsKind(array.map.elements_kind));
dcheck(IsFastSmiOrTaggedElementsKind(array.map.elements_kind));
const length = Convert<intptr>(Cast<Smi>(array.length) otherwise unreachable);
array.elements =
ExtractFixedArray(UnsafeCast<FixedArray>(elements), 0, length, length);
assert(array.elements.map != kCOWMap);
dcheck(array.elements.map != kCOWMap);
}
macro LoadElementOrUndefined(implicit context: Context)(
@ -72,7 +72,7 @@ macro EnsureArrayLengthWritable(implicit context: Context)(map: Map):
const descriptors: DescriptorArray = map.instance_descriptors;
const descriptor:&DescriptorEntry =
&descriptors.descriptors[kLengthDescriptorIndex];
assert(TaggedEqual(descriptor->key, LengthStringConstant()));
dcheck(TaggedEqual(descriptor->key, LengthStringConstant()));
const details: Smi = UnsafeCast<Smi>(descriptor->details);
if ((details & kAttributesReadOnlyMask) != 0) {
goto Bailout;

View File

@ -47,7 +47,7 @@ transitioning javascript builtin ArrayBufferPrototypeGetMaxByteLength(
// 6. Else,
// a. Let length be O.[[ArrayBufferByteLength]].
// 7. Return F(length);
assert(IsResizableArrayBuffer(o) || o.max_byte_length == o.byte_length);
dcheck(IsResizableArrayBuffer(o) || o.max_byte_length == o.byte_length);
return Convert<Number>(o.max_byte_length);
}
@ -92,7 +92,7 @@ SharedArrayBufferPrototypeGetMaxByteLength(
// 5. Else,
// a. Let length be O.[[ArrayBufferByteLength]].
// 6. Return F(length);
assert(IsResizableArrayBuffer(o) || o.max_byte_length == o.byte_length);
dcheck(IsResizableArrayBuffer(o) || o.max_byte_length == o.byte_length);
return Convert<Number>(o.max_byte_length);
}

View File

@ -158,7 +158,7 @@ struct float64_or_hole {
return this.value;
}
macro ValueUnsafeAssumeNotHole(): float64 {
assert(!this.is_hole);
dcheck(!this.is_hole);
return this.value;
}
@ -601,7 +601,7 @@ transitioning macro ToIntegerImpl(implicit context: Context)(input: JSAny):
// ToInteger normalizes -0 to +0.
if (value == 0.0) return SmiConstant(0);
const result = ChangeFloat64ToTagged(value);
assert(IsNumberNormalized(result));
dcheck(IsNumberNormalized(result));
return result;
}
case (a: JSAnyNotNumber): {
@ -1252,7 +1252,7 @@ macro FastHoleyElementsKind(kind: ElementsKind): ElementsKind {
} else if (kind == ElementsKind::PACKED_DOUBLE_ELEMENTS) {
return ElementsKind::HOLEY_DOUBLE_ELEMENTS;
}
assert(kind == ElementsKind::PACKED_ELEMENTS);
dcheck(kind == ElementsKind::PACKED_ELEMENTS);
return ElementsKind::HOLEY_ELEMENTS;
}
@ -1396,8 +1396,8 @@ macro SameValue(a: JSAny, b: JSAny): bool {
macro CheckIntegerIndexAdditionOverflow(
index1: uintptr, index2: uintptr, limit: uintptr) labels IfOverflow {
if constexpr (Is64()) {
assert(index1 <= kMaxSafeIntegerUint64);
assert(index2 <= kMaxSafeIntegerUint64);
dcheck(index1 <= kMaxSafeIntegerUint64);
dcheck(index2 <= kMaxSafeIntegerUint64);
// Given that both index1 and index2 are in a safe integer range the
// addition can't overflow.
if (index1 + index2 > limit) goto IfOverflow;
@ -1431,7 +1431,7 @@ macro TryNumberToUintPtr(valueNumber: Number, kMode: constexpr int31):
if (kMode == kModeValueIsAnyNumber) {
if (valueSmi < 0) goto IfLessThanZero;
} else {
assert(valueSmi >= 0);
dcheck(valueSmi >= 0);
}
const value: uintptr = Unsigned(Convert<intptr>(valueSmi));
// Positive Smi values definitely fit into both [0, kMaxSafeInteger] and
@ -1439,14 +1439,14 @@ macro TryNumberToUintPtr(valueNumber: Number, kMode: constexpr int31):
return value;
}
case (valueHeapNumber: HeapNumber): {
assert(IsNumberNormalized(valueHeapNumber));
dcheck(IsNumberNormalized(valueHeapNumber));
const valueDouble: float64 = Convert<float64>(valueHeapNumber);
// NaNs must be handled outside.
assert(!Float64IsNaN(valueDouble));
dcheck(!Float64IsNaN(valueDouble));
if (kMode == kModeValueIsAnyNumber) {
if (valueDouble < 0) goto IfLessThanZero;
} else {
assert(valueDouble >= 0);
dcheck(valueDouble >= 0);
}
if constexpr (Is64()) {
@ -1455,7 +1455,7 @@ macro TryNumberToUintPtr(valueNumber: Number, kMode: constexpr int31):
if (kMode == kModeValueIsAnyNumber) {
if (valueDouble > kMaxSafeInteger) goto IfSafeIntegerOverflow;
} else {
assert(valueDouble <= kMaxSafeInteger);
dcheck(valueDouble <= kMaxSafeInteger);
}
} else {
// On 32-bit architectures uintptr range is smaller than safe integer
@ -1464,7 +1464,7 @@ macro TryNumberToUintPtr(valueNumber: Number, kMode: constexpr int31):
kMode == kModeValueIsSafeInteger) {
if (valueDouble > kMaxUInt32Double) goto IfUIntPtrOverflow;
} else {
assert(valueDouble <= kMaxUInt32Double);
dcheck(valueDouble <= kMaxUInt32Double);
}
}
return ChangeFloat64ToUintPtr(valueDouble);
@ -1602,13 +1602,13 @@ macro ConvertToRelativeIndex(indexNumber: Number, length: uintptr): uintptr {
}
}
case (indexHeapNumber: HeapNumber): {
assert(IsNumberNormalized(indexHeapNumber));
dcheck(IsNumberNormalized(indexHeapNumber));
const indexDouble: float64 = Convert<float64>(indexHeapNumber);
// NaNs must already be handled by ConvertToRelativeIndex() version
// above accepting JSAny indices.
assert(!Float64IsNaN(indexDouble));
dcheck(!Float64IsNaN(indexDouble));
const lengthDouble: float64 = Convert<float64>(length);
assert(lengthDouble <= kMaxSafeInteger);
dcheck(lengthDouble <= kMaxSafeInteger);
if (indexDouble < 0) {
const relativeIndex: float64 = lengthDouble + indexDouble;
return relativeIndex > 0 ? ChangeFloat64ToUintPtr(relativeIndex) : 0;
@ -1643,15 +1643,15 @@ macro ClampToIndexRange(indexNumber: Number, limit: uintptr): uintptr {
return index;
}
case (indexHeapNumber: HeapNumber): {
assert(IsNumberNormalized(indexHeapNumber));
dcheck(IsNumberNormalized(indexHeapNumber));
const indexDouble: float64 = Convert<float64>(indexHeapNumber);
// NaNs must already be handled by ClampToIndexRange() version
// above accepting JSAny indices.
assert(!Float64IsNaN(indexDouble));
dcheck(!Float64IsNaN(indexDouble));
if (indexDouble <= 0) return 0;
const maxIndexDouble: float64 = Convert<float64>(limit);
assert(maxIndexDouble <= kMaxSafeInteger);
dcheck(maxIndexDouble <= kMaxSafeInteger);
if (indexDouble >= maxIndexDouble) return limit;
return ChangeFloat64ToUintPtr(indexDouble);
@ -1746,7 +1746,7 @@ transitioning builtin FastCreateDataProperty(implicit context: Context)(
BuildAppendJSArray(ElementsKind::HOLEY_DOUBLE_ELEMENTS, array, value)
otherwise Slow;
} else {
assert(IsFastSmiOrTaggedElementsKind(kind));
dcheck(IsFastSmiOrTaggedElementsKind(kind));
BuildAppendJSArray(ElementsKind::HOLEY_ELEMENTS, array, value)
otherwise Slow;
}
@ -1767,7 +1767,7 @@ transitioning builtin FastCreateDataProperty(implicit context: Context)(
otherwise unreachable;
doubleElements[index] = numberValue;
} else {
assert(IsFastSmiOrTaggedElementsKind(kind));
dcheck(IsFastSmiOrTaggedElementsKind(kind));
const elements = Cast<FixedArray>(array.elements) otherwise unreachable;
elements[index] = value;
}

View File

@ -36,7 +36,7 @@ void ArrayBuiltinsAssembler::TypedArrayMapResultGenerator() {
context(), method_name, original_array, len());
// In the Spec and our current implementation, the length check is already
// performed in TypedArraySpeciesCreate.
CSA_ASSERT(this, UintPtrLessThanOrEqual(len(), LoadJSTypedArrayLength(a)));
CSA_DCHECK(this, UintPtrLessThanOrEqual(len(), LoadJSTypedArrayLength(a)));
fast_typed_array_target_ =
Word32Equal(LoadElementsKind(original_array), LoadElementsKind(a));
a_ = a;
@ -228,7 +228,7 @@ void ArrayBuiltinsAssembler::VisitAllTypedArrayElements(
TF_BUILTIN(ArrayPrototypePop, CodeStubAssembler) {
auto argc = UncheckedParameter<Int32T>(Descriptor::kJSActualArgumentsCount);
auto context = Parameter<Context>(Descriptor::kContext);
CSA_ASSERT(this, IsUndefined(Parameter<Object>(Descriptor::kJSNewTarget)));
CSA_DCHECK(this, IsUndefined(Parameter<Object>(Descriptor::kJSNewTarget)));
CodeStubArguments args(this, argc);
TNode<Object> receiver = args.GetReceiver();
@ -248,7 +248,7 @@ TF_BUILTIN(ArrayPrototypePop, CodeStubAssembler) {
BIND(&fast);
{
TNode<JSArray> array_receiver = CAST(receiver);
CSA_ASSERT(this, TaggedIsPositiveSmi(LoadJSArrayLength(array_receiver)));
CSA_DCHECK(this, TaggedIsPositiveSmi(LoadJSArrayLength(array_receiver)));
TNode<IntPtrT> length =
LoadAndUntagObjectField(array_receiver, JSArray::kLengthOffset);
Label return_undefined(this), fast_elements(this);
@ -330,7 +330,7 @@ TF_BUILTIN(ArrayPrototypePush, CodeStubAssembler) {
auto argc = UncheckedParameter<Int32T>(Descriptor::kJSActualArgumentsCount);
auto context = Parameter<Context>(Descriptor::kContext);
CSA_ASSERT(this, IsUndefined(Parameter<Object>(Descriptor::kJSNewTarget)));
CSA_DCHECK(this, IsUndefined(Parameter<Object>(Descriptor::kJSNewTarget)));
CodeStubArguments args(this, argc);
TNode<Object> receiver = args.GetReceiver();
@ -449,7 +449,7 @@ TF_BUILTIN(ExtractFastJSArray, ArrayBuiltinsAssembler) {
TNode<BInt> begin = SmiToBInt(Parameter<Smi>(Descriptor::kBegin));
TNode<BInt> count = SmiToBInt(Parameter<Smi>(Descriptor::kCount));
CSA_ASSERT(this, Word32BinaryNot(IsNoElementsProtectorCellInvalid()));
CSA_DCHECK(this, Word32BinaryNot(IsNoElementsProtectorCellInvalid()));
Return(ExtractFastJSArray(context, array, begin, count));
}
@ -458,7 +458,7 @@ TF_BUILTIN(CloneFastJSArray, ArrayBuiltinsAssembler) {
auto context = Parameter<Context>(Descriptor::kContext);
auto array = Parameter<JSArray>(Descriptor::kSource);
CSA_ASSERT(this,
CSA_DCHECK(this,
Word32Or(Word32BinaryNot(IsHoleyFastElementsKindForRead(
LoadElementsKind(array))),
Word32BinaryNot(IsNoElementsProtectorCellInvalid())));
@ -477,7 +477,7 @@ TF_BUILTIN(CloneFastJSArrayFillingHoles, ArrayBuiltinsAssembler) {
auto context = Parameter<Context>(Descriptor::kContext);
auto array = Parameter<JSArray>(Descriptor::kSource);
CSA_ASSERT(this,
CSA_DCHECK(this,
Word32Or(Word32BinaryNot(IsHoleyFastElementsKindForRead(
LoadElementsKind(array))),
Word32BinaryNot(IsNoElementsProtectorCellInvalid())));
@ -526,7 +526,7 @@ class ArrayPopulatorAssembler : public CodeStubAssembler {
TNode<Number> length) {
TVARIABLE(Object, array);
Label is_constructor(this), is_not_constructor(this), done(this);
CSA_ASSERT(this, IsNumberNormalized(length));
CSA_DCHECK(this, IsNumberNormalized(length));
GotoIf(TaggedIsSmi(receiver), &is_not_constructor);
Branch(IsConstructor(CAST(receiver)), &is_constructor, &is_not_constructor);
@ -619,7 +619,7 @@ void ArrayIncludesIndexofAssembler::Generate(SearchVariant variant,
TNode<JSArray> array = CAST(receiver);
// JSArray length is always a positive Smi for fast arrays.
CSA_ASSERT(this, TaggedIsPositiveSmi(LoadJSArrayLength(array)));
CSA_DCHECK(this, TaggedIsPositiveSmi(LoadJSArrayLength(array)));
TNode<Smi> array_length = LoadFastJSArrayLength(array);
TNode<IntPtrT> array_length_untagged = SmiUntag(array_length);
@ -1207,7 +1207,7 @@ TF_BUILTIN(ArrayIteratorPrototypeNext, CodeStubAssembler) {
// Let index be O.[[ArrayIteratorNextIndex]].
TNode<Number> index = LoadJSArrayIteratorNextIndex(iterator);
CSA_ASSERT(this, IsNumberNonNegativeSafeInteger(index));
CSA_DCHECK(this, IsNumberNonNegativeSafeInteger(index));
// Dispatch based on the type of the {array}.
TNode<Map> array_map = LoadMap(array);
@ -1219,7 +1219,7 @@ TF_BUILTIN(ArrayIteratorPrototypeNext, CodeStubAssembler) {
BIND(&if_array);
{
// If {array} is a JSArray, then the {index} must be in Unsigned32 range.
CSA_ASSERT(this, IsNumberArrayIndex(index));
CSA_DCHECK(this, IsNumberArrayIndex(index));
// Check that the {index} is within range for the {array}. We handle all
// kinds of JSArray's here, so we do the computation on Uint32.
@ -1260,8 +1260,8 @@ TF_BUILTIN(ArrayIteratorPrototypeNext, CodeStubAssembler) {
BIND(&if_other);
{
// We cannot enter here with either JSArray's or JSTypedArray's.
CSA_ASSERT(this, Word32BinaryNot(IsJSArray(array)));
CSA_ASSERT(this, Word32BinaryNot(IsJSTypedArray(array)));
CSA_DCHECK(this, Word32BinaryNot(IsJSArray(array)));
CSA_DCHECK(this, Word32BinaryNot(IsJSTypedArray(array)));
// Check that the {index} is within the bounds of the {array}s "length".
TNode<Number> length = CAST(
@ -1297,7 +1297,7 @@ TF_BUILTIN(ArrayIteratorPrototypeNext, CodeStubAssembler) {
//
// Note specifically that JSTypedArray's will never take this path, so
// we don't need to worry about their maximum value.
CSA_ASSERT(this, Word32BinaryNot(IsJSTypedArray(array)));
CSA_DCHECK(this, Word32BinaryNot(IsJSTypedArray(array)));
TNode<Number> max_length =
SelectConstant(IsJSArray(array), NumberConstant(kMaxUInt32),
NumberConstant(kMaxSafeInteger));
@ -1382,8 +1382,8 @@ class ArrayFlattenAssembler : public CodeStubAssembler {
TNode<Number> start, TNode<Number> depth,
base::Optional<TNode<HeapObject>> mapper_function = base::nullopt,
base::Optional<TNode<Object>> this_arg = base::nullopt) {
CSA_ASSERT(this, IsNumberPositive(source_length));
CSA_ASSERT(this, IsNumberPositive(start));
CSA_DCHECK(this, IsNumberPositive(source_length));
CSA_DCHECK(this, IsNumberPositive(start));
// 1. Let targetIndex be start.
TVARIABLE(Number, var_target_index, start);
@ -1404,7 +1404,7 @@ class ArrayFlattenAssembler : public CodeStubAssembler {
// a. Let P be ! ToString(sourceIndex).
// b. Let exists be ? HasProperty(source, P).
CSA_ASSERT(this,
CSA_DCHECK(this,
SmiGreaterThanOrEqual(CAST(source_index), SmiConstant(0)));
const TNode<Oddball> exists =
HasProperty(context, source, source_index, kHasProperty);
@ -1419,7 +1419,7 @@ class ArrayFlattenAssembler : public CodeStubAssembler {
// ii. If mapperFunction is present, then
if (mapper_function) {
CSA_ASSERT(this, Word32Or(IsUndefined(mapper_function.value()),
CSA_DCHECK(this, Word32Or(IsUndefined(mapper_function.value()),
IsCallable(mapper_function.value())));
DCHECK(this_arg.has_value());
@ -1445,7 +1445,7 @@ class ArrayFlattenAssembler : public CodeStubAssembler {
BIND(&if_flatten_array);
{
CSA_ASSERT(this, IsJSArray(element));
CSA_DCHECK(this, IsJSArray(element));
// 1. Let elementLen be ? ToLength(? Get(element, "length")).
const TNode<Object> element_length =
@ -1462,7 +1462,7 @@ class ArrayFlattenAssembler : public CodeStubAssembler {
BIND(&if_flatten_proxy);
{
CSA_ASSERT(this, IsJSProxy(element));
CSA_DCHECK(this, IsJSProxy(element));
// 1. Let elementLen be ? ToLength(? Get(element, "length")).
const TNode<Number> element_length = ToLength_Inline(
@ -1802,11 +1802,11 @@ TF_BUILTIN(ArrayConstructorImpl, ArrayBuiltinsAssembler) {
Parameter<HeapObject>(Descriptor::kAllocationSite);
// Initial map for the builtin Array functions should be Map.
CSA_ASSERT(this, IsMap(CAST(LoadObjectField(
CSA_DCHECK(this, IsMap(CAST(LoadObjectField(
target, JSFunction::kPrototypeOrInitialMapOffset))));
// We should either have undefined or a valid AllocationSite
CSA_ASSERT(this, Word32Or(IsUndefined(maybe_allocation_site),
CSA_DCHECK(this, Word32Or(IsUndefined(maybe_allocation_site),
IsAllocationSite(maybe_allocation_site)));
// "Enter" the context of the Array function.

View File

@ -55,7 +55,7 @@ void AsyncFunctionBuiltinsAssembler::AsyncFunctionAwaitResumeClosure(
// unnecessary runtime checks removed.
// Ensure that the {async_function_object} is neither closed nor running.
CSA_SLOW_ASSERT(
CSA_SLOW_DCHECK(
this, SmiGreaterThan(
LoadObjectField<Smi>(async_function_object,
JSGeneratorObject::kContinuationOffset),
@ -226,7 +226,7 @@ TF_BUILTIN(AsyncFunctionLazyDeoptContinuation, AsyncFunctionBuiltinsAssembler) {
}
TF_BUILTIN(AsyncFunctionAwaitRejectClosure, AsyncFunctionBuiltinsAssembler) {
CSA_ASSERT_JS_ARGC_EQ(this, 1);
CSA_DCHECK_JS_ARGC_EQ(this, 1);
const auto sentError = Parameter<Object>(Descriptor::kSentError);
const auto context = Parameter<Context>(Descriptor::kContext);
@ -236,7 +236,7 @@ TF_BUILTIN(AsyncFunctionAwaitRejectClosure, AsyncFunctionBuiltinsAssembler) {
}
TF_BUILTIN(AsyncFunctionAwaitResolveClosure, AsyncFunctionBuiltinsAssembler) {
CSA_ASSERT_JS_ARGC_EQ(this, 1);
CSA_DCHECK_JS_ARGC_EQ(this, 1);
const auto sentValue = Parameter<Object>(Descriptor::kSentValue);
const auto context = Parameter<Context>(Descriptor::kContext);

View File

@ -55,12 +55,12 @@ TNode<Object> AsyncBuiltinsAssembler::AwaitOld(
// Let promiseCapability be ! NewPromiseCapability(%Promise%).
const TNode<JSFunction> promise_fun =
CAST(LoadContextElement(native_context, Context::PROMISE_FUNCTION_INDEX));
CSA_ASSERT(this, IsFunctionWithPrototypeSlotMap(LoadMap(promise_fun)));
CSA_DCHECK(this, IsFunctionWithPrototypeSlotMap(LoadMap(promise_fun)));
const TNode<Map> promise_map = CAST(
LoadObjectField(promise_fun, JSFunction::kPrototypeOrInitialMapOffset));
// Assert that the JSPromise map has an instance size is
// JSPromise::kSizeWithEmbedderFields.
CSA_ASSERT(this,
CSA_DCHECK(this,
IntPtrEqual(LoadMapInstanceSizeInWords(promise_map),
IntPtrConstant(JSPromise::kSizeWithEmbedderFields /
kTaggedSize)));
@ -259,7 +259,7 @@ void AsyncBuiltinsAssembler::InitializeNativeClosure(
native_context, Context::STRICT_FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX));
// Ensure that we don't have to initialize prototype_or_initial_map field of
// JSFunction.
CSA_ASSERT(this,
CSA_DCHECK(this,
IntPtrEqual(LoadMapInstanceSizeInWords(function_map),
IntPtrConstant(JSFunction::kSizeWithoutPrototype /
kTaggedSize)));
@ -302,7 +302,7 @@ TNode<JSFunction> AsyncBuiltinsAssembler::CreateUnwrapClosure(
TNode<Context> AsyncBuiltinsAssembler::AllocateAsyncIteratorValueUnwrapContext(
TNode<NativeContext> native_context, TNode<Oddball> done) {
CSA_ASSERT(this, IsBoolean(done));
CSA_DCHECK(this, IsBoolean(done));
TNode<Context> context = AllocateSyntheticFunctionContext(
native_context, ValueUnwrapContext::kLength);
@ -317,7 +317,7 @@ TF_BUILTIN(AsyncIteratorValueUnwrap, AsyncBuiltinsAssembler) {
const TNode<Object> done =
LoadContextElement(context, ValueUnwrapContext::kDoneSlot);
CSA_ASSERT(this, IsBoolean(CAST(done)));
CSA_DCHECK(this, IsBoolean(CAST(done)));
const TNode<Object> unwrapped_value =
CallBuiltin(Builtin::kCreateIterResultObject, context, value, done);

View File

@ -65,18 +65,18 @@ class AsyncGeneratorBuiltinsAssembler : public AsyncBuiltinsAssembler {
}
inline void SetGeneratorAwaiting(const TNode<JSGeneratorObject> generator) {
CSA_ASSERT(this, Word32BinaryNot(IsGeneratorAwaiting(generator)));
CSA_DCHECK(this, Word32BinaryNot(IsGeneratorAwaiting(generator)));
StoreObjectFieldNoWriteBarrier(
generator, JSAsyncGeneratorObject::kIsAwaitingOffset, SmiConstant(1));
CSA_ASSERT(this, IsGeneratorAwaiting(generator));
CSA_DCHECK(this, IsGeneratorAwaiting(generator));
}
inline void SetGeneratorNotAwaiting(
const TNode<JSGeneratorObject> generator) {
CSA_ASSERT(this, IsGeneratorAwaiting(generator));
CSA_DCHECK(this, IsGeneratorAwaiting(generator));
StoreObjectFieldNoWriteBarrier(
generator, JSAsyncGeneratorObject::kIsAwaitingOffset, SmiConstant(0));
CSA_ASSERT(this, Word32BinaryNot(IsGeneratorAwaiting(generator)));
CSA_DCHECK(this, Word32BinaryNot(IsGeneratorAwaiting(generator)));
}
inline void CloseGenerator(const TNode<JSGeneratorObject> generator) {
@ -216,7 +216,7 @@ void AsyncGeneratorBuiltinsAssembler::AsyncGeneratorAwaitResumeClosure(
SetGeneratorNotAwaiting(generator);
CSA_SLOW_ASSERT(this, IsGeneratorSuspended(generator));
CSA_SLOW_DCHECK(this, IsGeneratorSuspended(generator));
// Remember the {resume_mode} for the {generator}.
StoreObjectFieldNoWriteBarrier(generator,
@ -401,7 +401,7 @@ TF_BUILTIN(AsyncGeneratorResumeNext, AsyncGeneratorBuiltinsAssembler) {
Goto(&start);
BIND(&start);
CSA_ASSERT(this, IsGeneratorNotExecuting(generator));
CSA_DCHECK(this, IsGeneratorNotExecuting(generator));
// Stop resuming if suspended for Await.
ReturnIf(IsGeneratorAwaiting(generator), UndefinedConstant());
@ -478,7 +478,7 @@ TF_BUILTIN(AsyncGeneratorResolve, AsyncGeneratorBuiltinsAssembler) {
const auto done = Parameter<Object>(Descriptor::kDone);
const auto context = Parameter<Context>(Descriptor::kContext);
CSA_ASSERT(this, Word32BinaryNot(IsGeneratorAwaiting(generator)));
CSA_DCHECK(this, Word32BinaryNot(IsGeneratorAwaiting(generator)));
// This operation should be called only when the `value` parameter has been
// Await-ed. Typically, this means `value` is not a JSPromise value. However,

View File

@ -161,7 +161,7 @@ void AsyncFromSyncBuiltinsAssembler::Generate_AsyncFromSyncIteratorMethod(
const TNode<JSFunction> promise_fun =
CAST(LoadContextElement(native_context, Context::PROMISE_FUNCTION_INDEX));
CSA_ASSERT(this, IsConstructor(promise_fun));
CSA_DCHECK(this, IsConstructor(promise_fun));
// Let valueWrapper be PromiseResolve(%Promise%, « value »).
// IfAbruptRejectPromise(valueWrapper, promiseCapability).

View File

@ -70,9 +70,9 @@ macro MutableBigIntAbsoluteSub(implicit context: Context)(
const ylength = ReadBigIntLength(y);
const xsign = ReadBigIntSign(x);
assert(MutableBigIntAbsoluteCompare(x, y) >= 0);
dcheck(MutableBigIntAbsoluteCompare(x, y) >= 0);
if (xlength == 0) {
assert(ylength == 0);
dcheck(ylength == 0);
return x;
}
@ -104,7 +104,7 @@ macro MutableBigIntAbsoluteAdd(implicit context: Context)(
// case: 0n + 0n
if (xlength == 0) {
assert(ylength == 0);
dcheck(ylength == 0);
return x;
}

View File

@ -280,7 +280,7 @@ void CallOrConstructBuiltinsAssembler::CallOrConstructWithArrayLike(
TNode<Int32T> length = var_length.value();
{
Label normalize_done(this);
CSA_ASSERT(this, Int32LessThanOrEqual(
CSA_DCHECK(this, Int32LessThanOrEqual(
length, Int32Constant(FixedArray::kMaxLength)));
GotoIfNot(Word32Equal(length, Int32Constant(0)), &normalize_done);
// Make sure we don't accidentally pass along the
@ -327,10 +327,10 @@ void CallOrConstructBuiltinsAssembler::CallOrConstructDoubleVarargs(
TNode<Int32T> args_count, TNode<Context> context, TNode<Int32T> kind) {
const ElementsKind new_kind = PACKED_ELEMENTS;
const WriteBarrierMode barrier_mode = UPDATE_WRITE_BARRIER;
CSA_ASSERT(this, Int32LessThanOrEqual(length,
CSA_DCHECK(this, Int32LessThanOrEqual(length,
Int32Constant(FixedArray::kMaxLength)));
TNode<IntPtrT> intptr_length = ChangeInt32ToIntPtr(length);
CSA_ASSERT(this, WordNotEqual(intptr_length, IntPtrConstant(0)));
CSA_DCHECK(this, WordNotEqual(intptr_length, IntPtrConstant(0)));
// Allocate a new FixedArray of Objects.
TNode<FixedArray> new_elements = CAST(AllocateFixedArray(
@ -439,7 +439,7 @@ void CallOrConstructBuiltinsAssembler::CallOrConstructWithSpread(
TNode<Int32T> length = LoadAndUntagToWord32ObjectField(
var_js_array.value(), JSArray::kLengthOffset);
TNode<FixedArrayBase> elements = var_elements.value();
CSA_ASSERT(this, Int32LessThanOrEqual(
CSA_DCHECK(this, Int32LessThanOrEqual(
length, Int32Constant(FixedArray::kMaxLength)));
if (!new_target) {

View File

@ -151,7 +151,7 @@ void BaseCollectionsAssembler::AddConstructorEntry(
Label* if_may_have_side_effects, Label* if_exception,
TVariable<Object>* var_exception) {
compiler::ScopedExceptionHandler handler(this, if_exception, var_exception);
CSA_ASSERT(this, Word32BinaryNot(IsTheHole(key_value)));
CSA_DCHECK(this, Word32BinaryNot(IsTheHole(key_value)));
if (variant == kMap || variant == kWeakMap) {
TorqueStructKeyValuePair pair =
if_may_have_side_effects != nullptr
@ -191,7 +191,7 @@ void BaseCollectionsAssembler::AddConstructorEntries(
TNode<JSArray> initial_entries_jsarray =
UncheckedCast<JSArray>(initial_entries);
#if DEBUG
CSA_ASSERT(this, IsFastJSArrayWithNoCustomIteration(
CSA_DCHECK(this, IsFastJSArrayWithNoCustomIteration(
context, initial_entries_jsarray));
TNode<Map> original_initial_entries_map = LoadMap(initial_entries_jsarray);
#endif
@ -215,7 +215,7 @@ void BaseCollectionsAssembler::AddConstructorEntries(
Unreachable();
BIND(&if_not_modified);
}
CSA_ASSERT(this, TaggedEqual(original_initial_entries_map,
CSA_DCHECK(this, TaggedEqual(original_initial_entries_map,
LoadMap(initial_entries_jsarray)));
#endif
use_fast_loop = Int32FalseConstant();
@ -238,13 +238,13 @@ void BaseCollectionsAssembler::AddConstructorEntriesFromFastJSArray(
TNode<FixedArrayBase> elements = LoadElements(fast_jsarray);
TNode<Int32T> elements_kind = LoadElementsKind(fast_jsarray);
TNode<JSFunction> add_func = GetInitialAddFunction(variant, native_context);
CSA_ASSERT(this,
CSA_DCHECK(this,
TaggedEqual(GetAddFunction(variant, native_context, collection),
add_func));
CSA_ASSERT(this, IsFastJSArrayWithNoCustomIteration(context, fast_jsarray));
CSA_DCHECK(this, IsFastJSArrayWithNoCustomIteration(context, fast_jsarray));
TNode<IntPtrT> length = SmiUntag(LoadFastJSArrayLength(fast_jsarray));
CSA_ASSERT(this, IntPtrGreaterThanOrEqual(length, IntPtrConstant(0)));
CSA_ASSERT(
CSA_DCHECK(this, IntPtrGreaterThanOrEqual(length, IntPtrConstant(0)));
CSA_DCHECK(
this, HasInitialCollectionPrototype(variant, native_context, collection));
#if DEBUG
@ -277,7 +277,7 @@ void BaseCollectionsAssembler::AddConstructorEntriesFromFastJSArray(
// A Map constructor requires entries to be arrays (ex. [key, value]),
// so a FixedDoubleArray can never succeed.
if (variant == kMap || variant == kWeakMap) {
CSA_ASSERT(this, IntPtrGreaterThan(length, IntPtrConstant(0)));
CSA_DCHECK(this, IntPtrGreaterThan(length, IntPtrConstant(0)));
TNode<Object> element =
LoadAndNormalizeFixedDoubleArrayElement(elements, IntPtrConstant(0));
ThrowTypeError(context, MessageTemplate::kIteratorValueNotAnObject,
@ -296,9 +296,9 @@ void BaseCollectionsAssembler::AddConstructorEntriesFromFastJSArray(
}
BIND(&exit);
#if DEBUG
CSA_ASSERT(this,
CSA_DCHECK(this,
TaggedEqual(original_collection_map, LoadMap(CAST(collection))));
CSA_ASSERT(this,
CSA_DCHECK(this,
TaggedEqual(original_fast_js_array_map, LoadMap(fast_jsarray)));
#endif
}
@ -307,14 +307,14 @@ void BaseCollectionsAssembler::AddConstructorEntriesFromIterable(
Variant variant, TNode<Context> context, TNode<Context> native_context,
TNode<Object> collection, TNode<Object> iterable) {
Label exit(this), loop(this), if_exception(this, Label::kDeferred);
CSA_ASSERT(this, Word32BinaryNot(IsNullOrUndefined(iterable)));
CSA_DCHECK(this, Word32BinaryNot(IsNullOrUndefined(iterable)));
TNode<Object> add_func = GetAddFunction(variant, context, collection);
IteratorBuiltinsAssembler iterator_assembler(this->state());
TorqueStructIteratorRecord iterator =
iterator_assembler.GetIterator(context, iterable);
CSA_ASSERT(this, Word32BinaryNot(IsUndefined(iterator.object)));
CSA_DCHECK(this, Word32BinaryNot(IsUndefined(iterator.object)));
TNode<Map> fast_iterator_result_map = CAST(
LoadContextElement(native_context, Context::ITERATOR_RESULT_MAP_INDEX));
@ -402,7 +402,7 @@ TNode<JSObject> BaseCollectionsAssembler::AllocateJSCollection(
TNode<JSObject> BaseCollectionsAssembler::AllocateJSCollectionFast(
TNode<JSFunction> constructor) {
CSA_ASSERT(this, IsConstructorMap(LoadMap(constructor)));
CSA_DCHECK(this, IsConstructorMap(LoadMap(constructor)));
TNode<Map> initial_map =
CAST(LoadJSFunctionPrototypeOrInitialMap(constructor));
return AllocateJSObjectFromMap(initial_map);
@ -779,7 +779,7 @@ void CollectionsBuiltinsAssembler::FindOrderedHashTableEntry(
not_found);
// Make sure the entry index is within range.
CSA_ASSERT(
CSA_DCHECK(
this,
UintPtrLessThan(
var_entry.value(),
@ -1081,7 +1081,7 @@ TNode<JSArray> CollectionsBuiltinsAssembler::MapIteratorToList(
TNode<IntPtrT> index;
std::tie(table, index) =
TransitionAndUpdate<JSMapIterator, OrderedHashMap>(iterator);
CSA_ASSERT(this, IntPtrEqual(index, IntPtrConstant(0)));
CSA_DCHECK(this, IntPtrEqual(index, IntPtrConstant(0)));
TNode<IntPtrT> size =
LoadAndUntagObjectField(table, OrderedHashMap::NumberOfElementsOffset());
@ -1128,7 +1128,7 @@ TNode<JSArray> CollectionsBuiltinsAssembler::MapIteratorToList(
BIND(&write_value);
{
CSA_ASSERT(this, InstanceTypeEqual(LoadInstanceType(iterator),
CSA_DCHECK(this, InstanceTypeEqual(LoadInstanceType(iterator),
JS_MAP_VALUE_ITERATOR_TYPE));
TNode<Object> entry_value =
UnsafeLoadFixedArrayElement(table, entry_start_position,
@ -1187,7 +1187,7 @@ TNode<JSArray> CollectionsBuiltinsAssembler::SetOrSetIteratorToList(
TNode<IntPtrT> iter_index;
std::tie(iter_table, iter_index) =
TransitionAndUpdate<JSSetIterator, OrderedHashSet>(CAST(iterable));
CSA_ASSERT(this, IntPtrEqual(iter_index, IntPtrConstant(0)));
CSA_DCHECK(this, IntPtrEqual(iter_index, IntPtrConstant(0)));
var_table = iter_table;
Goto(&copy);
}
@ -1272,7 +1272,7 @@ void CollectionsBuiltinsAssembler::FindOrderedHashTableEntryForSmiKey(
const TNode<IntPtrT> key_untagged = SmiUntag(smi_key);
const TNode<IntPtrT> hash =
ChangeInt32ToIntPtr(ComputeUnseededHash(key_untagged));
CSA_ASSERT(this, IntPtrGreaterThanOrEqual(hash, IntPtrConstant(0)));
CSA_DCHECK(this, IntPtrGreaterThanOrEqual(hash, IntPtrConstant(0)));
*result = hash;
FindOrderedHashTableEntry<CollectionType>(
table, hash,
@ -1287,7 +1287,7 @@ void CollectionsBuiltinsAssembler::FindOrderedHashTableEntryForStringKey(
TNode<CollectionType> table, TNode<String> key_tagged,
TVariable<IntPtrT>* result, Label* entry_found, Label* not_found) {
const TNode<IntPtrT> hash = ComputeStringHash(key_tagged);
CSA_ASSERT(this, IntPtrGreaterThanOrEqual(hash, IntPtrConstant(0)));
CSA_DCHECK(this, IntPtrGreaterThanOrEqual(hash, IntPtrConstant(0)));
*result = hash;
FindOrderedHashTableEntry<CollectionType>(
table, hash,
@ -1302,7 +1302,7 @@ void CollectionsBuiltinsAssembler::FindOrderedHashTableEntryForHeapNumberKey(
TNode<CollectionType> table, TNode<HeapNumber> key_heap_number,
TVariable<IntPtrT>* result, Label* entry_found, Label* not_found) {
const TNode<IntPtrT> hash = CallGetHashRaw(key_heap_number);
CSA_ASSERT(this, IntPtrGreaterThanOrEqual(hash, IntPtrConstant(0)));
CSA_DCHECK(this, IntPtrGreaterThanOrEqual(hash, IntPtrConstant(0)));
*result = hash;
const TNode<Float64T> key_float = LoadHeapNumberValue(key_heap_number);
FindOrderedHashTableEntry<CollectionType>(
@ -1318,7 +1318,7 @@ void CollectionsBuiltinsAssembler::FindOrderedHashTableEntryForBigIntKey(
TNode<CollectionType> table, TNode<BigInt> key_big_int,
TVariable<IntPtrT>* result, Label* entry_found, Label* not_found) {
const TNode<IntPtrT> hash = CallGetHashRaw(key_big_int);
CSA_ASSERT(this, IntPtrGreaterThanOrEqual(hash, IntPtrConstant(0)));
CSA_DCHECK(this, IntPtrGreaterThanOrEqual(hash, IntPtrConstant(0)));
*result = hash;
FindOrderedHashTableEntry<CollectionType>(
table, hash,
@ -1333,7 +1333,7 @@ void CollectionsBuiltinsAssembler::FindOrderedHashTableEntryForOtherKey(
TNode<CollectionType> table, TNode<HeapObject> key_heap_object,
TVariable<IntPtrT>* result, Label* entry_found, Label* not_found) {
const TNode<IntPtrT> hash = GetHash(key_heap_object);
CSA_ASSERT(this, IntPtrGreaterThanOrEqual(hash, IntPtrConstant(0)));
CSA_DCHECK(this, IntPtrGreaterThanOrEqual(hash, IntPtrConstant(0)));
*result = hash;
FindOrderedHashTableEntry<CollectionType>(
table, hash,
@ -2496,7 +2496,7 @@ void WeakCollectionsBuiltinsAssembler::AddEntry(
TNode<HeapObject> WeakCollectionsBuiltinsAssembler::AllocateTable(
Variant variant, TNode<IntPtrT> at_least_space_for) {
// See HashTable::New().
CSA_ASSERT(this,
CSA_DCHECK(this,
IntPtrLessThanOrEqual(IntPtrConstant(0), at_least_space_for));
TNode<IntPtrT> capacity = HashTableComputeCapacity(at_least_space_for);
@ -2814,7 +2814,7 @@ TF_BUILTIN(WeakCollectionSet, WeakCollectionsBuiltinsAssembler) {
auto key = Parameter<JSReceiver>(Descriptor::kKey);
auto value = Parameter<Object>(Descriptor::kValue);
CSA_ASSERT(this, IsJSReceiver(key));
CSA_DCHECK(this, IsJSReceiver(key));
Label call_runtime(this), if_no_hash(this), if_not_found(this);

View File

@ -189,7 +189,7 @@ TF_BUILTIN(FastNewClosure, ConstructorBuiltinsAssembler) {
GotoIf(IsNoClosuresCellMap(feedback_cell_map), &no_closures);
GotoIf(IsOneClosureCellMap(feedback_cell_map), &one_closure);
CSA_ASSERT(this, IsManyClosuresCellMap(feedback_cell_map),
CSA_DCHECK(this, IsManyClosuresCellMap(feedback_cell_map),
feedback_cell_map, feedback_cell);
Goto(&cell_done);
@ -211,7 +211,7 @@ TF_BUILTIN(FastNewClosure, ConstructorBuiltinsAssembler) {
const TNode<IntPtrT> function_map_index = Signed(IntPtrAdd(
DecodeWordFromWord32<SharedFunctionInfo::FunctionMapIndexBits>(flags),
IntPtrConstant(Context::FIRST_FUNCTION_MAP_INDEX)));
CSA_ASSERT(this, UintPtrLessThanOrEqual(
CSA_DCHECK(this, UintPtrLessThanOrEqual(
function_map_index,
IntPtrConstant(Context::LAST_FUNCTION_MAP_INDEX)));
@ -539,7 +539,7 @@ TNode<HeapObject> ConstructorBuiltinsAssembler::CreateShallowObjectLiteral(
TNode<AllocationSite> allocation_site = CAST(maybe_allocation_site);
TNode<JSObject> boilerplate = LoadBoilerplate(allocation_site);
TNode<Map> boilerplate_map = LoadMap(boilerplate);
CSA_ASSERT(this, IsJSObjectMap(boilerplate_map));
CSA_DCHECK(this, IsJSObjectMap(boilerplate_map));
TVARIABLE(HeapObject, var_properties);
{
@ -587,7 +587,7 @@ TNode<HeapObject> ConstructorBuiltinsAssembler::CreateShallowObjectLiteral(
Goto(&done);
BIND(&if_copy_elements);
CSA_ASSERT(this, Word32BinaryNot(
CSA_DCHECK(this, Word32BinaryNot(
IsFixedCOWArrayMap(LoadMap(boilerplate_elements))));
auto flags = ExtractFixedArrayFlag::kAllFixedArrays;
var_elements = CloneFixedArray(boilerplate_elements, flags);
@ -681,7 +681,7 @@ TNode<JSObject> ConstructorBuiltinsAssembler::CreateEmptyObjectLiteral(
TNode<Map> map = LoadObjectFunctionInitialMap(native_context);
// Ensure that slack tracking is disabled for the map.
STATIC_ASSERT(Map::kNoSlackTracking == 0);
CSA_ASSERT(this, IsClearWord32<Map::Bits3::ConstructionCounterBits>(
CSA_DCHECK(this, IsClearWord32<Map::Bits3::ConstructionCounterBits>(
LoadMapBitField3(map)));
TNode<FixedArray> empty_fixed_array = EmptyFixedArrayConstant();
TNode<JSObject> result =

View File

@ -316,7 +316,7 @@ namespace internal {
\
/* Abort */ \
TFC(Abort, Abort) \
TFC(AbortCSAAssert, Abort) \
TFC(AbortCSADcheck, Abort) \
\
/* Built-in functions for Javascript */ \
/* Special internal builtins */ \

View File

@ -74,7 +74,7 @@ void GeneratorBuiltinsAssembler::InnerResume(
// The generator function should not close the generator by itself, let's
// check it is indeed not closed yet.
CSA_ASSERT(this, SmiNotEqual(result_continuation, closed));
CSA_DCHECK(this, SmiNotEqual(result_continuation, closed));
TNode<Smi> executing = SmiConstant(JSGeneratorObject::kGeneratorExecuting);
GotoIf(SmiEqual(result_continuation, executing), &if_final_return);
@ -219,7 +219,7 @@ TF_BUILTIN(SuspendGeneratorBaseline, GeneratorBuiltinsAssembler) {
TNode<JSFunction> closure = LoadJSGeneratorObjectFunction(generator);
auto sfi = LoadJSFunctionSharedFunctionInfo(closure);
CSA_ASSERT(this,
CSA_DCHECK(this,
Word32BinaryNot(IsSharedFunctionInfoDontAdaptArguments(sfi)));
TNode<IntPtrT> formal_parameter_count = Signed(ChangeUint32ToWord(
LoadSharedFunctionInfoFormalParameterCountWithoutReceiver(sfi)));
@ -273,7 +273,7 @@ TF_BUILTIN(ResumeGeneratorBaseline, GeneratorBuiltinsAssembler) {
auto generator = Parameter<JSGeneratorObject>(Descriptor::kGeneratorObject);
TNode<JSFunction> closure = LoadJSGeneratorObjectFunction(generator);
auto sfi = LoadJSFunctionSharedFunctionInfo(closure);
CSA_ASSERT(this,
CSA_DCHECK(this,
Word32BinaryNot(IsSharedFunctionInfoDontAdaptArguments(sfi)));
TNode<IntPtrT> formal_parameter_count = Signed(ChangeUint32ToWord(
LoadSharedFunctionInfoFormalParameterCountWithoutReceiver(sfi)));

View File

@ -843,7 +843,7 @@ TF_BUILTIN(CopyDataProperties, SetOrCopyDataPropertiesAssembler) {
auto source = Parameter<Object>(Descriptor::kSource);
auto context = Parameter<Context>(Descriptor::kContext);
CSA_ASSERT(this, TaggedNotEqual(target, source));
CSA_DCHECK(this, TaggedNotEqual(target, source));
Label if_runtime(this, Label::kDeferred);
Return(SetOrCopyDataProperties(context, target, source, &if_runtime, false));
@ -1050,9 +1050,9 @@ TF_BUILTIN(Abort, CodeStubAssembler) {
TailCallRuntime(Runtime::kAbort, NoContextConstant(), message_id);
}
TF_BUILTIN(AbortCSAAssert, CodeStubAssembler) {
TF_BUILTIN(AbortCSADcheck, CodeStubAssembler) {
auto message = Parameter<String>(Descriptor::kMessageOrMessageId);
TailCallRuntime(Runtime::kAbortCSAAssert, NoContextConstant(), message);
TailCallRuntime(Runtime::kAbortCSADcheck, NoContextConstant(), message);
}
void Builtins::Generate_CEntry_Return1_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit(
@ -1234,7 +1234,7 @@ TF_BUILTIN(GetPropertyWithReceiver, CodeStubAssembler) {
GotoIf(TaggedEqual(on_non_existent,
SmiConstant(OnNonExistent::kThrowReferenceError)),
&throw_reference_error);
CSA_ASSERT(this, TaggedEqual(on_non_existent,
CSA_DCHECK(this, TaggedEqual(on_non_existent,
SmiConstant(OnNonExistent::kReturnUndefined)));
Return(UndefinedConstant());

View File

@ -29,7 +29,7 @@ class IntlBuiltinsAssembler : public CodeStubAssembler {
TNode<JSArray> AllocateEmptyJSArray(TNode<Context> context);
TNode<IntPtrT> PointerToSeqStringData(TNode<String> seq_string) {
CSA_ASSERT(this,
CSA_DCHECK(this,
IsSequentialStringInstanceType(LoadInstanceType(seq_string)));
STATIC_ASSERT(SeqOneByteString::kHeaderSize ==
SeqTwoByteString::kHeaderSize);
@ -55,7 +55,7 @@ TF_BUILTIN(StringToLowerCaseIntl, IntlBuiltinsAssembler) {
to_direct.TryToDirect(&runtime);
const TNode<Int32T> instance_type = to_direct.instance_type();
CSA_ASSERT(this,
CSA_DCHECK(this,
Word32BinaryNot(IsIndirectStringInstanceType(instance_type)));
GotoIfNot(IsOneByteStringInstanceType(instance_type), &runtime);

View File

@ -136,7 +136,7 @@ void LazyBuiltinsAssembler::CompileLazy(TNode<JSFunction> function) {
&maybe_use_sfi_code);
// If it isn't undefined or fixed array it must be a feedback vector.
CSA_ASSERT(this, IsFeedbackVector(feedback_cell_value));
CSA_DCHECK(this, IsFeedbackVector(feedback_cell_value));
// Is there an optimization marker or optimized code in the feedback vector?
MaybeTailCallOptimizedCodeSlot(function, CAST(feedback_cell_value));
@ -146,7 +146,7 @@ void LazyBuiltinsAssembler::CompileLazy(TNode<JSFunction> function) {
// optimized Code object (we'd have tail-called it above). A usual case would
// be the InterpreterEntryTrampoline to start executing existing bytecode.
BIND(&maybe_use_sfi_code);
CSA_ASSERT(this, TaggedNotEqual(sfi_code, HeapConstant(BUILTIN_CODE(
CSA_DCHECK(this, TaggedNotEqual(sfi_code, HeapConstant(BUILTIN_CODE(
isolate(), CompileLazy))));
StoreObjectField(function, JSFunction::kCodeOffset, ToCodeT(sfi_code));

View File

@ -55,7 +55,7 @@ class MicrotaskQueueBuiltinsAssembler : public CodeStubAssembler {
TNode<RawPtrT> MicrotaskQueueBuiltinsAssembler::GetMicrotaskQueue(
TNode<Context> native_context) {
CSA_ASSERT(this, IsNativeContext(native_context));
CSA_DCHECK(this, IsNativeContext(native_context));
return LoadExternalPointerFromObject(native_context,
NativeContext::kMicrotaskQueueOffset,
kNativeContextMicrotaskQueueTag);
@ -105,7 +105,7 @@ TNode<IntPtrT> MicrotaskQueueBuiltinsAssembler::CalculateRingBufferOffset(
void MicrotaskQueueBuiltinsAssembler::PrepareForContext(
TNode<Context> native_context, Label* bailout) {
CSA_ASSERT(this, IsNativeContext(native_context));
CSA_DCHECK(this, IsNativeContext(native_context));
// Skip the microtask execution if the associated context is shutdown.
GotoIf(WordEqual(GetMicrotaskQueue(native_context), IntPtrConstant(0)),
@ -117,7 +117,7 @@ void MicrotaskQueueBuiltinsAssembler::PrepareForContext(
void MicrotaskQueueBuiltinsAssembler::RunSingleMicrotask(
TNode<Context> current_context, TNode<Microtask> microtask) {
CSA_ASSERT(this, TaggedIsNotSmi(microtask));
CSA_DCHECK(this, TaggedIsNotSmi(microtask));
StoreRoot(RootIndex::kCurrentMicrotask, microtask);
TNode<IntPtrT> saved_entered_context_count = GetEnteredContextCount();
@ -378,7 +378,7 @@ TNode<IntPtrT> MicrotaskQueueBuiltinsAssembler::GetEnteredContextCount() {
void MicrotaskQueueBuiltinsAssembler::EnterMicrotaskContext(
TNode<Context> native_context) {
CSA_ASSERT(this, IsNativeContext(native_context));
CSA_DCHECK(this, IsNativeContext(native_context));
auto ref = ExternalReference::handle_scope_implementer_address(isolate());
TNode<RawPtrT> hsi = Load<RawPtrT>(ExternalConstant(ref));

View File

@ -278,7 +278,7 @@ TNode<JSArray> ObjectEntriesValuesBuiltinsAssembler::FastGetOwnValuesOrEntries(
{
// Currently, we will not invoke getters,
// so, map will not be changed.
CSA_ASSERT(this, TaggedEqual(map, LoadMap(object)));
CSA_DCHECK(this, TaggedEqual(map, LoadMap(object)));
TNode<IntPtrT> descriptor_entry = var_descriptor_number.value();
TNode<Name> next_key =
LoadKeyByDescriptorEntry(descriptors, descriptor_entry);
@ -293,7 +293,7 @@ TNode<JSArray> ObjectEntriesValuesBuiltinsAssembler::FastGetOwnValuesOrEntries(
// If property is accessor, we escape fast path and call runtime.
GotoIf(IsPropertyKindAccessor(kind), if_call_runtime_with_fast_path);
CSA_ASSERT(this, IsPropertyKindData(kind));
CSA_DCHECK(this, IsPropertyKindData(kind));
// If desc is not undefined and desc.[[Enumerable]] is true, then skip to
// the next descriptor.
@ -346,7 +346,7 @@ TNode<JSArray>
ObjectEntriesValuesBuiltinsAssembler::FinalizeValuesOrEntriesJSArray(
TNode<Context> context, TNode<FixedArray> result, TNode<IntPtrT> size,
TNode<Map> array_map, Label* if_empty) {
CSA_ASSERT(this, IsJSArrayMap(array_map));
CSA_DCHECK(this, IsJSArrayMap(array_map));
GotoIf(IntPtrEqual(size, IntPtrConstant(0)), if_empty);
TNode<JSArray> array = AllocateJSArray(array_map, result, SmiTag(size));
@ -477,7 +477,7 @@ TF_BUILTIN(ObjectKeys, ObjectBuiltinsAssembler) {
&if_slow);
// Ensure that the {object} doesn't have any elements.
CSA_ASSERT(this, IsJSObjectMap(object_map));
CSA_DCHECK(this, IsJSObjectMap(object_map));
TNode<FixedArrayBase> object_elements = LoadElements(CAST(object));
GotoIf(IsEmptyFixedArray(object_elements), &if_empty_elements);
Branch(IsEmptySlowElementDictionary(object_elements), &if_empty_elements,
@ -853,7 +853,7 @@ TF_BUILTIN(ObjectToString, ObjectBuiltinsAssembler) {
BIND(&if_object);
{
CSA_ASSERT(this, IsJSReceiver(CAST(receiver)));
CSA_DCHECK(this, IsJSReceiver(CAST(receiver)));
var_default = ObjectToStringConstant();
Goto(&checkstringtag);
}
@ -868,7 +868,7 @@ TF_BUILTIN(ObjectToString, ObjectBuiltinsAssembler) {
GotoIf(IsHeapNumberMap(receiver_map), &if_number);
GotoIf(IsSymbolMap(receiver_map), &if_symbol);
GotoIf(IsUndefined(receiver), &return_undefined);
CSA_ASSERT(this, IsNull(receiver));
CSA_DCHECK(this, IsNull(receiver));
Return(NullToStringConstant());
BIND(&return_undefined);
@ -980,7 +980,7 @@ TF_BUILTIN(ObjectToString, ObjectBuiltinsAssembler) {
LoadMapInstanceType(receiver_value_map);
GotoIf(IsBigIntInstanceType(receiver_value_instance_type),
&if_value_is_bigint);
CSA_ASSERT(this, IsStringInstanceType(receiver_value_instance_type));
CSA_DCHECK(this, IsStringInstanceType(receiver_value_instance_type));
Goto(&if_value_is_string);
BIND(&if_value_is_number);
@ -1295,7 +1295,7 @@ TF_BUILTIN(CreateGeneratorObject, ObjectBuiltinsAssembler) {
TF_BUILTIN(ObjectGetOwnPropertyDescriptor, ObjectBuiltinsAssembler) {
auto argc = UncheckedParameter<Int32T>(Descriptor::kJSActualArgumentsCount);
auto context = Parameter<Context>(Descriptor::kContext);
CSA_ASSERT(this, IsUndefined(Parameter<Object>(Descriptor::kJSNewTarget)));
CSA_DCHECK(this, IsUndefined(Parameter<Object>(Descriptor::kJSNewTarget)));
CodeStubArguments args(this, argc);
TNode<Object> object_input = args.GetOptionalArgumentValue(0);
@ -1497,7 +1497,7 @@ TNode<JSObject> ObjectBuiltinsAssembler::FromPropertyDescriptor(
Goto(&return_desc);
BIND(&bailout);
CSA_ASSERT(this, Int32Constant(0));
CSA_DCHECK(this, Int32Constant(0));
Unreachable();
}

View File

@ -91,7 +91,7 @@ TF_BUILTIN(CallProxy, ProxiesCodeStubAssembler) {
auto proxy = Parameter<JSProxy>(Descriptor::kFunction);
auto context = Parameter<Context>(Descriptor::kContext);
CSA_ASSERT(this, IsCallable(proxy));
CSA_DCHECK(this, IsCallable(proxy));
PerformStackCheck(context);
@ -103,11 +103,11 @@ TF_BUILTIN(CallProxy, ProxiesCodeStubAssembler) {
CAST(LoadObjectField(proxy, JSProxy::kHandlerOffset));
// 2. If handler is null, throw a TypeError exception.
CSA_ASSERT(this, IsNullOrJSReceiver(handler));
CSA_DCHECK(this, IsNullOrJSReceiver(handler));
GotoIfNot(IsJSReceiver(handler), &throw_proxy_handler_revoked);
// 3. Assert: Type(handler) is Object.
CSA_ASSERT(this, IsJSReceiver(handler));
CSA_DCHECK(this, IsJSReceiver(handler));
// 4. Let target be the value of the [[ProxyTarget]] internal slot of O.
TNode<Object> target = LoadObjectField(proxy, JSProxy::kTargetOffset);
@ -147,7 +147,7 @@ TF_BUILTIN(ConstructProxy, ProxiesCodeStubAssembler) {
auto new_target = Parameter<Object>(Descriptor::kNewTarget);
auto context = Parameter<Context>(Descriptor::kContext);
CSA_ASSERT(this, IsCallable(proxy));
CSA_DCHECK(this, IsCallable(proxy));
Label throw_proxy_handler_revoked(this, Label::kDeferred),
trap_undefined(this), not_an_object(this, Label::kDeferred);
@ -157,11 +157,11 @@ TF_BUILTIN(ConstructProxy, ProxiesCodeStubAssembler) {
CAST(LoadObjectField(proxy, JSProxy::kHandlerOffset));
// 2. If handler is null, throw a TypeError exception.
CSA_ASSERT(this, IsNullOrJSReceiver(handler));
CSA_DCHECK(this, IsNullOrJSReceiver(handler));
GotoIfNot(IsJSReceiver(handler), &throw_proxy_handler_revoked);
// 3. Assert: Type(handler) is Object.
CSA_ASSERT(this, IsJSReceiver(handler));
CSA_DCHECK(this, IsJSReceiver(handler));
// 4. Let target be the value of the [[ProxyTarget]] internal slot of O.
TNode<Object> target = LoadObjectField(proxy, JSProxy::kTargetOffset);
@ -198,7 +198,7 @@ TF_BUILTIN(ConstructProxy, ProxiesCodeStubAssembler) {
BIND(&trap_undefined);
{
// 6.a. Assert: target has a [[Construct]] internal method.
CSA_ASSERT(this, IsConstructor(CAST(target)));
CSA_DCHECK(this, IsConstructor(CAST(target)));
// 6.b. Return ? Construct(target, argumentsList, newTarget).
TailCallStub(CodeFactory::Construct(isolate()), context, target, new_target,

View File

@ -89,9 +89,9 @@ TNode<JSRegExpResult> RegExpBuiltinsAssembler::AllocateRegExpResult(
TNode<Context> context, TNode<Smi> length, TNode<Smi> index,
TNode<String> input, TNode<JSRegExp> regexp, TNode<Number> last_index,
TNode<BoolT> has_indices, TNode<FixedArray>* elements_out) {
CSA_ASSERT(this, SmiLessThanOrEqual(
CSA_DCHECK(this, SmiLessThanOrEqual(
length, SmiConstant(JSArray::kMaxFastArrayLength)));
CSA_ASSERT(this, SmiGreaterThan(length, SmiConstant(0)));
CSA_DCHECK(this, SmiGreaterThan(length, SmiConstant(0)));
// Allocate.
@ -285,7 +285,7 @@ TNode<JSRegExpResult> RegExpBuiltinsAssembler::ConstructNewResultFromMatchInfo(
BIND(&named_captures);
{
CSA_ASSERT(this, SmiGreaterThan(num_results, SmiConstant(1)));
CSA_DCHECK(this, SmiGreaterThan(num_results, SmiConstant(1)));
// Preparations for named capture properties. Exit early if the result does
// not have any named captures to minimize performance impact.
@ -295,7 +295,7 @@ TNode<JSRegExpResult> RegExpBuiltinsAssembler::ConstructNewResultFromMatchInfo(
// We reach this point only if captures exist, implying that the assigned
// regexp engine must be able to handle captures.
CSA_ASSERT(
CSA_DCHECK(
this,
Word32Or(
SmiEqual(CAST(LoadFixedArrayElement(data, JSRegExp::kTagIndex)),
@ -313,7 +313,7 @@ TNode<JSRegExpResult> RegExpBuiltinsAssembler::ConstructNewResultFromMatchInfo(
TNode<FixedArray> names = CAST(maybe_names);
TNode<IntPtrT> names_length = LoadAndUntagFixedArrayBaseLength(names);
CSA_ASSERT(this, IntPtrGreaterThan(names_length, IntPtrZero()));
CSA_DCHECK(this, IntPtrGreaterThan(names_length, IntPtrZero()));
// Stash names in case we need them to build the indices array later.
StoreObjectField(result, JSRegExpResult::kNamesOffset, names);
@ -446,8 +446,8 @@ TNode<HeapObject> RegExpBuiltinsAssembler::RegExpExecInternal(
Label if_failure(this);
CSA_ASSERT(this, IsNumberNormalized(last_index));
CSA_ASSERT(this, IsNumberPositive(last_index));
CSA_DCHECK(this, IsNumberNormalized(last_index));
CSA_DCHECK(this, IsNumberPositive(last_index));
GotoIf(TaggedIsNotSmi(last_index), &if_failure);
TNode<IntPtrT> int_string_length = LoadStringLengthAsWord(string);
@ -544,7 +544,7 @@ TNode<HeapObject> RegExpBuiltinsAssembler::RegExpExecInternal(
{
Label next(this);
GotoIfNot(TaggedIsSmi(var_code.value()), &next);
CSA_ASSERT(this, SmiEqual(CAST(var_code.value()),
CSA_DCHECK(this, SmiEqual(CAST(var_code.value()),
SmiConstant(JSRegExp::kUninitializedValue)));
Goto(&next);
BIND(&next);
@ -650,7 +650,7 @@ TNode<HeapObject> RegExpBuiltinsAssembler::RegExpExecInternal(
IntPtrConstant(RegExp::kInternalRegExpFallbackToExperimental)),
&retry_experimental);
CSA_ASSERT(this, IntPtrEqual(int_result,
CSA_DCHECK(this, IntPtrEqual(int_result,
IntPtrConstant(RegExp::kInternalRegExpRetry)));
Goto(&runtime);
}
@ -727,7 +727,7 @@ TNode<HeapObject> RegExpBuiltinsAssembler::RegExpExecInternal(
TNode<ExternalReference> pending_exception_address =
ExternalConstant(ExternalReference::Create(
IsolateAddressId::kPendingExceptionAddress, isolate()));
CSA_ASSERT(this, IsTheHole(Load<Object>(pending_exception_address)));
CSA_DCHECK(this, IsTheHole(Load<Object>(pending_exception_address)));
#endif // DEBUG
CallRuntime(Runtime::kThrowStackOverflow, context);
Unreachable();
@ -800,7 +800,7 @@ TNode<BoolT> RegExpBuiltinsAssembler::IsFastRegExpNoPrototype(
TNode<BoolT> RegExpBuiltinsAssembler::IsFastRegExpNoPrototype(
TNode<Context> context, TNode<Object> object) {
CSA_ASSERT(this, TaggedIsNotSmi(object));
CSA_DCHECK(this, TaggedIsNotSmi(object));
return IsFastRegExpNoPrototype(context, object, LoadMap(CAST(object)));
}
@ -809,7 +809,7 @@ void RegExpBuiltinsAssembler::BranchIfFastRegExp(
PrototypeCheckAssembler::Flags prototype_check_flags,
base::Optional<DescriptorIndexNameValue> additional_property_to_check,
Label* if_isunmodified, Label* if_ismodified) {
CSA_ASSERT(this, TaggedEqual(LoadMap(object), map));
CSA_DCHECK(this, TaggedEqual(LoadMap(object), map));
GotoIfForceSlowPath(if_ismodified);
@ -931,16 +931,16 @@ TF_BUILTIN(RegExpExecAtom, RegExpBuiltinsAssembler) {
auto match_info = Parameter<FixedArray>(Descriptor::kMatchInfo);
auto context = Parameter<Context>(Descriptor::kContext);
CSA_ASSERT(this, TaggedIsPositiveSmi(last_index));
CSA_DCHECK(this, TaggedIsPositiveSmi(last_index));
TNode<FixedArray> data = CAST(LoadObjectField(regexp, JSRegExp::kDataOffset));
CSA_ASSERT(
CSA_DCHECK(
this,
SmiEqual(CAST(UnsafeLoadFixedArrayElement(data, JSRegExp::kTagIndex)),
SmiConstant(JSRegExp::ATOM)));
// Callers ensure that last_index is in-bounds.
CSA_ASSERT(this,
CSA_DCHECK(this,
UintPtrLessThanOrEqual(SmiUntag(last_index),
LoadStringLengthAsWord(subject_string)));
@ -952,7 +952,7 @@ TF_BUILTIN(RegExpExecAtom, RegExpBuiltinsAssembler) {
//
// This is especially relevant for crbug.com/1075514: atom patterns are
// non-empty and thus guaranteed not to match at the end of the string.
CSA_ASSERT(this, IntPtrGreaterThan(LoadStringLengthAsWord(needle_string),
CSA_DCHECK(this, IntPtrGreaterThan(LoadStringLengthAsWord(needle_string),
IntPtrConstant(0)));
const TNode<Smi> match_from =
@ -964,8 +964,8 @@ TF_BUILTIN(RegExpExecAtom, RegExpBuiltinsAssembler) {
BIND(&if_success);
{
CSA_ASSERT(this, TaggedIsPositiveSmi(match_from));
CSA_ASSERT(this, UintPtrLessThan(SmiUntag(match_from),
CSA_DCHECK(this, TaggedIsPositiveSmi(match_from));
CSA_DCHECK(this, UintPtrLessThan(SmiUntag(match_from),
LoadStringLengthAsWord(subject_string)));
const int kNumRegisters = 2;
@ -1000,8 +1000,8 @@ TF_BUILTIN(RegExpExecInternal, RegExpBuiltinsAssembler) {
auto match_info = Parameter<RegExpMatchInfo>(Descriptor::kMatchInfo);
auto context = Parameter<Context>(Descriptor::kContext);
CSA_ASSERT(this, IsNumberNormalized(last_index));
CSA_ASSERT(this, IsNumberPositive(last_index));
CSA_DCHECK(this, IsNumberNormalized(last_index));
CSA_DCHECK(this, IsNumberPositive(last_index));
Return(RegExpExecInternal(context, regexp, string, last_index, match_info));
}
@ -1026,7 +1026,7 @@ TNode<String> RegExpBuiltinsAssembler::FlagsGetter(TNode<Context> context,
if (is_fastpath) {
// Refer to JSRegExp's flag property on the fast-path.
CSA_ASSERT(this, IsJSRegExp(CAST(regexp)));
CSA_DCHECK(this, IsJSRegExp(CAST(regexp)));
const TNode<Smi> flags_smi =
CAST(LoadObjectField(CAST(regexp), JSRegExp::kFlagsOffset));
var_flags = SmiUntag(flags_smi);
@ -1401,8 +1401,8 @@ TNode<BoolT> RegExpBuiltinsAssembler::FlagGetter(TNode<Context> context,
TNode<Number> RegExpBuiltinsAssembler::AdvanceStringIndex(
TNode<String> string, TNode<Number> index, TNode<BoolT> is_unicode,
bool is_fastpath) {
CSA_ASSERT(this, IsNumberNormalized(index));
if (is_fastpath) CSA_ASSERT(this, TaggedIsPositiveSmi(index));
CSA_DCHECK(this, IsNumberNormalized(index));
if (is_fastpath) CSA_DCHECK(this, TaggedIsPositiveSmi(index));
// Default to last_index + 1.
// TODO(pwong): Consider using TrySmiAdd for the fast path to reduce generated
@ -1426,7 +1426,7 @@ TNode<Number> RegExpBuiltinsAssembler::AdvanceStringIndex(
// Must be in Smi range on the fast path. We control the value of {index}
// on all call-sites and can never exceed the length of the string.
STATIC_ASSERT(String::kMaxLength + 2 < Smi::kMaxValue);
CSA_ASSERT(this, TaggedIsPositiveSmi(index_plus_one));
CSA_DCHECK(this, TaggedIsPositiveSmi(index_plus_one));
}
Label if_isunicode(this), out(this);
@ -1513,8 +1513,8 @@ TNode<Object> RegExpMatchAllAssembler::CreateRegExpStringIterator(
TNode<JSArray> RegExpBuiltinsAssembler::RegExpPrototypeSplitBody(
TNode<Context> context, TNode<JSRegExp> regexp, TNode<String> string,
const TNode<Smi> limit) {
CSA_ASSERT(this, IsFastRegExpPermissive(context, regexp));
CSA_ASSERT(this, Word32BinaryNot(FastFlagGetter(regexp, JSRegExp::kSticky)));
CSA_DCHECK(this, IsFastRegExpPermissive(context, regexp));
CSA_DCHECK(this, Word32BinaryNot(FastFlagGetter(regexp, JSRegExp::kSticky)));
const TNode<IntPtrT> int_limit = SmiUntag(limit);
@ -1619,7 +1619,7 @@ TNode<JSArray> RegExpBuiltinsAssembler::RegExpPrototypeSplitBody(
match_indices, RegExpMatchInfo::kFirstCaptureIndex));
const TNode<Smi> match_to = CAST(UnsafeLoadFixedArrayElement(
match_indices, RegExpMatchInfo::kFirstCaptureIndex + 1));
CSA_ASSERT(this, SmiNotEqual(match_from, string_length));
CSA_DCHECK(this, SmiNotEqual(match_from, string_length));
// Advance index and continue if the match is empty.
{

View File

@ -139,9 +139,9 @@ void SharedArrayBufferBuiltinsAssembler::DebugCheckAtomicIndex(
//
// This function must always be called after ValidateIntegerTypedArray, which
// will ensure that LoadJSArrayBufferViewBuffer will not be null.
CSA_ASSERT(this, Word32BinaryNot(
CSA_DCHECK(this, Word32BinaryNot(
IsDetachedBuffer(LoadJSArrayBufferViewBuffer(array))));
CSA_ASSERT(this, UintPtrLessThan(index, LoadJSTypedArrayLength(array)));
CSA_DCHECK(this, UintPtrLessThan(index, LoadJSTypedArrayLength(array)));
}
TNode<BigInt> SharedArrayBufferBuiltinsAssembler::BigIntFromSigned64(

View File

@ -167,8 +167,8 @@ void StringBuiltinsAssembler::StringEqual_Core(
TNode<String> lhs, TNode<Word32T> lhs_instance_type, TNode<String> rhs,
TNode<Word32T> rhs_instance_type, TNode<IntPtrT> length, Label* if_equal,
Label* if_not_equal, Label* if_indirect) {
CSA_ASSERT(this, WordEqual(LoadStringLengthAsWord(lhs), length));
CSA_ASSERT(this, WordEqual(LoadStringLengthAsWord(rhs), length));
CSA_DCHECK(this, WordEqual(LoadStringLengthAsWord(lhs), length));
CSA_DCHECK(this, WordEqual(LoadStringLengthAsWord(rhs), length));
// Fast check to see if {lhs} and {rhs} refer to the same String object.
GotoIf(TaggedEqual(lhs, rhs), if_equal);
@ -244,8 +244,8 @@ void StringBuiltinsAssembler::StringEqual_Loop(
TNode<String> lhs, TNode<Word32T> lhs_instance_type, MachineType lhs_type,
TNode<String> rhs, TNode<Word32T> rhs_instance_type, MachineType rhs_type,
TNode<IntPtrT> length, Label* if_equal, Label* if_not_equal) {
CSA_ASSERT(this, WordEqual(LoadStringLengthAsWord(lhs), length));
CSA_ASSERT(this, WordEqual(LoadStringLengthAsWord(rhs), length));
CSA_DCHECK(this, WordEqual(LoadStringLengthAsWord(lhs), length));
CSA_DCHECK(this, WordEqual(LoadStringLengthAsWord(rhs), length));
// Compute the effective offset of the first character.
TNode<RawPtrT> lhs_data = DirectStringData(lhs, lhs_instance_type);
@ -341,7 +341,7 @@ TNode<String> StringBuiltinsAssembler::AllocateConsString(TNode<Uint32T> length,
TNode<String> StringBuiltinsAssembler::StringAdd(
TNode<ContextOrEmptyContext> context, TNode<String> left,
TNode<String> right) {
CSA_ASSERT(this, IsZeroOrContext(context));
CSA_DCHECK(this, IsZeroOrContext(context));
TVARIABLE(String, result);
Label check_right(this), runtime(this, Label::kDeferred), cons(this),
@ -540,7 +540,7 @@ TF_BUILTIN(StringAdd_CheckNone, StringBuiltinsAssembler) {
auto right = Parameter<String>(Descriptor::kRight);
TNode<ContextOrEmptyContext> context =
UncheckedParameter<ContextOrEmptyContext>(Descriptor::kContext);
CSA_ASSERT(this, IsZeroOrContext(context));
CSA_DCHECK(this, IsZeroOrContext(context));
Return(StringAdd(context, left, right));
}
@ -965,8 +965,8 @@ TNode<String> StringBuiltinsAssembler::GetSubstitution(
TNode<Context> context, TNode<String> subject_string,
TNode<Smi> match_start_index, TNode<Smi> match_end_index,
TNode<String> replace_string) {
CSA_ASSERT(this, TaggedIsPositiveSmi(match_start_index));
CSA_ASSERT(this, TaggedIsPositiveSmi(match_end_index));
CSA_DCHECK(this, TaggedIsPositiveSmi(match_start_index));
CSA_DCHECK(this, TaggedIsPositiveSmi(match_end_index));
TVARIABLE(String, var_result, replace_string);
Label runtime(this), out(this);
@ -984,7 +984,7 @@ TNode<String> StringBuiltinsAssembler::GetSubstitution(
BIND(&runtime);
{
CSA_ASSERT(this, TaggedIsPositiveSmi(dollar_index));
CSA_DCHECK(this, TaggedIsPositiveSmi(dollar_index));
const TNode<Object> matched =
CallBuiltin(Builtin::kStringSubstring, context, subject_string,
@ -1260,7 +1260,7 @@ TF_BUILTIN(StringPrototypeMatchAll, StringBuiltinsAssembler) {
TNode<JSArray> StringBuiltinsAssembler::StringToArray(
TNode<NativeContext> context, TNode<String> subject_string,
TNode<Smi> subject_length, TNode<Number> limit_number) {
CSA_ASSERT(this, SmiGreaterThan(subject_length, SmiConstant(0)));
CSA_DCHECK(this, SmiGreaterThan(subject_length, SmiConstant(0)));
Label done(this), call_runtime(this, Label::kDeferred),
fill_thehole_and_call_runtime(this, Label::kDeferred);
@ -1299,7 +1299,7 @@ TNode<JSArray> StringBuiltinsAssembler::StringToArray(
// TODO(jkummerow): Implement a CSA version of
// DisallowGarbageCollection and use that to guard
// ToDirectStringAssembler.PointerToData().
CSA_ASSERT(this, WordEqual(to_direct.PointerToData(&call_runtime),
CSA_DCHECK(this, WordEqual(to_direct.PointerToData(&call_runtime),
string_data));
TNode<Int32T> char_code =
UncheckedCast<Int32T>(Load(MachineType::Uint8(), string_data,
@ -1479,12 +1479,12 @@ TNode<Int32T> StringBuiltinsAssembler::LoadSurrogatePairAt(
TNode<Int32T> trail = var_trail.value();
// Check that this path is only taken if a surrogate pair is found
CSA_SLOW_ASSERT(this,
CSA_SLOW_DCHECK(this,
Uint32GreaterThanOrEqual(lead, Int32Constant(0xD800)));
CSA_SLOW_ASSERT(this, Uint32LessThan(lead, Int32Constant(0xDC00)));
CSA_SLOW_ASSERT(this,
CSA_SLOW_DCHECK(this, Uint32LessThan(lead, Int32Constant(0xDC00)));
CSA_SLOW_DCHECK(this,
Uint32GreaterThanOrEqual(trail, Int32Constant(0xDC00)));
CSA_SLOW_ASSERT(this, Uint32LessThan(trail, Int32Constant(0xE000)));
CSA_SLOW_DCHECK(this, Uint32LessThan(trail, Int32Constant(0xE000)));
switch (encoding) {
case UnicodeEncoding::UTF16:
@ -1758,7 +1758,7 @@ TNode<String> StringBuiltinsAssembler::SubString(TNode<String> string,
BIND(&original_string_or_invalid_length);
{
CSA_ASSERT(this, IntPtrEqual(substr_length, string_length));
CSA_DCHECK(this, IntPtrEqual(substr_length, string_length));
// Equal length - check if {from, to} == {0, str.length}.
GotoIf(UintPtrGreaterThan(from, IntPtrConstant(0)), &runtime);

View File

@ -90,8 +90,8 @@ transitioning builtin StringToList(implicit context: Context)(string: String):
i = i + value.length_intptr;
arrayLength++;
}
assert(arrayLength >= 0);
assert(SmiTag(stringLength) >= arrayLength);
dcheck(arrayLength >= 0);
dcheck(SmiTag(stringLength) >= arrayLength);
array.length = arrayLength;
return array;
@ -121,7 +121,7 @@ IfInBounds(String, uintptr, uintptr), IfOutOfBounds {
goto IfInBounds(string, index, length);
}
case (indexHeapNumber: HeapNumber): {
assert(IsNumberNormalized(indexHeapNumber));
dcheck(IsNumberNormalized(indexHeapNumber));
// Valid string indices fit into Smi range, so HeapNumber index is
// definitely an out of bounds case.
goto IfOutOfBounds;

View File

@ -338,7 +338,7 @@ void TypedArrayBuiltinsAssembler::
CallCCopyFastNumberJSArrayElementsToTypedArray(
TNode<Context> context, TNode<JSArray> source, TNode<JSTypedArray> dest,
TNode<UintPtrT> source_length, TNode<UintPtrT> offset) {
CSA_ASSERT(this,
CSA_DCHECK(this,
Word32BinaryNot(IsBigInt64ElementsKind(LoadElementsKind(dest))));
TNode<ExternalReference> f = ExternalConstant(
ExternalReference::copy_fast_number_jsarray_elements_to_typed_array());

View File

@ -793,7 +793,7 @@ macro Is<A : type extends Object, B : type extends Object>(
macro UnsafeCast<A : type extends Object>(implicit context: Context)(o: Object):
A {
assert(Is<A>(o));
dcheck(Is<A>(o));
return %RawDownCast<A>(o);
}
@ -803,12 +803,12 @@ macro UnsafeConstCast<T: type>(r: const &T):&T {
UnsafeCast<RegExpMatchInfo>(implicit context: Context)(o: Object):
RegExpMatchInfo {
assert(Is<FixedArray>(o));
dcheck(Is<FixedArray>(o));
return %RawDownCast<RegExpMatchInfo>(o);
}
macro UnsafeCast<A : type extends WeakHeapObject>(o: A|Object): A {
assert(IsWeakOrCleared(o));
dcheck(IsWeakOrCleared(o));
return %RawDownCast<A>(o);
}

View File

@ -29,7 +29,7 @@ FromConstexpr<Smi, constexpr int31>(i: constexpr int31): Smi {
return %FromConstexpr<Smi>(i);
}
FromConstexpr<PositiveSmi, constexpr int31>(i: constexpr int31): PositiveSmi {
assert(i >= 0);
dcheck(i >= 0);
return %FromConstexpr<PositiveSmi>(i);
}
FromConstexpr<String, constexpr string>(s: constexpr string): String {
@ -232,11 +232,11 @@ Convert<TaggedIndex, intptr>(i: intptr): TaggedIndex {
}
Convert<intptr, uintptr>(ui: uintptr): intptr {
const i = Signed(ui);
assert(i >= 0);
dcheck(i >= 0);
return i;
}
Convert<PositiveSmi, intptr>(i: intptr): PositiveSmi {
assert(IsValidPositiveSmi(i));
dcheck(IsValidPositiveSmi(i));
return %RawDownCast<PositiveSmi>(SmiTag(i));
}
Convert<PositiveSmi, uintptr>(ui: uintptr): PositiveSmi labels IfOverflow {

View File

@ -22,7 +22,7 @@ macro SplitOffTail(weakCell: WeakCell): WeakCell|Undefined {
case (Undefined): {
}
case (tailIsNowAHead: WeakCell): {
assert(tailIsNowAHead.prev == weakCell);
dcheck(tailIsNowAHead.prev == weakCell);
tailIsNowAHead.prev = Undefined;
}
}
@ -37,7 +37,7 @@ PopClearedCell(finalizationRegistry: JSFinalizationRegistry): WeakCell|
return Undefined;
}
case (weakCell: WeakCell): {
assert(weakCell.prev == Undefined);
dcheck(weakCell.prev == Undefined);
finalizationRegistry.cleared_cells = SplitOffTail(weakCell);
// If the WeakCell has an unregister token, remove the cell from the
@ -118,9 +118,9 @@ FinalizationRegistryConstructor(
finalizationRegistry.flags =
SmiTag(FinalizationRegistryFlags{scheduled_for_cleanup: false});
// 7. Set finalizationRegistry.[[Cells]] to be an empty List.
assert(finalizationRegistry.active_cells == Undefined);
assert(finalizationRegistry.cleared_cells == Undefined);
assert(finalizationRegistry.key_map == Undefined);
dcheck(finalizationRegistry.active_cells == Undefined);
dcheck(finalizationRegistry.cleared_cells == Undefined);
dcheck(finalizationRegistry.key_map == Undefined);
// 8. Return finalizationRegistry.
return finalizationRegistry;
}

View File

@ -39,7 +39,7 @@ struct FrameWithArgumentsInfo {
// This macro is should only be used in builtins that can be called from
// interpreted or JITted code, not from CSA/Torque builtins (the number of
// returned formal parameters would be wrong).
// It is difficult to actually check/assert this, since interpreted or JITted
// It is difficult to actually check/dcheck this, since interpreted or JITted
// frames are StandardFrames, but so are hand-written builtins. Doing that
// more refined check would be prohibitively expensive.
macro GetFrameWithArgumentsInfo(implicit context: Context)():

View File

@ -21,7 +21,7 @@ FromConstexpr<FrameType, constexpr FrameType>(t: constexpr FrameType):
Cast<FrameType>(o: Object): FrameType
labels CastError {
if (TaggedIsNotSmi(o)) goto CastError;
assert(
dcheck(
Convert<int32>(BitcastTaggedToWordForTagAndSmiBits(o)) <
Convert<int32>(kFrameTypeCount << kSmiTagSize));
return %RawDownCast<FrameType>(o);

View File

@ -67,7 +67,7 @@ TNode<JSArray> GrowableFixedArray::ToJSArray(const TNode<Context> context) {
TNode<IntPtrT> GrowableFixedArray::NewCapacity(
TNode<IntPtrT> current_capacity) {
CSA_ASSERT(this,
CSA_DCHECK(this,
IntPtrGreaterThanOrEqual(current_capacity, IntPtrConstant(0)));
// Growth rate is analog to JSObject::NewElementsCapacity:
@ -82,9 +82,9 @@ TNode<IntPtrT> GrowableFixedArray::NewCapacity(
TNode<FixedArray> GrowableFixedArray::ResizeFixedArray(
const TNode<IntPtrT> element_count, const TNode<IntPtrT> new_capacity) {
CSA_ASSERT(this, IntPtrGreaterThanOrEqual(element_count, IntPtrConstant(0)));
CSA_ASSERT(this, IntPtrGreaterThanOrEqual(new_capacity, IntPtrConstant(0)));
CSA_ASSERT(this, IntPtrGreaterThanOrEqual(new_capacity, element_count));
CSA_DCHECK(this, IntPtrGreaterThanOrEqual(element_count, IntPtrConstant(0)));
CSA_DCHECK(this, IntPtrGreaterThanOrEqual(new_capacity, IntPtrConstant(0)));
CSA_DCHECK(this, IntPtrGreaterThanOrEqual(new_capacity, element_count));
const TNode<FixedArray> from_array = var_array_.value();

View File

@ -10,14 +10,14 @@ struct GrowableFixedArray {
this.array.objects[this.length++] = obj;
}
macro ResizeFixedArray(newCapacity: intptr): FixedArray {
assert(this.length >= 0);
assert(newCapacity >= 0);
assert(newCapacity >= this.length);
dcheck(this.length >= 0);
dcheck(newCapacity >= 0);
dcheck(newCapacity >= this.length);
const first: intptr = 0;
return ExtractFixedArray(this.array, first, this.length, newCapacity);
}
macro EnsureCapacity() {
assert(this.length <= this.capacity);
dcheck(this.length <= this.capacity);
if (this.capacity == this.length) {
// Growth rate is analog to JSObject::NewElementsCapacity:
// new_capacity = (current_capacity + (current_capacity >> 1)) + 16.

View File

@ -21,7 +21,7 @@ macro InSameNativeContext(lhs: Context, rhs: Context): bool {
macro MaybeObjectToStrong(maybeObject: MaybeObject):
HeapObject labels IfCleared {
assert(IsWeakOrCleared(maybeObject));
dcheck(IsWeakOrCleared(maybeObject));
const weakObject = %RawDownCast<Weak<HeapObject>>(maybeObject);
return WeakToStrong(weakObject) otherwise IfCleared;
}
@ -91,10 +91,10 @@ macro SetCallFeedbackContent(implicit context: Context)(
macro CollectCallFeedback(
maybeTarget: JSAny, maybeReceiver: Lazy<JSAny>, context: Context,
maybeFeedbackVector: Undefined|FeedbackVector, slotId: uintptr): void {
// TODO(v8:9891): Remove this assert once all callers are ported to Torque.
// This assert ensures correctness of maybeFeedbackVector's type which can
// TODO(v8:9891): Remove this dcheck once all callers are ported to Torque.
// This dcheck ensures correctness of maybeFeedbackVector's type which can
// be easily broken for calls from CSA.
assert(
dcheck(
IsUndefined(maybeFeedbackVector) ||
Is<FeedbackVector>(maybeFeedbackVector));
const feedbackVector =
@ -158,7 +158,7 @@ macro CollectCallFeedback(
SetCallFeedbackContent(
feedbackVector, slotId, CallFeedbackContent::kReceiver);
} else {
assert(!FeedbackValueIsReceiver(feedbackVector, slotId));
dcheck(!FeedbackValueIsReceiver(feedbackVector, slotId));
}
TryInitializeAsMonomorphic(recordedFunction, feedbackVector, slotId)
otherwise TransitionToMegamorphic;
@ -170,10 +170,10 @@ macro CollectCallFeedback(
macro CollectInstanceOfFeedback(
maybeTarget: JSAny, context: Context,
maybeFeedbackVector: Undefined|FeedbackVector, slotId: uintptr): void {
// TODO(v8:9891): Remove this assert once all callers are ported to Torque.
// This assert ensures correctness of maybeFeedbackVector's type which can
// TODO(v8:9891): Remove this dcheck once all callers are ported to Torque.
// This dcheck ensures correctness of maybeFeedbackVector's type which can
// be easily broken for calls from CSA.
assert(
dcheck(
IsUndefined(maybeFeedbackVector) ||
Is<FeedbackVector>(maybeFeedbackVector));
const feedbackVector =
@ -228,10 +228,10 @@ macro CollectConstructFeedback(implicit context: Context)(
updateFeedbackMode: constexpr UpdateFeedbackMode):
never labels ConstructGeneric,
ConstructArray(AllocationSite) {
// TODO(v8:9891): Remove this assert once all callers are ported to Torque.
// This assert ensures correctness of maybeFeedbackVector's type which can
// TODO(v8:9891): Remove this dcheck once all callers are ported to Torque.
// This dcheck ensures correctness of maybeFeedbackVector's type which can
// be easily broken for calls from CSA.
assert(
dcheck(
IsUndefined(maybeFeedbackVector) ||
Is<FeedbackVector>(maybeFeedbackVector));

View File

@ -22,7 +22,7 @@ macro PerformPolymorphicCheck(
const polymorphicArray = UnsafeCast<WeakFixedArray>(expectedPolymorphicArray);
const weakActualMap = MakeWeak(actualMap);
const length = polymorphicArray.length_intptr;
assert(length > 0);
dcheck(length > 0);
for (let mapIndex: intptr = 0; mapIndex < length;
mapIndex += FeedbackIteratorEntrySize()) {
@ -30,7 +30,7 @@ macro PerformPolymorphicCheck(
UnsafeCast<WeakHeapObject>(polymorphicArray[mapIndex]);
if (maybeCachedMap == weakActualMap) {
const handlerIndex = mapIndex + FeedbackIteratorHandlerOffset();
assert(handlerIndex < length);
dcheck(handlerIndex < length);
const maybeHandler =
Cast<Object>(polymorphicArray[handlerIndex]) otherwise unreachable;
if (TaggedEqual(maybeHandler, actualHandler)) {
@ -49,7 +49,7 @@ macro PerformMonomorphicCheck(
actualMap: Map, actualHandler: Smi|DataHandler): int32 {
if (TaggedEqual(expectedMap, actualMap)) {
const handlerIndex = slotIndex + 1;
assert(handlerIndex < feedbackVector.length_intptr);
dcheck(handlerIndex < feedbackVector.length_intptr);
const maybeHandler =
Cast<Object>(feedbackVector[handlerIndex]) otherwise unreachable;
if (TaggedEqual(actualHandler, maybeHandler)) {

View File

@ -18,7 +18,7 @@ macro GetCoverageInfo(implicit context: Context)(function: JSFunction):
macro IncrementBlockCount(implicit context: Context)(
coverageInfo: CoverageInfo, slot: Smi) {
assert(Convert<int32>(slot) < coverageInfo.slot_count);
dcheck(Convert<int32>(slot) < coverageInfo.slot_count);
++coverageInfo.slots[slot].block_count;
}

View File

@ -19,10 +19,10 @@ builtin GetTemplateObject(
// handler; the current advantage of the split implementation is that the
// bytecode can skip most work if feedback exists.
// TODO(v8:9891): Remove this assert once all callers are ported to Torque.
// This assert ensures correctness of maybeFeedbackVector's type which can
// TODO(v8:9891): Remove this dcheck once all callers are ported to Torque.
// This dcheck ensures correctness of maybeFeedbackVector's type which can
// be easily broken for calls from CSA.
assert(
dcheck(
IsUndefined(maybeFeedbackVector) ||
Is<FeedbackVector>(maybeFeedbackVector));
try {
@ -59,7 +59,7 @@ transitioning macro ForInNextSlow(
context: Context, slot: uintptr, receiver: JSAnyNotSmi, key: JSAny,
cacheType: Object, maybeFeedbackVector: Undefined|FeedbackVector,
guaranteedFeedback: constexpr UpdateFeedbackMode): JSAny {
assert(receiver.map != cacheType); // Handled on the fast path.
dcheck(receiver.map != cacheType); // Handled on the fast path.
UpdateFeedback(
SmiTag<ForInFeedback>(ForInFeedback::kAny), maybeFeedbackVector, slot,
guaranteedFeedback);

View File

@ -52,10 +52,10 @@ transitioning builtin GetIteratorWithFeedback(
context: Context, receiver: JSAny, loadSlot: TaggedIndex,
callSlot: TaggedIndex,
maybeFeedbackVector: Undefined|FeedbackVector): JSAny {
// TODO(v8:9891): Remove this assert once all callers are ported to Torque.
// This assert ensures correctness of maybeFeedbackVector's type which can
// TODO(v8:9891): Remove this dcheck once all callers are ported to Torque.
// This dcheck ensures correctness of maybeFeedbackVector's type which can
// be easily broken for calls from CSA.
assert(
dcheck(
IsUndefined(maybeFeedbackVector) ||
Is<FeedbackVector>(maybeFeedbackVector));
let iteratorMethod: JSAny;

View File

@ -416,7 +416,7 @@ MathHypot(
} else if (max == 0) {
return 0;
}
assert(max > 0);
dcheck(max > 0);
// Kahan summation to avoid rounding errors.
// Normalize the numbers to the largest one to avoid overflow.

View File

@ -62,7 +62,7 @@ transitioning macro ThisNumberValue(implicit context: Context)(
}
macro ToCharCode(input: int32): char8 {
assert(0 <= input && input < 36);
dcheck(0 <= input && input < 36);
return input < 10 ?
%RawDownCast<char8>(Unsigned(input + kAsciiZero)) :
%RawDownCast<char8>(Unsigned(input - 10 + kAsciiLowerCaseA));
@ -78,7 +78,7 @@ macro NumberToStringSmi(x: int32, radix: int32): String labels Slow {
return StringFromSingleCharCode(ToCharCode(n));
}
} else {
assert(isNegative);
dcheck(isNegative);
if (n == kMinInt32) {
goto Slow;
}
@ -92,7 +92,7 @@ macro NumberToStringSmi(x: int32, radix: int32): String labels Slow {
temp = temp / radix;
length = length + 1;
}
assert(length > 0);
dcheck(length > 0);
const strSeq = AllocateNonEmptySeqOneByteString(Unsigned(length));
let cursor: intptr = Convert<intptr>(length) - 1;
while (n > 0) {
@ -102,15 +102,15 @@ macro NumberToStringSmi(x: int32, radix: int32): String labels Slow {
cursor = cursor - 1;
}
if (isNegative) {
assert(cursor == 0);
dcheck(cursor == 0);
// Insert '-' to result.
*UnsafeConstCast(&strSeq.chars[0]) = 45;
} else {
assert(cursor == -1);
dcheck(cursor == -1);
// In sync with Factory::SmiToString: If radix = 10 and positive number,
// update hash for string.
if (radix == 10) {
assert(strSeq.raw_hash_field == kNameEmptyHashField);
dcheck(strSeq.raw_hash_field == kNameEmptyHashField);
strSeq.raw_hash_field = MakeArrayIndexHash(Unsigned(x), Unsigned(length));
}
}

View File

@ -58,7 +58,7 @@ ObjectFromEntries(
const fastIteratorResultMap: Map = GetIteratorResultMap();
let i: iterator::IteratorRecord = iterator::GetIterator(iterable);
try {
assert(!IsNullOrUndefined(i.object));
dcheck(!IsNullOrUndefined(i.object));
while (true) {
const step: JSReceiver =
iterator::IteratorStep(i, fastIteratorResultMap)

View File

@ -194,7 +194,7 @@ transitioning builtin
FulfillPromise(implicit context: Context)(
promise: JSPromise, value: JSAny): Undefined {
// Assert: The value of promise.[[PromiseState]] is "pending".
assert(promise.Status() == PromiseState::kPending);
dcheck(promise.Status() == PromiseState::kPending);
RunContextPromiseHookResolve(promise);
@ -469,7 +469,7 @@ transitioning macro PerformPromiseThenImpl(implicit context: Context)(
resultPromiseOrCapability);
} else
deferred {
assert(promise.Status() == PromiseState::kRejected);
dcheck(promise.Status() == PromiseState::kRejected);
handlerContext = ExtractHandlerContext(onRejected, onFulfilled);
microtask = NewPromiseRejectReactionJobTask(
handlerContext, reactionsOrResult, onRejected,

View File

@ -103,7 +103,7 @@ transitioning macro PromiseAllResolveElementClosure<F: type>(
}
}
assert(
dcheck(
promiseContext.length ==
SmiTag(PromiseAllResolveElementContextSlots::
kPromiseAllResolveElementLength));
@ -111,10 +111,10 @@ transitioning macro PromiseAllResolveElementClosure<F: type>(
function.context = nativeContext;
// Determine the index from the {function}.
assert(kPropertyArrayNoHashSentinel == 0);
dcheck(kPropertyArrayNoHashSentinel == 0);
const identityHash =
LoadJSReceiverIdentityHash(function) otherwise unreachable;
assert(identityHash > 0);
dcheck(identityHash > 0);
const index = identityHash - 1;
let remainingElementsCount = *ContextSlot(

View File

@ -44,15 +44,15 @@ macro CreatePromiseAllResolveElementFunction(implicit context: Context)(
resolveElementContext: PromiseAllResolveElementContext, index: Smi,
nativeContext: NativeContext,
resolveFunction: SharedFunctionInfo): JSFunction {
assert(index > 0);
assert(index < kPropertyArrayHashFieldMax);
dcheck(index > 0);
dcheck(index < kPropertyArrayHashFieldMax);
const map = *ContextSlot(
nativeContext, ContextSlot::STRICT_FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX);
const resolve = AllocateFunctionWithMapAndContext(
map, resolveFunction, resolveElementContext);
assert(kPropertyArrayNoHashSentinel == 0);
dcheck(kPropertyArrayNoHashSentinel == 0);
resolve.properties_or_hash = index;
return resolve;
}
@ -332,7 +332,7 @@ transitioning macro GeneratePromiseAll<F1: type, F2: type>(
const capability = NewPromiseCapability(receiver, False);
// NewPromiseCapability guarantees that receiver is Constructor.
assert(Is<Constructor>(receiver));
dcheck(Is<Constructor>(receiver));
const constructor = UnsafeCast<Constructor>(receiver);
try {

View File

@ -57,14 +57,14 @@ transitioning macro CreatePromiseAnyRejectElementContext(
macro CreatePromiseAnyRejectElementFunction(implicit context: Context)(
rejectElementContext: PromiseAnyRejectElementContext, index: Smi,
nativeContext: NativeContext): JSFunction {
assert(index > 0);
assert(index < kPropertyArrayHashFieldMax);
dcheck(index > 0);
dcheck(index < kPropertyArrayHashFieldMax);
const map = *ContextSlot(
nativeContext, ContextSlot::STRICT_FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX);
const rejectInfo = PromiseAnyRejectElementSharedFunConstant();
const reject =
AllocateFunctionWithMapAndContext(map, rejectInfo, rejectElementContext);
assert(kPropertyArrayNoHashSentinel == 0);
dcheck(kPropertyArrayNoHashSentinel == 0);
reject.properties_or_hash = index;
return reject;
}
@ -89,7 +89,7 @@ PromiseAnyRejectElementClosure(
return Undefined;
}
assert(
dcheck(
context.length ==
SmiTag(
PromiseAnyRejectElementContextSlots::kPromiseAnyRejectElementLength));
@ -100,9 +100,9 @@ PromiseAnyRejectElementClosure(
target.context = nativeContext;
// 5. Let index be F.[[Index]].
assert(kPropertyArrayNoHashSentinel == 0);
dcheck(kPropertyArrayNoHashSentinel == 0);
const identityHash = LoadJSReceiverIdentityHash(target) otherwise unreachable;
assert(identityHash > 0);
dcheck(identityHash > 0);
const index = identityHash - 1;
// 6. Let errors be F.[[Errors]].
@ -328,7 +328,7 @@ PromiseAny(
const capability = NewPromiseCapability(receiver, False);
// NewPromiseCapability guarantees that receiver is Constructor.
assert(Is<Constructor>(receiver));
dcheck(Is<Constructor>(receiver));
const constructor = UnsafeCast<Constructor>(receiver);
try {
@ -365,7 +365,7 @@ PromiseAny(
goto Reject(e);
} label Reject(e: Object) deferred {
// Exception must be bound to a JS value.
assert(e != TheHole);
dcheck(e != TheHole);
Call(
context, UnsafeCast<Callable>(capability.reject), Undefined,
UnsafeCast<JSAny>(e));

View File

@ -70,7 +70,7 @@ PromiseCatchFinally(
*ContextSlot(context, PromiseFinallyContextSlot::kConstructorSlot);
// 5. Assert: IsConstructor(C) is true.
assert(IsConstructor(constructor));
dcheck(IsConstructor(constructor));
// 6. Let promise be ? PromiseResolve(C, result).
const promise = PromiseResolve(constructor, result);
@ -117,7 +117,7 @@ PromiseThenFinally(
*ContextSlot(context, PromiseFinallyContextSlot::kConstructorSlot);
// 5. Assert: IsConstructor(C) is true.
assert(IsConstructor(constructor));
dcheck(IsConstructor(constructor));
// 6. Let promise be ? PromiseResolve(C, result).
const promise = PromiseResolve(constructor, result);
@ -185,7 +185,7 @@ PromisePrototypeFinally(
}
// 4. Assert: IsConstructor(C) is true.
assert(IsConstructor(constructor));
dcheck(IsConstructor(constructor));
// 5. If IsCallable(onFinally) is not true,
// a. Let thenFinally be onFinally.

View File

@ -49,7 +49,7 @@ macro PromiseInit(promise: JSPromise): void {
macro InnerNewJSPromise(implicit context: Context)(): JSPromise {
const promiseFun = *NativeContextSlot(ContextSlot::PROMISE_FUNCTION_INDEX);
assert(IsFunctionWithPrototypeSlotMap(promiseFun.map));
dcheck(IsFunctionWithPrototypeSlotMap(promiseFun.map));
const promiseMap = UnsafeCast<Map>(promiseFun.prototype_or_initial_map);
const promiseHeapObject = promise_internal::AllocateJSPromise(context);
*UnsafeConstCast(&promiseHeapObject.map) = promiseMap;
@ -230,7 +230,7 @@ transitioning macro NewJSPromise(implicit context: Context)(): JSPromise {
@export
transitioning macro NewJSPromise(implicit context: Context)(
status: constexpr PromiseState, result: JSAny): JSPromise {
assert(status != PromiseState::kPending);
dcheck(status != PromiseState::kPending);
const instance = InnerNewJSPromise();
instance.reactions_or_result = result;

View File

@ -27,7 +27,7 @@ PromiseRace(
const promise = capability.promise;
// NewPromiseCapability guarantees that receiver is Constructor.
assert(Is<Constructor>(receiver));
dcheck(Is<Constructor>(receiver));
const constructor = UnsafeCast<Constructor>(receiver);
// For catch prediction, don't treat the .then calls as handling it;

View File

@ -138,8 +138,8 @@ ResolvePromise(implicit context: Context)(
// ensures that the intrinsic %ObjectPrototype% doesn't contain any
// "then" property. This helps to avoid negative lookups on iterator
// results from async generators.
assert(IsJSReceiverMap(resolutionMap));
assert(!IsPromiseThenProtectorCellInvalid());
dcheck(IsJSReceiverMap(resolutionMap));
dcheck(!IsPromiseThenProtectorCellInvalid());
if (resolutionMap ==
*NativeContextSlot(
nativeContext, ContextSlot::ITERATOR_RESULT_MAP_INDEX)) {

View File

@ -15,15 +15,15 @@ ProxyDeleteProperty(implicit context: Context)(
// Handle deeply nested proxy.
PerformStackCheck();
// 1. Assert: IsPropertyKey(P) is true.
assert(TaggedIsNotSmi(name));
assert(Is<Name>(name));
assert(!IsPrivateSymbol(name));
dcheck(TaggedIsNotSmi(name));
dcheck(Is<Name>(name));
dcheck(!IsPrivateSymbol(name));
try {
// 2. Let handler be O.[[ProxyHandler]].
// 3. If handler is null, throw a TypeError exception.
// 4. Assert: Type(handler) is Object.
assert(proxy.handler == Null || Is<JSReceiver>(proxy.handler));
dcheck(proxy.handler == Null || Is<JSReceiver>(proxy.handler));
const handler =
Cast<JSReceiver>(proxy.handler) otherwise ThrowProxyHandlerRevoked;

View File

@ -17,9 +17,9 @@ ProxyGetProperty(implicit context: Context)(
onNonExistent: Smi): JSAny {
PerformStackCheck();
// 1. Assert: IsPropertyKey(P) is true.
assert(TaggedIsNotSmi(name));
assert(Is<Name>(name));
assert(!IsPrivateSymbol(name));
dcheck(TaggedIsNotSmi(name));
dcheck(Is<Name>(name));
dcheck(!IsPrivateSymbol(name));
// 2. Let handler be O.[[ProxyHandler]].
// 3. If handler is null, throw a TypeError exception.

View File

@ -16,7 +16,7 @@ ProxyGetPrototypeOf(implicit context: Context)(proxy: JSProxy): JSAny {
// 1. Let handler be O.[[ProxyHandler]].
// 2. If handler is null, throw a TypeError exception.
// 3. Assert: Type(handler) is Object.
assert(proxy.handler == Null || Is<JSReceiver>(proxy.handler));
dcheck(proxy.handler == Null || Is<JSReceiver>(proxy.handler));
const handler =
Cast<JSReceiver>(proxy.handler) otherwise ThrowProxyHandlerRevoked;
@ -40,7 +40,7 @@ ProxyGetPrototypeOf(implicit context: Context)(proxy: JSProxy): JSAny {
// 9. Let extensibleTarget be ? IsExtensible(target).
// 10. If extensibleTarget is true, return handlerProto.
const extensibleTarget: JSAny = object::ObjectIsExtensibleImpl(target);
assert(extensibleTarget == True || extensibleTarget == False);
dcheck(extensibleTarget == True || extensibleTarget == False);
if (extensibleTarget == True) {
return handlerProto;
}

View File

@ -10,19 +10,19 @@ namespace proxy {
// https://tc39.github.io/ecma262/#sec-proxy-object-internal-methods-and-internal-slots-hasproperty-p
transitioning builtin ProxyHasProperty(implicit context: Context)(
proxy: JSProxy, name: PropertyKey): JSAny {
assert(Is<JSProxy>(proxy));
dcheck(Is<JSProxy>(proxy));
PerformStackCheck();
// 1. Assert: IsPropertyKey(P) is true.
assert(Is<Name>(name));
assert(!IsPrivateSymbol(name));
dcheck(Is<Name>(name));
dcheck(!IsPrivateSymbol(name));
try {
// 2. Let handler be O.[[ProxyHandler]].
// 3. If handler is null, throw a TypeError exception.
// 4. Assert: Type(handler) is Object.
assert(proxy.handler == Null || Is<JSReceiver>(proxy.handler));
dcheck(proxy.handler == Null || Is<JSReceiver>(proxy.handler));
const handler =
Cast<JSReceiver>(proxy.handler) otherwise ThrowProxyHandlerRevoked;

View File

@ -16,7 +16,7 @@ transitioning builtin ProxyIsExtensible(implicit context: Context)(
// 1. Let handler be O.[[ProxyHandler]].
// 2. If handler is null, throw a TypeError exception.
// 3. Assert: Type(handler) is Object.
assert(proxy.handler == Null || Is<JSReceiver>(proxy.handler));
dcheck(proxy.handler == Null || Is<JSReceiver>(proxy.handler));
const handler =
Cast<JSReceiver>(proxy.handler) otherwise ThrowProxyHandlerRevoked;

View File

@ -17,7 +17,7 @@ ProxyPreventExtensions(implicit context: Context)(
// 1. Let handler be O.[[ProxyHandler]].
// 2. If handler is null, throw a TypeError exception.
// 3. Assert: Type(handler) is Object.
assert(proxy.handler == Null || Is<JSReceiver>(proxy.handler));
dcheck(proxy.handler == Null || Is<JSReceiver>(proxy.handler));
const handler =
Cast<JSReceiver>(proxy.handler) otherwise ThrowProxyHandlerRevoked;
@ -38,7 +38,7 @@ ProxyPreventExtensions(implicit context: Context)(
// 8.b If extensibleTarget is true, throw a TypeError exception.
if (ToBoolean(trapResult)) {
const extensibleTarget: JSAny = object::ObjectIsExtensibleImpl(target);
assert(extensibleTarget == True || extensibleTarget == False);
dcheck(extensibleTarget == True || extensibleTarget == False);
if (extensibleTarget == True) {
ThrowTypeError(MessageTemplate::kProxyPreventExtensionsExtensible);
}

View File

@ -26,7 +26,7 @@ ProxyRevoke(js-implicit context: Context)(): Undefined {
*proxySlot = Null;
// 4. Assert: p is a Proxy object.
assert(Is<JSProxy>(proxy));
dcheck(Is<JSProxy>(proxy));
// 5. Set p.[[ProxyTarget]] to null.
proxy.target = Null;

View File

@ -22,8 +22,8 @@ ProxySetProperty(implicit context: Context)(
proxy: JSProxy, name: PropertyKey|PrivateSymbol, value: JSAny,
receiverValue: JSAny): JSAny {
// 1. Assert: IsPropertyKey(P) is true.
assert(TaggedIsNotSmi(name));
assert(Is<Name>(name));
dcheck(TaggedIsNotSmi(name));
dcheck(Is<Name>(name));
let key: PropertyKey;
typeswitch (name) {
@ -40,7 +40,7 @@ ProxySetProperty(implicit context: Context)(
// 2. Let handler be O.[[ProxyHandler]].
// 3. If handler is null, throw a TypeError exception.
// 4. Assert: Type(handler) is Object.
assert(proxy.handler == Null || Is<JSReceiver>(proxy.handler));
dcheck(proxy.handler == Null || Is<JSReceiver>(proxy.handler));
const handler =
Cast<JSReceiver>(proxy.handler) otherwise ThrowProxyHandlerRevoked;

View File

@ -15,12 +15,12 @@ ProxySetPrototypeOf(implicit context: Context)(
const kTrapName: constexpr string = 'setPrototypeOf';
try {
// 1. Assert: Either Type(V) is Object or Type(V) is Null.
assert(proto == Null || Is<JSReceiver>(proto));
dcheck(proto == Null || Is<JSReceiver>(proto));
// 2. Let handler be O.[[ProxyHandler]].
// 3. If handler is null, throw a TypeError exception.
// 4. Assert: Type(handler) is Object.
assert(proxy.handler == Null || Is<JSReceiver>(proxy.handler));
dcheck(proxy.handler == Null || Is<JSReceiver>(proxy.handler));
const handler =
Cast<JSReceiver>(proxy.handler) otherwise ThrowProxyHandlerRevoked;
@ -48,7 +48,7 @@ ProxySetPrototypeOf(implicit context: Context)(
// 10. Let extensibleTarget be ? IsExtensible(target).
// 11. If extensibleTarget is true, return true.
const extensibleTarget: Object = object::ObjectIsExtensibleImpl(target);
assert(extensibleTarget == True || extensibleTarget == False);
dcheck(extensibleTarget == True || extensibleTarget == False);
if (extensibleTarget == True) {
return True;
}

View File

@ -41,7 +41,7 @@ transitioning macro RegExpPrototypeMatchAllImpl(implicit context: Context)(
const flags: String = FastFlagsGetter(fastRegExp);
matcher = RegExpCreate(nativeContext, source, flags);
const matcherRegExp = UnsafeCast<JSRegExp>(matcher);
assert(IsFastRegExpPermissive(matcherRegExp));
dcheck(IsFastRegExpPermissive(matcherRegExp));
// 7. Let lastIndex be ? ToLength(? Get(R, "lastIndex")).
// 8. Perform ? Set(matcher, "lastIndex", lastIndex, true).
@ -159,7 +159,7 @@ transitioning javascript builtin RegExpStringIteratorPrototypeNext(
return AllocateJSIteratorResult(UnsafeCast<JSAny>(match), False);
}
// a. If global is true,
assert(flags.global);
dcheck(flags.global);
if (isFastRegExp) {
// i. Let matchStr be ? ToString(? Get(match, "0")).
const match = UnsafeCast<JSRegExpResult>(match);
@ -168,7 +168,7 @@ transitioning javascript builtin RegExpStringIteratorPrototypeNext(
// When iterating_regexp is fast, we assume it stays fast even after
// accessing the first match from the RegExp result.
assert(IsFastRegExpPermissive(iteratingRegExp));
dcheck(IsFastRegExpPermissive(iteratingRegExp));
const iteratingRegExp = UnsafeCast<JSRegExp>(iteratingRegExp);
if (matchStr == kEmptyString) {
// 1. Let thisIndex be ? ToLength(? Get(R, "lastIndex")).
@ -186,7 +186,7 @@ transitioning javascript builtin RegExpStringIteratorPrototypeNext(
// iii. Return ! CreateIterResultObject(match, false).
return AllocateJSIteratorResult(match, False);
}
assert(!isFastRegExp);
dcheck(!isFastRegExp);
// i. Let matchStr be ? ToString(? Get(match, "0")).
const match = UnsafeCast<JSAny>(match);
const matchStr = ToString_Inline(GetProperty(match, SmiConstant(0)));

View File

@ -22,7 +22,7 @@ extern macro UnsafeLoadFixedArrayElement(
transitioning macro RegExpPrototypeMatchBody(implicit context: Context)(
regexp: JSReceiver, string: String, isFastPath: constexpr bool): JSAny {
if constexpr (isFastPath) {
assert(Is<FastJSRegExp>(regexp));
dcheck(Is<FastJSRegExp>(regexp));
}
const isGlobal: bool = FlagGetter(regexp, Flag::kGlobal, isFastPath);
@ -32,7 +32,7 @@ transitioning macro RegExpPrototypeMatchBody(implicit context: Context)(
RegExpExec(regexp, string);
}
assert(isGlobal);
dcheck(isGlobal);
const isUnicode: bool = FlagGetter(regexp, Flag::kUnicode, isFastPath);
StoreLastIndex(regexp, 0, isFastPath);
@ -74,7 +74,7 @@ transitioning macro RegExpPrototypeMatchBody(implicit context: Context)(
string, UnsafeCast<Smi>(matchFrom), UnsafeCast<Smi>(matchTo));
}
} else {
assert(!isFastPath);
dcheck(!isFastPath);
const resultTemp = RegExpExec(regexp, string);
if (resultTemp == Null) {
goto IfDidNotMatch;
@ -96,7 +96,7 @@ transitioning macro RegExpPrototypeMatchBody(implicit context: Context)(
}
let lastIndex = LoadLastIndex(regexp, isFastPath);
if constexpr (isFastPath) {
assert(TaggedIsPositiveSmi(lastIndex));
dcheck(TaggedIsPositiveSmi(lastIndex));
} else {
lastIndex = ToLength_Inline(lastIndex);
}
@ -109,7 +109,7 @@ transitioning macro RegExpPrototypeMatchBody(implicit context: Context)(
// incremented to overflow the Smi range since the maximal string
// length is less than the maximal Smi value.
StaticAssertStringLengthFitsSmi();
assert(TaggedIsPositiveSmi(newLastIndex));
dcheck(TaggedIsPositiveSmi(newLastIndex));
}
StoreLastIndex(regexp, newLastIndex, isFastPath);

View File

@ -175,9 +175,9 @@ transitioning macro RegExpReplaceFastString(implicit context: Context)(
transitioning builtin RegExpReplace(implicit context: Context)(
regexp: FastJSRegExp, string: String, replaceValue: JSAny): String {
// TODO(pwong): Remove assert when all callers (StringPrototypeReplace) are
// TODO(pwong): Remove dcheck when all callers (StringPrototypeReplace) are
// from Torque.
assert(Is<FastJSRegExp>(regexp));
dcheck(Is<FastJSRegExp>(regexp));
// 2. Is {replace_value} callable?
typeswitch (replaceValue) {

View File

@ -9,7 +9,7 @@ namespace regexp {
transitioning macro
RegExpPrototypeSearchBodyFast(implicit context: Context)(
regexp: JSRegExp, string: String): JSAny {
assert(IsFastRegExpPermissive(regexp));
dcheck(IsFastRegExpPermissive(regexp));
// Grab the initial value of last index.
const previousLastIndex: Smi = FastLoadLastIndex(regexp);

View File

@ -86,7 +86,7 @@ transitioning macro RegExpPrototypeExecBodyWithoutResult(
regexp: JSRegExp, string: String, regexpLastIndex: Number,
isFastPath: constexpr bool): RegExpMatchInfo labels IfDidNotMatch {
if (isFastPath) {
assert(HasInitialRegExpMap(regexp));
dcheck(HasInitialRegExpMap(regexp));
} else {
IncrementUseCounter(context, SmiConstant(kRegExpExecCalledOnSlowRegExp));
}
@ -397,7 +397,7 @@ transitioning macro IsRegExp(implicit context: Context)(obj: JSAny): bool {
return Is<JSRegExp>(receiver);
}
assert(value != Undefined);
dcheck(value != Undefined);
// The common path. Symbol.match exists, equals the RegExpPrototypeMatch
// function (and is thus trueish), and the receiver is a JSRegExp.
if (ToBoolean(value)) {
@ -408,7 +408,7 @@ transitioning macro IsRegExp(implicit context: Context)(obj: JSAny): bool {
return true;
}
assert(!ToBoolean(value));
dcheck(!ToBoolean(value));
if (Is<JSRegExp>(receiver)) {
IncrementUseCounter(context, SmiConstant(kRegExpMatchIsFalseishOnJSRegExp));
}

View File

@ -22,7 +22,7 @@ transitioning macro StringPad(implicit context: Context)(
return receiverString;
}
const maxLength: Number = ToLength_Inline(arguments[0]);
assert(IsNumberNormalized(maxLength));
dcheck(IsNumberNormalized(maxLength));
typeswitch (maxLength) {
case (smiMaxLength: Smi): {
@ -49,7 +49,7 @@ transitioning macro StringPad(implicit context: Context)(
}
// Pad.
assert(fillLength > 0);
dcheck(fillLength > 0);
// Throw if max_length is greater than String::kMaxLength.
if (!TaggedIsSmi(maxLength)) {
ThrowInvalidStringLength(context);
@ -59,7 +59,7 @@ transitioning macro StringPad(implicit context: Context)(
if (smiMaxLength > SmiConstant(kStringMaxLength)) {
ThrowInvalidStringLength(context);
}
assert(smiMaxLength > stringLength);
dcheck(smiMaxLength > stringLength);
const padLength: Smi = smiMaxLength - stringLength;
let padding: String;
@ -85,11 +85,11 @@ transitioning macro StringPad(implicit context: Context)(
}
// Return result.
assert(padLength == padding.length_smi);
dcheck(padLength == padding.length_smi);
if (variant == kStringPadStart) {
return padding + receiverString;
}
assert(variant == kStringPadEnd);
dcheck(variant == kStringPadEnd);
return receiverString + padding;
}

View File

@ -7,8 +7,8 @@ const kBuiltinName: constexpr string = 'String.prototype.repeat';
builtin StringRepeat(implicit context: Context)(
string: String, count: Smi): String {
assert(count >= 0);
assert(string != kEmptyString);
dcheck(count >= 0);
dcheck(string != kEmptyString);
let result: String = kEmptyString;
let powerOfTwoRepeats: String = string;
@ -50,7 +50,7 @@ transitioning javascript builtin StringPrototypeRepeat(
return StringRepeat(s, n);
}
case (heapNum: HeapNumber): deferred {
assert(IsNumberNormalized(heapNum));
dcheck(IsNumberNormalized(heapNum));
const n = LoadHeapNumberValue(heapNum);
// 4. If n < 0, throw a RangeError exception.

View File

@ -27,7 +27,7 @@ transitioning javascript builtin StringPrototypeSubstr(
// 7. Let resultLength be min(max(end, 0), size - intStart).
const length = arguments[1];
const lengthLimit = size - initStart;
assert(lengthLimit <= size);
dcheck(lengthLimit <= size);
const resultLength: uintptr = length != Undefined ?
ClampToIndexRange(length, lengthLimit) :
lengthLimit;

View File

@ -231,7 +231,7 @@ const kAllocateBaseFlags: constexpr AllocationFlag =
AllocationFlag::kAllowLargeObjectAllocation;
macro AllocateFromNew(
sizeInBytes: intptr, map: Map, pretenured: bool): UninitializedHeapObject {
assert(ValidAllocationSize(sizeInBytes, map));
dcheck(ValidAllocationSize(sizeInBytes, map));
if (pretenured) {
return Allocate(
sizeInBytes,
@ -321,7 +321,7 @@ struct UninitializedIterator {}
// %RawDownCast should *never* be used anywhere in Torque code except for
// in Torque-based UnsafeCast operators preceeded by an appropriate
// type assert()
// type dcheck()
intrinsic %RawDownCast<To: type, From: type>(x: From): To;
intrinsic %RawConstexprCast<To: type, From: type>(f: From): To;

View File

@ -28,8 +28,8 @@ transitioning macro AllocateTypedArray(implicit context: Context)(
isLengthTracking: bool): JSTypedArray {
let elements: ByteArray;
if constexpr (isOnHeap) {
assert(!IsResizableArrayBuffer(buffer));
assert(!isLengthTracking);
dcheck(!IsResizableArrayBuffer(buffer));
dcheck(!isLengthTracking);
elements = AllocateByteArray(byteLength);
} else {
elements = kEmptyByteArray;
@ -44,7 +44,7 @@ transitioning macro AllocateTypedArray(implicit context: Context)(
// allocator is NOT used. When the mock array buffer is used, impossibly
// large allocations are allowed that would erroneously cause an overflow
// and this assertion to fail.
assert(
dcheck(
IsMockArrayBufferAllocatorFlag() ||
(backingStore + byteOffset) >= backingStore);
}
@ -67,7 +67,7 @@ transitioning macro AllocateTypedArray(implicit context: Context)(
} else {
typed_array::SetJSTypedArrayOffHeapDataPtr(
typedArray, buffer.backing_store_ptr, byteOffset);
assert(
dcheck(
typedArray.data_ptr ==
(buffer.backing_store_ptr + Convert<intptr>(byteOffset)));
}
@ -164,7 +164,7 @@ transitioning macro ConstructByArrayLike(implicit context: Context)(
} else if (length > 0) {
const byteLength = typedArray.byte_length;
assert(byteLength <= kArrayBufferMaxByteLength);
dcheck(byteLength <= kArrayBufferMaxByteLength);
if (IsSharedArrayBuffer(src.buffer)) {
typed_array::CallCRelaxedMemcpy(
typedArray.data_ptr, src.data_ptr, byteLength);
@ -326,7 +326,7 @@ transitioning macro ConstructByArrayBuffer(implicit context: Context)(
transitioning macro TypedArrayCreateByLength(implicit context: Context)(
constructor: Constructor, length: Number, methodName: constexpr string):
JSTypedArray {
assert(IsSafeInteger(length));
dcheck(IsSafeInteger(length));
// 1. Let newTypedArray be ? Construct(constructor, argumentList).
const newTypedArrayObj = Construct(constructor, length);
@ -384,7 +384,7 @@ transitioning macro ConstructByJSReceiver(implicit context: Context)(
transitioning builtin CreateTypedArray(
context: Context, target: JSFunction, newTarget: JSReceiver, arg1: JSAny,
arg2: JSAny, arg3: JSAny): JSTypedArray {
assert(IsConstructor(target));
dcheck(IsConstructor(target));
// 4. Let O be ? AllocateTypedArray(constructorName, NewTarget,
// "%TypedArrayPrototype%").
try {
@ -441,7 +441,7 @@ transitioning macro TypedArraySpeciesCreate(implicit context: Context)(
// It is assumed that the CreateTypedArray builtin does not produce a
// typed array that fails ValidateTypedArray
assert(!IsDetachedBuffer(typedArray.buffer));
dcheck(!IsDetachedBuffer(typedArray.buffer));
return typedArray;
} label IfSlow deferred {
@ -455,7 +455,7 @@ transitioning macro TypedArraySpeciesCreate(implicit context: Context)(
if constexpr (numArgs == 1) {
newObj = Construct(constructor, arg0);
} else {
assert(numArgs == 3);
dcheck(numArgs == 3);
newObj = Construct(constructor, arg0, arg1, arg2);
}

View File

@ -255,8 +255,8 @@ TypedArrayPrototypeSetTypedArray(implicit context: Context, receiver: JSAny)(
otherwise unreachable;
const dstPtr: RawPtr = target.data_ptr + Convert<intptr>(startOffset);
assert(countBytes <= target.byte_length - startOffset);
assert(countBytes <= typedArray.byte_length);
dcheck(countBytes <= target.byte_length - startOffset);
dcheck(countBytes <= typedArray.byte_length);
// 29. If srcType is the same as targetType, then
// a. NOTE: If srcType and targetType are the same, the transfer must

View File

@ -36,8 +36,8 @@ macro FastCopy(
otherwise unreachable;
const srcPtr: RawPtr = src.data_ptr + Convert<intptr>(startOffset);
assert(countBytes <= dest.byte_length);
assert(countBytes <= src.byte_length - startOffset);
dcheck(countBytes <= dest.byte_length);
dcheck(countBytes <= src.byte_length - startOffset);
if (IsSharedArrayBuffer(src.buffer)) {
// SABs need a relaxed memmove to preserve atomicity.

View File

@ -56,7 +56,7 @@ TypedArrayMerge(
} else {
// No elements on the left, but the right does, so we take
// from the right.
assert(left == middle);
dcheck(left == middle);
target.objects[targetIndex] = source.objects[right++];
}
}
@ -66,7 +66,7 @@ transitioning builtin
TypedArrayMergeSort(implicit context: Context)(
source: FixedArray, from: uintptr, to: uintptr, target: FixedArray,
array: JSTypedArray, comparefn: Callable): JSAny {
assert(to - from > 1);
dcheck(to - from > 1);
const middle: uintptr = from + ((to - from) >>> 1);
// On the next recursion step source becomes target and vice versa.

View File

@ -108,7 +108,7 @@ struct TypedArrayAccessor {
context: Context, array: JSTypedArray, index: uintptr, value: Numeric) {
const storefn: StoreNumericFn = this.storeNumericFn;
const result = storefn(context, array, index, value);
assert(result == kStoreSucceded);
dcheck(result == kStoreSucceded);
}
macro StoreJSAny(
@ -119,7 +119,7 @@ struct TypedArrayAccessor {
if (result == kStoreFailureArrayDetached) {
goto IfDetached;
}
assert(result == kStoreSucceded);
dcheck(result == kStoreSucceded);
}
loadNumericFn: LoadNumericFn;

View File

@ -160,7 +160,7 @@ builtin WasmTableGet(tableIndex: intptr, index: int32): Object {
const instance: WasmInstanceObject = LoadInstanceFromFrame();
const entryIndex: intptr = ChangeInt32ToIntPtr(index);
try {
assert(IsValidPositiveSmi(tableIndex));
dcheck(IsValidPositiveSmi(tableIndex));
if (!IsValidPositiveSmi(entryIndex)) goto IndexOutOfRange;
const tables: FixedArray = LoadTablesFromInstance(instance);
@ -193,7 +193,7 @@ builtin WasmTableSet(tableIndex: intptr, index: int32, value: Object): Object {
const instance: WasmInstanceObject = LoadInstanceFromFrame();
const entryIndex: intptr = ChangeInt32ToIntPtr(index);
try {
assert(IsValidPositiveSmi(tableIndex));
dcheck(IsValidPositiveSmi(tableIndex));
if (!IsValidPositiveSmi(entryIndex)) goto IndexOutOfRange;
const tables: FixedArray = LoadTablesFromInstance(instance);
@ -412,7 +412,7 @@ builtin UintPtr53ToNumber(value: uintptr): Number {
const valueFloat = ChangeUintPtrToFloat64(value);
// Values need to be within [0..2^53], such that they can be represented as
// float64.
assert(ChangeFloat64ToUintPtr(valueFloat) == value);
dcheck(ChangeFloat64ToUintPtr(valueFloat) == value);
return AllocateHeapNumberWithValue(valueFloat);
}

File diff suppressed because it is too large Load Diff

View File

@ -233,36 +233,36 @@ enum class PrimitiveType { kBoolean, kNumber, kString, kSymbol };
#endif
#ifdef DEBUG
// CSA_ASSERT_ARGS generates an
// CSA_DCHECK_ARGS generates an
// std::initializer_list<CodeStubAssembler::ExtraNode> from __VA_ARGS__. It
// currently supports between 0 and 2 arguments.
// clang-format off
#define CSA_ASSERT_0_ARGS(...) {}
#define CSA_ASSERT_1_ARG(a, ...) {{a, #a}}
#define CSA_ASSERT_2_ARGS(a, b, ...) {{a, #a}, {b, #b}}
#define CSA_DCHECK_0_ARGS(...) {}
#define CSA_DCHECK_1_ARG(a, ...) {{a, #a}}
#define CSA_DCHECK_2_ARGS(a, b, ...) {{a, #a}, {b, #b}}
// clang-format on
#define SWITCH_CSA_ASSERT_ARGS(dummy, a, b, FUNC, ...) FUNC(a, b)
#define CSA_ASSERT_ARGS(...) \
CALL(SWITCH_CSA_ASSERT_ARGS, (, ##__VA_ARGS__, CSA_ASSERT_2_ARGS, \
CSA_ASSERT_1_ARG, CSA_ASSERT_0_ARGS))
#define SWITCH_CSA_DCHECK_ARGS(dummy, a, b, FUNC, ...) FUNC(a, b)
#define CSA_DCHECK_ARGS(...) \
CALL(SWITCH_CSA_DCHECK_ARGS, (, ##__VA_ARGS__, CSA_DCHECK_2_ARGS, \
CSA_DCHECK_1_ARG, CSA_DCHECK_0_ARGS))
// Workaround for MSVC to skip comma in empty __VA_ARGS__.
#define CALL(x, y) x y
// CSA_ASSERT(csa, <condition>, <extra values to print...>)
// CSA_DCHECK(csa, <condition>, <extra values to print...>)
#define CSA_ASSERT(csa, condition_node, ...) \
(csa)->Assert(condition_node, #condition_node, __FILE__, __LINE__, \
CSA_ASSERT_ARGS(__VA_ARGS__))
#define CSA_DCHECK(csa, condition_node, ...) \
(csa)->Dcheck(condition_node, #condition_node, __FILE__, __LINE__, \
CSA_DCHECK_ARGS(__VA_ARGS__))
// CSA_ASSERT_BRANCH(csa, [](Label* ok, Label* not_ok) {...},
// CSA_DCHECK_BRANCH(csa, [](Label* ok, Label* not_ok) {...},
// <extra values to print...>)
#define CSA_ASSERT_BRANCH(csa, gen, ...) \
(csa)->Assert(gen, #gen, __FILE__, __LINE__, CSA_ASSERT_ARGS(__VA_ARGS__))
#define CSA_DCHECK_BRANCH(csa, gen, ...) \
(csa)->Dcheck(gen, #gen, __FILE__, __LINE__, CSA_DCHECK_ARGS(__VA_ARGS__))
#define CSA_ASSERT_JS_ARGC_OP(csa, Op, op, expected) \
(csa)->Assert( \
#define CSA_DCHECK_JS_ARGC_OP(csa, Op, op, expected) \
(csa)->Dcheck( \
[&]() -> TNode<BoolT> { \
const TNode<Word32T> argc = (csa)->UncheckedParameter<Word32T>( \
Descriptor::kJSActualArgumentsCount); \
@ -274,8 +274,8 @@ enum class PrimitiveType { kBoolean, kNumber, kString, kSymbol };
Descriptor::kJSActualArgumentsCount)), \
"argc"}})
#define CSA_ASSERT_JS_ARGC_EQ(csa, expected) \
CSA_ASSERT_JS_ARGC_OP(csa, Word32Equal, ==, expected)
#define CSA_DCHECK_JS_ARGC_EQ(csa, expected) \
CSA_DCHECK_JS_ARGC_OP(csa, Word32Equal, ==, expected)
#define CSA_DEBUG_INFO(name) \
{ #name, __FILE__, __LINE__ }
@ -285,9 +285,9 @@ enum class PrimitiveType { kBoolean, kNumber, kString, kSymbol };
#define TYPED_VARIABLE_CONSTRUCTOR(name, ...) \
name(CSA_DEBUG_INFO(name), __VA_ARGS__)
#else // DEBUG
#define CSA_ASSERT(csa, ...) ((void)0)
#define CSA_ASSERT_BRANCH(csa, ...) ((void)0)
#define CSA_ASSERT_JS_ARGC_EQ(csa, expected) ((void)0)
#define CSA_DCHECK(csa, ...) ((void)0)
#define CSA_DCHECK_BRANCH(csa, ...) ((void)0)
#define CSA_DCHECK_JS_ARGC_EQ(csa, expected) ((void)0)
#define BIND(label) Bind(label)
#define TYPED_VARIABLE_DEF(type, name, ...) TVariable<type> name(__VA_ARGS__)
#define TYPED_VARIABLE_CONSTRUCTOR(name, ...) name(__VA_ARGS__)
@ -298,12 +298,12 @@ enum class PrimitiveType { kBoolean, kNumber, kString, kSymbol };
EXPAND(TYPED_VARIABLE_CONSTRUCTOR(__VA_ARGS__, this))
#ifdef ENABLE_SLOW_DCHECKS
#define CSA_SLOW_ASSERT(csa, ...) \
#define CSA_SLOW_DCHECK(csa, ...) \
if (FLAG_enable_slow_asserts) { \
CSA_ASSERT(csa, __VA_ARGS__); \
CSA_DCHECK(csa, __VA_ARGS__); \
}
#else
#define CSA_SLOW_ASSERT(csa, ...) ((void)0)
#define CSA_SLOW_DCHECK(csa, ...) ((void)0)
#endif
// Provides JavaScript-specific "macro-assembler" functionality on top of the
@ -768,13 +768,13 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
using NodeGenerator = std::function<TNode<T>()>;
using ExtraNode = std::pair<TNode<Object>, const char*>;
void Assert(const BranchGenerator& branch, const char* message,
void Dcheck(const BranchGenerator& branch, const char* message,
const char* file, int line,
std::initializer_list<ExtraNode> extra_nodes = {});
void Assert(const NodeGenerator<BoolT>& condition_body, const char* message,
void Dcheck(const NodeGenerator<BoolT>& condition_body, const char* message,
const char* file, int line,
std::initializer_list<ExtraNode> extra_nodes = {});
void Assert(TNode<Word32T> condition_node, const char* message,
void Dcheck(TNode<Word32T> condition_node, const char* message,
const char* file, int line,
std::initializer_list<ExtraNode> extra_nodes = {});
void Check(const BranchGenerator& branch, const char* message,
@ -1097,7 +1097,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
TNode<ExternalString> object) {
// This is only valid for ExternalStrings where the resource data
// pointer is cached (i.e. no uncached external strings).
CSA_ASSERT(this, Word32NotEqual(
CSA_DCHECK(this, Word32NotEqual(
Word32And(LoadInstanceType(object),
Int32Constant(kUncachedExternalStringMask)),
Int32Constant(kUncachedExternalStringTag)));
@ -1236,7 +1236,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
TNode<IntPtrT> offset =
IntPtrSub(reference.offset, IntPtrConstant(kHeapObjectTag));
CSA_ASSERT(this, TaggedIsNotSmi(reference.object));
CSA_DCHECK(this, TaggedIsNotSmi(reference.object));
return CAST(
LoadFromObject(MachineTypeOf<T>::value, reference.object, offset));
}
@ -1270,7 +1270,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
}
TNode<IntPtrT> offset =
IntPtrSub(reference.offset, IntPtrConstant(kHeapObjectTag));
CSA_ASSERT(this, TaggedIsNotSmi(reference.object));
CSA_DCHECK(this, TaggedIsNotSmi(reference.object));
StoreToObject(rep, reference.object, offset, value, write_barrier);
}
template <class T, typename std::enable_if<
@ -4068,7 +4068,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
TVariable<Number>* var_result,
Label* if_bailout);
void AssertHasValidMap(TNode<HeapObject> object);
void DcheckHasValidMap(TNode<HeapObject> object);
template <typename TValue>
void EmitElementStoreTypedArray(TNode<JSTypedArray> typed_array,

View File

@ -853,13 +853,13 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
AssembleArchTableSwitch(instr);
DCHECK_EQ(LeaveCC, i.OutputSBit());
break;
case kArchAbortCSAAssert:
case kArchAbortCSADcheck:
DCHECK(i.InputRegister(0) == r1);
{
// We don't actually want to generate a pile of code for this, so just
// claim there is a stack frame, without generating one.
FrameScope scope(tasm(), StackFrame::NO_FRAME_TYPE);
__ Call(isolate()->builtins()->code_handle(Builtin::kAbortCSAAssert),
__ Call(isolate()->builtins()->code_handle(Builtin::kAbortCSADcheck),
RelocInfo::CODE_TARGET);
}
__ stop();

View File

@ -498,9 +498,9 @@ void InstructionSelector::VisitStackSlot(Node* node) {
sequence()->AddImmediate(Constant(slot)), 0, nullptr);
}
void InstructionSelector::VisitAbortCSAAssert(Node* node) {
void InstructionSelector::VisitAbortCSADcheck(Node* node) {
ArmOperandGenerator g(this);
Emit(kArchAbortCSAAssert, g.NoOutput(), g.UseFixed(node->InputAt(0), r1));
Emit(kArchAbortCSADcheck, g.NoOutput(), g.UseFixed(node->InputAt(0), r1));
}
void InstructionSelector::VisitStoreLane(Node* node) {

View File

@ -871,16 +871,16 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
case kArchBinarySearchSwitch:
AssembleArchBinarySearchSwitch(instr);
break;
case kArchAbortCSAAssert:
case kArchAbortCSADcheck:
DCHECK_EQ(i.InputRegister(0), x1);
{
// We don't actually want to generate a pile of code for this, so just
// claim there is a stack frame, without generating one.
FrameScope scope(tasm(), StackFrame::NO_FRAME_TYPE);
__ Call(isolate()->builtins()->code_handle(Builtin::kAbortCSAAssert),
__ Call(isolate()->builtins()->code_handle(Builtin::kAbortCSADcheck),
RelocInfo::CODE_TARGET);
}
__ Debug("kArchAbortCSAAssert", 0, BREAK);
__ Debug("kArchAbortCSADcheck", 0, BREAK);
unwinding_info_writer_.MarkBlockWillExit();
break;
case kArchDebugBreak:

View File

@ -579,9 +579,9 @@ void InstructionSelector::VisitStackSlot(Node* node) {
sequence()->AddImmediate(Constant(slot)), 0, nullptr);
}
void InstructionSelector::VisitAbortCSAAssert(Node* node) {
void InstructionSelector::VisitAbortCSADcheck(Node* node) {
Arm64OperandGenerator g(this);
Emit(kArchAbortCSAAssert, g.NoOutput(), g.UseFixed(node->InputAt(0), x1));
Emit(kArchAbortCSADcheck, g.NoOutput(), g.UseFixed(node->InputAt(0), x1));
}
void EmitLoad(InstructionSelector* selector, Node* node, InstructionCode opcode,

View File

@ -887,13 +887,13 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
case kArchComment:
__ RecordComment(reinterpret_cast<const char*>(i.InputInt32(0)));
break;
case kArchAbortCSAAssert:
case kArchAbortCSADcheck:
DCHECK(i.InputRegister(0) == edx);
{
// We don't actually want to generate a pile of code for this, so just
// claim there is a stack frame, without generating one.
FrameScope scope(tasm(), StackFrame::NO_FRAME_TYPE);
__ Call(isolate()->builtins()->code_handle(Builtin::kAbortCSAAssert),
__ Call(isolate()->builtins()->code_handle(Builtin::kAbortCSADcheck),
RelocInfo::CODE_TARGET);
}
__ int3();

View File

@ -458,9 +458,9 @@ void InstructionSelector::VisitStackSlot(Node* node) {
sequence()->AddImmediate(Constant(slot)), 0, nullptr);
}
void InstructionSelector::VisitAbortCSAAssert(Node* node) {
void InstructionSelector::VisitAbortCSADcheck(Node* node) {
IA32OperandGenerator g(this);
Emit(kArchAbortCSAAssert, g.NoOutput(), g.UseFixed(node->InputAt(0), edx));
Emit(kArchAbortCSADcheck, g.NoOutput(), g.UseFixed(node->InputAt(0), edx));
}
void InstructionSelector::VisitLoadLane(Node* node) {

View File

@ -92,7 +92,7 @@ inline RecordWriteMode WriteBarrierKindToRecordWriteMode(
V(ArchBinarySearchSwitch) \
V(ArchTableSwitch) \
V(ArchNop) \
V(ArchAbortCSAAssert) \
V(ArchAbortCSADcheck) \
V(ArchDebugBreak) \
V(ArchComment) \
V(ArchThrowTerminator) \

View File

@ -308,7 +308,7 @@ int InstructionScheduler::GetInstructionFlags(const Instruction* instr) const {
#if V8_ENABLE_WEBASSEMBLY
case kArchTailCallWasm:
#endif // V8_ENABLE_WEBASSEMBLY
case kArchAbortCSAAssert:
case kArchAbortCSADcheck:
return kHasSideEffect;
case kArchDebugBreak:

View File

@ -1452,8 +1452,8 @@ void InstructionSelector::VisitNode(Node* node) {
case IrOpcode::kStateValues:
case IrOpcode::kObjectState:
return;
case IrOpcode::kAbortCSAAssert:
VisitAbortCSAAssert(node);
case IrOpcode::kAbortCSADcheck:
VisitAbortCSADcheck(node);
return;
case IrOpcode::kDebugBreak:
VisitDebugBreak(node);

View File

@ -748,13 +748,13 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
case kArchTableSwitch:
AssembleArchTableSwitch(instr);
break;
case kArchAbortCSAAssert:
case kArchAbortCSADcheck:
DCHECK(i.InputRegister(0) == a0);
{
// We don't actually want to generate a pile of code for this, so just
// claim there is a stack frame, without generating one.
FrameScope scope(tasm(), StackFrame::NO_FRAME_TYPE);
__ Call(isolate()->builtins()->code_handle(Builtin::kAbortCSAAssert),
__ Call(isolate()->builtins()->code_handle(Builtin::kAbortCSADcheck),
RelocInfo::CODE_TARGET);
}
__ stop();

View File

@ -345,9 +345,9 @@ void InstructionSelector::VisitStackSlot(Node* node) {
sequence()->AddImmediate(Constant(slot)), 0, nullptr);
}
void InstructionSelector::VisitAbortCSAAssert(Node* node) {
void InstructionSelector::VisitAbortCSADcheck(Node* node) {
Loong64OperandGenerator g(this);
Emit(kArchAbortCSAAssert, g.NoOutput(), g.UseFixed(node->InputAt(0), a0));
Emit(kArchAbortCSADcheck, g.NoOutput(), g.UseFixed(node->InputAt(0), a0));
}
void EmitLoad(InstructionSelector* selector, Node* node, InstructionCode opcode,

View File

@ -809,13 +809,13 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
case kArchTableSwitch:
AssembleArchTableSwitch(instr);
break;
case kArchAbortCSAAssert:
case kArchAbortCSADcheck:
DCHECK(i.InputRegister(0) == a0);
{
// We don't actually want to generate a pile of code for this, so just
// claim there is a stack frame, without generating one.
FrameScope scope(tasm(), StackFrame::NO_FRAME_TYPE);
__ Call(isolate()->builtins()->code_handle(Builtin::kAbortCSAAssert),
__ Call(isolate()->builtins()->code_handle(Builtin::kAbortCSADcheck),
RelocInfo::CODE_TARGET);
}
__ stop();

View File

@ -1427,7 +1427,7 @@ int InstructionScheduler::GetInstructionLatency(const Instruction* instr) {
2);
case kArchTableSwitch:
return AssembleArchTableSwitchLatency();
case kArchAbortCSAAssert:
case kArchAbortCSADcheck:
return CallLatency() + 1;
case kArchComment:
case kArchDeoptimize:

View File

@ -278,9 +278,9 @@ void InstructionSelector::VisitStackSlot(Node* node) {
sequence()->AddImmediate(Constant(slot)), 0, nullptr);
}
void InstructionSelector::VisitAbortCSAAssert(Node* node) {
void InstructionSelector::VisitAbortCSADcheck(Node* node) {
MipsOperandGenerator g(this);
Emit(kArchAbortCSAAssert, g.NoOutput(), g.UseFixed(node->InputAt(0), a0));
Emit(kArchAbortCSADcheck, g.NoOutput(), g.UseFixed(node->InputAt(0), a0));
}
void InstructionSelector::VisitLoadTransform(Node* node) {

View File

@ -770,13 +770,13 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
case kArchTableSwitch:
AssembleArchTableSwitch(instr);
break;
case kArchAbortCSAAssert:
case kArchAbortCSADcheck:
DCHECK(i.InputRegister(0) == a0);
{
// We don't actually want to generate a pile of code for this, so just
// claim there is a stack frame, without generating one.
FrameScope scope(tasm(), StackFrame::NO_FRAME_TYPE);
__ Call(isolate()->builtins()->code_handle(Builtin::kAbortCSAAssert),
__ Call(isolate()->builtins()->code_handle(Builtin::kAbortCSADcheck),
RelocInfo::CODE_TARGET);
}
__ stop();

View File

@ -1301,7 +1301,7 @@ int InstructionScheduler::GetInstructionLatency(const Instruction* instr) {
return AssembleArchJumpLatency();
case kArchTableSwitch:
return AssembleArchTableSwitchLatency();
case kArchAbortCSAAssert:
case kArchAbortCSADcheck:
return CallLatency() + 1;
case kArchDebugBreak:
return 1;

View File

@ -349,9 +349,9 @@ void InstructionSelector::VisitStackSlot(Node* node) {
sequence()->AddImmediate(Constant(slot)), 0, nullptr);
}
void InstructionSelector::VisitAbortCSAAssert(Node* node) {
void InstructionSelector::VisitAbortCSADcheck(Node* node) {
Mips64OperandGenerator g(this);
Emit(kArchAbortCSAAssert, g.NoOutput(), g.UseFixed(node->InputAt(0), a0));
Emit(kArchAbortCSADcheck, g.NoOutput(), g.UseFixed(node->InputAt(0), a0));
}
void EmitLoad(InstructionSelector* selector, Node* node, InstructionCode opcode,

View File

@ -992,13 +992,13 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
AssembleArchTableSwitch(instr);
DCHECK_EQ(LeaveRC, i.OutputRCBit());
break;
case kArchAbortCSAAssert:
case kArchAbortCSADcheck:
DCHECK(i.InputRegister(0) == r4);
{
// We don't actually want to generate a pile of code for this, so just
// claim there is a stack frame, without generating one.
FrameScope scope(tasm(), StackFrame::NO_FRAME_TYPE);
__ Call(isolate()->builtins()->code_handle(Builtin::kAbortCSAAssert),
__ Call(isolate()->builtins()->code_handle(Builtin::kAbortCSADcheck),
RelocInfo::CODE_TARGET);
}
__ stop();

View File

@ -162,9 +162,9 @@ void InstructionSelector::VisitStackSlot(Node* node) {
sequence()->AddImmediate(Constant(slot)), 0, nullptr);
}
void InstructionSelector::VisitAbortCSAAssert(Node* node) {
void InstructionSelector::VisitAbortCSADcheck(Node* node) {
PPCOperandGenerator g(this);
Emit(kArchAbortCSAAssert, g.NoOutput(), g.UseFixed(node->InputAt(0), r4));
Emit(kArchAbortCSADcheck, g.NoOutput(), g.UseFixed(node->InputAt(0), r4));
}
static void VisitLoadCommon(InstructionSelector* selector, Node* node,

View File

@ -743,13 +743,13 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
case kArchTableSwitch:
AssembleArchTableSwitch(instr);
break;
case kArchAbortCSAAssert:
case kArchAbortCSADcheck:
DCHECK(i.InputRegister(0) == a0);
{
// We don't actually want to generate a pile of code for this, so just
// claim there is a stack frame, without generating one.
FrameScope scope(tasm(), StackFrame::NO_FRAME_TYPE);
__ Call(isolate()->builtins()->code_handle(Builtin::kAbortCSAAssert),
__ Call(isolate()->builtins()->code_handle(Builtin::kAbortCSADcheck),
RelocInfo::CODE_TARGET);
}
__ stop();

View File

@ -1117,7 +1117,7 @@ int InstructionScheduler::GetInstructionLatency(const Instruction* instr) {
return AssembleArchJumpLatency();
case kArchTableSwitch:
return AssembleArchTableSwitchLatency();
case kArchAbortCSAAssert:
case kArchAbortCSADcheck:
return CallLatency() + 1;
case kArchDebugBreak:
return 1;

Some files were not shown because too many files have changed in this diff Show More