[csa] Add assertions to CSA

This adds a bunch of assertions to CSA, mostly about documenting and checking
parameter types.

Drive-by-change: Removed unused function.

BUG=v8:6325

Review-Url: https://codereview.chromium.org/2847923003
Cr-Commit-Position: refs/heads/master@{#45398}
This commit is contained in:
jgruber 2017-05-18 08:46:39 -07:00 committed by Commit bot
parent 0c0ab3dce0
commit b14a981496
3 changed files with 261 additions and 86 deletions

View File

@ -74,9 +74,8 @@ void CodeStubAssembler::Check(const NodeGenerator& condition_body,
} else {
SNPrintF(buffer, "CSA_ASSERT failed: %s\n", message);
}
CallRuntime(
Runtime::kGlobalPrint, SmiConstant(Smi::kZero),
HeapConstant(factory()->NewStringFromAsciiChecked(&(buffer[0]))));
CallRuntime(Runtime::kGlobalPrint, SmiConstant(0),
HeapConstant(factory()->InternalizeUtf8String(&(buffer[0]))));
}
DebugBreak();
Goto(&ok);
@ -198,6 +197,10 @@ Node* CodeStubAssembler::IntPtrRoundUpToPowerOfTwo32(Node* value) {
return IntPtrAdd(value, IntPtrConstant(1));
}
Node* CodeStubAssembler::MatchesParameterMode(Node* value, ParameterMode mode) {
return (mode == SMI_PARAMETERS) ? TaggedIsSmi(value) : Int32Constant(1);
}
Node* CodeStubAssembler::WordIsPowerOfTwo(Node* value) {
// value && !(value & (value - 1))
return WordEqual(
@ -441,15 +444,18 @@ Node* CodeStubAssembler::SmiTag(Node* value) {
}
Node* CodeStubAssembler::SmiUntag(Node* value) {
CSA_SLOW_ASSERT(this, TaggedIsSmi(value));
return WordSar(BitcastTaggedToWord(value), SmiShiftBitsConstant());
}
Node* CodeStubAssembler::SmiToWord32(Node* value) {
CSA_SLOW_ASSERT(this, TaggedIsSmi(value));
Node* result = SmiUntag(value);
return TruncateWordToWord32(result);
}
Node* CodeStubAssembler::SmiToFloat64(Node* value) {
CSA_SLOW_ASSERT(this, TaggedIsSmi(value));
return ChangeInt32ToFloat64(SmiToWord32(value));
}
@ -462,6 +468,8 @@ Node* CodeStubAssembler::SmiMin(Node* a, Node* b) {
}
Node* CodeStubAssembler::SmiMod(Node* a, Node* b) {
CSA_SLOW_ASSERT(this, TaggedIsSmi(a));
CSA_SLOW_ASSERT(this, TaggedIsSmi(b));
VARIABLE(var_result, MachineRepresentation::kTagged);
Label return_result(this, &var_result),
return_minuszero(this, Label::kDeferred),
@ -521,6 +529,7 @@ Node* CodeStubAssembler::SmiMod(Node* a, Node* b) {
Goto(&return_result);
BIND(&return_result);
CSA_SLOW_ASSERT(this, IsNumber(var_result.value()));
return var_result.value();
}
@ -582,11 +591,15 @@ Node* CodeStubAssembler::SmiMul(Node* a, Node* b) {
}
BIND(&return_result);
CSA_SLOW_ASSERT(this, IsNumber(var_result.value()));
return var_result.value();
}
Node* CodeStubAssembler::TrySmiDiv(Node* dividend, Node* divisor,
Label* bailout) {
CSA_SLOW_ASSERT(this, TaggedIsSmi(dividend));
CSA_SLOW_ASSERT(this, TaggedIsSmi(divisor));
// Both {a} and {b} are Smis. Bailout to floating point division if {divisor}
// is zero.
GotoIf(WordEqual(divisor, SmiConstant(0)), bailout);
@ -664,6 +677,7 @@ Node* CodeStubAssembler::WordIsWordAligned(Node* word) {
void CodeStubAssembler::BranchIfPrototypesHaveNoElements(
Node* receiver_map, Label* definitely_no_elements,
Label* possibly_elements) {
CSA_SLOW_ASSERT(this, IsMap(receiver_map));
VARIABLE(var_map, MachineRepresentation::kTagged, receiver_map);
Label loop_body(this, &var_map);
Node* empty_elements = LoadRoot(Heap::kEmptyFixedArrayRootIndex);
@ -986,6 +1000,7 @@ Node* CodeStubAssembler::LoadObjectField(Node* object, int offset,
Node* CodeStubAssembler::LoadObjectField(Node* object, Node* offset,
MachineType rep) {
CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
return Load(rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)));
}
@ -1062,11 +1077,13 @@ Node* CodeStubAssembler::StoreAndTagSmi(Node* base, int offset, Node* value) {
}
Node* CodeStubAssembler::LoadHeapNumberValue(Node* object) {
CSA_SLOW_ASSERT(this, IsAnyHeapNumber(object));
return LoadObjectField(object, HeapNumber::kValueOffset,
MachineType::Float64());
}
Node* CodeStubAssembler::LoadMap(Node* object) {
CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
return LoadObjectField(object, HeapObject::kMapOffset);
}
@ -1121,6 +1138,7 @@ Node* CodeStubAssembler::LoadMapBitField3(Node* map) {
}
Node* CodeStubAssembler::LoadMapInstanceType(Node* map) {
CSA_SLOW_ASSERT(this, IsMap(map));
return LoadObjectField(map, Map::kInstanceTypeOffset, MachineType::Uint8());
}
@ -1204,6 +1222,7 @@ Node* CodeStubAssembler::LoadMapConstructor(Node* map) {
Node* CodeStubAssembler::LoadSharedFunctionInfoSpecialField(
Node* shared, int offset, ParameterMode mode) {
CSA_SLOW_ASSERT(this, HasInstanceType(shared, SHARED_FUNCTION_INFO_TYPE));
if (Is64()) {
Node* result = LoadObjectField(shared, offset, MachineType::Int32());
if (mode == SMI_PARAMETERS) {
@ -1348,6 +1367,8 @@ Node* CodeStubAssembler::LoadFixedTypedArrayElementAsTagged(
Node* CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement(
Node* object, Node* index_node, int additional_offset,
ParameterMode parameter_mode) {
CSA_SLOW_ASSERT(this, IsFixedArray(object));
CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
int32_t header_size =
FixedArray::kHeaderSize + additional_offset - kHeapObjectTag;
#if V8_TARGET_LITTLE_ENDIAN
@ -1367,6 +1388,8 @@ Node* CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement(
Node* CodeStubAssembler::LoadFixedDoubleArrayElement(
Node* object, Node* index_node, MachineType machine_type,
int additional_offset, ParameterMode parameter_mode, Label* if_hole) {
CSA_SLOW_ASSERT(this, IsFixedDoubleArray(object));
CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
CSA_ASSERT(this, IsFixedDoubleArray(object));
int32_t header_size =
FixedDoubleArray::kHeaderSize + additional_offset - kHeapObjectTag;
@ -1401,11 +1424,13 @@ Node* CodeStubAssembler::LoadDoubleWithHoleCheck(Node* base, Node* offset,
}
Node* CodeStubAssembler::LoadContextElement(Node* context, int slot_index) {
CSA_SLOW_ASSERT(this, IsFixedArray(context));
int offset = Context::SlotOffset(slot_index);
return Load(MachineType::AnyTagged(), context, IntPtrConstant(offset));
}
Node* CodeStubAssembler::LoadContextElement(Node* context, Node* slot_index) {
CSA_SLOW_ASSERT(this, IsFixedArray(context));
Node* offset =
IntPtrAdd(WordShl(slot_index, kPointerSizeLog2),
IntPtrConstant(Context::kHeaderSize - kHeapObjectTag));
@ -1414,12 +1439,14 @@ Node* CodeStubAssembler::LoadContextElement(Node* context, Node* slot_index) {
Node* CodeStubAssembler::StoreContextElement(Node* context, int slot_index,
Node* value) {
CSA_SLOW_ASSERT(this, IsFixedArray(context));
int offset = Context::SlotOffset(slot_index);
return Store(context, IntPtrConstant(offset), value);
}
Node* CodeStubAssembler::StoreContextElement(Node* context, Node* slot_index,
Node* value) {
CSA_SLOW_ASSERT(this, IsFixedArray(context));
Node* offset =
IntPtrAdd(WordShl(slot_index, kPointerSizeLog2),
IntPtrConstant(Context::kHeaderSize - kHeapObjectTag));
@ -1429,12 +1456,14 @@ Node* CodeStubAssembler::StoreContextElement(Node* context, Node* slot_index,
Node* CodeStubAssembler::StoreContextElementNoWriteBarrier(Node* context,
int slot_index,
Node* value) {
CSA_SLOW_ASSERT(this, IsFixedArray(context));
int offset = Context::SlotOffset(slot_index);
return StoreNoWriteBarrier(MachineRepresentation::kTagged, context,
IntPtrConstant(offset), value);
}
Node* CodeStubAssembler::LoadNativeContext(Node* context) {
CSA_SLOW_ASSERT(this, IsFixedArray(context));
return LoadContextElement(context, Context::NATIVE_CONTEXT_INDEX);
}
@ -1466,18 +1495,21 @@ Node* CodeStubAssembler::LoadJSFunctionPrototype(Node* function,
}
Node* CodeStubAssembler::StoreHeapNumberValue(Node* object, Node* value) {
CSA_SLOW_ASSERT(this, IsAnyHeapNumber(object));
return StoreObjectFieldNoWriteBarrier(object, HeapNumber::kValueOffset, value,
MachineRepresentation::kFloat64);
}
Node* CodeStubAssembler::StoreObjectField(
Node* object, int offset, Node* value) {
CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
DCHECK_NE(HeapObject::kMapOffset, offset); // Use StoreMap instead.
return Store(object, IntPtrConstant(offset - kHeapObjectTag), value);
}
Node* CodeStubAssembler::StoreObjectField(Node* object, Node* offset,
Node* value) {
CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
int const_offset;
if (ToInt32Constant(offset, const_offset)) {
return StoreObjectField(object, const_offset, value);
@ -1488,12 +1520,14 @@ Node* CodeStubAssembler::StoreObjectField(Node* object, Node* offset,
Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
Node* object, int offset, Node* value, MachineRepresentation rep) {
CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
return StoreNoWriteBarrier(rep, object,
IntPtrConstant(offset - kHeapObjectTag), value);
}
Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
Node* object, Node* offset, Node* value, MachineRepresentation rep) {
CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
int const_offset;
if (ToInt32Constant(offset, const_offset)) {
return StoreObjectFieldNoWriteBarrier(object, const_offset, value, rep);
@ -1503,6 +1537,7 @@ Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
}
Node* CodeStubAssembler::StoreMap(Node* object, Node* map) {
CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
CSA_SLOW_ASSERT(this, IsMap(map));
return StoreWithMapWriteBarrier(
object, IntPtrConstant(HeapObject::kMapOffset - kHeapObjectTag), map);
@ -1514,6 +1549,7 @@ Node* CodeStubAssembler::StoreMapNoWriteBarrier(
}
Node* CodeStubAssembler::StoreMapNoWriteBarrier(Node* object, Node* map) {
CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
CSA_SLOW_ASSERT(this, IsMap(map));
return StoreNoWriteBarrier(
MachineRepresentation::kTagged, object,
@ -1534,6 +1570,8 @@ Node* CodeStubAssembler::StoreFixedArrayElement(Node* object, Node* index_node,
WriteBarrierMode barrier_mode,
int additional_offset,
ParameterMode parameter_mode) {
CSA_SLOW_ASSERT(this, IsFixedArray(object));
CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||
barrier_mode == UPDATE_WRITE_BARRIER);
int header_size =
@ -1551,6 +1589,7 @@ Node* CodeStubAssembler::StoreFixedArrayElement(Node* object, Node* index_node,
Node* CodeStubAssembler::StoreFixedDoubleArrayElement(
Node* object, Node* index_node, Node* value, ParameterMode parameter_mode) {
CSA_ASSERT(this, IsFixedDoubleArray(object));
CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
Node* offset =
ElementOffsetFromIndex(index_node, FAST_DOUBLE_ELEMENTS, parameter_mode,
FixedArray::kHeaderSize - kHeapObjectTag);
@ -1612,6 +1651,7 @@ Node* CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* array,
CodeStubArguments& args,
Variable& arg_index,
Label* bailout) {
CSA_SLOW_ASSERT(this, IsJSArray(array));
Comment("BuildAppendJSArray: %s", ElementsKindToString(kind));
Label pre_bailout(this);
Label success(this);
@ -1682,6 +1722,7 @@ void CodeStubAssembler::TryStoreArrayElement(ElementsKind kind,
void CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* array,
Node* value, Label* bailout) {
CSA_SLOW_ASSERT(this, IsJSArray(array));
Comment("BuildAppendJSArray: %s", ElementsKindToString(kind));
ParameterMode mode = OptimalParameterMode();
VARIABLE(var_length, OptimalParameterRepresentation(),
@ -1741,6 +1782,8 @@ Node* CodeStubAssembler::AllocateSeqOneByteString(Node* context, Node* length,
ParameterMode mode,
AllocationFlags flags) {
Comment("AllocateSeqOneByteString");
CSA_SLOW_ASSERT(this, IsFixedArray(context));
CSA_SLOW_ASSERT(this, MatchesParameterMode(length, mode));
VARIABLE(var_result, MachineRepresentation::kTagged);
// Compute the SeqOneByteString size and check if it fits into new space.
@ -1811,6 +1854,8 @@ Node* CodeStubAssembler::AllocateSeqTwoByteString(int length,
Node* CodeStubAssembler::AllocateSeqTwoByteString(Node* context, Node* length,
ParameterMode mode,
AllocationFlags flags) {
CSA_SLOW_ASSERT(this, IsFixedArray(context));
CSA_SLOW_ASSERT(this, MatchesParameterMode(length, mode));
Comment("AllocateSeqTwoByteString");
VARIABLE(var_result, MachineRepresentation::kTagged);
@ -1866,7 +1911,9 @@ Node* CodeStubAssembler::AllocateSeqTwoByteString(Node* context, Node* length,
Node* CodeStubAssembler::AllocateSlicedString(
Heap::RootListIndex map_root_index, Node* length, Node* parent,
Node* offset) {
CSA_ASSERT(this, IsString(parent));
CSA_ASSERT(this, TaggedIsSmi(length));
CSA_ASSERT(this, TaggedIsSmi(offset));
Node* result = Allocate(SlicedString::kSize);
DCHECK(Heap::RootIsImmortalImmovable(map_root_index));
StoreMapNoWriteBarrier(result, map_root_index);
@ -1899,6 +1946,8 @@ Node* CodeStubAssembler::AllocateConsString(Heap::RootListIndex map_root_index,
Node* length, Node* first,
Node* second,
AllocationFlags flags) {
CSA_ASSERT(this, IsString(first));
CSA_ASSERT(this, IsString(second));
CSA_ASSERT(this, TaggedIsSmi(length));
Node* result = Allocate(ConsString::kSize, flags);
DCHECK(Heap::RootIsImmortalImmovable(map_root_index));
@ -1938,6 +1987,9 @@ Node* CodeStubAssembler::AllocateTwoByteConsString(Node* length, Node* first,
Node* CodeStubAssembler::NewConsString(Node* context, Node* length, Node* left,
Node* right, AllocationFlags flags) {
CSA_ASSERT(this, IsFixedArray(context));
CSA_ASSERT(this, IsString(left));
CSA_ASSERT(this, IsString(right));
CSA_ASSERT(this, TaggedIsSmi(length));
// Added string can be a cons string.
Comment("Allocating ConsString");
@ -1993,10 +2045,16 @@ Node* CodeStubAssembler::NewConsString(Node* context, Node* length, Node* left,
Node* CodeStubAssembler::AllocateRegExpResult(Node* context, Node* length,
Node* index, Node* input) {
CSA_ASSERT(this, IsFixedArray(context));
CSA_ASSERT(this, TaggedIsSmi(index));
CSA_ASSERT(this, TaggedIsSmi(length));
CSA_ASSERT(this, IsString(input));
#ifdef DEBUG
Node* const max_length =
SmiConstant(Smi::FromInt(JSArray::kInitialMaxFastElementArray));
CSA_ASSERT(this, SmiLessThanOrEqual(length, max_length));
USE(max_length);
#endif // DEBUG
// Allocate the JSRegExpResult.
// TODO(jgruber): Fold JSArray and FixedArray allocations, then remove
@ -2119,6 +2177,7 @@ Node* CodeStubAssembler::AllocateJSObjectFromMap(Node* map, Node* properties,
void CodeStubAssembler::InitializeJSObjectFromMap(Node* object, Node* map,
Node* size, Node* properties,
Node* elements) {
CSA_SLOW_ASSERT(this, IsMap(map));
// This helper assumes that the object is in new-space, as guarded by the
// check in AllocatedJSObjectFromMap.
if (properties == nullptr) {
@ -2126,6 +2185,7 @@ void CodeStubAssembler::InitializeJSObjectFromMap(Node* object, Node* map,
StoreObjectFieldRoot(object, JSObject::kPropertiesOffset,
Heap::kEmptyFixedArrayRootIndex);
} else {
CSA_ASSERT(this, IsFixedArray(properties));
StoreObjectFieldNoWriteBarrier(object, JSObject::kPropertiesOffset,
properties);
}
@ -2133,6 +2193,7 @@ void CodeStubAssembler::InitializeJSObjectFromMap(Node* object, Node* map,
StoreObjectFieldRoot(object, JSObject::kElementsOffset,
Heap::kEmptyFixedArrayRootIndex);
} else {
CSA_ASSERT(this, IsFixedArray(elements));
StoreObjectFieldNoWriteBarrier(object, JSObject::kElementsOffset, elements);
}
InitializeJSObjectBody(object, map, size, JSObject::kHeaderSize);
@ -2140,6 +2201,7 @@ void CodeStubAssembler::InitializeJSObjectFromMap(Node* object, Node* map,
void CodeStubAssembler::InitializeJSObjectBody(Node* object, Node* map,
Node* size, int start_offset) {
CSA_SLOW_ASSERT(this, IsMap(map));
// TODO(cbruni): activate in-object slack tracking machinery.
Comment("InitializeJSObjectBody");
Node* filler = LoadRoot(Heap::kUndefinedValueRootIndex);
@ -2169,6 +2231,8 @@ void CodeStubAssembler::StoreFieldsNoWriteBarrier(Node* start_address,
Node* CodeStubAssembler::AllocateUninitializedJSArrayWithoutElements(
ElementsKind kind, Node* array_map, Node* length, Node* allocation_site) {
Comment("begin allocation of JSArray without elements");
CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
CSA_SLOW_ASSERT(this, IsMap(array_map));
int base_size = JSArray::kSize;
if (allocation_site != nullptr) {
base_size += AllocationMemento::kSize;
@ -2185,6 +2249,8 @@ CodeStubAssembler::AllocateUninitializedJSArrayWithElements(
ElementsKind kind, Node* array_map, Node* length, Node* allocation_site,
Node* capacity, ParameterMode capacity_mode) {
Comment("begin allocation of JSArray with elements");
CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
CSA_SLOW_ASSERT(this, IsMap(array_map));
int base_size = JSArray::kSize;
if (allocation_site != nullptr) {
@ -2211,13 +2277,15 @@ Node* CodeStubAssembler::AllocateUninitializedJSArray(ElementsKind kind,
Node* length,
Node* allocation_site,
Node* size_in_bytes) {
CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
CSA_SLOW_ASSERT(this, IsMap(array_map));
// Allocate space for the JSArray and the elements FixedArray in one go.
Node* array = AllocateInNewSpace(size_in_bytes);
Comment("write JSArray headers");
StoreMapNoWriteBarrier(array, array_map);
CSA_ASSERT(this, TaggedIsSmi(length));
StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
StoreObjectFieldRoot(array, JSArray::kPropertiesOffset,
@ -2233,6 +2301,10 @@ Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, Node* array_map,
Node* capacity, Node* length,
Node* allocation_site,
ParameterMode capacity_mode) {
CSA_SLOW_ASSERT(this, IsMap(array_map));
CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, capacity_mode));
Node *array = nullptr, *elements = nullptr;
if (IsIntPtrOrSmiConstantZero(capacity)) {
// Array is empty. Use the shared empty fixed array instead of allocating a
@ -2266,6 +2338,7 @@ Node* CodeStubAssembler::AllocateFixedArray(ElementsKind kind,
Node* capacity_node,
ParameterMode mode,
AllocationFlags flags) {
CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity_node, mode));
CSA_ASSERT(this, IntPtrOrSmiGreaterThan(capacity_node,
IntPtrOrSmiConstant(0, mode), mode));
Node* total_size = GetFixedArrayAllocationSize(capacity_node, kind, mode);
@ -2285,6 +2358,9 @@ Node* CodeStubAssembler::AllocateFixedArray(ElementsKind kind,
void CodeStubAssembler::FillFixedArrayWithValue(
ElementsKind kind, Node* array, Node* from_node, Node* to_node,
Heap::RootListIndex value_root_index, ParameterMode mode) {
CSA_SLOW_ASSERT(this, MatchesParameterMode(from_node, mode));
CSA_SLOW_ASSERT(this, MatchesParameterMode(to_node, mode));
CSA_SLOW_ASSERT(this, IsFixedArrayWithKind(array, kind));
bool is_double = IsFastDoubleElementsKind(kind);
DCHECK(value_root_index == Heap::kTheHoleValueRootIndex ||
value_root_index == Heap::kUndefinedValueRootIndex);
@ -2328,6 +2404,10 @@ void CodeStubAssembler::CopyFixedArrayElements(
ElementsKind from_kind, Node* from_array, ElementsKind to_kind,
Node* to_array, Node* element_count, Node* capacity,
WriteBarrierMode barrier_mode, ParameterMode mode) {
CSA_SLOW_ASSERT(this, MatchesParameterMode(element_count, mode));
CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(from_array, from_kind));
CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(to_array, to_kind));
STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag;
Comment("[ CopyFixedArrayElements");
@ -2464,6 +2544,12 @@ void CodeStubAssembler::CopyStringCharacters(Node* from_string, Node* to_string,
String::Encoding from_encoding,
String::Encoding to_encoding,
ParameterMode mode) {
// Cannot assert IsString(from_string) and IsString(to_string) here because
// CSA::SubString can pass in faked sequential strings when handling external
// subject strings.
CSA_SLOW_ASSERT(this, MatchesParameterMode(character_count, mode));
CSA_SLOW_ASSERT(this, MatchesParameterMode(from_index, mode));
CSA_SLOW_ASSERT(this, MatchesParameterMode(to_index, mode));
bool from_one_byte = from_encoding == String::ONE_BYTE_ENCODING;
bool to_one_byte = to_encoding == String::ONE_BYTE_ENCODING;
DCHECK_IMPLIES(to_one_byte, from_one_byte);
@ -2522,6 +2608,7 @@ Node* CodeStubAssembler::LoadElementAndPrepareForStore(Node* array,
ElementsKind from_kind,
ElementsKind to_kind,
Label* if_hole) {
CSA_SLOW_ASSERT(this, IsFixedArrayWithKind(array, from_kind));
if (IsFastDoubleElementsKind(from_kind)) {
Node* value =
LoadDoubleWithHoleCheck(array, offset, if_hole, MachineType::Float64());
@ -2548,6 +2635,7 @@ Node* CodeStubAssembler::LoadElementAndPrepareForStore(Node* array,
Node* CodeStubAssembler::CalculateNewElementsCapacity(Node* old_capacity,
ParameterMode mode) {
CSA_SLOW_ASSERT(this, MatchesParameterMode(old_capacity, mode));
Node* half_old_capacity = WordOrSmiShr(old_capacity, 1, mode);
Node* new_capacity = IntPtrOrSmiAdd(half_old_capacity, old_capacity, mode);
Node* padding = IntPtrOrSmiConstant(16, mode);
@ -2557,6 +2645,9 @@ Node* CodeStubAssembler::CalculateNewElementsCapacity(Node* old_capacity,
Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
ElementsKind kind, Node* key,
Label* bailout) {
CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, kind));
CSA_SLOW_ASSERT(this, TaggedIsSmi(key));
Node* capacity = LoadFixedArrayBaseLength(elements);
ParameterMode mode = OptimalParameterMode();
@ -2573,6 +2664,10 @@ Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
ParameterMode mode,
Label* bailout) {
Comment("TryGrowElementsCapacity");
CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, kind));
CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
CSA_SLOW_ASSERT(this, MatchesParameterMode(key, mode));
// If the gap growth is too big, fall back to the runtime.
Node* max_gap = IntPtrOrSmiConstant(JSObject::kMaxGap, mode);
@ -2590,6 +2685,11 @@ Node* CodeStubAssembler::GrowElementsCapacity(
Node* object, Node* elements, ElementsKind from_kind, ElementsKind to_kind,
Node* capacity, Node* new_capacity, ParameterMode mode, Label* bailout) {
Comment("[ GrowElementsCapacity");
CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, from_kind));
CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
CSA_SLOW_ASSERT(this, MatchesParameterMode(new_capacity, mode));
// If size of the allocation for the new capacity doesn't fit in a page
// that we can bump-pointer allocate from, fall back to the runtime.
int max_size = FixedArrayBase::GetMaxLengthForNewSpaceAllocation(to_kind);
@ -2798,6 +2898,7 @@ Node* CodeStubAssembler::ChangeFloat64ToTagged(Node* value) {
Goto(&if_join);
}
BIND(&if_join);
CSA_SLOW_ASSERT(this, IsNumber(var_result.value()));
return var_result.value();
}
@ -2825,6 +2926,7 @@ Node* CodeStubAssembler::ChangeInt32ToTagged(Node* value) {
}
Goto(&if_join);
BIND(&if_join);
CSA_SLOW_ASSERT(this, IsNumber(var_result.value()));
return var_result.value();
}
@ -2861,6 +2963,7 @@ Node* CodeStubAssembler::ChangeUint32ToTagged(Node* value) {
Goto(&if_join);
BIND(&if_join);
CSA_SLOW_ASSERT(this, IsNumber(var_result.value()));
return var_result.value();
}
@ -2925,6 +3028,7 @@ Node* CodeStubAssembler::ToThisString(Node* context, Node* value,
}
Node* CodeStubAssembler::ChangeNumberToFloat64(Node* value) {
CSA_SLOW_ASSERT(this, IsNumber(value));
VARIABLE(result, MachineRepresentation::kFloat64);
Label smi(this);
Label done(this, &result);
@ -2944,6 +3048,7 @@ Node* CodeStubAssembler::ChangeNumberToFloat64(Node* value) {
}
Node* CodeStubAssembler::ChangeNumberToIntPtr(Node* value) {
CSA_SLOW_ASSERT(this, IsNumber(value));
VARIABLE(result, MachineType::PointerRepresentation());
Label smi(this), done(this, &result);
GotoIf(TaggedIsSmi(value), &smi);
@ -3081,6 +3186,7 @@ Node* CodeStubAssembler::InstanceTypeEqual(Node* instance_type, int type) {
}
Node* CodeStubAssembler::IsSpecialReceiverMap(Node* map) {
CSA_SLOW_ASSERT(this, IsMap(map));
Node* is_special = IsSpecialReceiverInstanceType(LoadMapInstanceType(map));
uint32_t mask =
1 << Map::kHasNamedInterceptor | 1 << Map::kIsAccessCheckNeeded;
@ -3207,9 +3313,7 @@ Node* CodeStubAssembler::IsJSGlobalProxy(Node* object) {
Int32Constant(JS_GLOBAL_PROXY_TYPE));
}
Node* CodeStubAssembler::IsMap(Node* map) {
return HasInstanceType(map, MAP_TYPE);
}
Node* CodeStubAssembler::IsMap(Node* map) { return IsMetaMap(LoadMap(map)); }
Node* CodeStubAssembler::IsJSValueInstanceType(Node* instance_type) {
return Word32Equal(instance_type, Int32Constant(JS_VALUE_TYPE));
@ -3235,6 +3339,45 @@ Node* CodeStubAssembler::IsJSArrayMap(Node* map) {
return IsJSArrayInstanceType(LoadMapInstanceType(map));
}
Node* CodeStubAssembler::IsFixedArray(Node* object) {
return HasInstanceType(object, FIXED_ARRAY_TYPE);
}
// This complicated check is due to elements oddities. If a smi array is empty
// after Array.p.shift, it is replaced by the empty array constant. If it is
// later filled with a double element, we try to grow it but pass in a double
// elements kind. Usually this would cause a size mismatch (since the source
// fixed array has FAST_HOLEY_ELEMENTS and destination has
// FAST_HOLEY_DOUBLE_ELEMENTS), but we don't have to worry about it when the
// source array is empty.
// TODO(jgruber): It might we worth creating an empty_double_array constant to
// simplify this case.
Node* CodeStubAssembler::IsFixedArrayWithKindOrEmpty(Node* object,
ElementsKind kind) {
Label out(this);
VARIABLE(var_result, MachineRepresentation::kWord32, Int32Constant(1));
GotoIf(IsFixedArrayWithKind(object, kind), &out);
Node* const length = LoadFixedArrayBaseLength(object);
GotoIf(SmiEqual(length, SmiConstant(0)), &out);
var_result.Bind(Int32Constant(0));
Goto(&out);
BIND(&out);
return var_result.value();
}
Node* CodeStubAssembler::IsFixedArrayWithKind(Node* object, ElementsKind kind) {
if (IsFastDoubleElementsKind(kind)) {
return IsFixedDoubleArray(object);
} else {
DCHECK(IsFastSmiOrObjectElementsKind(kind));
return IsFixedArray(object);
}
}
Node* CodeStubAssembler::IsWeakCell(Node* object) {
return IsWeakCellMap(LoadMap(object));
}
@ -3255,6 +3398,14 @@ Node* CodeStubAssembler::IsAccessorPair(Node* object) {
return IsAccessorPairMap(LoadMap(object));
}
Node* CodeStubAssembler::IsAnyHeapNumber(Node* object) {
return Word32Or(IsMutableHeapNumber(object), IsHeapNumber(object));
}
Node* CodeStubAssembler::IsMutableHeapNumber(Node* object) {
return IsMutableHeapNumberMap(LoadMap(object));
}
Node* CodeStubAssembler::IsHeapNumber(Node* object) {
return IsHeapNumberMap(LoadMap(object));
}
@ -3386,7 +3537,7 @@ Node* CodeStubAssembler::IsNumberPositive(Node* number) {
Node* CodeStubAssembler::StringCharCodeAt(Node* string, Node* index,
ParameterMode parameter_mode) {
if (parameter_mode == SMI_PARAMETERS) CSA_ASSERT(this, TaggedIsSmi(index));
CSA_ASSERT(this, MatchesParameterMode(index, parameter_mode));
CSA_ASSERT(this, IsString(string));
// Translate the {index} into a Word.
@ -3498,6 +3649,7 @@ Node* CodeStubAssembler::StringFromCharCode(Node* code) {
}
BIND(&if_done);
CSA_ASSERT(this, IsString(var_result.value()));
return var_result.value();
}
@ -3708,6 +3860,7 @@ Node* CodeStubAssembler::SubString(Node* context, Node* string, Node* from,
}
BIND(&end);
CSA_ASSERT(this, IsString(var_result.value()));
return var_result.value();
}
@ -3855,26 +4008,6 @@ Node* ToDirectStringAssembler::TryToSequential(StringPointerKind ptr_kind,
return var_result.value();
}
Node* CodeStubAssembler::TryDerefExternalString(Node* const string,
Node* const instance_type,
Label* if_bailout) {
Label out(this);
CSA_ASSERT(this, IsExternalStringInstanceType(instance_type));
GotoIf(IsShortExternalStringInstanceType(instance_type), if_bailout);
// Move the pointer so that offset-wise, it looks like a sequential string.
STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
Node* resource_data = LoadObjectField(
string, ExternalString::kResourceDataOffset, MachineType::Pointer());
Node* const fake_sequential_string =
IntPtrSub(resource_data,
IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
return fake_sequential_string;
}
void CodeStubAssembler::MaybeDerefIndirectString(Variable* var_string,
Node* instance_type,
Variable* var_did_something) {
@ -4080,10 +4213,12 @@ Node* CodeStubAssembler::StringFromCodePoint(Node* codepoint,
}
BIND(&return_result);
CSA_ASSERT(this, IsString(var_result.value()));
return var_result.value();
}
Node* CodeStubAssembler::StringToNumber(Node* context, Node* input) {
CSA_SLOW_ASSERT(this, IsString(input));
Label runtime(this, Label::kDeferred);
Label end(this);
@ -4138,7 +4273,7 @@ Node* CodeStubAssembler::NumberToString(Node* context, Node* argument) {
Node* hash = Word32Xor(low, high);
hash = ChangeInt32ToIntPtr(hash);
hash = WordShl(hash, one);
Node* index = WordAnd(hash, SmiUntag(BitcastWordToTagged(mask)));
Node* index = WordAnd(hash, WordSar(mask, SmiShiftBitsConstant()));
// Cache entry's key must be a heap number
Node* number_key = LoadFixedArrayElement(number_string_cache, index);
@ -4183,6 +4318,7 @@ Node* CodeStubAssembler::NumberToString(Node* context, Node* argument) {
}
BIND(&done);
CSA_ASSERT(this, IsString(result.value()));
return result.value();
}
@ -4229,6 +4365,7 @@ Node* CodeStubAssembler::ToName(Node* context, Node* value) {
}
BIND(&end);
CSA_ASSERT(this, IsName(var_result.value()));
return var_result.value();
}
@ -4317,6 +4454,7 @@ Node* CodeStubAssembler::NonNumberToNumber(Node* context, Node* input) {
}
BIND(&end);
CSA_ASSERT(this, IsNumber(var_result.value()));
return var_result.value();
}
@ -4346,6 +4484,7 @@ Node* CodeStubAssembler::ToNumber(Node* context, Node* input) {
}
BIND(&end);
CSA_ASSERT(this, IsNumber(var_result.value()));
return var_result.value();
}
@ -4448,6 +4587,7 @@ Node* CodeStubAssembler::ToUint32(Node* context, Node* input) {
}
BIND(&out);
CSA_ASSERT(this, IsNumber(var_result.value()));
return var_result.value();
}
@ -4487,6 +4627,7 @@ Node* CodeStubAssembler::ToString(Node* context, Node* input) {
}
BIND(&done);
CSA_ASSERT(this, IsString(result.value()));
return result.value();
}
@ -4556,6 +4697,7 @@ Node* CodeStubAssembler::ToSmiIndex(Node* const input, Node* const context,
Goto(&done);
BIND(&done);
CSA_SLOW_ASSERT(this, TaggedIsSmi(result.value()));
return result.value();
}
@ -4580,6 +4722,7 @@ Node* CodeStubAssembler::ToSmiLength(Node* input, Node* const context,
Goto(&done);
BIND(&done);
CSA_SLOW_ASSERT(this, TaggedIsSmi(result.value()));
return result.value();
}
@ -4649,6 +4792,7 @@ Node* CodeStubAssembler::ToInteger(Node* context, Node* input,
}
BIND(&out);
CSA_SLOW_ASSERT(this, IsNumber(var_arg.value()));
return var_arg.value();
}
@ -4765,6 +4909,7 @@ void CodeStubAssembler::TryInternalizeString(
Variable* var_internalized, Label* if_not_internalized, Label* if_bailout) {
DCHECK(var_index->rep() == MachineType::PointerRepresentation());
DCHECK(var_internalized->rep() == MachineRepresentation::kTagged);
CSA_SLOW_ASSERT(this, IsString(string));
Node* function = ExternalConstant(
ExternalReference::try_internalize_string_function(isolate()));
Node* result = CallCFunction1(MachineType::AnyTagged(),
@ -5012,6 +5157,8 @@ void CodeStubAssembler::InsertEntry<NameDictionary>(Node* dictionary,
Node* name, Node* value,
Node* index,
Node* enum_index) {
CSA_SLOW_ASSERT(this, IsDictionary(dictionary));
// Store name and value.
StoreFixedArrayElement(dictionary, index, name);
StoreValueByKeyIndex<NameDictionary>(dictionary, index, value);
@ -5053,6 +5200,7 @@ void CodeStubAssembler::InsertEntry<GlobalDictionary>(Node* dictionary,
template <class Dictionary>
void CodeStubAssembler::Add(Node* dictionary, Node* key, Node* value,
Label* bailout) {
CSA_SLOW_ASSERT(this, IsDictionary(dictionary));
Node* capacity = GetCapacity<Dictionary>(dictionary);
Node* nof = GetNumberOfElements<Dictionary>(dictionary);
Node* new_nof = SmiAdd(nof, SmiConstant(1));
@ -6771,6 +6919,8 @@ Node* CodeStubAssembler::BuildFastLoop(
const CodeStubAssembler::VariableList& vars, Node* start_index,
Node* end_index, const FastLoopBody& body, int increment,
ParameterMode parameter_mode, IndexAdvanceMode advance_mode) {
CSA_SLOW_ASSERT(this, MatchesParameterMode(start_index, parameter_mode));
CSA_SLOW_ASSERT(this, MatchesParameterMode(end_index, parameter_mode));
MachineRepresentation index_rep = (parameter_mode == INTPTR_PARAMETERS)
? MachineType::PointerRepresentation()
: MachineRepresentation::kTaggedSigned;
@ -6808,6 +6958,9 @@ void CodeStubAssembler::BuildFastFixedArrayForEach(
Node* last_element_exclusive, const FastFixedArrayForEachBody& body,
ParameterMode mode, ForEachDirection direction) {
STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
CSA_SLOW_ASSERT(this, MatchesParameterMode(first_element_inclusive, mode));
CSA_SLOW_ASSERT(this, MatchesParameterMode(last_element_exclusive, mode));
CSA_SLOW_ASSERT(this, IsFixedArrayWithKind(fixed_array, kind));
int32_t first_val;
bool constant_first = ToInt32Constant(first_element_inclusive, first_val);
int32_t last_val;
@ -6868,6 +7021,7 @@ void CodeStubAssembler::GotoIfFixedArraySizeDoesntFitInNewSpace(
void CodeStubAssembler::InitializeFieldsWithRoot(
Node* object, Node* start_offset, Node* end_offset,
Heap::RootListIndex root_index) {
CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
start_offset = IntPtrAdd(start_offset, IntPtrConstant(-kHeapObjectTag));
end_offset = IntPtrAdd(end_offset, IntPtrConstant(-kHeapObjectTag));
Node* root_value = LoadRoot(root_index);
@ -6883,6 +7037,9 @@ void CodeStubAssembler::InitializeFieldsWithRoot(
void CodeStubAssembler::BranchIfNumericRelationalComparison(
RelationalComparisonMode mode, Node* lhs, Node* rhs, Label* if_true,
Label* if_false) {
CSA_SLOW_ASSERT(this, IsNumber(lhs));
CSA_SLOW_ASSERT(this, IsNumber(rhs));
Label end(this);
VARIABLE(result, MachineRepresentation::kTagged);
@ -6995,6 +7152,9 @@ Node* CodeStubAssembler::RelationalComparison(RelationalComparisonMode mode,
Node* lhs, Node* rhs,
Node* context,
Variable* var_type_feedback) {
CSA_SLOW_ASSERT(this, IsNumber(lhs));
CSA_SLOW_ASSERT(this, IsNumber(rhs));
Label return_true(this), return_false(this), end(this);
VARIABLE(result, MachineRepresentation::kTagged);
@ -8653,6 +8813,8 @@ Node* CodeStubAssembler::InstanceOf(Node* object, Node* callable,
}
Node* CodeStubAssembler::NumberInc(Node* value) {
CSA_SLOW_ASSERT(this, IsNumber(value));
VARIABLE(var_result, MachineRepresentation::kTagged);
VARIABLE(var_finc_value, MachineRepresentation::kFloat64);
Label if_issmi(this), if_isnotsmi(this), do_finc(this), end(this);
@ -8705,6 +8867,8 @@ Node* CodeStubAssembler::NumberInc(Node* value) {
}
Node* CodeStubAssembler::NumberDec(Node* value) {
CSA_SLOW_ASSERT(this, IsNumber(value));
VARIABLE(var_result, MachineRepresentation::kTagged);
VARIABLE(var_fdec_value, MachineRepresentation::kFloat64);
Label if_issmi(this), if_isnotsmi(this), do_fdec(this), end(this);
@ -9130,6 +9294,8 @@ Node* CodeStubAssembler::IsPromiseHookEnabledOrDebugIsActive() {
Node* CodeStubAssembler::AllocateFunctionWithMapAndContext(Node* map,
Node* shared_info,
Node* context) {
CSA_SLOW_ASSERT(this, IsMap(map));
Node* const code = BitcastTaggedToWord(
LoadObjectField(shared_info, SharedFunctionInfo::kCodeOffset));
Node* const code_entry =

View File

@ -19,6 +19,44 @@ class CodeStubArguments;
class StatsCounter;
class StubCache;
#define CSA_CHECK(csa, x) \
(csa)->Check([&] { return (x); }, #x, __FILE__, __LINE__)
#ifdef DEBUG
#define CSA_ASSERT(csa, x) \
(csa)->Assert([&] { return (x); }, #x, __FILE__, __LINE__)
#define CSA_ASSERT_JS_ARGC_OP(csa, Op, op, expected) \
(csa)->Assert( \
[&] { \
compiler::Node* const argc = \
(csa)->Parameter(Descriptor::kActualArgumentsCount); \
return (csa)->Op(argc, (csa)->Int32Constant(expected)); \
}, \
"argc " #op " " #expected, __FILE__, __LINE__)
#define CSA_ASSERT_JS_ARGC_EQ(csa, expected) \
CSA_ASSERT_JS_ARGC_OP(csa, Word32Equal, ==, expected)
#define BIND(label) Bind(label, {#label, __FILE__, __LINE__})
#define VARIABLE(name, ...) \
Variable name(this, {#name, __FILE__, __LINE__}, __VA_ARGS__);
#else // DEBUG
#define CSA_ASSERT(csa, x) ((void)0)
#define CSA_ASSERT_JS_ARGC_EQ(csa, expected) ((void)0)
#define BIND(label) Bind(label);
#define VARIABLE(name, ...) Variable name(this, __VA_ARGS__);
#endif // DEBUG
#ifdef ENABLE_SLOW_DCHECKS
#define CSA_SLOW_ASSERT(csa, x) \
if (FLAG_enable_slow_asserts) { \
CSA_ASSERT(csa, x); \
}
#else
#define CSA_SLOW_ASSERT(csa, x) ((void)0)
#endif
enum class PrimitiveType { kBoolean, kNumber, kString, kSymbol };
#define HEAP_CONSTANT_LIST(V) \
@ -27,10 +65,8 @@ enum class PrimitiveType { kBoolean, kNumber, kString, kSymbol };
V(AllocationSiteMap, AllocationSiteMap) \
V(BooleanMap, BooleanMap) \
V(CodeMap, CodeMap) \
V(empty_string, EmptyString) \
V(length_string, LengthString) \
V(prototype_string, PrototypeString) \
V(EmptyFixedArray, EmptyFixedArray) \
V(empty_string, EmptyString) \
V(EmptyWeakCell, EmptyWeakCell) \
V(FalseValue, False) \
V(FeedbackVectorMap, FeedbackVectorMap) \
@ -38,23 +74,27 @@ enum class PrimitiveType { kBoolean, kNumber, kString, kSymbol };
V(FixedCOWArrayMap, FixedCOWArrayMap) \
V(FixedDoubleArrayMap, FixedDoubleArrayMap) \
V(FunctionTemplateInfoMap, FunctionTemplateInfoMap) \
V(GlobalPropertyCellMap, PropertyCellMap) \
V(has_instance_symbol, HasInstanceSymbol) \
V(HeapNumberMap, HeapNumberMap) \
V(NoClosuresCellMap, NoClosuresCellMap) \
V(OneClosureCellMap, OneClosureCellMap) \
V(length_string, LengthString) \
V(ManyClosuresCellMap, ManyClosuresCellMap) \
V(MetaMap, MetaMap) \
V(MinusZeroValue, MinusZero) \
V(MutableHeapNumberMap, MutableHeapNumberMap) \
V(NanValue, Nan) \
V(NoClosuresCellMap, NoClosuresCellMap) \
V(NullValue, Null) \
V(GlobalPropertyCellMap, PropertyCellMap) \
V(OneClosureCellMap, OneClosureCellMap) \
V(prototype_string, PrototypeString) \
V(SpeciesProtector, SpeciesProtector) \
V(SymbolMap, SymbolMap) \
V(TheHoleValue, TheHole) \
V(TrueValue, True) \
V(Tuple2Map, Tuple2Map) \
V(Tuple3Map, Tuple3Map) \
V(UndefinedValue, Undefined) \
V(WeakCellMap, WeakCellMap) \
V(SpeciesProtector, SpeciesProtector)
V(WeakCellMap, WeakCellMap)
// Provides JavaScript-specific "macro-assembler" functionality on top of the
// CodeAssembler. By factoring the JavaScript-isms out of the CodeAssembler,
@ -117,6 +157,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
return value;
}
Node* MatchesParameterMode(Node* value, ParameterMode mode);
#define PARAMETER_BINOP(OpName, IntPtrOpName, SmiOpName) \
Node* OpName(Node* a, Node* b, ParameterMode mode) { \
if (mode == SMI_PARAMETERS) { \
@ -185,6 +227,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
// Smi operations.
#define SMI_ARITHMETIC_BINOP(SmiOpName, IntPtrOpName) \
Node* SmiOpName(Node* a, Node* b) { \
CSA_SLOW_ASSERT(this, TaggedIsSmi(a)); \
CSA_SLOW_ASSERT(this, TaggedIsSmi(b)); \
return BitcastWordToTaggedSigned( \
IntPtrOpName(BitcastTaggedToWord(a), BitcastTaggedToWord(b))); \
}
@ -195,10 +239,12 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
#undef SMI_ARITHMETIC_BINOP
Node* SmiShl(Node* a, int shift) {
CSA_SLOW_ASSERT(this, TaggedIsSmi(a));
return BitcastWordToTaggedSigned(WordShl(BitcastTaggedToWord(a), shift));
}
Node* SmiShr(Node* a, int shift) {
CSA_SLOW_ASSERT(this, TaggedIsSmi(a));
return BitcastWordToTaggedSigned(
WordAnd(WordShr(BitcastTaggedToWord(a), shift),
BitcastTaggedToWord(SmiConstant(-1))));
@ -224,6 +270,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
#define SMI_COMPARISON_OP(SmiOpName, IntPtrOpName) \
Node* SmiOpName(Node* a, Node* b) { \
CSA_SLOW_ASSERT(this, TaggedIsSmi(a)); \
CSA_SLOW_ASSERT(this, TaggedIsSmi(b)); \
return IntPtrOpName(BitcastTaggedToWord(a), BitcastTaggedToWord(b)); \
}
SMI_COMPARISON_OP(SmiEqual, WordEqual)
@ -763,6 +811,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
Node* IsPropertyCell(Node* object);
Node* IsAccessorInfo(Node* object);
Node* IsAccessorPair(Node* object);
Node* IsAnyHeapNumber(Node* object);
Node* IsMutableHeapNumber(Node* object);
Node* IsHeapNumber(Node* object);
Node* IsName(Node* object);
Node* IsSymbol(Node* object);
@ -773,6 +823,9 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
Node* IsJSArrayInstanceType(Node* instance_type);
Node* IsJSArray(Node* object);
Node* IsJSArrayMap(Node* object);
Node* IsFixedArray(Node* object);
Node* IsFixedArrayWithKindOrEmpty(Node* object, ElementsKind kind);
Node* IsFixedArrayWithKind(Node* object, ElementsKind kind);
Node* IsNativeContext(Node* object);
Node* IsWeakCell(Node* object);
Node* IsFixedDoubleArray(Node* object);
@ -823,16 +876,6 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
Node* StringAdd(Node* context, Node* first, Node* second,
AllocationFlags flags = kNone);
// Unpack the external string, returning a pointer that (offset-wise) looks
// like a sequential string.
// Note that this pointer is not tagged and does not point to a real
// sequential string instance, and may only be used to access the string
// data. The pointer is GC-safe as long as a reference to the container
// ExternalString is live.
// |string| must be an external string. Bailout for short external strings.
Node* TryDerefExternalString(Node* const string, Node* const instance_type,
Label* if_bailout);
// Check if |var_string| has an indirect (thin or flat cons) string type,
// and unpack it if so.
void MaybeDerefIndirectString(Variable* var_string, Node* instance_type,
@ -1602,44 +1645,6 @@ class ToDirectStringAssembler : public CodeStubAssembler {
const Flags flags_;
};
#define CSA_CHECK(csa, x) \
(csa)->Check([&] { return (x); }, #x, __FILE__, __LINE__)
#ifdef DEBUG
#define CSA_ASSERT(csa, x) \
(csa)->Assert([&] { return (x); }, #x, __FILE__, __LINE__)
#define CSA_ASSERT_JS_ARGC_OP(csa, Op, op, expected) \
(csa)->Assert( \
[&] { \
compiler::Node* const argc = \
(csa)->Parameter(Descriptor::kActualArgumentsCount); \
return (csa)->Op(argc, (csa)->Int32Constant(expected)); \
}, \
"argc " #op " " #expected, __FILE__, __LINE__)
#define CSA_ASSERT_JS_ARGC_EQ(csa, expected) \
CSA_ASSERT_JS_ARGC_OP(csa, Word32Equal, ==, expected)
#define BIND(label) Bind(label, {#label, __FILE__, __LINE__})
#define VARIABLE(name, ...) \
Variable name(this, {#name, __FILE__, __LINE__}, __VA_ARGS__);
#else // DEBUG
#define CSA_ASSERT(csa, x) ((void)0)
#define CSA_ASSERT_JS_ARGC_EQ(csa, expected) ((void)0)
#define BIND(label) Bind(label);
#define VARIABLE(name, ...) Variable name(this, __VA_ARGS__);
#endif // DEBUG
#ifdef ENABLE_SLOW_DCHECKS
#define CSA_SLOW_ASSERT(csa, x) \
if (FLAG_enable_slow_asserts) { \
CSA_ASSERT(csa, x); \
}
#else
#define CSA_SLOW_ASSERT(csa, x) ((void)0)
#endif
DEFINE_OPERATORS_FOR_FLAGS(CodeStubAssembler::AllocationFlags);
} // namespace internal

View File

@ -633,6 +633,10 @@
# Slow tests.
'ignition/regress-599001-verifyheap': [SKIP],
'regress/regress-2185-2': [SKIP],
# Crankshaft bug, wrong elements kind: crbug.com/v8/6342
'regress/regress-2671': [SKIP],
'regress/regress-2671-1': [SKIP],
}], # variant == noturbofan_stress
##############################################################################