[stubs] Fixing issues found by machine graph verifier in code stubs.

BUG=

Review-Url: https://codereview.chromium.org/2568713002
Cr-Commit-Position: refs/heads/master@{#41651}
This commit is contained in:
ishell 2016-12-12 07:53:21 -08:00 committed by Commit bot
parent 1bdf908db0
commit 9978f90381
8 changed files with 178 additions and 165 deletions

View File

@ -75,8 +75,8 @@ void Builtins::Generate_CopyFastSmiOrObjectElements(
int max_elements = FixedArrayBase::GetMaxLengthForNewSpaceAllocation(kind);
Label if_newspace(&assembler), if_oldspace(&assembler);
assembler.Branch(
assembler.UintPtrLessThan(
length, assembler.IntPtrOrSmiConstant(max_elements, mode)),
assembler.UintPtrOrSmiLessThan(
length, assembler.IntPtrOrSmiConstant(max_elements, mode), mode),
&if_newspace, &if_oldspace);
assembler.Bind(&if_newspace);

View File

@ -1228,23 +1228,23 @@ TF_BUILTIN(Divide, CodeStubAssembler) {
Label bailout(this);
// Do floating point division if {divisor} is zero.
GotoIf(WordEqual(divisor, IntPtrConstant(0)), &bailout);
GotoIf(SmiEqual(divisor, SmiConstant(0)), &bailout);
// Do floating point division {dividend} is zero and {divisor} is
// negative.
Label dividend_is_zero(this), dividend_is_not_zero(this);
Branch(WordEqual(dividend, IntPtrConstant(0)), &dividend_is_zero,
Branch(SmiEqual(dividend, SmiConstant(0)), &dividend_is_zero,
&dividend_is_not_zero);
Bind(&dividend_is_zero);
{
GotoIf(IntPtrLessThan(divisor, IntPtrConstant(0)), &bailout);
GotoIf(SmiLessThan(divisor, SmiConstant(0)), &bailout);
Goto(&dividend_is_not_zero);
}
Bind(&dividend_is_not_zero);
Node* untagged_divisor = SmiUntag(divisor);
Node* untagged_dividend = SmiUntag(dividend);
Node* untagged_divisor = SmiToWord32(divisor);
Node* untagged_dividend = SmiToWord32(dividend);
// Do floating point division if {dividend} is kMinInt (or kMinInt - 1
// if the Smi size is 31) and {divisor} is -1.
@ -1269,7 +1269,7 @@ TF_BUILTIN(Divide, CodeStubAssembler) {
Node* truncated = Int32Mul(untagged_result, untagged_divisor);
// Do floating point division if the remainder is not 0.
GotoIf(Word32NotEqual(untagged_dividend, truncated), &bailout);
var_result.Bind(SmiTag(untagged_result));
var_result.Bind(SmiFromWord32(untagged_result));
Goto(&end);
// Bailout: convert {dividend} and {divisor} to double and do double

View File

@ -108,7 +108,14 @@ Code* BuildWithCodeStubAssemblerCS(Isolate* isolate,
DCHECK_LE(0, descriptor.GetRegisterParameterCount());
compiler::CodeAssemblerState state(isolate, &zone, descriptor, flags, name);
generator(&state);
// TODO(ishell): remove this when code stub assembler graphs verification
// is enabled for all stubs.
bool saved_csa_verify = FLAG_csa_verify;
// Enable verification only in mksnapshot.
FLAG_csa_verify = DEBUG_BOOL && FLAG_startup_blob != nullptr;
Handle<Code> code = compiler::CodeAssembler::GenerateCode(&state);
FLAG_csa_verify = saved_csa_verify;
PostBuildProfileAndTracing(isolate, *code, name);
return *code;
}

View File

@ -438,45 +438,6 @@ Node* CodeStubAssembler::SmiToFloat64(Node* value) {
return ChangeInt32ToFloat64(SmiToWord32(value));
}
Node* CodeStubAssembler::SmiAdd(Node* a, Node* b) {
return BitcastWordToTaggedSigned(
IntPtrAdd(BitcastTaggedToWord(a), BitcastTaggedToWord(b)));
}
Node* CodeStubAssembler::SmiSub(Node* a, Node* b) {
return BitcastWordToTaggedSigned(
IntPtrSub(BitcastTaggedToWord(a), BitcastTaggedToWord(b)));
}
Node* CodeStubAssembler::SmiEqual(Node* a, Node* b) {
return WordEqual(BitcastTaggedToWord(a), BitcastTaggedToWord(b));
}
Node* CodeStubAssembler::SmiAbove(Node* a, Node* b) {
return UintPtrGreaterThan(BitcastTaggedToWord(a), BitcastTaggedToWord(b));
}
Node* CodeStubAssembler::SmiAboveOrEqual(Node* a, Node* b) {
return UintPtrGreaterThanOrEqual(BitcastTaggedToWord(a),
BitcastTaggedToWord(b));
}
Node* CodeStubAssembler::SmiBelow(Node* a, Node* b) {
return UintPtrLessThan(BitcastTaggedToWord(a), BitcastTaggedToWord(b));
}
Node* CodeStubAssembler::SmiLessThan(Node* a, Node* b) {
return IntPtrLessThan(BitcastTaggedToWord(a), BitcastTaggedToWord(b));
}
Node* CodeStubAssembler::SmiLessThanOrEqual(Node* a, Node* b) {
return IntPtrLessThanOrEqual(BitcastTaggedToWord(a), BitcastTaggedToWord(b));
}
Node* CodeStubAssembler::SmiGreaterThan(Node* a, Node* b) {
return IntPtrGreaterThan(BitcastTaggedToWord(a), BitcastTaggedToWord(b));
}
Node* CodeStubAssembler::SmiMax(Node* a, Node* b) {
return SelectTaggedConstant(SmiLessThan(a, b), b, a);
}
@ -1783,8 +1744,10 @@ Node* CodeStubAssembler::NewConsString(Node* context, Node* length, Node* left,
Node* right_instance_type = LoadInstanceType(right);
// Compute intersection and difference of instance types.
Node* anded_instance_types = WordAnd(left_instance_type, right_instance_type);
Node* xored_instance_types = WordXor(left_instance_type, right_instance_type);
Node* anded_instance_types =
Word32And(left_instance_type, right_instance_type);
Node* xored_instance_types =
Word32Xor(left_instance_type, right_instance_type);
// We create a one-byte cons string if
// 1. both strings are one-byte, or
@ -1801,15 +1764,15 @@ Node* CodeStubAssembler::NewConsString(Node* context, Node* length, Node* left,
Label two_byte_map(this);
Variable result(this, MachineRepresentation::kTagged);
Label done(this, &result);
GotoIf(WordNotEqual(
WordAnd(anded_instance_types,
IntPtrConstant(kStringEncodingMask | kOneByteDataHintTag)),
IntPtrConstant(0)),
GotoIf(Word32NotEqual(Word32And(anded_instance_types,
Int32Constant(kStringEncodingMask |
kOneByteDataHintTag)),
Int32Constant(0)),
&one_byte_map);
Branch(WordNotEqual(WordAnd(xored_instance_types,
IntPtrConstant(kStringEncodingMask |
kOneByteDataHintMask)),
IntPtrConstant(kOneByteStringTag | kOneByteDataHintTag)),
Branch(Word32NotEqual(Word32And(xored_instance_types,
Int32Constant(kStringEncodingMask |
kOneByteDataHintMask)),
Int32Constant(kOneByteStringTag | kOneByteDataHintTag)),
&two_byte_map, &one_byte_map);
Bind(&one_byte_map);
@ -2361,16 +2324,17 @@ Node* CodeStubAssembler::LoadElementAndPrepareForStore(Node* array,
Node* CodeStubAssembler::CalculateNewElementsCapacity(Node* old_capacity,
ParameterMode mode) {
if (mode == SMI_PARAMETERS) {
old_capacity = BitcastTaggedToWord(old_capacity);
}
Node* half_old_capacity = WordShr(old_capacity, IntPtrConstant(1));
Node* new_capacity = IntPtrAdd(half_old_capacity, old_capacity);
Node* unconditioned_result =
IntPtrAdd(new_capacity, IntPtrOrSmiConstant(16, mode));
if (mode == INTEGER_PARAMETERS || mode == INTPTR_PARAMETERS) {
return unconditioned_result;
Node* unconditioned_result = IntPtrAdd(new_capacity, IntPtrConstant(16));
if (mode == SMI_PARAMETERS) {
return SmiAnd(BitcastWordToTaggedSigned(unconditioned_result),
SmiConstant(-1));
} else {
int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize;
return WordAnd(unconditioned_result,
IntPtrConstant(static_cast<size_t>(-1) << kSmiShiftBits));
return unconditioned_result;
}
}
@ -2396,12 +2360,12 @@ Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
// If the gap growth is too big, fall back to the runtime.
Node* max_gap = IntPtrOrSmiConstant(JSObject::kMaxGap, mode);
Node* max_capacity = IntPtrAdd(capacity, max_gap);
GotoIf(UintPtrGreaterThanOrEqual(key, max_capacity), bailout);
Node* max_capacity = IntPtrOrSmiAdd(capacity, max_gap, mode);
GotoIf(UintPtrOrSmiGreaterThanOrEqual(key, max_capacity, mode), bailout);
// Calculate the capacity of the new backing store.
Node* new_capacity = CalculateNewElementsCapacity(
IntPtrAdd(key, IntPtrOrSmiConstant(1, mode)), mode);
IntPtrOrSmiAdd(key, IntPtrOrSmiConstant(1, mode), mode), mode);
return GrowElementsCapacity(object, elements, kind, kind, capacity,
new_capacity, mode, bailout);
}
@ -2413,8 +2377,8 @@ Node* CodeStubAssembler::GrowElementsCapacity(
// If size of the allocation for the new capacity doesn't fit in a page
// that we can bump-pointer allocate from, fall back to the runtime.
int max_size = FixedArrayBase::GetMaxLengthForNewSpaceAllocation(to_kind);
GotoIf(UintPtrGreaterThanOrEqual(new_capacity,
IntPtrOrSmiConstant(max_size, mode)),
GotoIf(UintPtrOrSmiGreaterThanOrEqual(
new_capacity, IntPtrOrSmiConstant(max_size, mode), mode),
bailout);
// Allocate the new backing store.
@ -2606,7 +2570,7 @@ Node* CodeStubAssembler::ChangeFloat64ToTagged(Node* value) {
Goto(&if_valueisheapnumber);
Bind(&if_notoverflow);
{
Node* result = Projection(0, pair);
Node* result = BitcastWordToTaggedSigned(Projection(0, pair));
var_result.Bind(result);
Goto(&if_join);
}
@ -3509,12 +3473,10 @@ Node* CodeStubAssembler::StringAdd(Node* context, Node* left, Node* right,
CSA_ASSERT(this, TaggedIsSmi(left_length));
CSA_ASSERT(this, TaggedIsSmi(right_length));
Node* new_length = SmiAdd(left_length, right_length);
GotoIf(UintPtrGreaterThanOrEqual(
new_length, SmiConstant(Smi::FromInt(String::kMaxLength))),
GotoIf(SmiAboveOrEqual(new_length, SmiConstant(String::kMaxLength)),
&runtime);
GotoIf(IntPtrLessThan(new_length,
SmiConstant(Smi::FromInt(ConsString::kMinLength))),
GotoIf(SmiLessThan(new_length, SmiConstant(ConsString::kMinLength)),
&non_cons);
result.Bind(NewConsString(context, new_length, left, right, flags));
@ -3527,23 +3489,24 @@ Node* CodeStubAssembler::StringAdd(Node* context, Node* left, Node* right,
Node* right_instance_type = LoadInstanceType(right);
// Compute intersection and difference of instance types.
Node* ored_instance_types = WordOr(left_instance_type, right_instance_type);
Node* xored_instance_types = WordXor(left_instance_type, right_instance_type);
Node* ored_instance_types = Word32Or(left_instance_type, right_instance_type);
Node* xored_instance_types =
Word32Xor(left_instance_type, right_instance_type);
// Check if both strings have the same encoding and both are sequential.
GotoIf(WordNotEqual(
WordAnd(xored_instance_types, IntPtrConstant(kStringEncodingMask)),
IntPtrConstant(0)),
GotoIf(Word32NotEqual(Word32And(xored_instance_types,
Int32Constant(kStringEncodingMask)),
Int32Constant(0)),
&runtime);
GotoIf(WordNotEqual(WordAnd(ored_instance_types,
IntPtrConstant(kStringRepresentationMask)),
IntPtrConstant(0)),
GotoIf(Word32NotEqual(Word32And(ored_instance_types,
Int32Constant(kStringRepresentationMask)),
Int32Constant(0)),
&runtime);
Label two_byte(this);
GotoIf(WordEqual(
WordAnd(ored_instance_types, IntPtrConstant(kStringEncodingMask)),
IntPtrConstant(kTwoByteStringTag)),
GotoIf(Word32Equal(
Word32And(ored_instance_types, Int32Constant(kStringEncodingMask)),
Int32Constant(kTwoByteStringTag)),
&two_byte);
// One-byte sequential string case
Node* new_string =
@ -3744,7 +3707,9 @@ Node* CodeStubAssembler::NumberToString(Node* context, Node* argument) {
// Make the hash mask from the length of the number string cache. It
// contains two elements (number and string) for each cache entry.
Node* mask = LoadFixedArrayBaseLength(number_string_cache);
// TODO(ishell): cleanup mask handling.
Node* mask =
BitcastTaggedToWord(LoadFixedArrayBaseLength(number_string_cache));
Node* one = IntPtrConstant(1);
mask = IntPtrSub(mask, one);
@ -3760,9 +3725,9 @@ Node* CodeStubAssembler::NumberToString(Node* context, Node* argument) {
Node* high = LoadObjectField(argument, HeapNumber::kValueOffset + kIntSize,
MachineType::Int32());
Node* hash = Word32Xor(low, high);
if (Is64()) hash = ChangeInt32ToInt64(hash);
hash = ChangeInt32ToIntPtr(hash);
hash = WordShl(hash, one);
Node* index = WordAnd(hash, SmiToWord(mask));
Node* index = WordAnd(hash, SmiUntag(BitcastWordToTagged(mask)));
// Cache entry's key must be a heap number
Node* number_key =
@ -3776,8 +3741,8 @@ Node* CodeStubAssembler::NumberToString(Node* context, Node* argument) {
MachineType::Int32());
Node* high_compare = LoadObjectField(
number_key, HeapNumber::kValueOffset + kIntSize, MachineType::Int32());
GotoUnless(WordEqual(low, low_compare), &runtime);
GotoUnless(WordEqual(high, high_compare), &runtime);
GotoUnless(Word32Equal(low, low_compare), &runtime);
GotoUnless(Word32Equal(high, high_compare), &runtime);
// Heap number match, return value fro cache entry.
IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
@ -3795,7 +3760,8 @@ Node* CodeStubAssembler::NumberToString(Node* context, Node* argument) {
Bind(&smi);
{
// Load the smi key, make sure it matches the smi we're looking for.
Node* smi_index = WordAnd(WordShl(argument, one), mask);
Node* smi_index = BitcastWordToTagged(
WordAnd(WordShl(BitcastTaggedToWord(argument), one), mask));
Node* smi_key = LoadFixedArrayElement(number_string_cache, smi_index, 0,
SMI_PARAMETERS);
GotoIf(WordNotEqual(smi_key, argument), &runtime);
@ -4599,9 +4565,9 @@ void CodeStubAssembler::InsertEntry<NameDictionary>(Node* dictionary,
const int kInitialIndex = 0;
PropertyDetails d(NONE, DATA, kInitialIndex, PropertyCellType::kNoCell);
enum_index =
WordShl(enum_index, PropertyDetails::DictionaryStorageField::kShift);
SmiShl(enum_index, PropertyDetails::DictionaryStorageField::kShift);
STATIC_ASSERT(kInitialIndex == 0);
var_details.Bind(WordOr(SmiConstant(d.AsSmi()), enum_index));
var_details.Bind(SmiOr(SmiConstant(d.AsSmi()), enum_index));
// Private names must be marked non-enumerable.
Label not_private(this, &var_details);
@ -4610,8 +4576,8 @@ void CodeStubAssembler::InsertEntry<NameDictionary>(Node* dictionary,
const int kPrivateMask = 1 << Symbol::kPrivateBit;
GotoUnless(IsSetWord32(flags, kPrivateMask), &not_private);
Node* dont_enum =
WordShl(SmiConstant(DONT_ENUM), PropertyDetails::AttributesField::kShift);
var_details.Bind(WordOr(var_details.value(), dont_enum));
SmiShl(SmiConstant(DONT_ENUM), PropertyDetails::AttributesField::kShift);
var_details.Bind(SmiOr(var_details.value(), dont_enum));
Goto(&not_private);
Bind(&not_private);
@ -4641,20 +4607,20 @@ void CodeStubAssembler::Add(Node* dictionary, Node* key, Node* value,
// Require 33% to still be free after adding additional_elements.
// Computing "x + (x >> 1)" on a Smi x does not return a valid Smi!
// But that's OK here because it's only used for a comparison.
Node* required_capacity_pseudo_smi = SmiAdd(new_nof, WordShr(new_nof, 1));
GotoIf(UintPtrLessThan(capacity, required_capacity_pseudo_smi), bailout);
Node* required_capacity_pseudo_smi = SmiAdd(new_nof, SmiShr(new_nof, 1));
GotoIf(SmiBelow(capacity, required_capacity_pseudo_smi), bailout);
// Require rehashing if more than 50% of free elements are deleted elements.
Node* deleted = GetNumberOfDeletedElements<Dictionary>(dictionary);
CSA_ASSERT(this, UintPtrGreaterThan(capacity, new_nof));
Node* half_of_free_elements = WordShr(SmiSub(capacity, new_nof), 1);
GotoIf(UintPtrGreaterThan(deleted, half_of_free_elements), bailout);
CSA_ASSERT(this, SmiAbove(capacity, new_nof));
Node* half_of_free_elements = SmiShr(SmiSub(capacity, new_nof), 1);
GotoIf(SmiAbove(deleted, half_of_free_elements), bailout);
Node* enum_index = nullptr;
if (Dictionary::kIsEnumerable) {
enum_index = GetNextEnumerationIndex<Dictionary>(dictionary);
Node* new_enum_index = SmiAdd(enum_index, SmiConstant(1));
Node* max_enum_index =
SmiConstant(PropertyDetails::DictionaryStorageField::kMax);
GotoIf(UintPtrGreaterThan(new_enum_index, max_enum_index), bailout);
GotoIf(SmiAbove(new_enum_index, max_enum_index), bailout);
// No more bailouts after this point.
// Operations from here on can have side effects.
@ -4806,8 +4772,8 @@ void CodeStubAssembler::LoadPropertyFromFastObject(Node* object, Node* map,
(DescriptorArray::kDescriptorValue - DescriptorArray::kDescriptorKey) *
kPointerSize;
Node* details = LoadAndUntagToWord32FixedArrayElement(descriptors, name_index,
name_to_details_offset);
Node* details = LoadAndUntagToWord32FixedArrayElement(
descriptors, name_index, name_to_details_offset, INTPTR_PARAMETERS);
var_details->Bind(details);
Node* location = DecodeWord32<PropertyDetails::LocationField>(details);
@ -4891,8 +4857,8 @@ void CodeStubAssembler::LoadPropertyFromFastObject(Node* object, Node* map,
}
Bind(&if_in_descriptor);
{
Node* value =
LoadFixedArrayElement(descriptors, name_index, name_to_value_offset);
Node* value = LoadFixedArrayElement(
descriptors, name_index, name_to_value_offset, INTPTR_PARAMETERS);
var_value->Bind(value);
Goto(&done);
}
@ -4914,8 +4880,8 @@ void CodeStubAssembler::LoadPropertyFromNameDictionary(Node* dictionary,
(NameDictionary::kEntryValueIndex - NameDictionary::kEntryKeyIndex) *
kPointerSize;
Node* details = LoadAndUntagToWord32FixedArrayElement(dictionary, name_index,
name_to_details_offset);
Node* details = LoadAndUntagToWord32FixedArrayElement(
dictionary, name_index, name_to_details_offset, INTPTR_PARAMETERS);
var_details->Bind(details);
var_value->Bind(LoadFixedArrayElement(
@ -6088,7 +6054,8 @@ void CodeStubAssembler::TrapAllocationMemento(Node* object,
kMementoMapOffset + AllocationMemento::kSize - kPointerSize;
// Bail out if the object is not in new space.
Node* object_page = PageFromAddress(object);
Node* object_word = BitcastTaggedToWord(object);
Node* object_page = PageFromAddress(object_word);
{
Node* page_flags = Load(MachineType::IntPtr(), object_page,
IntPtrConstant(Page::kFlagsOffset));
@ -6099,7 +6066,7 @@ void CodeStubAssembler::TrapAllocationMemento(Node* object,
}
Node* memento_last_word = IntPtrAdd(
object, IntPtrConstant(kMementoLastWordOffset - kHeapObjectTag));
object_word, IntPtrConstant(kMementoLastWordOffset - kHeapObjectTag));
Node* memento_last_word_page = PageFromAddress(memento_last_word);
Node* new_space_top = Load(MachineType::Pointer(), new_space_top_address);

View File

@ -95,27 +95,27 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
return value;
}
#define PARAMETER_BINARY_OPERATION(OpName, IntPtrOpName, SmiOpName, \
Int32OpName) \
Node* OpName(Node* value1, Node* value2, ParameterMode mode) { \
if (mode == SMI_PARAMETERS) { \
return SmiOpName(value1, value2); \
} else if (mode == INTPTR_PARAMETERS) { \
return IntPtrOpName(value1, value2); \
} else { \
DCHECK_EQ(INTEGER_PARAMETERS, mode); \
return Int32OpName(value1, value2); \
} \
#define PARAMETER_BINOP(OpName, IntPtrOpName, SmiOpName, Int32OpName) \
Node* OpName(Node* a, Node* b, ParameterMode mode) { \
if (mode == SMI_PARAMETERS) { \
return SmiOpName(a, b); \
} else if (mode == INTPTR_PARAMETERS) { \
return IntPtrOpName(a, b); \
} else { \
DCHECK_EQ(INTEGER_PARAMETERS, mode); \
return Int32OpName(a, b); \
} \
}
PARAMETER_BINARY_OPERATION(IntPtrOrSmiAdd, IntPtrAdd, SmiAdd, Int32Add)
PARAMETER_BINARY_OPERATION(IntPtrOrSmiLessThan, IntPtrLessThan, SmiLessThan,
Int32LessThan)
PARAMETER_BINARY_OPERATION(IntPtrOrSmiGreaterThan, IntPtrGreaterThan,
SmiGreaterThan, Int32GreaterThan)
PARAMETER_BINARY_OPERATION(UintPtrOrSmiLessThan, UintPtrLessThan, SmiBelow,
Uint32LessThan)
#undef PARAMETER_BINARY_OPERATION
PARAMETER_BINOP(IntPtrOrSmiAdd, IntPtrAdd, SmiAdd, Int32Add)
PARAMETER_BINOP(IntPtrOrSmiLessThan, IntPtrLessThan, SmiLessThan,
Int32LessThan)
PARAMETER_BINOP(IntPtrOrSmiGreaterThan, IntPtrGreaterThan, SmiGreaterThan,
Int32GreaterThan)
PARAMETER_BINOP(UintPtrOrSmiLessThan, UintPtrLessThan, SmiBelow,
Uint32LessThan)
PARAMETER_BINOP(UintPtrOrSmiGreaterThanOrEqual, UintPtrGreaterThanOrEqual,
SmiAboveOrEqual, Uint32GreaterThanOrEqual)
#undef PARAMETER_BINOP
Node* NoContextConstant();
#define HEAP_CONSTANT_ACCESSOR(rootName, name) Node* name##Constant();
@ -157,25 +157,46 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
Node* SmiToWord32(Node* value);
// Smi operations.
Node* SmiAdd(Node* a, Node* b);
Node* SmiSub(Node* a, Node* b);
Node* SmiEqual(Node* a, Node* b);
Node* SmiAbove(Node* a, Node* b);
Node* SmiAboveOrEqual(Node* a, Node* b);
Node* SmiBelow(Node* a, Node* b);
Node* SmiLessThan(Node* a, Node* b);
Node* SmiLessThanOrEqual(Node* a, Node* b);
Node* SmiGreaterThan(Node* a, Node* b);
#define SMI_ARITHMETIC_BINOP(SmiOpName, IntPtrOpName) \
Node* SmiOpName(Node* a, Node* b) { \
return BitcastWordToTaggedSigned( \
IntPtrOpName(BitcastTaggedToWord(a), BitcastTaggedToWord(b))); \
}
SMI_ARITHMETIC_BINOP(SmiAdd, IntPtrAdd)
SMI_ARITHMETIC_BINOP(SmiSub, IntPtrSub)
SMI_ARITHMETIC_BINOP(SmiAnd, WordAnd)
SMI_ARITHMETIC_BINOP(SmiOr, WordOr)
#define SMI_SHIFT_OP(SmiOpName, IntPtrOpName) \
Node* SmiOpName(Node* a, int shift) { \
return BitcastWordToTaggedSigned( \
IntPtrOpName(BitcastTaggedToWord(a), shift)); \
} \
SMI_ARITHMETIC_BINOP(SmiOpName, IntPtrOpName)
SMI_SHIFT_OP(SmiShl, WordShl)
SMI_SHIFT_OP(SmiShr, WordShr)
#undef SMI_SHIFT_OP
#undef SMI_ARITHMETIC_BINOP
#define SMI_COMPARISON_OP(SmiOpName, IntPtrOpName) \
Node* SmiOpName(Node* a, Node* b) { \
return IntPtrOpName(BitcastTaggedToWord(a), BitcastTaggedToWord(b)); \
}
SMI_COMPARISON_OP(SmiEqual, WordEqual)
SMI_COMPARISON_OP(SmiAbove, UintPtrGreaterThan)
SMI_COMPARISON_OP(SmiAboveOrEqual, UintPtrGreaterThanOrEqual)
SMI_COMPARISON_OP(SmiBelow, UintPtrLessThan)
SMI_COMPARISON_OP(SmiLessThan, IntPtrLessThan)
SMI_COMPARISON_OP(SmiLessThanOrEqual, IntPtrLessThanOrEqual)
SMI_COMPARISON_OP(SmiGreaterThan, IntPtrGreaterThan)
#undef SMI_COMPARISON_OP
Node* SmiMax(Node* a, Node* b);
Node* SmiMin(Node* a, Node* b);
// Computes a % b for Smi inputs a and b; result is not necessarily a Smi.
Node* SmiMod(Node* a, Node* b);
// Computes a * b for Smi inputs a and b; result is not necessarily a Smi.
Node* SmiMul(Node* a, Node* b);
Node* SmiOr(Node* a, Node* b) {
return BitcastWordToTaggedSigned(
WordOr(BitcastTaggedToWord(a), BitcastTaggedToWord(b)));
}
// Smi | HeapNumber operations.
Node* NumberInc(Node* value);
@ -708,6 +729,13 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
return DecodeWord<T>(ChangeUint32ToWord(word32));
}
// Returns a node that contains a decoded (unsigned!) value of a bit
// field |T| in |word|. Returns result as an uint32 node.
template <typename T>
Node* DecodeWord32FromWord(Node* word) {
return TruncateWordToWord32(DecodeWord<T>(word));
}
// Decodes an unsigned (!) value from |word32| to an uint32 node.
Node* DecodeWord32(Node* word32, uint32_t shift, uint32_t mask);
@ -861,7 +889,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
Label* if_not_found, Label* if_bailout);
// This is a type of a lookup in holder generator function. In case of a
// property lookup the {key} is guaranteed to be a unique name and in case of
// property lookup the {key} is guaranteed to be an unique name and in case of
// element lookup the key is an Int32 index.
typedef std::function<void(Node* receiver, Node* holder, Node* map,
Node* instance_type, Node* key, Label* next_holder,

View File

@ -437,7 +437,15 @@ Handle<Code> TurboFanCodeStub::GenerateCode() {
compiler::CodeAssemblerState state(isolate(), &zone, descriptor,
GetCodeFlags(), name);
GenerateAssembly(&state);
return compiler::CodeAssembler::GenerateCode(&state);
// TODO(ishell): remove this when code stub assembler graphs verification
// is enabled for all stubs.
bool saved_csa_verify = FLAG_csa_verify;
// Enable verification only in mksnapshot.
FLAG_csa_verify = DEBUG_BOOL && FLAG_startup_blob != nullptr;
Handle<Code> code = compiler::CodeAssembler::GenerateCode(&state);
FLAG_csa_verify = saved_csa_verify;
return code;
}
#define ACCESSOR_ASSEMBLER(Name) \

View File

@ -197,7 +197,7 @@ void AccessorAssemblerImpl::HandleLoadICSmiHandlerCase(
Node* is_jsarray_condition =
IsSetWord<LoadHandler::IsJsArrayBits>(handler_word);
Node* elements_kind =
DecodeWord<LoadHandler::ElementsKindBits>(handler_word);
DecodeWord32FromWord<LoadHandler::ElementsKindBits>(handler_word);
Label if_hole(this), unimplemented_elements_kind(this);
Label* out_of_bounds = miss;
EmitElementLoad(holder, elements, elements_kind, intptr_index,
@ -782,7 +782,7 @@ void AccessorAssemblerImpl::EmitElementLoad(
if_fast_double(this), if_fast_holey_double(this), if_nonfast(this),
if_dictionary(this);
GotoIf(
IntPtrGreaterThan(elements_kind, IntPtrConstant(LAST_FAST_ELEMENTS_KIND)),
Int32GreaterThan(elements_kind, Int32Constant(LAST_FAST_ELEMENTS_KIND)),
&if_nonfast);
EmitFastElementsBoundsCheck(object, elements, intptr_index,
@ -803,8 +803,8 @@ void AccessorAssemblerImpl::EmitElementLoad(
&if_fast_double,
// FAST_HOLEY_DOUBLE_ELEMENTS
&if_fast_holey_double};
Switch(TruncateWordToWord32(elements_kind), unimplemented_elements_kind,
kinds, labels, arraysize(kinds));
Switch(elements_kind, unimplemented_elements_kind, kinds, labels,
arraysize(kinds));
Bind(&if_fast_packed);
{
@ -842,11 +842,11 @@ void AccessorAssemblerImpl::EmitElementLoad(
Bind(&if_nonfast);
{
STATIC_ASSERT(LAST_ELEMENTS_KIND == LAST_FIXED_TYPED_ARRAY_ELEMENTS_KIND);
GotoIf(IntPtrGreaterThanOrEqual(
GotoIf(Int32GreaterThanOrEqual(
elements_kind,
IntPtrConstant(FIRST_FIXED_TYPED_ARRAY_ELEMENTS_KIND)),
Int32Constant(FIRST_FIXED_TYPED_ARRAY_ELEMENTS_KIND)),
&if_typed_array);
GotoIf(IntPtrEqual(elements_kind, IntPtrConstant(DICTIONARY_ELEMENTS)),
GotoIf(Word32Equal(elements_kind, Int32Constant(DICTIONARY_ELEMENTS)),
&if_dictionary);
Goto(unimplemented_elements_kind);
}
@ -913,8 +913,8 @@ void AccessorAssemblerImpl::EmitElementLoad(
FIRST_FIXED_TYPED_ARRAY_ELEMENTS_KIND + 1;
DCHECK_EQ(kTypedElementsKindCount, arraysize(elements_kinds));
DCHECK_EQ(kTypedElementsKindCount, arraysize(elements_kind_labels));
Switch(TruncateWordToWord32(elements_kind), miss, elements_kinds,
elements_kind_labels, kTypedElementsKindCount);
Switch(elements_kind, miss, elements_kinds, elements_kind_labels,
kTypedElementsKindCount);
Bind(&uint8_elements);
{
Comment("UINT8_ELEMENTS"); // Handles UINT8_CLAMPED_ELEMENTS too.
@ -1397,8 +1397,9 @@ void AccessorAssemblerImpl::KeyedLoadICGeneric(const LoadICParameters* p) {
const int32_t kMaxLinear = 210;
Label stub_cache(this);
Node* bitfield3 = LoadMapBitField3(receiver_map);
Node* nof = DecodeWord32<Map::NumberOfOwnDescriptorsBits>(bitfield3);
GotoIf(Uint32LessThan(Int32Constant(kMaxLinear), nof), &stub_cache);
Node* nof =
DecodeWordFromWord32<Map::NumberOfOwnDescriptorsBits>(bitfield3);
GotoIf(UintPtrLessThan(IntPtrConstant(kMaxLinear), nof), &stub_cache);
Node* descriptors = LoadMapDescriptors(receiver_map);
Variable var_name_index(this, MachineType::PointerRepresentation());
Label if_descriptor_found(this);

View File

@ -118,7 +118,7 @@ void KeyedStoreGenericAssembler::TryRewriteElements(
TrapAllocationMemento(receiver, bailout);
}
Label perform_transition(this), check_holey_map(this);
Variable var_target_map(this, MachineType::PointerRepresentation());
Variable var_target_map(this, MachineRepresentation::kTagged);
// Check if the receiver has the default |from_kind| map.
{
Node* packed_map =
@ -534,7 +534,7 @@ void KeyedStoreGenericAssembler::LookupPropertyOnPrototypeChain(
DescriptorArray::kDescriptorKey) *
kPointerSize;
Node* details = LoadAndUntagToWord32FixedArrayElement(
descriptors, name_index, kNameToDetailsOffset);
descriptors, name_index, kNameToDetailsOffset, INTPTR_PARAMETERS);
JumpIfDataProperty(details, &ok_to_write, readonly);
// Accessor case.
@ -553,15 +553,15 @@ void KeyedStoreGenericAssembler::LookupPropertyOnPrototypeChain(
NameDictionary::kEntryKeyIndex) *
kPointerSize;
Node* details = LoadAndUntagToWord32FixedArrayElement(
dictionary, entry, kNameToDetailsOffset);
dictionary, entry, kNameToDetailsOffset, INTPTR_PARAMETERS);
JumpIfDataProperty(details, &ok_to_write, readonly);
// Accessor case.
const int kNameToValueOffset = (NameDictionary::kEntryValueIndex -
NameDictionary::kEntryKeyIndex) *
kPointerSize;
var_accessor_pair->Bind(
LoadFixedArrayElement(dictionary, entry, kNameToValueOffset));
var_accessor_pair->Bind(LoadFixedArrayElement(
dictionary, entry, kNameToValueOffset, INTPTR_PARAMETERS));
var_accessor_holder->Bind(holder);
Goto(accessor);
}
@ -574,8 +574,8 @@ void KeyedStoreGenericAssembler::LookupPropertyOnPrototypeChain(
GlobalDictionary::kEntryKeyIndex) *
kPointerSize;
Node* property_cell =
LoadFixedArrayElement(dictionary, entry, kNameToValueOffset);
Node* property_cell = LoadFixedArrayElement(
dictionary, entry, kNameToValueOffset, INTPTR_PARAMETERS);
Node* value =
LoadObjectField(property_cell, PropertyCell::kValueOffset);
@ -641,15 +641,17 @@ void KeyedStoreGenericAssembler::EmitGenericPropertyStore(
NameDictionary::kEntryKeyIndex) *
kPointerSize;
Node* details = LoadAndUntagToWord32FixedArrayElement(
properties, var_name_index.value(), kNameToDetailsOffset);
properties, var_name_index.value(), kNameToDetailsOffset,
INTPTR_PARAMETERS);
JumpIfDataProperty(details, &overwrite, &readonly);
// Accessor case.
const int kNameToValueOffset =
(NameDictionary::kEntryValueIndex - NameDictionary::kEntryKeyIndex) *
kPointerSize;
var_accessor_pair.Bind(LoadFixedArrayElement(
properties, var_name_index.value(), kNameToValueOffset));
var_accessor_pair.Bind(
LoadFixedArrayElement(properties, var_name_index.value(),
kNameToValueOffset, INTPTR_PARAMETERS));
var_accessor_holder.Bind(receiver);
Goto(&accessor);