[dict-proto] CSA/Torque implementation of SwissNameDictionary, pt. 1
This CL adds a) swiss-hash-table-helpers.tq, which contains Torque counterparts for the C++ code in swiss-hash-table-helpers.h. b) various helpers required for that, including adding several CSA integer operations to base.tq. Bug: v8:11330 Change-Id: I6f6faf742334b5d107e84364ed793ad856d1cda1 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2757427 Reviewed-by: Santiago Aboy Solanes <solanes@chromium.org> Reviewed-by: Igor Sheludko <ishell@chromium.org> Commit-Queue: Frank Emrich <emrich@google.com> Cr-Commit-Position: refs/heads/master@{#73580}
This commit is contained in:
parent
690636c041
commit
534431aad9
1
BUILD.gn
1
BUILD.gn
@ -1442,6 +1442,7 @@ torque_files = [
|
||||
"src/objects/stack-frame-info.tq",
|
||||
"src/objects/string.tq",
|
||||
"src/objects/struct.tq",
|
||||
"src/objects/swiss-hash-table-helpers.tq",
|
||||
"src/objects/swiss-name-dictionary.tq",
|
||||
"src/objects/synthetic-module.tq",
|
||||
"src/objects/template-objects.tq",
|
||||
|
@ -904,16 +904,21 @@ extern operator '&' macro WordAnd(uintptr, uintptr): uintptr;
|
||||
extern operator '|' macro WordOr(uintptr, uintptr): uintptr;
|
||||
|
||||
extern operator '+' macro Int32Add(int32, int32): int32;
|
||||
extern operator '+' macro Uint32Add(uint32, uint32): uint32;
|
||||
extern operator '+' macro ConstexprUint32Add(
|
||||
constexpr uint32, constexpr int32): constexpr uint32;
|
||||
extern operator '+' macro ConstexprInt31Add(
|
||||
constexpr int31, constexpr int31): constexpr int31;
|
||||
extern operator '+' macro ConstexprInt32Add(
|
||||
constexpr int32, constexpr int32): constexpr int32;
|
||||
extern operator '*' macro ConstexprInt31Mul(
|
||||
constexpr int31, constexpr int31): constexpr int31;
|
||||
extern operator '-' macro Int32Sub(int16, int16): int32;
|
||||
extern operator '-' macro Int32Sub(uint16, uint16): int32;
|
||||
extern operator '-' macro Int32Sub(int32, int32): int32;
|
||||
extern operator '-' macro UInt32Sub(uint32, uint32): uint32;
|
||||
extern operator '*' macro Int32Mul(int32, int32): int32;
|
||||
extern operator '*' macro Uint32Mul(uint32, uint32): uint32;
|
||||
extern operator '/' macro Int32Div(int32, int32): int32;
|
||||
extern operator '%' macro Int32Mod(int32, int32): int32;
|
||||
extern operator '&' macro Word32And(int32, int32): int32;
|
||||
@ -950,6 +955,8 @@ extern operator '==' macro Word32Equal(bool, bool): bool;
|
||||
extern operator '!=' macro Word32NotEqual(bool, bool): bool;
|
||||
extern operator '|' macro ConstexprWord32Or(
|
||||
constexpr int32, constexpr int32): constexpr int32;
|
||||
extern operator '^' macro Word32Xor(int32, int32): int32;
|
||||
extern operator '^' macro Word32Xor(uint32, uint32): uint32;
|
||||
|
||||
extern operator '==' macro Word64Equal(int64, int64): bool;
|
||||
extern operator '==' macro Word64Equal(uint64, uint64): bool;
|
||||
@ -962,6 +969,8 @@ extern operator '<<' macro Word64Shl(uint64, uint64): uint64;
|
||||
extern operator '|' macro Word64Or(int64, int64): int64;
|
||||
extern operator '|' macro Word64Or(uint64, uint64): uint64;
|
||||
extern operator '&' macro Word64And(uint64, uint64): uint64;
|
||||
extern operator '^' macro Word64Xor(int64, int64): int64;
|
||||
extern operator '^' macro Word64Xor(uint64, uint64): uint64;
|
||||
|
||||
extern operator '+' macro Float64Add(float64, float64): float64;
|
||||
extern operator '-' macro Float64Sub(float64, float64): float64;
|
||||
@ -1037,6 +1046,9 @@ operator '==' macro PromiseStateEquals(
|
||||
return Word32Equal(s1, s2);
|
||||
}
|
||||
|
||||
extern macro CountLeadingZeros64(uint64): int64;
|
||||
extern macro CountTrailingZeros64(uint64): int64;
|
||||
|
||||
extern macro TaggedIsSmi(Object): bool;
|
||||
extern macro TaggedIsNotSmi(Object): bool;
|
||||
extern macro TaggedIsPositiveSmi(Object): bool;
|
||||
@ -1091,6 +1103,7 @@ extern macro ChangeTaggedToFloat64(implicit context: Context)(JSAny): float64;
|
||||
extern macro ChangeFloat64ToTagged(float64): Number;
|
||||
extern macro ChangeFloat64ToUintPtr(float64): uintptr;
|
||||
extern macro ChangeFloat64ToIntPtr(float64): intptr;
|
||||
extern macro ChangeBoolToInt32(bool): int32;
|
||||
extern macro ChangeInt32ToFloat64(int32): float64;
|
||||
extern macro ChangeInt32ToIntPtr(int32): intptr; // Sign-extends.
|
||||
extern macro ChangeUint32ToWord(uint32): uintptr; // Doesn't sign-extend.
|
||||
|
@ -69,6 +69,10 @@ FromConstexpr<Smi, constexpr Smi>(s: constexpr Smi): Smi {
|
||||
FromConstexpr<uint32, constexpr int31>(i: constexpr int31): uint32 {
|
||||
return Unsigned(Int32Constant(i));
|
||||
}
|
||||
FromConstexpr<uint8, constexpr uint8>(i: constexpr uint8): uint8 {
|
||||
const i: uint32 = i;
|
||||
return %RawDownCast<uint8>(i);
|
||||
}
|
||||
FromConstexpr<uint32, constexpr uint32>(i: constexpr uint32): uint32 {
|
||||
return Unsigned(%FromConstexpr<int32>(i));
|
||||
}
|
||||
@ -134,6 +138,9 @@ macro Convert<To: type, From: type>(i: From): To labels Overflow {
|
||||
Convert<Boolean, bool>(b: bool): Boolean {
|
||||
return b ? True : False;
|
||||
}
|
||||
Convert<int32, bool>(b: bool): int32 {
|
||||
return ChangeBoolToInt32(b);
|
||||
}
|
||||
Convert<Number, int32>(i: int32): Number {
|
||||
return ChangeInt32ToTagged(i);
|
||||
}
|
||||
|
@ -606,7 +606,7 @@ TNode<IntPtrT> CodeStubAssembler::PopulationCountFallback(
|
||||
return Signed(WordAnd(value, UintPtrConstant(0xff)));
|
||||
}
|
||||
|
||||
TNode<Int64T> CodeStubAssembler::Word64PopulationCount(TNode<Word64T> value) {
|
||||
TNode<Int64T> CodeStubAssembler::PopulationCount64(TNode<Word64T> value) {
|
||||
if (IsWord64PopcntSupported()) {
|
||||
return Word64Popcnt(value);
|
||||
}
|
||||
@ -620,7 +620,7 @@ TNode<Int64T> CodeStubAssembler::Word64PopulationCount(TNode<Word64T> value) {
|
||||
PopulationCountFallback(ReinterpretCast<UintPtrT>(value)));
|
||||
}
|
||||
|
||||
TNode<Int32T> CodeStubAssembler::Word32PopulationCount(TNode<Word32T> value) {
|
||||
TNode<Int32T> CodeStubAssembler::PopulationCount32(TNode<Word32T> value) {
|
||||
if (IsWord32PopcntSupported()) {
|
||||
return Word32Popcnt(value);
|
||||
}
|
||||
@ -636,8 +636,7 @@ TNode<Int32T> CodeStubAssembler::Word32PopulationCount(TNode<Word32T> value) {
|
||||
}
|
||||
}
|
||||
|
||||
TNode<Int64T> CodeStubAssembler::Word64CountTrailingZeros(
|
||||
TNode<Word64T> value) {
|
||||
TNode<Int64T> CodeStubAssembler::CountTrailingZeros64(TNode<Word64T> value) {
|
||||
if (IsWord64CtzSupported()) {
|
||||
return Word64Ctz(value);
|
||||
}
|
||||
@ -653,11 +652,10 @@ TNode<Int64T> CodeStubAssembler::Word64CountTrailingZeros(
|
||||
// than doing binary search.
|
||||
TNode<Word64T> lhs = Word64Not(value);
|
||||
TNode<Word64T> rhs = Uint64Sub(Unsigned(value), Uint64Constant(1));
|
||||
return Word64PopulationCount(Word64And(lhs, rhs));
|
||||
return PopulationCount64(Word64And(lhs, rhs));
|
||||
}
|
||||
|
||||
TNode<Int32T> CodeStubAssembler::Word32CountTrailingZeros(
|
||||
TNode<Word32T> value) {
|
||||
TNode<Int32T> CodeStubAssembler::CountTrailingZeros32(TNode<Word32T> value) {
|
||||
if (IsWord32CtzSupported()) {
|
||||
return Word32Ctz(value);
|
||||
}
|
||||
@ -666,13 +664,21 @@ TNode<Int32T> CodeStubAssembler::Word32CountTrailingZeros(
|
||||
// Same fallback as in Word64CountTrailingZeros.
|
||||
TNode<Word32T> lhs = Word32BitwiseNot(value);
|
||||
TNode<Word32T> rhs = Int32Sub(Signed(value), Int32Constant(1));
|
||||
return Word32PopulationCount(Word32And(lhs, rhs));
|
||||
return PopulationCount32(Word32And(lhs, rhs));
|
||||
} else {
|
||||
TNode<Int64T> res64 = Word64CountTrailingZeros(ChangeUint32ToUint64(value));
|
||||
TNode<Int64T> res64 = CountTrailingZeros64(ChangeUint32ToUint64(value));
|
||||
return TruncateInt64ToInt32(Signed(res64));
|
||||
}
|
||||
}
|
||||
|
||||
TNode<Int64T> CodeStubAssembler::CountLeadingZeros64(TNode<Word64T> value) {
|
||||
return Word64Clz(value);
|
||||
}
|
||||
|
||||
TNode<Int32T> CodeStubAssembler::CountLeadingZeros32(TNode<Word32T> value) {
|
||||
return Word32Clz(value);
|
||||
}
|
||||
|
||||
template <>
|
||||
TNode<Smi> CodeStubAssembler::TaggedToParameter(TNode<Smi> value) {
|
||||
return value;
|
||||
@ -5626,6 +5632,10 @@ TNode<Number> CodeStubAssembler::ChangeUintPtrToTagged(TNode<UintPtrT> value) {
|
||||
return var_result.value();
|
||||
}
|
||||
|
||||
TNode<Int32T> CodeStubAssembler::ChangeBoolToInt32(TNode<BoolT> b) {
|
||||
return UncheckedCast<Int32T>(b);
|
||||
}
|
||||
|
||||
TNode<String> CodeStubAssembler::ToThisString(TNode<Context> context,
|
||||
TNode<Object> value,
|
||||
TNode<String> method_name) {
|
||||
@ -14309,5 +14319,37 @@ TNode<SwissNameDictionary> CodeStubAssembler::AllocateSwissNameDictionary(
|
||||
return AllocateSwissNameDictionary(IntPtrConstant(at_least_space_for));
|
||||
}
|
||||
|
||||
TNode<Uint64T> CodeStubAssembler::LoadSwissNameDictionaryCtrlTableGroup(
|
||||
TNode<IntPtrT> address) {
|
||||
TNode<RawPtrT> ptr = ReinterpretCast<RawPtrT>(address);
|
||||
TNode<Uint64T> data = UnalignedLoad<Uint64T>(ptr, IntPtrConstant(0));
|
||||
|
||||
#ifdef V8_TARGET_LITTLE_ENDIAN
|
||||
return data;
|
||||
#else
|
||||
// Reverse byte order.
|
||||
// TODO(v8:11330) Doing this without using dedicated instructions (which we
|
||||
// don't have access to here) will destroy any performance benefit Swiss
|
||||
// Tables have. So we just support this so that we don't have to disable the
|
||||
// test suite for SwissNameDictionary on big endian platforms.
|
||||
|
||||
TNode<Uint64T> result = Uint64Constant(0);
|
||||
constexpr int count = sizeof(uint64_t);
|
||||
for (int i = 0; i < count; ++i) {
|
||||
int src_offset = i * 8;
|
||||
int dest_offset = (count - i - 1) * 8;
|
||||
|
||||
TNode<Uint64T> mask = Uint64Constant(0xffULL << src_offset);
|
||||
TNode<Uint64T> src_data = Word64And(data, mask);
|
||||
|
||||
TNode<Uint64T> shifted =
|
||||
src_offset < dest_offset
|
||||
? Word64Shl(src_data, Uint64Constant(dest_offset - src_offset))
|
||||
: Word64Shr(src_data, Uint64Constant(src_offset - dest_offset));
|
||||
result = Unsigned(Word64Or(result, shifted));
|
||||
}
|
||||
return result;
|
||||
#endif
|
||||
}
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
@ -541,10 +541,12 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
|
||||
int* value);
|
||||
|
||||
TNode<IntPtrT> PopulationCountFallback(TNode<UintPtrT> value);
|
||||
TNode<Int64T> Word64PopulationCount(TNode<Word64T> value);
|
||||
TNode<Int32T> Word32PopulationCount(TNode<Word32T> value);
|
||||
TNode<Int64T> Word64CountTrailingZeros(TNode<Word64T> value);
|
||||
TNode<Int32T> Word32CountTrailingZeros(TNode<Word32T> value);
|
||||
TNode<Int64T> PopulationCount64(TNode<Word64T> value);
|
||||
TNode<Int32T> PopulationCount32(TNode<Word32T> value);
|
||||
TNode<Int64T> CountTrailingZeros64(TNode<Word64T> value);
|
||||
TNode<Int32T> CountTrailingZeros32(TNode<Word32T> value);
|
||||
TNode<Int64T> CountLeadingZeros64(TNode<Word64T> value);
|
||||
TNode<Int32T> CountLeadingZeros32(TNode<Word32T> value);
|
||||
|
||||
// Round the 32bits payload of the provided word up to the next power of two.
|
||||
TNode<IntPtrT> IntPtrRoundUpToPowerOfTwo32(TNode<IntPtrT> value);
|
||||
@ -2256,6 +2258,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
|
||||
TNode<Float64T> ChangeTaggedToFloat64(TNode<Context> context,
|
||||
TNode<Object> input);
|
||||
|
||||
TNode<Int32T> ChangeBoolToInt32(TNode<BoolT> b);
|
||||
|
||||
void TaggedToNumeric(TNode<Context> context, TNode<Object> value,
|
||||
TVariable<Numeric>* var_numeric);
|
||||
void TaggedToNumericWithFeedback(TNode<Context> context, TNode<Object> value,
|
||||
@ -3555,6 +3559,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
|
||||
bool ConstexprInt32GreaterThanEqual(int32_t a, int32_t b) { return a >= b; }
|
||||
uint32_t ConstexprUint32Add(uint32_t a, uint32_t b) { return a + b; }
|
||||
int32_t ConstexprUint32Sub(uint32_t a, uint32_t b) { return a - b; }
|
||||
int32_t ConstexprInt32Sub(int32_t a, int32_t b) { return a - b; }
|
||||
int32_t ConstexprInt32Add(int32_t a, int32_t b) { return a + b; }
|
||||
int31_t ConstexprInt31Add(int31_t a, int31_t b) {
|
||||
int32_t val;
|
||||
CHECK(!base::bits::SignedAddOverflow32(a, b, &val));
|
||||
@ -3714,6 +3720,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
|
||||
TNode<SwissNameDictionary> AllocateSwissNameDictionary(
|
||||
int at_least_space_for);
|
||||
|
||||
TNode<Uint64T> LoadSwissNameDictionaryCtrlTableGroup(TNode<IntPtrT> address);
|
||||
|
||||
private:
|
||||
friend class CodeStubArguments;
|
||||
|
||||
|
@ -733,6 +733,18 @@ TNode<Object> CodeAssembler::LoadRoot(RootIndex root_index) {
|
||||
LoadFullTagged(isolate_root, IntPtrConstant(offset)));
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
TNode<T> CodeAssembler::UnalignedLoad(TNode<RawPtrT> base,
|
||||
TNode<IntPtrT> offset) {
|
||||
MachineType mt =
|
||||
MachineType::TypeForRepresentation(MachineRepresentationOf<T>::value);
|
||||
return UncheckedCast<T>(
|
||||
raw_assembler()->UnalignedLoad(mt, static_cast<Node*>(base), offset));
|
||||
}
|
||||
|
||||
template TNode<Uint64T> CodeAssembler::UnalignedLoad(TNode<RawPtrT> base,
|
||||
TNode<IntPtrT> offset);
|
||||
|
||||
void CodeAssembler::Store(Node* base, Node* value) {
|
||||
raw_assembler()->Store(MachineRepresentation::kTagged, base, value,
|
||||
kFullWriteBarrier);
|
||||
|
@ -789,6 +789,9 @@ class V8_EXPORT_PRIVATE CodeAssembler {
|
||||
// Load a value from the root array.
|
||||
TNode<Object> LoadRoot(RootIndex root_index);
|
||||
|
||||
template <typename Type>
|
||||
TNode<Type> UnalignedLoad(TNode<RawPtrT> base, TNode<IntPtrT> offset);
|
||||
|
||||
// Store value to raw memory location.
|
||||
void Store(Node* base, Node* value);
|
||||
void Store(Node* base, Node* offset, Node* value);
|
||||
|
@ -293,6 +293,9 @@ struct GroupPortableImpl {
|
||||
: ctrl(base::ReadLittleEndianValue<uint64_t>(
|
||||
reinterpret_cast<uintptr_t>(const_cast<ctrl_t*>(pos)))) {}
|
||||
|
||||
static constexpr uint64_t kMsbs = 0x8080808080808080ULL;
|
||||
static constexpr uint64_t kLsbs = 0x0101010101010101ULL;
|
||||
|
||||
// Returns a bitmask representing the positions of slots that match |hash|.
|
||||
BitMask<uint64_t, kWidth, 3> Match(h2_t hash) const {
|
||||
// For the technique, see:
|
||||
@ -308,22 +311,18 @@ struct GroupPortableImpl {
|
||||
// v = 0x1716151413121110
|
||||
// hash = 0x12
|
||||
// retval = (v - lsbs) & ~v & msbs = 0x0000000080800000
|
||||
constexpr uint64_t msbs = 0x8080808080808080ULL;
|
||||
constexpr uint64_t lsbs = 0x0101010101010101ULL;
|
||||
auto x = ctrl ^ (lsbs * hash);
|
||||
return BitMask<uint64_t, kWidth, 3>((x - lsbs) & ~x & msbs);
|
||||
auto x = ctrl ^ (kLsbs * hash);
|
||||
return BitMask<uint64_t, kWidth, 3>((x - kLsbs) & ~x & kMsbs);
|
||||
}
|
||||
|
||||
// Returns a bitmask representing the positions of empty slots.
|
||||
BitMask<uint64_t, kWidth, 3> MatchEmpty() const {
|
||||
constexpr uint64_t msbs = 0x8080808080808080ULL;
|
||||
return BitMask<uint64_t, kWidth, 3>((ctrl & (~ctrl << 6)) & msbs);
|
||||
return BitMask<uint64_t, kWidth, 3>((ctrl & (~ctrl << 6)) & kMsbs);
|
||||
}
|
||||
|
||||
// Returns a bitmask representing the positions of empty or deleted slots.
|
||||
BitMask<uint64_t, kWidth, 3> MatchEmptyOrDeleted() const {
|
||||
constexpr uint64_t msbs = 0x8080808080808080ULL;
|
||||
return BitMask<uint64_t, kWidth, 3>((ctrl & (~ctrl << 7)) & msbs);
|
||||
return BitMask<uint64_t, kWidth, 3>((ctrl & (~ctrl << 7)) & kMsbs);
|
||||
}
|
||||
|
||||
// Returns the number of trailing empty or deleted elements in the group.
|
||||
@ -336,10 +335,8 @@ struct GroupPortableImpl {
|
||||
}
|
||||
|
||||
void ConvertSpecialToEmptyAndFullToDeleted(ctrl_t* dst) const {
|
||||
constexpr uint64_t msbs = 0x8080808080808080ULL;
|
||||
constexpr uint64_t lsbs = 0x0101010101010101ULL;
|
||||
auto x = ctrl & msbs;
|
||||
auto res = (~x + (x >> 7)) & ~lsbs;
|
||||
auto x = ctrl & kMsbs;
|
||||
auto res = (~x + (x >> 7)) & ~kLsbs;
|
||||
base::WriteLittleEndianValue(reinterpret_cast<uint64_t*>(dst), res);
|
||||
}
|
||||
|
||||
|
128
src/objects/swiss-hash-table-helpers.tq
Normal file
128
src/objects/swiss-hash-table-helpers.tq
Normal file
@ -0,0 +1,128 @@
|
||||
// Copyright 2021 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
// Note that most structs and macros in this file have 1:1 C++ counterparts in
|
||||
// the corresponding .h file.
|
||||
|
||||
#include 'src/objects/swiss-hash-table-helpers.h'
|
||||
|
||||
namespace swiss_table {
|
||||
|
||||
const kGroupWidth:
|
||||
constexpr int32 generates 'swiss_table::Group::kWidth';
|
||||
|
||||
const kUseSIMD:
|
||||
constexpr bool generates 'swiss_table::Group::kWidth == 16';
|
||||
|
||||
namespace ctrl {
|
||||
const kEmpty: constexpr uint8
|
||||
generates 'static_cast<uint8_t>(swiss_table::Ctrl::kEmpty)';
|
||||
|
||||
const kDeleted: constexpr uint8
|
||||
generates 'static_cast<uint8_t>(swiss_table::Ctrl::kDeleted)';
|
||||
}
|
||||
|
||||
const kH2Bits: constexpr int32 generates 'swiss_table::kH2Bits';
|
||||
const kH2Mask:
|
||||
constexpr uint32 generates '((1 << swiss_table::kH2Bits) - 1)';
|
||||
|
||||
extern macro LoadSwissNameDictionaryCtrlTableGroup(intptr): uint64;
|
||||
|
||||
// Counterpart to swiss_table::ProbeSequence in C++ implementation.
|
||||
struct ProbeSequence {
|
||||
macro Next() {
|
||||
this.index = this.index + Unsigned(FromConstexpr<int32>(kGroupWidth));
|
||||
this.offset = (this.offset + this.index) & this.mask;
|
||||
}
|
||||
|
||||
macro Offset(index: int32): uint32 {
|
||||
return (this.offset + Unsigned(index)) & this.mask;
|
||||
}
|
||||
|
||||
mask: uint32;
|
||||
offset: uint32;
|
||||
index: uint32;
|
||||
}
|
||||
|
||||
const kByteMaskShift: uint64 = 3;
|
||||
|
||||
// Counterpart to swiss_table::BitMask<uint64_t, kWidth, 3>, as used by
|
||||
// swiss_table::GroupPortableImpl in C++ implementation.
|
||||
struct ByteMask {
|
||||
macro HasBitsSet(): bool {
|
||||
return this.mask != FromConstexpr<uint64>(0);
|
||||
}
|
||||
|
||||
macro LowestBitSet(): int32 {
|
||||
return Convert<int32>(
|
||||
CountTrailingZeros64(this.mask) >> Signed(kByteMaskShift));
|
||||
}
|
||||
|
||||
// Counterpart to operator++() in C++ version.
|
||||
macro ClearLowestSetBit() {
|
||||
this.mask = this.mask & (this.mask - FromConstexpr<uint64>(1));
|
||||
}
|
||||
|
||||
mask: uint64;
|
||||
}
|
||||
|
||||
macro H1(hash: uint32): uint32 {
|
||||
return hash >>> Unsigned(FromConstexpr<int32>(kH2Bits));
|
||||
}
|
||||
|
||||
macro H2(hash: uint32): uint32 {
|
||||
return hash & kH2Mask;
|
||||
}
|
||||
|
||||
const kLsbs: constexpr uint64
|
||||
generates 'swiss_table::GroupPortableImpl::kLsbs';
|
||||
const kMsbs: constexpr uint64
|
||||
generates 'swiss_table::GroupPortableImpl::kMsbs';
|
||||
|
||||
struct GroupPortableImpl {
|
||||
macro Match(h2: uint32): ByteMask {
|
||||
const x = Word64Xor(this.ctrl, (kLsbs * Convert<uint64>(h2)));
|
||||
const result = (x - kLsbs) & ~x & kMsbs;
|
||||
return ByteMask{mask: result};
|
||||
}
|
||||
|
||||
macro MatchEmpty(): ByteMask {
|
||||
const result = ((this.ctrl & (~this.ctrl << 6)) & kMsbs);
|
||||
return ByteMask{mask: result};
|
||||
}
|
||||
|
||||
const ctrl: uint64;
|
||||
}
|
||||
|
||||
struct GroupPortableLoader {
|
||||
macro LoadGroup(ctrlPtr: intptr): GroupPortableImpl {
|
||||
return GroupPortableImpl{
|
||||
ctrl: LoadSwissNameDictionaryCtrlTableGroup(ctrlPtr)
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(v8:11330) Temporary nonsense code that supresses warnings about unused
|
||||
// macros.
|
||||
@export
|
||||
macro SwissTableWarningSuppresser(): never {
|
||||
const group = GroupPortableLoader{}.LoadGroup(0);
|
||||
let mask = group.Match(0);
|
||||
let seq = ProbeSequence{mask: 0, offset: 0, index: 0};
|
||||
|
||||
const _bla2 = group.MatchEmpty();
|
||||
|
||||
const _bla3 = H1(0);
|
||||
const _bla4 = H2(0);
|
||||
|
||||
const _bla5 = seq.Next();
|
||||
const _bla6 = seq.Offset(0);
|
||||
|
||||
const _bla7 = mask.HasBitsSet();
|
||||
const _bla8 = mask.LowestBitSet();
|
||||
const _bla9 = mask.ClearLowestSetBit();
|
||||
|
||||
unreachable;
|
||||
}
|
||||
}
|
@ -4242,14 +4242,14 @@ TEST(PopCount) {
|
||||
int expected_pop32 = test_case.second;
|
||||
int expected_pop64 = 2 * expected_pop32;
|
||||
|
||||
TNode<Int32T> pop32 = m.Word32PopulationCount(m.Uint32Constant(value32));
|
||||
TNode<Int32T> pop32 = m.PopulationCount32(m.Uint32Constant(value32));
|
||||
CSA_CHECK(&m, m.Word32Equal(pop32, m.Int32Constant(expected_pop32)));
|
||||
|
||||
if (m.Is64()) {
|
||||
// TODO(emrich): enable once 64-bit operations are supported on 32-bit
|
||||
// architectures.
|
||||
|
||||
TNode<Int64T> pop64 = m.Word64PopulationCount(m.Uint64Constant(value64));
|
||||
TNode<Int64T> pop64 = m.PopulationCount64(m.Uint64Constant(value64));
|
||||
CSA_CHECK(&m, m.Word64Equal(pop64, m.Int64Constant(expected_pop64)));
|
||||
}
|
||||
}
|
||||
@ -4279,7 +4279,7 @@ TEST(CountTrailingZeros) {
|
||||
int expected_ctz32 = test_case.second;
|
||||
int expected_ctz64 = expected_ctz32 + 32;
|
||||
|
||||
TNode<Int32T> pop32 = m.Word32CountTrailingZeros(m.Uint32Constant(value32));
|
||||
TNode<Int32T> pop32 = m.CountTrailingZeros32(m.Uint32Constant(value32));
|
||||
CSA_CHECK(&m, m.Word32Equal(pop32, m.Int32Constant(expected_ctz32)));
|
||||
|
||||
if (m.Is64()) {
|
||||
@ -4287,9 +4287,8 @@ TEST(CountTrailingZeros) {
|
||||
// architectures.
|
||||
|
||||
TNode<Int64T> pop64_ext =
|
||||
m.Word64CountTrailingZeros(m.Uint64Constant(value32));
|
||||
TNode<Int64T> pop64 =
|
||||
m.Word64CountTrailingZeros(m.Uint64Constant(value64));
|
||||
m.CountTrailingZeros64(m.Uint64Constant(value32));
|
||||
TNode<Int64T> pop64 = m.CountTrailingZeros64(m.Uint64Constant(value64));
|
||||
|
||||
CSA_CHECK(&m, m.Word64Equal(pop64_ext, m.Int64Constant(expected_ctz32)));
|
||||
CSA_CHECK(&m, m.Word64Equal(pop64, m.Int64Constant(expected_ctz64)));
|
||||
|
Loading…
Reference in New Issue
Block a user