uintptr -> fallback_uintptr to avoid confusion with uintptr_t

This commit is contained in:
Victor Zverovich 2019-06-14 18:47:36 -07:00
parent f03a6c5325
commit e29708ee57
3 changed files with 6 additions and 6 deletions

View File

@ -229,7 +229,7 @@ FMT_FUNC void system_error::init(int err_code, string_view format_str,
namespace internal {
template <> FMT_FUNC int count_digits<4>(internal::uintptr n) {
template <> FMT_FUNC int count_digits<4>(internal::fallback_uintptr n) {
// Assume little endian; pointer formatting is implementation-defined anyway.
int i = static_cast<int>(sizeof(void*)) - 1;
while (i > 0 && n.value[i] == 0) --i;

View File

@ -232,13 +232,13 @@ namespace internal {
#endif
// A fallback implementation of uintptr_t for systems that lack it.
struct uintptr {
struct fallback_uintptr {
unsigned char value[sizeof(void*)];
};
#ifdef UINTPTR_MAX
using uintptr_t = ::uintptr_t;
#else
using uintptr_t = uintptr;
using uintptr_t = fallback_uintptr;
#endif
template <typename T> inline bool use_grisu() {
@ -700,7 +700,7 @@ template <unsigned BITS, typename UInt> inline int count_digits(UInt n) {
return num_digits;
}
template <> int count_digits<4>(internal::uintptr n);
template <> int count_digits<4>(internal::fallback_uintptr n);
template <typename Char>
inline size_t count_code_points(basic_string_view<Char> s) {
@ -929,7 +929,7 @@ inline Char* format_uint(Char* buffer, UInt value, int num_digits,
}
template <unsigned BASE_BITS, typename Char>
Char* format_uint(Char* buffer, internal::uintptr n, int num_digits,
Char* format_uint(Char* buffer, internal::fallback_uintptr n, int num_digits,
bool = false) {
auto char_digits = std::numeric_limits<unsigned char>::digits / 4;
int start = (num_digits + char_digits - 1) / char_digits - 1;

View File

@ -259,7 +259,7 @@ TEST(UtilTest, CountDigits) {
TEST(UtilTest, WriteUIntPtr) {
fmt::memory_buffer buf;
fmt::writer writer(buf);
writer.write_pointer(fmt::internal::bit_cast<fmt::internal::uintptr>(
writer.write_pointer(fmt::internal::bit_cast<fmt::internal::fallback_uintptr>(
reinterpret_cast<void*>(0xface)),
nullptr);
EXPECT_EQ("0xface", to_string(buf));