Use basic_string<char> for the UTF-8 test string

This is an attempt to fix the unit tests on DeveloperStudio 2013.
Currently, the size of the earth_africa string is reported as 2
on Windows.  But I think that may be 2 16-bit characters.
This commit is contained in:
David Neto 2015-10-27 11:10:29 -04:00
parent 1780fc4fcf
commit b6a43383e7

View File

@ -196,8 +196,13 @@ class EnumCase {
// each of which has a 4-byte UTF-8 encoding.
inline std::string MakeLongUTF8String(size_t num_4_byte_chars) {
// An example of a longest valid UTF-8 character.
const std::string earth_africa("\xF0\x9F\x8C\x8D");
// Be explicit about the character type becuase Microsoft compilers can
// otherwise interpret the character string as being over wide (16-bit)
// characters. Ideally, we would just use a C++11 UTF-8 string literal,
// but we want to support older Microsoft compilers.
const std::basic_string<char> earth_africa("\xF0\x9F\x8C\x8D");
EXPECT_EQ(4, earth_africa.size());
std::string result;
result.reserve(num_4_byte_chars * 4);
for (size_t i = 0; i < num_4_byte_chars; i++ ) {