SkTestTypeface: correct encoding

Change-Id: I6ef875fc44278b873989d323dd26f6ed26b68f28
Reviewed-on: https://skia-review.googlesource.com/6889
Commit-Queue: Hal Canary <halcanary@google.com>
Reviewed-by: Cary Clark <caryclark@google.com>
This commit is contained in:
Hal Canary 2017-01-11 13:26:43 -05:00 committed by Skia Commit-Bot
parent 95e3c058ef
commit 58d2a72afa

View File

@ -171,8 +171,8 @@ void SkTestTypeface::onGetFontDescriptor(SkFontDescriptor* desc, bool* isLocal)
}
int SkTestTypeface::onCharsToGlyphs(const void* chars, Encoding encoding,
uint16_t glyphs[], int glyphCount) const {
SkASSERT(encoding == kUTF16_Encoding);
uint16_t glyphs[], int glyphCount) const {
SkASSERT(encoding == kUTF32_Encoding);
for (int index = 0; index < glyphCount; ++index) {
SkUnichar ch = ((SkUnichar*) chars)[index];
glyphs[index] = fTestFont->codeToIndex(ch);
@ -214,7 +214,7 @@ protected:
uint16_t generateCharToGlyph(SkUnichar uni) override {
uint16_t glyph;
(void) this->getTestTypeface()->onCharsToGlyphs((const void *) &uni,
SkTypeface::kUTF16_Encoding, &glyph, 1);
SkTypeface::kUTF32_Encoding, &glyph, 1);
return glyph;
}