Upgrade to Harfbuzz 8.3.0
Fixes: QTBUG-119150
Change-Id: I80f21f6f27cce14a1e91e822c3681ec491491ff1
Reviewed-by: Qt CI Bot <qt_ci_bot@qt-project.org>
Reviewed-by: Volker Hilsheimer <volker.hilsheimer@qt.io>
(cherry picked from commit 98ca28f7a6
)
Reviewed-by: Qt Cherry-pick Bot <cherrypick_bot@qt-project.org>
(cherry picked from commit 67637f2c4056bccf71a880253eaaaa5f09494cf9)
This commit is contained in:
parent
49bd0a8190
commit
e2cbce919c
6
src/3rdparty/harfbuzz-ng/README.md
vendored
6
src/3rdparty/harfbuzz-ng/README.md
vendored
@ -13,8 +13,10 @@
|
|||||||
HarfBuzz is a text shaping engine. It primarily supports [OpenType][1], but also
|
HarfBuzz is a text shaping engine. It primarily supports [OpenType][1], but also
|
||||||
[Apple Advanced Typography][2]. HarfBuzz is used in Android, Chrome,
|
[Apple Advanced Typography][2]. HarfBuzz is used in Android, Chrome,
|
||||||
ChromeOS, Firefox, GNOME, GTK+, KDE, Qt, LibreOffice, OpenJDK, XeTeX,
|
ChromeOS, Firefox, GNOME, GTK+, KDE, Qt, LibreOffice, OpenJDK, XeTeX,
|
||||||
PlayStation, Microsoft Edge, Photoshop, Illustrator, InDesign,
|
PlayStation, Microsoft Edge, Adobe Photoshop, Illustrator, InDesign,
|
||||||
and other places.
|
Godot Engine, and other places.
|
||||||
|
|
||||||
|
[![xkcd-derived image](xkcd.png)](https://xkcd.com/2347/)
|
||||||
|
|
||||||
For bug reports, mailing list, and other information please visit:
|
For bug reports, mailing list, and other information please visit:
|
||||||
|
|
||||||
|
4
src/3rdparty/harfbuzz-ng/qt_attribution.json
vendored
4
src/3rdparty/harfbuzz-ng/qt_attribution.json
vendored
@ -7,8 +7,8 @@
|
|||||||
|
|
||||||
"Description": "HarfBuzz is an OpenType text shaping engine.",
|
"Description": "HarfBuzz is an OpenType text shaping engine.",
|
||||||
"Homepage": "http://harfbuzz.org",
|
"Homepage": "http://harfbuzz.org",
|
||||||
"Version": "8.2.2",
|
"Version": "8.3.0",
|
||||||
"DownloadLocation": "https://github.com/harfbuzz/harfbuzz/releases/tag/8.2.2",
|
"DownloadLocation": "https://github.com/harfbuzz/harfbuzz/releases/tag/8.3.0",
|
||||||
|
|
||||||
"License": "MIT License",
|
"License": "MIT License",
|
||||||
"LicenseId": "MIT",
|
"LicenseId": "MIT",
|
||||||
|
@ -204,6 +204,7 @@ struct IndexSubtable
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (!u.header.sanitize (c)) return_trace (false);
|
if (!u.header.sanitize (c)) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
switch (u.header.indexFormat)
|
switch (u.header.indexFormat)
|
||||||
{
|
{
|
||||||
case 1: return_trace (u.format1.sanitize (c, glyph_count));
|
case 1: return_trace (u.format1.sanitize (c, glyph_count));
|
||||||
@ -378,6 +379,7 @@ struct IndexSubtableRecord
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
firstGlyphIndex <= lastGlyphIndex &&
|
firstGlyphIndex <= lastGlyphIndex &&
|
||||||
offsetToSubtable.sanitize (c, base, lastGlyphIndex - firstGlyphIndex + 1));
|
offsetToSubtable.sanitize (c, base, lastGlyphIndex - firstGlyphIndex + 1));
|
||||||
}
|
}
|
||||||
@ -635,6 +637,7 @@ struct BitmapSizeTable
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
indexSubtableArrayOffset.sanitize (c, base, numberOfIndexSubtables) &&
|
indexSubtableArrayOffset.sanitize (c, base, numberOfIndexSubtables) &&
|
||||||
horizontal.sanitize (c) &&
|
horizontal.sanitize (c) &&
|
||||||
vertical.sanitize (c));
|
vertical.sanitize (c));
|
||||||
@ -738,7 +741,9 @@ struct CBLC
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
likely (version.major == 2 || version.major == 3) &&
|
likely (version.major == 2 || version.major == 3) &&
|
||||||
|
hb_barrier () &&
|
||||||
sizeTables.sanitize (c, this));
|
sizeTables.sanitize (c, this));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -975,6 +980,7 @@ struct CBDT
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
likely (version.major == 2 || version.major == 3));
|
likely (version.major == 2 || version.major == 3));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1948,10 +1948,11 @@ struct COLR
|
|||||||
bool has_v0_data () const { return numBaseGlyphs; }
|
bool has_v0_data () const { return numBaseGlyphs; }
|
||||||
bool has_v1_data () const
|
bool has_v1_data () const
|
||||||
{
|
{
|
||||||
if (version == 1)
|
if (version != 1)
|
||||||
return (this+baseGlyphList).len > 0;
|
return false;
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
return false;
|
return (this+baseGlyphList).len > 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
unsigned int get_glyph_layers (hb_codepoint_t glyph,
|
unsigned int get_glyph_layers (hb_codepoint_t glyph,
|
||||||
@ -2032,6 +2033,8 @@ struct COLR
|
|||||||
hb_set_t *palette_indices) const
|
hb_set_t *palette_indices) const
|
||||||
{
|
{
|
||||||
if (version != 1) return;
|
if (version != 1) return;
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
hb_set_t visited_glyphs;
|
hb_set_t visited_glyphs;
|
||||||
|
|
||||||
hb_colrv1_closure_context_t c (this, &visited_glyphs, layer_indices, palette_indices);
|
hb_colrv1_closure_context_t c (this, &visited_glyphs, layer_indices, palette_indices);
|
||||||
@ -2058,10 +2061,12 @@ struct COLR
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
(this+baseGlyphsZ).sanitize (c, numBaseGlyphs) &&
|
(this+baseGlyphsZ).sanitize (c, numBaseGlyphs) &&
|
||||||
(this+layersZ).sanitize (c, numLayers) &&
|
(this+layersZ).sanitize (c, numLayers) &&
|
||||||
(version == 0 ||
|
(version == 0 ||
|
||||||
(version == 1 &&
|
(hb_barrier () &&
|
||||||
|
version == 1 &&
|
||||||
baseGlyphList.sanitize (c, this) &&
|
baseGlyphList.sanitize (c, this) &&
|
||||||
layerList.sanitize (c, this) &&
|
layerList.sanitize (c, this) &&
|
||||||
clipList.sanitize (c, this) &&
|
clipList.sanitize (c, this) &&
|
||||||
@ -2284,6 +2289,8 @@ struct COLR
|
|||||||
{
|
{
|
||||||
if (version == 1)
|
if (version == 1)
|
||||||
{
|
{
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
const Paint *paint = get_base_glyph_paint (glyph);
|
const Paint *paint = get_base_glyph_paint (glyph);
|
||||||
|
|
||||||
return paint != nullptr;
|
return paint != nullptr;
|
||||||
@ -2313,6 +2320,8 @@ struct COLR
|
|||||||
|
|
||||||
if (version == 1)
|
if (version == 1)
|
||||||
{
|
{
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
const Paint *paint = get_base_glyph_paint (glyph);
|
const Paint *paint = get_base_glyph_paint (glyph);
|
||||||
if (paint)
|
if (paint)
|
||||||
{
|
{
|
||||||
|
@ -214,13 +214,17 @@ struct CPAL
|
|||||||
hb_set_t *nameids_to_retain /* OUT */) const
|
hb_set_t *nameids_to_retain /* OUT */) const
|
||||||
{
|
{
|
||||||
if (version == 1)
|
if (version == 1)
|
||||||
|
{
|
||||||
|
hb_barrier ();
|
||||||
v1 ().collect_name_ids (this, numPalettes, numColors, color_index_map, nameids_to_retain);
|
v1 ().collect_name_ids (this, numPalettes, numColors, color_index_map, nameids_to_retain);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
const CPALV1Tail& v1 () const
|
const CPALV1Tail& v1 () const
|
||||||
{
|
{
|
||||||
if (version == 0) return Null (CPALV1Tail);
|
if (version == 0) return Null (CPALV1Tail);
|
||||||
|
hb_barrier ();
|
||||||
return StructAfter<CPALV1Tail> (*this);
|
return StructAfter<CPALV1Tail> (*this);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -312,7 +316,10 @@ struct CPAL
|
|||||||
return_trace (false);
|
return_trace (false);
|
||||||
|
|
||||||
if (version == 1)
|
if (version == 1)
|
||||||
|
{
|
||||||
|
hb_barrier ();
|
||||||
return_trace (v1 ().serialize (c->serializer, numPalettes, numColors, this, color_index_map));
|
return_trace (v1 ().serialize (c->serializer, numPalettes, numColors, this, color_index_map));
|
||||||
|
}
|
||||||
|
|
||||||
return_trace (true);
|
return_trace (true);
|
||||||
}
|
}
|
||||||
@ -321,6 +328,7 @@ struct CPAL
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
(this+colorRecordsZ).sanitize (c, numColorRecords) &&
|
(this+colorRecordsZ).sanitize (c, numColorRecords) &&
|
||||||
colorRecordIndicesZ.sanitize (c, numPalettes) &&
|
colorRecordIndicesZ.sanitize (c, numPalettes) &&
|
||||||
(version == 0 || v1 ().sanitize (c, this, numPalettes, numColors)));
|
(version == 0 || v1 ().sanitize (c, this, numPalettes, numColors)));
|
||||||
|
@ -368,6 +368,7 @@ struct sbix
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (likely (c->check_struct (this) &&
|
return_trace (likely (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
version >= 1 &&
|
version >= 1 &&
|
||||||
strikes.sanitize (c, this)));
|
strikes.sanitize (c, this)));
|
||||||
}
|
}
|
||||||
|
@ -56,6 +56,7 @@ struct SVGDocumentIndexEntry
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
svgDoc.sanitize (c, base, svgDocLength));
|
svgDoc.sanitize (c, base, svgDocLength));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -64,6 +64,7 @@ struct Coverage
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (!u.format.sanitize (c)) return_trace (false);
|
if (!u.format.sanitize (c)) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
switch (u.format)
|
switch (u.format)
|
||||||
{
|
{
|
||||||
case 1: return_trace (u.format1.sanitize (c));
|
case 1: return_trace (u.format1.sanitize (c));
|
||||||
|
@ -291,6 +291,7 @@ struct CaretValue
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (!u.format.sanitize (c)) return_trace (false);
|
if (!u.format.sanitize (c)) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
switch (u.format) {
|
switch (u.format) {
|
||||||
case 1: return_trace (u.format1.sanitize (c));
|
case 1: return_trace (u.format1.sanitize (c));
|
||||||
case 2: return_trace (u.format2.sanitize (c));
|
case 2: return_trace (u.format2.sanitize (c));
|
||||||
@ -441,6 +442,20 @@ struct MarkGlyphSetsFormat1
|
|||||||
bool covers (unsigned int set_index, hb_codepoint_t glyph_id) const
|
bool covers (unsigned int set_index, hb_codepoint_t glyph_id) const
|
||||||
{ return (this+coverage[set_index]).get_coverage (glyph_id) != NOT_COVERED; }
|
{ return (this+coverage[set_index]).get_coverage (glyph_id) != NOT_COVERED; }
|
||||||
|
|
||||||
|
void collect_used_mark_sets (const hb_set_t& glyph_set,
|
||||||
|
hb_set_t& used_mark_sets /* OUT */) const
|
||||||
|
{
|
||||||
|
unsigned i = 0;
|
||||||
|
for (const auto &offset : coverage)
|
||||||
|
{
|
||||||
|
const auto &cov = this+offset;
|
||||||
|
if (cov.intersects (&glyph_set))
|
||||||
|
used_mark_sets.add (i);
|
||||||
|
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
template <typename set_t>
|
template <typename set_t>
|
||||||
void collect_coverage (hb_vector_t<set_t> &sets) const
|
void collect_coverage (hb_vector_t<set_t> &sets) const
|
||||||
{
|
{
|
||||||
@ -461,6 +476,7 @@ struct MarkGlyphSetsFormat1
|
|||||||
bool ret = true;
|
bool ret = true;
|
||||||
for (const Offset32To<Coverage>& offset : coverage.iter ())
|
for (const Offset32To<Coverage>& offset : coverage.iter ())
|
||||||
{
|
{
|
||||||
|
auto snap = c->serializer->snapshot ();
|
||||||
auto *o = out->coverage.serialize_append (c->serializer);
|
auto *o = out->coverage.serialize_append (c->serializer);
|
||||||
if (unlikely (!o))
|
if (unlikely (!o))
|
||||||
{
|
{
|
||||||
@ -468,11 +484,17 @@ struct MarkGlyphSetsFormat1
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
//not using o->serialize_subset (c, offset, this, out) here because
|
//skip empty coverage
|
||||||
//OTS doesn't allow null offset.
|
|
||||||
//See issue: https://github.com/khaledhosny/ots/issues/172
|
|
||||||
c->serializer->push ();
|
c->serializer->push ();
|
||||||
c->dispatch (this+offset);
|
bool res = false;
|
||||||
|
if (offset) res = c->dispatch (this+offset);
|
||||||
|
if (!res)
|
||||||
|
{
|
||||||
|
c->serializer->pop_discard ();
|
||||||
|
c->serializer->revert (snap);
|
||||||
|
(out->coverage.len)--;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
c->serializer->add_link (*o, c->serializer->pop_pack ());
|
c->serializer->add_link (*o, c->serializer->pop_pack ());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -513,6 +535,15 @@ struct MarkGlyphSets
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void collect_used_mark_sets (const hb_set_t& glyph_set,
|
||||||
|
hb_set_t& used_mark_sets /* OUT */) const
|
||||||
|
{
|
||||||
|
switch (u.format) {
|
||||||
|
case 1: u.format1.collect_used_mark_sets (glyph_set, used_mark_sets); return;
|
||||||
|
default:return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
bool subset (hb_subset_context_t *c) const
|
bool subset (hb_subset_context_t *c) const
|
||||||
{
|
{
|
||||||
TRACE_SUBSET (this);
|
TRACE_SUBSET (this);
|
||||||
@ -526,6 +557,7 @@ struct MarkGlyphSets
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (!u.format.sanitize (c)) return_trace (false);
|
if (!u.format.sanitize (c)) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
switch (u.format) {
|
switch (u.format) {
|
||||||
case 1: return_trace (u.format1.sanitize (c));
|
case 1: return_trace (u.format1.sanitize (c));
|
||||||
default:return_trace (true);
|
default:return_trace (true);
|
||||||
@ -600,6 +632,7 @@ struct GDEFVersion1_2
|
|||||||
attachList.sanitize (c, this) &&
|
attachList.sanitize (c, this) &&
|
||||||
ligCaretList.sanitize (c, this) &&
|
ligCaretList.sanitize (c, this) &&
|
||||||
markAttachClassDef.sanitize (c, this) &&
|
markAttachClassDef.sanitize (c, this) &&
|
||||||
|
hb_barrier () &&
|
||||||
(version.to_int () < 0x00010002u || markGlyphSetsDef.sanitize (c, this)) &&
|
(version.to_int () < 0x00010002u || markGlyphSetsDef.sanitize (c, this)) &&
|
||||||
(version.to_int () < 0x00010003u || varStore.sanitize (c, this)));
|
(version.to_int () < 0x00010003u || varStore.sanitize (c, this)));
|
||||||
}
|
}
|
||||||
@ -627,23 +660,28 @@ struct GDEFVersion1_2
|
|||||||
bool subset (hb_subset_context_t *c) const
|
bool subset (hb_subset_context_t *c) const
|
||||||
{
|
{
|
||||||
TRACE_SUBSET (this);
|
TRACE_SUBSET (this);
|
||||||
auto *out = c->serializer->embed (*this);
|
auto *out = c->serializer->start_embed (*this);
|
||||||
if (unlikely (!out)) return_trace (false);
|
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
|
||||||
|
|
||||||
|
out->version.major = version.major;
|
||||||
|
out->version.minor = version.minor;
|
||||||
bool subset_glyphclassdef = out->glyphClassDef.serialize_subset (c, glyphClassDef, this, nullptr, false, true);
|
bool subset_glyphclassdef = out->glyphClassDef.serialize_subset (c, glyphClassDef, this, nullptr, false, true);
|
||||||
bool subset_attachlist = out->attachList.serialize_subset (c, attachList, this);
|
bool subset_attachlist = out->attachList.serialize_subset (c, attachList, this);
|
||||||
bool subset_ligcaretlist = out->ligCaretList.serialize_subset (c, ligCaretList, this);
|
|
||||||
bool subset_markattachclassdef = out->markAttachClassDef.serialize_subset (c, markAttachClassDef, this, nullptr, false, true);
|
bool subset_markattachclassdef = out->markAttachClassDef.serialize_subset (c, markAttachClassDef, this, nullptr, false, true);
|
||||||
|
|
||||||
bool subset_markglyphsetsdef = false;
|
bool subset_markglyphsetsdef = false;
|
||||||
|
auto snapshot_version0 = c->serializer->snapshot ();
|
||||||
if (version.to_int () >= 0x00010002u)
|
if (version.to_int () >= 0x00010002u)
|
||||||
{
|
{
|
||||||
|
if (unlikely (!c->serializer->embed (markGlyphSetsDef))) return_trace (false);
|
||||||
subset_markglyphsetsdef = out->markGlyphSetsDef.serialize_subset (c, markGlyphSetsDef, this);
|
subset_markglyphsetsdef = out->markGlyphSetsDef.serialize_subset (c, markGlyphSetsDef, this);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool subset_varstore = false;
|
bool subset_varstore = false;
|
||||||
|
auto snapshot_version2 = c->serializer->snapshot ();
|
||||||
if (version.to_int () >= 0x00010003u)
|
if (version.to_int () >= 0x00010003u)
|
||||||
{
|
{
|
||||||
|
if (unlikely (!c->serializer->embed (varStore))) return_trace (false);
|
||||||
if (c->plan->all_axes_pinned)
|
if (c->plan->all_axes_pinned)
|
||||||
out->varStore = 0;
|
out->varStore = 0;
|
||||||
else if (c->plan->normalized_coords)
|
else if (c->plan->normalized_coords)
|
||||||
@ -666,15 +704,21 @@ struct GDEFVersion1_2
|
|||||||
subset_varstore = out->varStore.serialize_subset (c, varStore, this, c->plan->gdef_varstore_inner_maps.as_array ());
|
subset_varstore = out->varStore.serialize_subset (c, varStore, this, c->plan->gdef_varstore_inner_maps.as_array ());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
if (subset_varstore)
|
if (subset_varstore)
|
||||||
{
|
{
|
||||||
out->version.minor = 3;
|
out->version.minor = 3;
|
||||||
|
c->plan->has_gdef_varstore = true;
|
||||||
} else if (subset_markglyphsetsdef) {
|
} else if (subset_markglyphsetsdef) {
|
||||||
out->version.minor = 2;
|
out->version.minor = 2;
|
||||||
|
c->serializer->revert (snapshot_version2);
|
||||||
} else {
|
} else {
|
||||||
out->version.minor = 0;
|
out->version.minor = 0;
|
||||||
|
c->serializer->revert (snapshot_version0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool subset_ligcaretlist = out->ligCaretList.serialize_subset (c, ligCaretList, this);
|
||||||
|
|
||||||
return_trace (subset_glyphclassdef || subset_attachlist ||
|
return_trace (subset_glyphclassdef || subset_attachlist ||
|
||||||
subset_ligcaretlist || subset_markattachclassdef ||
|
subset_ligcaretlist || subset_markattachclassdef ||
|
||||||
(out->version.to_int () >= 0x00010002u && subset_markglyphsetsdef) ||
|
(out->version.to_int () >= 0x00010002u && subset_markglyphsetsdef) ||
|
||||||
@ -709,6 +753,7 @@ struct GDEF
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!u.version.sanitize (c))) return_trace (false);
|
if (unlikely (!u.version.sanitize (c))) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
switch (u.version.major) {
|
switch (u.version.major) {
|
||||||
case 1: return_trace (u.version1.sanitize (c));
|
case 1: return_trace (u.version1.sanitize (c));
|
||||||
#ifndef HB_NO_BEYOND_64K
|
#ifndef HB_NO_BEYOND_64K
|
||||||
|
@ -25,6 +25,7 @@ struct Anchor
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (!u.format.sanitize (c)) return_trace (false);
|
if (!u.format.sanitize (c)) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
switch (u.format) {
|
switch (u.format) {
|
||||||
case 1: return_trace (u.format1.sanitize (c));
|
case 1: return_trace (u.format1.sanitize (c));
|
||||||
case 2: return_trace (u.format2.sanitize (c));
|
case 2: return_trace (u.format2.sanitize (c));
|
||||||
|
@ -38,9 +38,15 @@ struct AnchorFormat3
|
|||||||
*y = font->em_fscale_y (yCoordinate);
|
*y = font->em_fscale_y (yCoordinate);
|
||||||
|
|
||||||
if ((font->x_ppem || font->num_coords) && xDeviceTable.sanitize (&c->sanitizer, this))
|
if ((font->x_ppem || font->num_coords) && xDeviceTable.sanitize (&c->sanitizer, this))
|
||||||
|
{
|
||||||
|
hb_barrier ();
|
||||||
*x += (this+xDeviceTable).get_x_delta (font, c->var_store, c->var_store_cache);
|
*x += (this+xDeviceTable).get_x_delta (font, c->var_store, c->var_store_cache);
|
||||||
|
}
|
||||||
if ((font->y_ppem || font->num_coords) && yDeviceTable.sanitize (&c->sanitizer, this))
|
if ((font->y_ppem || font->num_coords) && yDeviceTable.sanitize (&c->sanitizer, this))
|
||||||
|
{
|
||||||
|
hb_barrier ();
|
||||||
*y += (this+yDeviceTable).get_y_delta (font, c->var_store, c->var_store_cache);
|
*y += (this+yDeviceTable).get_y_delta (font, c->var_store, c->var_store_cache);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
bool subset (hb_subset_context_t *c) const
|
bool subset (hb_subset_context_t *c) const
|
||||||
|
@ -8,7 +8,7 @@ namespace GPOS_impl {
|
|||||||
struct AnchorMatrix
|
struct AnchorMatrix
|
||||||
{
|
{
|
||||||
HBUINT16 rows; /* Number of rows */
|
HBUINT16 rows; /* Number of rows */
|
||||||
UnsizedArrayOf<Offset16To<Anchor>>
|
UnsizedArrayOf<Offset16To<Anchor, AnchorMatrix>>
|
||||||
matrixZ; /* Matrix of offsets to Anchor tables--
|
matrixZ; /* Matrix of offsets to Anchor tables--
|
||||||
* from beginning of AnchorMatrix table */
|
* from beginning of AnchorMatrix table */
|
||||||
public:
|
public:
|
||||||
@ -18,6 +18,7 @@ struct AnchorMatrix
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (!c->check_struct (this)) return_trace (false);
|
if (!c->check_struct (this)) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
if (unlikely (hb_unsigned_mul_overflows (rows, cols))) return_trace (false);
|
if (unlikely (hb_unsigned_mul_overflows (rows, cols))) return_trace (false);
|
||||||
unsigned int count = rows * cols;
|
unsigned int count = rows * cols;
|
||||||
if (!c->check_array (matrixZ.arrayZ, count)) return_trace (false);
|
if (!c->check_array (matrixZ.arrayZ, count)) return_trace (false);
|
||||||
@ -25,6 +26,7 @@ struct AnchorMatrix
|
|||||||
if (c->lazy_some_gpos)
|
if (c->lazy_some_gpos)
|
||||||
return_trace (true);
|
return_trace (true);
|
||||||
|
|
||||||
|
hb_barrier ();
|
||||||
for (unsigned int i = 0; i < count; i++)
|
for (unsigned int i = 0; i < count; i++)
|
||||||
if (!matrixZ[i].sanitize (c, this)) return_trace (false);
|
if (!matrixZ[i].sanitize (c, this)) return_trace (false);
|
||||||
return_trace (true);
|
return_trace (true);
|
||||||
@ -38,6 +40,7 @@ struct AnchorMatrix
|
|||||||
if (unlikely (row >= rows || col >= cols)) return Null (Anchor);
|
if (unlikely (row >= rows || col >= cols)) return Null (Anchor);
|
||||||
auto &offset = matrixZ[row * cols + col];
|
auto &offset = matrixZ[row * cols + col];
|
||||||
if (unlikely (!offset.sanitize (&c->sanitizer, this))) return Null (Anchor);
|
if (unlikely (!offset.sanitize (&c->sanitizer, this))) return Null (Anchor);
|
||||||
|
hb_barrier ();
|
||||||
*found = !offset.is_null ();
|
*found = !offset.is_null ();
|
||||||
return this+offset;
|
return this+offset;
|
||||||
}
|
}
|
||||||
@ -65,15 +68,14 @@ struct AnchorMatrix
|
|||||||
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
|
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
|
||||||
|
|
||||||
out->rows = num_rows;
|
out->rows = num_rows;
|
||||||
bool ret = false;
|
|
||||||
for (const unsigned i : index_iter)
|
for (const unsigned i : index_iter)
|
||||||
{
|
{
|
||||||
auto *offset = c->serializer->embed (matrixZ[i]);
|
auto *offset = c->serializer->embed (matrixZ[i]);
|
||||||
if (!offset) return_trace (false);
|
if (!offset) return_trace (false);
|
||||||
ret |= offset->serialize_subset (c, matrixZ[i], this);
|
offset->serialize_subset (c, matrixZ[i], this);
|
||||||
}
|
}
|
||||||
|
|
||||||
return_trace (ret);
|
return_trace (true);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -23,7 +23,7 @@ static void SinglePos_serialize (hb_serialize_context_t *c,
|
|||||||
const SrcLookup *src,
|
const SrcLookup *src,
|
||||||
Iterator it,
|
Iterator it,
|
||||||
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map,
|
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map,
|
||||||
bool all_axes_pinned);
|
unsigned new_format);
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -11,21 +11,21 @@ struct EntryExitRecord
|
|||||||
{
|
{
|
||||||
friend struct CursivePosFormat1;
|
friend struct CursivePosFormat1;
|
||||||
|
|
||||||
bool sanitize (hb_sanitize_context_t *c, const void *base) const
|
bool sanitize (hb_sanitize_context_t *c, const struct CursivePosFormat1 *base) const
|
||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (entryAnchor.sanitize (c, base) && exitAnchor.sanitize (c, base));
|
return_trace (entryAnchor.sanitize (c, base) && exitAnchor.sanitize (c, base));
|
||||||
}
|
}
|
||||||
|
|
||||||
void collect_variation_indices (hb_collect_variation_indices_context_t *c,
|
void collect_variation_indices (hb_collect_variation_indices_context_t *c,
|
||||||
const void *src_base) const
|
const struct CursivePosFormat1 *src_base) const
|
||||||
{
|
{
|
||||||
(src_base+entryAnchor).collect_variation_indices (c);
|
(src_base+entryAnchor).collect_variation_indices (c);
|
||||||
(src_base+exitAnchor).collect_variation_indices (c);
|
(src_base+exitAnchor).collect_variation_indices (c);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool subset (hb_subset_context_t *c,
|
bool subset (hb_subset_context_t *c,
|
||||||
const void *src_base) const
|
const struct CursivePosFormat1 *src_base) const
|
||||||
{
|
{
|
||||||
TRACE_SERIALIZE (this);
|
TRACE_SERIALIZE (this);
|
||||||
auto *out = c->serializer->embed (this);
|
auto *out = c->serializer->embed (this);
|
||||||
@ -38,11 +38,11 @@ struct EntryExitRecord
|
|||||||
}
|
}
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
Offset16To<Anchor>
|
Offset16To<Anchor, struct CursivePosFormat1>
|
||||||
entryAnchor; /* Offset to EntryAnchor table--from
|
entryAnchor; /* Offset to EntryAnchor table--from
|
||||||
* beginning of CursivePos
|
* beginning of CursivePos
|
||||||
* subtable--may be NULL */
|
* subtable--may be NULL */
|
||||||
Offset16To<Anchor>
|
Offset16To<Anchor, struct CursivePosFormat1>
|
||||||
exitAnchor; /* Offset to ExitAnchor table--from
|
exitAnchor; /* Offset to ExitAnchor table--from
|
||||||
* beginning of CursivePos
|
* beginning of CursivePos
|
||||||
* subtable--may be NULL */
|
* subtable--may be NULL */
|
||||||
@ -128,6 +128,7 @@ struct CursivePosFormat1
|
|||||||
const EntryExitRecord &this_record = entryExitRecord[(this+coverage).get_coverage (buffer->cur().codepoint)];
|
const EntryExitRecord &this_record = entryExitRecord[(this+coverage).get_coverage (buffer->cur().codepoint)];
|
||||||
if (!this_record.entryAnchor ||
|
if (!this_record.entryAnchor ||
|
||||||
unlikely (!this_record.entryAnchor.sanitize (&c->sanitizer, this))) return_trace (false);
|
unlikely (!this_record.entryAnchor.sanitize (&c->sanitizer, this))) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
|
hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
|
||||||
skippy_iter.reset_fast (buffer->idx);
|
skippy_iter.reset_fast (buffer->idx);
|
||||||
@ -145,6 +146,7 @@ struct CursivePosFormat1
|
|||||||
buffer->unsafe_to_concat_from_outbuffer (skippy_iter.idx, buffer->idx + 1);
|
buffer->unsafe_to_concat_from_outbuffer (skippy_iter.idx, buffer->idx + 1);
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
}
|
}
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
unsigned int i = skippy_iter.idx;
|
unsigned int i = skippy_iter.idx;
|
||||||
unsigned int j = buffer->idx;
|
unsigned int j = buffer->idx;
|
||||||
@ -262,7 +264,7 @@ struct CursivePosFormat1
|
|||||||
hb_requires (hb_is_iterator (Iterator))>
|
hb_requires (hb_is_iterator (Iterator))>
|
||||||
void serialize (hb_subset_context_t *c,
|
void serialize (hb_subset_context_t *c,
|
||||||
Iterator it,
|
Iterator it,
|
||||||
const void *src_base)
|
const struct CursivePosFormat1 *src_base)
|
||||||
{
|
{
|
||||||
if (unlikely (!c->serializer->extend_min ((*this)))) return;
|
if (unlikely (!c->serializer->extend_min ((*this)))) return;
|
||||||
this->format = 1;
|
this->format = 1;
|
||||||
|
@ -42,6 +42,7 @@ struct MarkMarkPosFormat1_2
|
|||||||
mark1Coverage.sanitize (c, this) &&
|
mark1Coverage.sanitize (c, this) &&
|
||||||
mark2Coverage.sanitize (c, this) &&
|
mark2Coverage.sanitize (c, this) &&
|
||||||
mark1Array.sanitize (c, this) &&
|
mark1Array.sanitize (c, this) &&
|
||||||
|
hb_barrier () &&
|
||||||
mark2Array.sanitize (c, this, (unsigned int) classCount));
|
mark2Array.sanitize (c, this, (unsigned int) classCount));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -36,6 +36,7 @@ struct PairPosFormat1_3
|
|||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
|
|
||||||
if (!c->check_struct (this)) return_trace (false);
|
if (!c->check_struct (this)) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
unsigned int len1 = valueFormat[0].get_len ();
|
unsigned int len1 = valueFormat[0].get_len ();
|
||||||
unsigned int len2 = valueFormat[1].get_len ();
|
unsigned int len2 = valueFormat[1].get_len ();
|
||||||
@ -131,20 +132,33 @@ struct PairPosFormat1_3
|
|||||||
auto *out = c->serializer->start_embed (*this);
|
auto *out = c->serializer->start_embed (*this);
|
||||||
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
|
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
|
||||||
out->format = format;
|
out->format = format;
|
||||||
out->valueFormat[0] = valueFormat[0];
|
|
||||||
out->valueFormat[1] = valueFormat[1];
|
|
||||||
if (c->plan->flags & HB_SUBSET_FLAGS_NO_HINTING)
|
|
||||||
{
|
|
||||||
hb_pair_t<unsigned, unsigned> newFormats = compute_effective_value_formats (glyphset);
|
|
||||||
out->valueFormat[0] = newFormats.first;
|
|
||||||
out->valueFormat[1] = newFormats.second;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (c->plan->all_axes_pinned)
|
hb_pair_t<unsigned, unsigned> newFormats = hb_pair (valueFormat[0], valueFormat[1]);
|
||||||
|
|
||||||
|
if (c->plan->normalized_coords)
|
||||||
{
|
{
|
||||||
out->valueFormat[0] = out->valueFormat[0].drop_device_table_flags ();
|
/* all device flags will be dropped when full instancing, no need to strip
|
||||||
out->valueFormat[1] = out->valueFormat[1].drop_device_table_flags ();
|
* hints, also do not strip emtpy cause we don't compute the new default
|
||||||
|
* value during stripping */
|
||||||
|
newFormats = compute_effective_value_formats (glyphset, false, false, &c->plan->layout_variation_idx_delta_map);
|
||||||
}
|
}
|
||||||
|
/* do not strip hints for VF */
|
||||||
|
else if (c->plan->flags & HB_SUBSET_FLAGS_NO_HINTING)
|
||||||
|
{
|
||||||
|
hb_blob_t* blob = hb_face_reference_table (c->plan->source, HB_TAG ('f','v','a','r'));
|
||||||
|
bool has_fvar = (blob != hb_blob_get_empty ());
|
||||||
|
hb_blob_destroy (blob);
|
||||||
|
|
||||||
|
bool strip = !has_fvar;
|
||||||
|
/* special case: strip hints when a VF has no GDEF varstore after
|
||||||
|
* subsetting*/
|
||||||
|
if (has_fvar && !c->plan->has_gdef_varstore)
|
||||||
|
strip = true;
|
||||||
|
newFormats = compute_effective_value_formats (glyphset, strip, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
out->valueFormat[0] = newFormats.first;
|
||||||
|
out->valueFormat[1] = newFormats.second;
|
||||||
|
|
||||||
hb_sorted_vector_t<hb_codepoint_t> new_coverage;
|
hb_sorted_vector_t<hb_codepoint_t> new_coverage;
|
||||||
|
|
||||||
@ -175,7 +189,9 @@ struct PairPosFormat1_3
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
hb_pair_t<unsigned, unsigned> compute_effective_value_formats (const hb_set_t& glyphset) const
|
hb_pair_t<unsigned, unsigned> compute_effective_value_formats (const hb_set_t& glyphset,
|
||||||
|
bool strip_hints, bool strip_empty,
|
||||||
|
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *varidx_delta_map = nullptr) const
|
||||||
{
|
{
|
||||||
unsigned record_size = PairSet::get_size (valueFormat);
|
unsigned record_size = PairSet::get_size (valueFormat);
|
||||||
|
|
||||||
@ -195,8 +211,8 @@ struct PairPosFormat1_3
|
|||||||
{
|
{
|
||||||
if (record->intersects (glyphset))
|
if (record->intersects (glyphset))
|
||||||
{
|
{
|
||||||
format1 = format1 | valueFormat[0].get_effective_format (record->get_values_1 ());
|
format1 = format1 | valueFormat[0].get_effective_format (record->get_values_1 (), strip_hints, strip_empty, &set, varidx_delta_map);
|
||||||
format2 = format2 | valueFormat[1].get_effective_format (record->get_values_2 (valueFormat[0]));
|
format2 = format2 | valueFormat[1].get_effective_format (record->get_values_2 (valueFormat[0]), strip_hints, strip_empty, &set, varidx_delta_map);
|
||||||
}
|
}
|
||||||
record = &StructAtOffset<const PairValueRecord> (record, record_size);
|
record = &StructAtOffset<const PairValueRecord> (record, record_size);
|
||||||
}
|
}
|
||||||
|
@ -8,7 +8,7 @@ namespace Layout {
|
|||||||
namespace GPOS_impl {
|
namespace GPOS_impl {
|
||||||
|
|
||||||
template <typename Types>
|
template <typename Types>
|
||||||
struct PairPosFormat2_4
|
struct PairPosFormat2_4 : ValueBase
|
||||||
{
|
{
|
||||||
protected:
|
protected:
|
||||||
HBUINT16 format; /* Format identifier--format = 2 */
|
HBUINT16 format; /* Format identifier--format = 2 */
|
||||||
@ -287,18 +287,31 @@ struct PairPosFormat2_4
|
|||||||
unsigned len2 = valueFormat2.get_len ();
|
unsigned len2 = valueFormat2.get_len ();
|
||||||
|
|
||||||
hb_pair_t<unsigned, unsigned> newFormats = hb_pair (valueFormat1, valueFormat2);
|
hb_pair_t<unsigned, unsigned> newFormats = hb_pair (valueFormat1, valueFormat2);
|
||||||
if (c->plan->flags & HB_SUBSET_FLAGS_NO_HINTING)
|
|
||||||
newFormats = compute_effective_value_formats (klass1_map, klass2_map);
|
if (c->plan->normalized_coords)
|
||||||
|
{
|
||||||
|
/* in case of full instancing, all var device flags will be dropped so no
|
||||||
|
* need to strip hints here */
|
||||||
|
newFormats = compute_effective_value_formats (klass1_map, klass2_map, false, false, &c->plan->layout_variation_idx_delta_map);
|
||||||
|
}
|
||||||
|
/* do not strip hints for VF */
|
||||||
|
else if (c->plan->flags & HB_SUBSET_FLAGS_NO_HINTING)
|
||||||
|
{
|
||||||
|
hb_blob_t* blob = hb_face_reference_table (c->plan->source, HB_TAG ('f','v','a','r'));
|
||||||
|
bool has_fvar = (blob != hb_blob_get_empty ());
|
||||||
|
hb_blob_destroy (blob);
|
||||||
|
|
||||||
|
bool strip = !has_fvar;
|
||||||
|
/* special case: strip hints when a VF has no GDEF varstore after
|
||||||
|
* subsetting*/
|
||||||
|
if (has_fvar && !c->plan->has_gdef_varstore)
|
||||||
|
strip = true;
|
||||||
|
newFormats = compute_effective_value_formats (klass1_map, klass2_map, strip, true);
|
||||||
|
}
|
||||||
|
|
||||||
out->valueFormat1 = newFormats.first;
|
out->valueFormat1 = newFormats.first;
|
||||||
out->valueFormat2 = newFormats.second;
|
out->valueFormat2 = newFormats.second;
|
||||||
|
|
||||||
if (c->plan->all_axes_pinned)
|
|
||||||
{
|
|
||||||
out->valueFormat1 = out->valueFormat1.drop_device_table_flags ();
|
|
||||||
out->valueFormat2 = out->valueFormat2.drop_device_table_flags ();
|
|
||||||
}
|
|
||||||
|
|
||||||
unsigned total_len = len1 + len2;
|
unsigned total_len = len1 + len2;
|
||||||
hb_vector_t<unsigned> class2_idxs (+ hb_range ((unsigned) class2Count) | hb_filter (klass2_map));
|
hb_vector_t<unsigned> class2_idxs (+ hb_range ((unsigned) class2Count) | hb_filter (klass2_map));
|
||||||
for (unsigned class1_idx : + hb_range ((unsigned) class1Count) | hb_filter (klass1_map))
|
for (unsigned class1_idx : + hb_range ((unsigned) class1Count) | hb_filter (klass1_map))
|
||||||
@ -326,7 +339,9 @@ struct PairPosFormat2_4
|
|||||||
|
|
||||||
|
|
||||||
hb_pair_t<unsigned, unsigned> compute_effective_value_formats (const hb_map_t& klass1_map,
|
hb_pair_t<unsigned, unsigned> compute_effective_value_formats (const hb_map_t& klass1_map,
|
||||||
const hb_map_t& klass2_map) const
|
const hb_map_t& klass2_map,
|
||||||
|
bool strip_hints, bool strip_empty,
|
||||||
|
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *varidx_delta_map = nullptr) const
|
||||||
{
|
{
|
||||||
unsigned len1 = valueFormat1.get_len ();
|
unsigned len1 = valueFormat1.get_len ();
|
||||||
unsigned len2 = valueFormat2.get_len ();
|
unsigned len2 = valueFormat2.get_len ();
|
||||||
@ -340,8 +355,8 @@ struct PairPosFormat2_4
|
|||||||
for (unsigned class2_idx : + hb_range ((unsigned) class2Count) | hb_filter (klass2_map))
|
for (unsigned class2_idx : + hb_range ((unsigned) class2Count) | hb_filter (klass2_map))
|
||||||
{
|
{
|
||||||
unsigned idx = (class1_idx * (unsigned) class2Count + class2_idx) * record_size;
|
unsigned idx = (class1_idx * (unsigned) class2Count + class2_idx) * record_size;
|
||||||
format1 = format1 | valueFormat1.get_effective_format (&values[idx]);
|
format1 = format1 | valueFormat1.get_effective_format (&values[idx], strip_hints, strip_empty, this, varidx_delta_map);
|
||||||
format2 = format2 | valueFormat2.get_effective_format (&values[idx + len1]);
|
format2 = format2 | valueFormat2.get_effective_format (&values[idx + len1], strip_hints, strip_empty, this, varidx_delta_map);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (format1 == valueFormat1 && format2 == valueFormat2)
|
if (format1 == valueFormat1 && format2 == valueFormat2)
|
||||||
|
@ -9,7 +9,7 @@ namespace GPOS_impl {
|
|||||||
|
|
||||||
|
|
||||||
template <typename Types>
|
template <typename Types>
|
||||||
struct PairSet
|
struct PairSet : ValueBase
|
||||||
{
|
{
|
||||||
template <typename Types2>
|
template <typename Types2>
|
||||||
friend struct PairPosFormat1_3;
|
friend struct PairPosFormat1_3;
|
||||||
@ -45,10 +45,12 @@ struct PairSet
|
|||||||
bool sanitize (hb_sanitize_context_t *c, const sanitize_closure_t *closure) const
|
bool sanitize (hb_sanitize_context_t *c, const sanitize_closure_t *closure) const
|
||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (!(c->check_struct (this)
|
if (!(c->check_struct (this) &&
|
||||||
&& c->check_range (&firstPairValueRecord,
|
hb_barrier () &&
|
||||||
|
c->check_range (&firstPairValueRecord,
|
||||||
len,
|
len,
|
||||||
closure->stride))) return_trace (false);
|
closure->stride))) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
unsigned int count = len;
|
unsigned int count = len;
|
||||||
const PairValueRecord *record = &firstPairValueRecord;
|
const PairValueRecord *record = &firstPairValueRecord;
|
||||||
|
@ -29,7 +29,7 @@ struct PairValueRecord
|
|||||||
|
|
||||||
struct context_t
|
struct context_t
|
||||||
{
|
{
|
||||||
const void *base;
|
const ValueBase *base;
|
||||||
const ValueFormat *valueFormats;
|
const ValueFormat *valueFormats;
|
||||||
const ValueFormat *newFormats;
|
const ValueFormat *newFormats;
|
||||||
unsigned len1; /* valueFormats[0].get_len() */
|
unsigned len1; /* valueFormats[0].get_len() */
|
||||||
@ -62,7 +62,7 @@ struct PairValueRecord
|
|||||||
|
|
||||||
void collect_variation_indices (hb_collect_variation_indices_context_t *c,
|
void collect_variation_indices (hb_collect_variation_indices_context_t *c,
|
||||||
const ValueFormat *valueFormats,
|
const ValueFormat *valueFormats,
|
||||||
const void *base) const
|
const ValueBase *base) const
|
||||||
{
|
{
|
||||||
unsigned record1_len = valueFormats[0].get_len ();
|
unsigned record1_len = valueFormats[0].get_len ();
|
||||||
unsigned record2_len = valueFormats[1].get_len ();
|
unsigned record2_len = valueFormats[1].get_len ();
|
||||||
|
@ -39,14 +39,12 @@ struct SinglePos
|
|||||||
const SrcLookup* src,
|
const SrcLookup* src,
|
||||||
Iterator glyph_val_iter_pairs,
|
Iterator glyph_val_iter_pairs,
|
||||||
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map,
|
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map,
|
||||||
bool all_axes_pinned)
|
unsigned newFormat)
|
||||||
{
|
{
|
||||||
if (unlikely (!c->extend_min (u.format))) return;
|
if (unlikely (!c->extend_min (u.format))) return;
|
||||||
unsigned format = 2;
|
unsigned format = 2;
|
||||||
ValueFormat new_format = src->get_value_format ();
|
ValueFormat new_format;
|
||||||
|
new_format = newFormat;
|
||||||
if (all_axes_pinned)
|
|
||||||
new_format = new_format.drop_device_table_flags ();
|
|
||||||
|
|
||||||
if (glyph_val_iter_pairs)
|
if (glyph_val_iter_pairs)
|
||||||
format = get_format (glyph_val_iter_pairs);
|
format = get_format (glyph_val_iter_pairs);
|
||||||
@ -89,8 +87,8 @@ SinglePos_serialize (hb_serialize_context_t *c,
|
|||||||
const SrcLookup *src,
|
const SrcLookup *src,
|
||||||
Iterator it,
|
Iterator it,
|
||||||
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map,
|
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map,
|
||||||
bool all_axes_pinned)
|
unsigned new_format)
|
||||||
{ c->start_embed<SinglePos> ()->serialize (c, src, it, layout_variation_idx_delta_map, all_axes_pinned); }
|
{ c->start_embed<SinglePos> ()->serialize (c, src, it, layout_variation_idx_delta_map, new_format); }
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -8,7 +8,7 @@ namespace OT {
|
|||||||
namespace Layout {
|
namespace Layout {
|
||||||
namespace GPOS_impl {
|
namespace GPOS_impl {
|
||||||
|
|
||||||
struct SinglePosFormat1
|
struct SinglePosFormat1 : ValueBase
|
||||||
{
|
{
|
||||||
protected:
|
protected:
|
||||||
HBUINT16 format; /* Format identifier--format = 1 */
|
HBUINT16 format; /* Format identifier--format = 1 */
|
||||||
@ -28,6 +28,7 @@ struct SinglePosFormat1
|
|||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
coverage.sanitize (c, this) &&
|
coverage.sanitize (c, this) &&
|
||||||
|
hb_barrier () &&
|
||||||
/* The coverage table may use a range to represent a set
|
/* The coverage table may use a range to represent a set
|
||||||
* of glyphs, which means a small number of bytes can
|
* of glyphs, which means a small number of bytes can
|
||||||
* generate a large glyph set. Manually modify the
|
* generate a large glyph set. Manually modify the
|
||||||
@ -146,6 +147,30 @@ struct SinglePosFormat1
|
|||||||
hb_set_t intersection;
|
hb_set_t intersection;
|
||||||
(this+coverage).intersect_set (glyphset, intersection);
|
(this+coverage).intersect_set (glyphset, intersection);
|
||||||
|
|
||||||
|
unsigned new_format = valueFormat;
|
||||||
|
|
||||||
|
if (c->plan->normalized_coords)
|
||||||
|
{
|
||||||
|
new_format = valueFormat.get_effective_format (values.arrayZ, false, false, this, &c->plan->layout_variation_idx_delta_map);
|
||||||
|
}
|
||||||
|
/* do not strip hints for VF */
|
||||||
|
else if (c->plan->flags & HB_SUBSET_FLAGS_NO_HINTING)
|
||||||
|
{
|
||||||
|
hb_blob_t* blob = hb_face_reference_table (c->plan->source, HB_TAG ('f','v','a','r'));
|
||||||
|
bool has_fvar = (blob != hb_blob_get_empty ());
|
||||||
|
hb_blob_destroy (blob);
|
||||||
|
|
||||||
|
bool strip = !has_fvar;
|
||||||
|
/* special case: strip hints when a VF has no GDEF varstore after
|
||||||
|
* subsetting*/
|
||||||
|
if (has_fvar && !c->plan->has_gdef_varstore)
|
||||||
|
strip = true;
|
||||||
|
new_format = valueFormat.get_effective_format (values.arrayZ,
|
||||||
|
strip, /* strip hints */
|
||||||
|
true, /* strip empty */
|
||||||
|
this, nullptr);
|
||||||
|
}
|
||||||
|
|
||||||
auto it =
|
auto it =
|
||||||
+ hb_iter (intersection)
|
+ hb_iter (intersection)
|
||||||
| hb_map_retains_sorting (glyph_map)
|
| hb_map_retains_sorting (glyph_map)
|
||||||
@ -153,7 +178,7 @@ struct SinglePosFormat1
|
|||||||
;
|
;
|
||||||
|
|
||||||
bool ret = bool (it);
|
bool ret = bool (it);
|
||||||
SinglePos_serialize (c->serializer, this, it, &c->plan->layout_variation_idx_delta_map, c->plan->all_axes_pinned);
|
SinglePos_serialize (c->serializer, this, it, &c->plan->layout_variation_idx_delta_map, new_format);
|
||||||
return_trace (ret);
|
return_trace (ret);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -7,7 +7,7 @@ namespace OT {
|
|||||||
namespace Layout {
|
namespace Layout {
|
||||||
namespace GPOS_impl {
|
namespace GPOS_impl {
|
||||||
|
|
||||||
struct SinglePosFormat2
|
struct SinglePosFormat2 : ValueBase
|
||||||
{
|
{
|
||||||
protected:
|
protected:
|
||||||
HBUINT16 format; /* Format identifier--format = 2 */
|
HBUINT16 format; /* Format identifier--format = 2 */
|
||||||
@ -143,6 +143,37 @@ struct SinglePosFormat2
|
|||||||
coverage.serialize_serialize (c, glyphs);
|
coverage.serialize_serialize (c, glyphs);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template<typename Iterator,
|
||||||
|
hb_requires (hb_is_iterator (Iterator))>
|
||||||
|
unsigned compute_effective_format (const hb_face_t *face,
|
||||||
|
Iterator it,
|
||||||
|
bool is_instancing, bool strip_hints,
|
||||||
|
bool has_gdef_varstore,
|
||||||
|
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *varidx_delta_map) const
|
||||||
|
{
|
||||||
|
hb_blob_t* blob = hb_face_reference_table (face, HB_TAG ('f','v','a','r'));
|
||||||
|
bool has_fvar = (blob != hb_blob_get_empty ());
|
||||||
|
hb_blob_destroy (blob);
|
||||||
|
|
||||||
|
unsigned new_format = 0;
|
||||||
|
if (is_instancing)
|
||||||
|
{
|
||||||
|
new_format = new_format | valueFormat.get_effective_format (+ it | hb_map (hb_second), false, false, this, varidx_delta_map);
|
||||||
|
}
|
||||||
|
/* do not strip hints for VF */
|
||||||
|
else if (strip_hints)
|
||||||
|
{
|
||||||
|
bool strip = !has_fvar;
|
||||||
|
if (has_fvar && !has_gdef_varstore)
|
||||||
|
strip = true;
|
||||||
|
new_format = new_format | valueFormat.get_effective_format (+ it | hb_map (hb_second), strip, true, this, nullptr);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
new_format = valueFormat;
|
||||||
|
|
||||||
|
return new_format;
|
||||||
|
}
|
||||||
|
|
||||||
bool subset (hb_subset_context_t *c) const
|
bool subset (hb_subset_context_t *c) const
|
||||||
{
|
{
|
||||||
TRACE_SUBSET (this);
|
TRACE_SUBSET (this);
|
||||||
@ -163,8 +194,13 @@ struct SinglePosFormat2
|
|||||||
})
|
})
|
||||||
;
|
;
|
||||||
|
|
||||||
|
unsigned new_format = compute_effective_format (c->plan->source, it,
|
||||||
|
bool (c->plan->normalized_coords),
|
||||||
|
bool (c->plan->flags & HB_SUBSET_FLAGS_NO_HINTING),
|
||||||
|
c->plan->has_gdef_varstore,
|
||||||
|
&c->plan->layout_variation_idx_delta_map);
|
||||||
bool ret = bool (it);
|
bool ret = bool (it);
|
||||||
SinglePos_serialize (c->serializer, this, it, &c->plan->layout_variation_idx_delta_map, c->plan->all_axes_pinned);
|
SinglePos_serialize (c->serializer, this, it, &c->plan->layout_variation_idx_delta_map, new_format);
|
||||||
return_trace (ret);
|
return_trace (ret);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -9,6 +9,8 @@ namespace GPOS_impl {
|
|||||||
|
|
||||||
typedef HBUINT16 Value;
|
typedef HBUINT16 Value;
|
||||||
|
|
||||||
|
struct ValueBase {}; // Dummy base class tag for OffsetTo<Value> bases.
|
||||||
|
|
||||||
typedef UnsizedArrayOf<Value> ValueRecord;
|
typedef UnsizedArrayOf<Value> ValueRecord;
|
||||||
|
|
||||||
struct ValueFormat : HBUINT16
|
struct ValueFormat : HBUINT16
|
||||||
@ -78,7 +80,7 @@ struct ValueFormat : HBUINT16
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool apply_value (hb_ot_apply_context_t *c,
|
bool apply_value (hb_ot_apply_context_t *c,
|
||||||
const void *base,
|
const ValueBase *base,
|
||||||
const Value *values,
|
const Value *values,
|
||||||
hb_glyph_position_t &glyph_pos) const
|
hb_glyph_position_t &glyph_pos) const
|
||||||
{
|
{
|
||||||
@ -142,11 +144,29 @@ struct ValueFormat : HBUINT16
|
|||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
unsigned int get_effective_format (const Value *values) const
|
unsigned int get_effective_format (const Value *values, bool strip_hints, bool strip_empty, const ValueBase *base,
|
||||||
|
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *varidx_delta_map) const
|
||||||
{
|
{
|
||||||
unsigned int format = *this;
|
unsigned int format = *this;
|
||||||
for (unsigned flag = xPlacement; flag <= yAdvDevice; flag = flag << 1) {
|
for (unsigned flag = xPlacement; flag <= yAdvDevice; flag = flag << 1) {
|
||||||
if (format & flag) should_drop (*values++, (Flags) flag, &format);
|
if (format & flag)
|
||||||
|
{
|
||||||
|
if (strip_hints && flag >= xPlaDevice)
|
||||||
|
{
|
||||||
|
format = format & ~flag;
|
||||||
|
values++;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (varidx_delta_map && flag >= xPlaDevice)
|
||||||
|
{
|
||||||
|
update_var_flag (values++, (Flags) flag, &format, base, varidx_delta_map);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
/* do not strip empty when instancing, cause we don't know whether the new
|
||||||
|
* default value is 0 or not */
|
||||||
|
if (strip_empty) should_drop (*values, (Flags) flag, &format);
|
||||||
|
values++;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return format;
|
return format;
|
||||||
@ -154,18 +174,19 @@ struct ValueFormat : HBUINT16
|
|||||||
|
|
||||||
template<typename Iterator,
|
template<typename Iterator,
|
||||||
hb_requires (hb_is_iterator (Iterator))>
|
hb_requires (hb_is_iterator (Iterator))>
|
||||||
unsigned int get_effective_format (Iterator it) const {
|
unsigned int get_effective_format (Iterator it, bool strip_hints, bool strip_empty, const ValueBase *base,
|
||||||
|
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *varidx_delta_map) const {
|
||||||
unsigned int new_format = 0;
|
unsigned int new_format = 0;
|
||||||
|
|
||||||
for (const hb_array_t<const Value>& values : it)
|
for (const hb_array_t<const Value>& values : it)
|
||||||
new_format = new_format | get_effective_format (&values);
|
new_format = new_format | get_effective_format (&values, strip_hints, strip_empty, base, varidx_delta_map);
|
||||||
|
|
||||||
return new_format;
|
return new_format;
|
||||||
}
|
}
|
||||||
|
|
||||||
void copy_values (hb_serialize_context_t *c,
|
void copy_values (hb_serialize_context_t *c,
|
||||||
unsigned int new_format,
|
unsigned int new_format,
|
||||||
const void *base,
|
const ValueBase *base,
|
||||||
const Value *values,
|
const Value *values,
|
||||||
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map) const
|
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map) const
|
||||||
{
|
{
|
||||||
@ -217,7 +238,7 @@ struct ValueFormat : HBUINT16
|
|||||||
}
|
}
|
||||||
|
|
||||||
void collect_variation_indices (hb_collect_variation_indices_context_t *c,
|
void collect_variation_indices (hb_collect_variation_indices_context_t *c,
|
||||||
const void *base,
|
const ValueBase *base,
|
||||||
const hb_array_t<const Value>& values) const
|
const hb_array_t<const Value>& values) const
|
||||||
{
|
{
|
||||||
unsigned format = *this;
|
unsigned format = *this;
|
||||||
@ -251,17 +272,8 @@ struct ValueFormat : HBUINT16
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
unsigned drop_device_table_flags () const
|
|
||||||
{
|
|
||||||
unsigned format = *this;
|
|
||||||
for (unsigned flag = xPlaDevice; flag <= yAdvDevice; flag = flag << 1)
|
|
||||||
format = format & ~flag;
|
|
||||||
|
|
||||||
return format;
|
|
||||||
}
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
bool sanitize_value_devices (hb_sanitize_context_t *c, const void *base, const Value *values) const
|
bool sanitize_value_devices (hb_sanitize_context_t *c, const ValueBase *base, const Value *values) const
|
||||||
{
|
{
|
||||||
unsigned int format = *this;
|
unsigned int format = *this;
|
||||||
|
|
||||||
@ -278,17 +290,17 @@ struct ValueFormat : HBUINT16
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline Offset16To<Device>& get_device (Value* value)
|
static inline Offset16To<Device, ValueBase>& get_device (Value* value)
|
||||||
{
|
{
|
||||||
return *static_cast<Offset16To<Device> *> (value);
|
return *static_cast<Offset16To<Device, ValueBase> *> (value);
|
||||||
}
|
}
|
||||||
static inline const Offset16To<Device>& get_device (const Value* value)
|
static inline const Offset16To<Device, ValueBase>& get_device (const Value* value)
|
||||||
{
|
{
|
||||||
return *static_cast<const Offset16To<Device> *> (value);
|
return *static_cast<const Offset16To<Device, ValueBase> *> (value);
|
||||||
}
|
}
|
||||||
static inline const Device& get_device (const Value* value,
|
static inline const Device& get_device (const Value* value,
|
||||||
bool *worked,
|
bool *worked,
|
||||||
const void *base,
|
const ValueBase *base,
|
||||||
hb_sanitize_context_t &c)
|
hb_sanitize_context_t &c)
|
||||||
{
|
{
|
||||||
if (worked) *worked |= bool (*value);
|
if (worked) *worked |= bool (*value);
|
||||||
@ -296,12 +308,13 @@ struct ValueFormat : HBUINT16
|
|||||||
|
|
||||||
if (unlikely (!offset.sanitize (&c, base)))
|
if (unlikely (!offset.sanitize (&c, base)))
|
||||||
return Null(Device);
|
return Null(Device);
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
return base + offset;
|
return base + offset;
|
||||||
}
|
}
|
||||||
|
|
||||||
void add_delta_to_value (HBINT16 *value,
|
void add_delta_to_value (HBINT16 *value,
|
||||||
const void *base,
|
const ValueBase *base,
|
||||||
const Value *src_value,
|
const Value *src_value,
|
||||||
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map) const
|
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map) const
|
||||||
{
|
{
|
||||||
@ -313,7 +326,8 @@ struct ValueFormat : HBUINT16
|
|||||||
*value += hb_second (*varidx_delta);
|
*value += hb_second (*varidx_delta);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool copy_device (hb_serialize_context_t *c, const void *base,
|
bool copy_device (hb_serialize_context_t *c,
|
||||||
|
const ValueBase *base,
|
||||||
const Value *src_value,
|
const Value *src_value,
|
||||||
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map,
|
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map,
|
||||||
unsigned int new_format, Flags flag) const
|
unsigned int new_format, Flags flag) const
|
||||||
@ -354,7 +368,7 @@ struct ValueFormat : HBUINT16
|
|||||||
return (format & devices) != 0;
|
return (format & devices) != 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool sanitize_value (hb_sanitize_context_t *c, const void *base, const Value *values) const
|
bool sanitize_value (hb_sanitize_context_t *c, const ValueBase *base, const Value *values) const
|
||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
|
|
||||||
@ -366,7 +380,7 @@ struct ValueFormat : HBUINT16
|
|||||||
return_trace (!has_device () || sanitize_value_devices (c, base, values));
|
return_trace (!has_device () || sanitize_value_devices (c, base, values));
|
||||||
}
|
}
|
||||||
|
|
||||||
bool sanitize_values (hb_sanitize_context_t *c, const void *base, const Value *values, unsigned int count) const
|
bool sanitize_values (hb_sanitize_context_t *c, const ValueBase *base, const Value *values, unsigned int count) const
|
||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
unsigned size = get_size ();
|
unsigned size = get_size ();
|
||||||
@ -376,11 +390,12 @@ struct ValueFormat : HBUINT16
|
|||||||
if (c->lazy_some_gpos)
|
if (c->lazy_some_gpos)
|
||||||
return_trace (true);
|
return_trace (true);
|
||||||
|
|
||||||
|
hb_barrier ();
|
||||||
return_trace (sanitize_values_stride_unsafe (c, base, values, count, size));
|
return_trace (sanitize_values_stride_unsafe (c, base, values, count, size));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Just sanitize referenced Device tables. Doesn't check the values themselves. */
|
/* Just sanitize referenced Device tables. Doesn't check the values themselves. */
|
||||||
bool sanitize_values_stride_unsafe (hb_sanitize_context_t *c, const void *base, const Value *values, unsigned int count, unsigned int stride) const
|
bool sanitize_values_stride_unsafe (hb_sanitize_context_t *c, const ValueBase *base, const Value *values, unsigned int count, unsigned int stride) const
|
||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
|
|
||||||
@ -403,6 +418,20 @@ struct ValueFormat : HBUINT16
|
|||||||
*format = *format & ~flag;
|
*format = *format & ~flag;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void update_var_flag (const Value* value, Flags flag,
|
||||||
|
unsigned int* format, const ValueBase *base,
|
||||||
|
const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *varidx_delta_map) const
|
||||||
|
{
|
||||||
|
if (*value)
|
||||||
|
{
|
||||||
|
unsigned varidx = (base + get_device (value)).get_variation_index ();
|
||||||
|
hb_pair_t<unsigned, int> *varidx_delta;
|
||||||
|
if (varidx_delta_map->has (varidx, &varidx_delta) &&
|
||||||
|
varidx_delta->first != HB_OT_LAYOUT_NO_VARIATIONS_INDEX)
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
*format = *format & ~flag;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -33,9 +33,11 @@ struct ReverseChainSingleSubstFormat1
|
|||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (!(coverage.sanitize (c, this) && backtrack.sanitize (c, this)))
|
if (!(coverage.sanitize (c, this) && backtrack.sanitize (c, this)))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
const auto &lookahead = StructAfter<decltype (lookaheadX)> (backtrack);
|
const auto &lookahead = StructAfter<decltype (lookaheadX)> (backtrack);
|
||||||
if (!lookahead.sanitize (c, this))
|
if (!lookahead.sanitize (c, this))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
const auto &substitute = StructAfter<decltype (substituteX)> (lookahead);
|
const auto &substitute = StructAfter<decltype (substituteX)> (lookahead);
|
||||||
return_trace (substitute.sanitize (c));
|
return_trace (substitute.sanitize (c));
|
||||||
}
|
}
|
||||||
@ -109,12 +111,12 @@ struct ReverseChainSingleSubstFormat1
|
|||||||
bool apply (hb_ot_apply_context_t *c) const
|
bool apply (hb_ot_apply_context_t *c) const
|
||||||
{
|
{
|
||||||
TRACE_APPLY (this);
|
TRACE_APPLY (this);
|
||||||
if (unlikely (c->nesting_level_left != HB_MAX_NESTING_LEVEL))
|
|
||||||
return_trace (false); /* No chaining to this type */
|
|
||||||
|
|
||||||
unsigned int index = (this+coverage).get_coverage (c->buffer->cur ().codepoint);
|
unsigned int index = (this+coverage).get_coverage (c->buffer->cur ().codepoint);
|
||||||
if (likely (index == NOT_COVERED)) return_trace (false);
|
if (likely (index == NOT_COVERED)) return_trace (false);
|
||||||
|
|
||||||
|
if (unlikely (c->nesting_level_left != HB_MAX_NESTING_LEVEL))
|
||||||
|
return_trace (false); /* No chaining to this type */
|
||||||
|
|
||||||
const auto &lookahead = StructAfter<decltype (lookaheadX)> (backtrack);
|
const auto &lookahead = StructAfter<decltype (lookaheadX)> (backtrack);
|
||||||
const auto &substitute = StructAfter<decltype (substituteX)> (lookahead);
|
const auto &substitute = StructAfter<decltype (substituteX)> (lookahead);
|
||||||
|
|
||||||
|
@ -38,8 +38,8 @@ struct SmallTypes {
|
|||||||
using HBUINT = HBUINT16;
|
using HBUINT = HBUINT16;
|
||||||
using HBGlyphID = HBGlyphID16;
|
using HBGlyphID = HBGlyphID16;
|
||||||
using Offset = Offset16;
|
using Offset = Offset16;
|
||||||
template <typename Type, bool has_null=true>
|
template <typename Type, typename BaseType=void, bool has_null=true>
|
||||||
using OffsetTo = OT::Offset16To<Type, has_null>;
|
using OffsetTo = OT::Offset16To<Type, BaseType, has_null>;
|
||||||
template <typename Type>
|
template <typename Type>
|
||||||
using ArrayOf = OT::Array16Of<Type>;
|
using ArrayOf = OT::Array16Of<Type>;
|
||||||
template <typename Type>
|
template <typename Type>
|
||||||
@ -52,8 +52,8 @@ struct MediumTypes {
|
|||||||
using HBUINT = HBUINT24;
|
using HBUINT = HBUINT24;
|
||||||
using HBGlyphID = HBGlyphID24;
|
using HBGlyphID = HBGlyphID24;
|
||||||
using Offset = Offset24;
|
using Offset = Offset24;
|
||||||
template <typename Type, bool has_null=true>
|
template <typename Type, typename BaseType=void, bool has_null=true>
|
||||||
using OffsetTo = OT::Offset24To<Type, has_null>;
|
using OffsetTo = OT::Offset24To<Type, BaseType, has_null>;
|
||||||
template <typename Type>
|
template <typename Type>
|
||||||
using ArrayOf = OT::Array24Of<Type>;
|
using ArrayOf = OT::Array24Of<Type>;
|
||||||
template <typename Type>
|
template <typename Type>
|
||||||
|
5
src/3rdparty/harfbuzz-ng/src/OT/name/name.hh
vendored
5
src/3rdparty/harfbuzz-ng/src/OT/name/name.hh
vendored
@ -242,7 +242,9 @@ struct NameRecord
|
|||||||
bool sanitize (hb_sanitize_context_t *c, const void *base) const
|
bool sanitize (hb_sanitize_context_t *c, const void *base) const
|
||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) && offset.sanitize (c, base, length));
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
|
offset.sanitize (c, base, length));
|
||||||
}
|
}
|
||||||
|
|
||||||
HBUINT16 platformID; /* Platform ID. */
|
HBUINT16 platformID; /* Platform ID. */
|
||||||
@ -465,6 +467,7 @@ struct name
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
likely (format == 0 || format == 1) &&
|
likely (format == 0 || format == 1) &&
|
||||||
c->check_array (nameRecordZ.arrayZ, count) &&
|
c->check_array (nameRecordZ.arrayZ, count) &&
|
||||||
c->check_range (this, stringOffset) &&
|
c->check_range (this, stringOffset) &&
|
||||||
|
@ -39,6 +39,7 @@ struct ClassDefFormat1 : public OT::ClassDefFormat1_3<SmallTypes>
|
|||||||
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
|
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
|
||||||
constexpr unsigned min_size = OT::ClassDefFormat1_3<SmallTypes>::min_size;
|
constexpr unsigned min_size = OT::ClassDefFormat1_3<SmallTypes>::min_size;
|
||||||
if (vertex_len < min_size) return false;
|
if (vertex_len < min_size) return false;
|
||||||
|
hb_barrier ();
|
||||||
return vertex_len >= min_size + classValue.get_size () - classValue.len.get_size ();
|
return vertex_len >= min_size + classValue.get_size () - classValue.len.get_size ();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -50,6 +51,7 @@ struct ClassDefFormat2 : public OT::ClassDefFormat2_4<SmallTypes>
|
|||||||
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
|
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
|
||||||
constexpr unsigned min_size = OT::ClassDefFormat2_4<SmallTypes>::min_size;
|
constexpr unsigned min_size = OT::ClassDefFormat2_4<SmallTypes>::min_size;
|
||||||
if (vertex_len < min_size) return false;
|
if (vertex_len < min_size) return false;
|
||||||
|
hb_barrier ();
|
||||||
return vertex_len >= min_size + rangeRecord.get_size () - rangeRecord.len.get_size ();
|
return vertex_len >= min_size + rangeRecord.get_size () - rangeRecord.len.get_size ();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -114,6 +116,7 @@ struct ClassDef : public OT::ClassDef
|
|||||||
{
|
{
|
||||||
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
|
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
|
||||||
if (vertex_len < OT::ClassDef::min_size) return false;
|
if (vertex_len < OT::ClassDef::min_size) return false;
|
||||||
|
hb_barrier ();
|
||||||
switch (u.format)
|
switch (u.format)
|
||||||
{
|
{
|
||||||
case 1: return ((ClassDefFormat1*)this)->sanitize (vertex);
|
case 1: return ((ClassDefFormat1*)this)->sanitize (vertex);
|
||||||
|
@ -39,6 +39,7 @@ struct CoverageFormat1 : public OT::Layout::Common::CoverageFormat1_3<SmallTypes
|
|||||||
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
|
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
|
||||||
constexpr unsigned min_size = OT::Layout::Common::CoverageFormat1_3<SmallTypes>::min_size;
|
constexpr unsigned min_size = OT::Layout::Common::CoverageFormat1_3<SmallTypes>::min_size;
|
||||||
if (vertex_len < min_size) return false;
|
if (vertex_len < min_size) return false;
|
||||||
|
hb_barrier ();
|
||||||
return vertex_len >= min_size + glyphArray.get_size () - glyphArray.len.get_size ();
|
return vertex_len >= min_size + glyphArray.get_size () - glyphArray.len.get_size ();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -50,6 +51,7 @@ struct CoverageFormat2 : public OT::Layout::Common::CoverageFormat2_4<SmallTypes
|
|||||||
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
|
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
|
||||||
constexpr unsigned min_size = OT::Layout::Common::CoverageFormat2_4<SmallTypes>::min_size;
|
constexpr unsigned min_size = OT::Layout::Common::CoverageFormat2_4<SmallTypes>::min_size;
|
||||||
if (vertex_len < min_size) return false;
|
if (vertex_len < min_size) return false;
|
||||||
|
hb_barrier ();
|
||||||
return vertex_len >= min_size + rangeRecord.get_size () - rangeRecord.len.get_size ();
|
return vertex_len >= min_size + rangeRecord.get_size () - rangeRecord.len.get_size ();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -138,6 +140,7 @@ struct Coverage : public OT::Layout::Common::Coverage
|
|||||||
{
|
{
|
||||||
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
|
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
|
||||||
if (vertex_len < OT::Layout::Common::Coverage::min_size) return false;
|
if (vertex_len < OT::Layout::Common::Coverage::min_size) return false;
|
||||||
|
hb_barrier ();
|
||||||
switch (u.format)
|
switch (u.format)
|
||||||
{
|
{
|
||||||
case 1: return ((CoverageFormat1*)this)->sanitize (vertex);
|
case 1: return ((CoverageFormat1*)this)->sanitize (vertex);
|
||||||
|
2
src/3rdparty/harfbuzz-ng/src/graph/graph.hh
vendored
2
src/3rdparty/harfbuzz-ng/src/graph/graph.hh
vendored
@ -567,6 +567,7 @@ struct graph_t
|
|||||||
update_distances ();
|
update_distances ();
|
||||||
|
|
||||||
hb_priority_queue_t<int64_t> queue;
|
hb_priority_queue_t<int64_t> queue;
|
||||||
|
queue.alloc (vertices_.length);
|
||||||
hb_vector_t<vertex_t> &sorted_graph = vertices_scratch_;
|
hb_vector_t<vertex_t> &sorted_graph = vertices_scratch_;
|
||||||
if (unlikely (!check_success (sorted_graph.resize (vertices_.length)))) return;
|
if (unlikely (!check_success (sorted_graph.resize (vertices_.length)))) return;
|
||||||
hb_vector_t<unsigned> id_map;
|
hb_vector_t<unsigned> id_map;
|
||||||
@ -1370,6 +1371,7 @@ struct graph_t
|
|||||||
vertices_.tail ().distance = 0;
|
vertices_.tail ().distance = 0;
|
||||||
|
|
||||||
hb_priority_queue_t<int64_t> queue;
|
hb_priority_queue_t<int64_t> queue;
|
||||||
|
queue.alloc (count);
|
||||||
queue.insert (0, vertices_.length - 1);
|
queue.insert (0, vertices_.length - 1);
|
||||||
|
|
||||||
hb_vector_t<bool> visited;
|
hb_vector_t<bool> visited;
|
||||||
|
@ -76,6 +76,7 @@ struct Lookup : public OT::Lookup
|
|||||||
{
|
{
|
||||||
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
|
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
|
||||||
if (vertex_len < OT::Lookup::min_size) return false;
|
if (vertex_len < OT::Lookup::min_size) return false;
|
||||||
|
hb_barrier ();
|
||||||
return vertex_len >= this->get_size ();
|
return vertex_len >= this->get_size ();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -351,6 +352,7 @@ struct LookupList : public OT::LookupList<T>
|
|||||||
{
|
{
|
||||||
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
|
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
|
||||||
if (vertex_len < OT::LookupList<T>::min_size) return false;
|
if (vertex_len < OT::LookupList<T>::min_size) return false;
|
||||||
|
hb_barrier ();
|
||||||
return vertex_len >= OT::LookupList<T>::item_size * this->len;
|
return vertex_len >= OT::LookupList<T>::item_size * this->len;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -364,6 +366,7 @@ struct GSTAR : public OT::GSUBGPOS
|
|||||||
GSTAR* gstar = (GSTAR*) r.obj.head;
|
GSTAR* gstar = (GSTAR*) r.obj.head;
|
||||||
if (!gstar || !gstar->sanitize (r))
|
if (!gstar || !gstar->sanitize (r))
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
return gstar;
|
return gstar;
|
||||||
}
|
}
|
||||||
@ -383,6 +386,7 @@ struct GSTAR : public OT::GSUBGPOS
|
|||||||
{
|
{
|
||||||
int64_t len = vertex.obj.tail - vertex.obj.head;
|
int64_t len = vertex.obj.tail - vertex.obj.head;
|
||||||
if (len < OT::GSUBGPOS::min_size) return false;
|
if (len < OT::GSUBGPOS::min_size) return false;
|
||||||
|
hb_barrier ();
|
||||||
return len >= get_size ();
|
return len >= get_size ();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -40,6 +40,7 @@ struct AnchorMatrix : public OT::Layout::GPOS_impl::AnchorMatrix
|
|||||||
{
|
{
|
||||||
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
|
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
|
||||||
if (vertex_len < AnchorMatrix::min_size) return false;
|
if (vertex_len < AnchorMatrix::min_size) return false;
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
return vertex_len >= AnchorMatrix::min_size +
|
return vertex_len >= AnchorMatrix::min_size +
|
||||||
OT::Offset16::static_size * class_count * this->rows;
|
OT::Offset16::static_size * class_count * this->rows;
|
||||||
@ -128,6 +129,7 @@ struct MarkArray : public OT::Layout::GPOS_impl::MarkArray
|
|||||||
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
|
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
|
||||||
unsigned min_size = MarkArray::min_size;
|
unsigned min_size = MarkArray::min_size;
|
||||||
if (vertex_len < min_size) return false;
|
if (vertex_len < min_size) return false;
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
return vertex_len >= get_size ();
|
return vertex_len >= get_size ();
|
||||||
}
|
}
|
||||||
@ -495,6 +497,7 @@ struct MarkBasePos : public OT::Layout::GPOS_impl::MarkBasePos
|
|||||||
{
|
{
|
||||||
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
|
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
|
||||||
if (vertex_len < u.format.get_size ()) return false;
|
if (vertex_len < u.format.get_size ()) return false;
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
switch (u.format) {
|
switch (u.format) {
|
||||||
case 1:
|
case 1:
|
||||||
|
@ -42,6 +42,7 @@ struct PairPosFormat1 : public OT::Layout::GPOS_impl::PairPosFormat1_3<SmallType
|
|||||||
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
|
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
|
||||||
unsigned min_size = OT::Layout::GPOS_impl::PairPosFormat1_3<SmallTypes>::min_size;
|
unsigned min_size = OT::Layout::GPOS_impl::PairPosFormat1_3<SmallTypes>::min_size;
|
||||||
if (vertex_len < min_size) return false;
|
if (vertex_len < min_size) return false;
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
return vertex_len >=
|
return vertex_len >=
|
||||||
min_size + pairSet.get_size () - pairSet.len.get_size();
|
min_size + pairSet.get_size () - pairSet.len.get_size();
|
||||||
@ -198,6 +199,7 @@ struct PairPosFormat2 : public OT::Layout::GPOS_impl::PairPosFormat2_4<SmallType
|
|||||||
size_t vertex_len = vertex.table_size ();
|
size_t vertex_len = vertex.table_size ();
|
||||||
unsigned min_size = OT::Layout::GPOS_impl::PairPosFormat2_4<SmallTypes>::min_size;
|
unsigned min_size = OT::Layout::GPOS_impl::PairPosFormat2_4<SmallTypes>::min_size;
|
||||||
if (vertex_len < min_size) return false;
|
if (vertex_len < min_size) return false;
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
const unsigned class1_count = class1Count;
|
const unsigned class1_count = class1Count;
|
||||||
return vertex_len >=
|
return vertex_len >=
|
||||||
@ -625,6 +627,7 @@ struct PairPos : public OT::Layout::GPOS_impl::PairPos
|
|||||||
{
|
{
|
||||||
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
|
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
|
||||||
if (vertex_len < u.format.get_size ()) return false;
|
if (vertex_len < u.format.get_size ()) return false;
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
switch (u.format) {
|
switch (u.format) {
|
||||||
case 1:
|
case 1:
|
||||||
|
@ -75,6 +75,7 @@ struct ankr
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (likely (c->check_struct (this) &&
|
return_trace (likely (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
version == 0 &&
|
version == 0 &&
|
||||||
c->check_range (this, anchorData) &&
|
c->check_range (this, anchorData) &&
|
||||||
lookupTable.sanitize (c, this, &(this+anchorData))));
|
lookupTable.sanitize (c, this, &(this+anchorData))));
|
||||||
|
@ -123,6 +123,7 @@ struct bsln
|
|||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!(c->check_struct (this) && defaultBaseline < 32)))
|
if (unlikely (!(c->check_struct (this) && defaultBaseline < 32)))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
switch (format)
|
switch (format)
|
||||||
{
|
{
|
||||||
|
@ -191,6 +191,7 @@ struct LookupSegmentArray
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
first <= last &&
|
first <= last &&
|
||||||
valuesZ.sanitize (c, base, last - first + 1));
|
valuesZ.sanitize (c, base, last - first + 1));
|
||||||
}
|
}
|
||||||
@ -199,6 +200,7 @@ struct LookupSegmentArray
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
first <= last &&
|
first <= last &&
|
||||||
valuesZ.sanitize (c, base, last - first + 1, std::forward<Ts> (ds)...));
|
valuesZ.sanitize (c, base, last - first + 1, std::forward<Ts> (ds)...));
|
||||||
}
|
}
|
||||||
@ -360,6 +362,7 @@ struct LookupFormat10
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
valueSize <= 4 &&
|
valueSize <= 4 &&
|
||||||
valueArrayZ.sanitize (c, glyphCount * valueSize));
|
valueArrayZ.sanitize (c, glyphCount * valueSize));
|
||||||
}
|
}
|
||||||
@ -415,6 +418,7 @@ struct Lookup
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (!u.format.sanitize (c)) return_trace (false);
|
if (!u.format.sanitize (c)) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
switch (u.format) {
|
switch (u.format) {
|
||||||
case 0: return_trace (u.format0.sanitize (c));
|
case 0: return_trace (u.format0.sanitize (c));
|
||||||
case 2: return_trace (u.format2.sanitize (c));
|
case 2: return_trace (u.format2.sanitize (c));
|
||||||
@ -429,6 +433,7 @@ struct Lookup
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (!u.format.sanitize (c)) return_trace (false);
|
if (!u.format.sanitize (c)) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
switch (u.format) {
|
switch (u.format) {
|
||||||
case 0: return_trace (u.format0.sanitize (c, base));
|
case 0: return_trace (u.format0.sanitize (c, base));
|
||||||
case 2: return_trace (u.format2.sanitize (c, base));
|
case 2: return_trace (u.format2.sanitize (c, base));
|
||||||
@ -558,6 +563,7 @@ struct StateTable
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!(c->check_struct (this) &&
|
if (unlikely (!(c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
nClasses >= 4 /* Ensure pre-defined classes fit. */ &&
|
nClasses >= 4 /* Ensure pre-defined classes fit. */ &&
|
||||||
classTable.sanitize (c, this)))) return_trace (false);
|
classTable.sanitize (c, this)))) return_trace (false);
|
||||||
|
|
||||||
|
@ -138,6 +138,7 @@ struct FeatureName
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (likely (c->check_struct (this) &&
|
return_trace (likely (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
(base+settingTableZ).sanitize (c, nSettings)));
|
(base+settingTableZ).sanitize (c, nSettings)));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -200,6 +201,7 @@ struct feat
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (likely (c->check_struct (this) &&
|
return_trace (likely (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
version.major == 1 &&
|
version.major == 1 &&
|
||||||
namesZ.sanitize (c, featureNameCount, this)));
|
namesZ.sanitize (c, featureNameCount, this)));
|
||||||
}
|
}
|
||||||
|
@ -185,6 +185,7 @@ struct ActionSubrecord
|
|||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!c->check_struct (this)))
|
if (unlikely (!c->check_struct (this)))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
switch (u.header.actionType)
|
switch (u.header.actionType)
|
||||||
{
|
{
|
||||||
@ -220,6 +221,7 @@ struct PostcompensationActionChain
|
|||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!c->check_struct (this)))
|
if (unlikely (!c->check_struct (this)))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
unsigned int offset = min_size;
|
unsigned int offset = min_size;
|
||||||
for (unsigned int i = 0; i < count; i++)
|
for (unsigned int i = 0; i < count; i++)
|
||||||
@ -389,6 +391,7 @@ struct just
|
|||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
|
|
||||||
return_trace (likely (c->check_struct (this) &&
|
return_trace (likely (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
version.major == 1 &&
|
version.major == 1 &&
|
||||||
horizData.sanitize (c, this, this) &&
|
horizData.sanitize (c, this, this) &&
|
||||||
vertData.sanitize (c, this, this)));
|
vertData.sanitize (c, this, this)));
|
||||||
|
@ -54,6 +54,7 @@ kerxTupleKern (int value,
|
|||||||
unsigned int offset = value;
|
unsigned int offset = value;
|
||||||
const FWORD *pv = &StructAtOffset<FWORD> (base, offset);
|
const FWORD *pv = &StructAtOffset<FWORD> (base, offset);
|
||||||
if (unlikely (!c->sanitizer.check_array (pv, tupleCount))) return 0;
|
if (unlikely (!c->sanitizer.check_array (pv, tupleCount))) return 0;
|
||||||
|
hb_barrier ();
|
||||||
return *pv;
|
return *pv;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -259,6 +260,7 @@ struct KerxSubTableFormat1
|
|||||||
depth = 0;
|
depth = 0;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
hb_mask_t kern_mask = c->plan->kern_mask;
|
hb_mask_t kern_mask = c->plan->kern_mask;
|
||||||
|
|
||||||
@ -389,6 +391,7 @@ struct KerxSubTableFormat2
|
|||||||
kern_idx = Types::offsetToIndex (kern_idx, this, arrayZ.arrayZ);
|
kern_idx = Types::offsetToIndex (kern_idx, this, arrayZ.arrayZ);
|
||||||
const FWORD *v = &arrayZ[kern_idx];
|
const FWORD *v = &arrayZ[kern_idx];
|
||||||
if (unlikely (!v->sanitize (&c->sanitizer))) return 0;
|
if (unlikely (!v->sanitize (&c->sanitizer))) return 0;
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
return kerxTupleKern (*v, header.tuple_count (), this, c);
|
return kerxTupleKern (*v, header.tuple_count (), this, c);
|
||||||
}
|
}
|
||||||
@ -429,6 +432,7 @@ struct KerxSubTableFormat2
|
|||||||
return_trace (likely (c->check_struct (this) &&
|
return_trace (likely (c->check_struct (this) &&
|
||||||
leftClassTable.sanitize (c, this) &&
|
leftClassTable.sanitize (c, this) &&
|
||||||
rightClassTable.sanitize (c, this) &&
|
rightClassTable.sanitize (c, this) &&
|
||||||
|
hb_barrier () &&
|
||||||
c->check_range (this, array)));
|
c->check_range (this, array)));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -509,6 +513,7 @@ struct KerxSubTableFormat4
|
|||||||
double the ankrActionIndex to get the correct offset here. */
|
double the ankrActionIndex to get the correct offset here. */
|
||||||
const HBUINT16 *data = &ankrData[entry.data.ankrActionIndex * 2];
|
const HBUINT16 *data = &ankrData[entry.data.ankrActionIndex * 2];
|
||||||
if (!c->sanitizer.check_array (data, 2)) return;
|
if (!c->sanitizer.check_array (data, 2)) return;
|
||||||
|
hb_barrier ();
|
||||||
unsigned int markControlPoint = *data++;
|
unsigned int markControlPoint = *data++;
|
||||||
unsigned int currControlPoint = *data++;
|
unsigned int currControlPoint = *data++;
|
||||||
hb_position_t markX = 0;
|
hb_position_t markX = 0;
|
||||||
@ -537,6 +542,7 @@ struct KerxSubTableFormat4
|
|||||||
double the ankrActionIndex to get the correct offset here. */
|
double the ankrActionIndex to get the correct offset here. */
|
||||||
const HBUINT16 *data = &ankrData[entry.data.ankrActionIndex * 2];
|
const HBUINT16 *data = &ankrData[entry.data.ankrActionIndex * 2];
|
||||||
if (!c->sanitizer.check_array (data, 2)) return;
|
if (!c->sanitizer.check_array (data, 2)) return;
|
||||||
|
hb_barrier ();
|
||||||
unsigned int markAnchorPoint = *data++;
|
unsigned int markAnchorPoint = *data++;
|
||||||
unsigned int currAnchorPoint = *data++;
|
unsigned int currAnchorPoint = *data++;
|
||||||
const Anchor &markAnchor = c->ankr_table->get_anchor (c->buffer->info[mark].codepoint,
|
const Anchor &markAnchor = c->ankr_table->get_anchor (c->buffer->info[mark].codepoint,
|
||||||
@ -557,6 +563,7 @@ struct KerxSubTableFormat4
|
|||||||
by 4 to get the correct offset for the given action. */
|
by 4 to get the correct offset for the given action. */
|
||||||
const FWORD *data = (const FWORD *) &ankrData[entry.data.ankrActionIndex * 4];
|
const FWORD *data = (const FWORD *) &ankrData[entry.data.ankrActionIndex * 4];
|
||||||
if (!c->sanitizer.check_array (data, 4)) return;
|
if (!c->sanitizer.check_array (data, 4)) return;
|
||||||
|
hb_barrier ();
|
||||||
int markX = *data++;
|
int markX = *data++;
|
||||||
int markY = *data++;
|
int markY = *data++;
|
||||||
int currX = *data++;
|
int currX = *data++;
|
||||||
@ -639,6 +646,7 @@ struct KerxSubTableFormat6
|
|||||||
if (unlikely (hb_unsigned_mul_overflows (offset, sizeof (FWORD32)))) return 0;
|
if (unlikely (hb_unsigned_mul_overflows (offset, sizeof (FWORD32)))) return 0;
|
||||||
const FWORD32 *v = &StructAtOffset<FWORD32> (&(this+t.array), offset * sizeof (FWORD32));
|
const FWORD32 *v = &StructAtOffset<FWORD32> (&(this+t.array), offset * sizeof (FWORD32));
|
||||||
if (unlikely (!v->sanitize (&c->sanitizer))) return 0;
|
if (unlikely (!v->sanitize (&c->sanitizer))) return 0;
|
||||||
|
hb_barrier ();
|
||||||
return kerxTupleKern (*v, header.tuple_count (), &(this+vector), c);
|
return kerxTupleKern (*v, header.tuple_count (), &(this+vector), c);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
@ -649,6 +657,7 @@ struct KerxSubTableFormat6
|
|||||||
unsigned int offset = l + r;
|
unsigned int offset = l + r;
|
||||||
const FWORD *v = &StructAtOffset<FWORD> (&(this+t.array), offset * sizeof (FWORD));
|
const FWORD *v = &StructAtOffset<FWORD> (&(this+t.array), offset * sizeof (FWORD));
|
||||||
if (unlikely (!v->sanitize (&c->sanitizer))) return 0;
|
if (unlikely (!v->sanitize (&c->sanitizer))) return 0;
|
||||||
|
hb_barrier ();
|
||||||
return kerxTupleKern (*v, header.tuple_count (), &(this+vector), c);
|
return kerxTupleKern (*v, header.tuple_count (), &(this+vector), c);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -674,6 +683,7 @@ struct KerxSubTableFormat6
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (likely (c->check_struct (this) &&
|
return_trace (likely (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
(is_long () ?
|
(is_long () ?
|
||||||
(
|
(
|
||||||
u.l.rowIndexTable.sanitize (c, this) &&
|
u.l.rowIndexTable.sanitize (c, this) &&
|
||||||
@ -787,9 +797,10 @@ struct KerxSubTable
|
|||||||
bool sanitize (hb_sanitize_context_t *c) const
|
bool sanitize (hb_sanitize_context_t *c) const
|
||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (!u.header.sanitize (c) ||
|
if (!(u.header.sanitize (c) &&
|
||||||
u.header.length <= u.header.static_size ||
|
hb_barrier () &&
|
||||||
!c->check_range (this, u.header.length))
|
u.header.length >= u.header.static_size &&
|
||||||
|
c->check_range (this, u.header.length)))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
|
|
||||||
return_trace (dispatch (c));
|
return_trace (dispatch (c));
|
||||||
@ -936,9 +947,10 @@ struct KerxTable
|
|||||||
bool sanitize (hb_sanitize_context_t *c) const
|
bool sanitize (hb_sanitize_context_t *c) const
|
||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!thiz()->version.sanitize (c) ||
|
if (unlikely (!(thiz()->version.sanitize (c) &&
|
||||||
(unsigned) thiz()->version < (unsigned) T::minVersion ||
|
hb_barrier () &&
|
||||||
!thiz()->tableCount.sanitize (c)))
|
(unsigned) thiz()->version >= (unsigned) T::minVersion &&
|
||||||
|
thiz()->tableCount.sanitize (c))))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
|
|
||||||
typedef typename T::SubTable SubTable;
|
typedef typename T::SubTable SubTable;
|
||||||
@ -949,6 +961,7 @@ struct KerxTable
|
|||||||
{
|
{
|
||||||
if (unlikely (!st->u.header.sanitize (c)))
|
if (unlikely (!st->u.header.sanitize (c)))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
/* OpenType kern table has 2-byte subtable lengths. That's limiting.
|
/* OpenType kern table has 2-byte subtable lengths. That's limiting.
|
||||||
* MS implementation also only supports one subtable, of format 0,
|
* MS implementation also only supports one subtable, of format 0,
|
||||||
* anyway. Certain versions of some fonts, like Calibry, contain
|
* anyway. Certain versions of some fonts, like Calibry, contain
|
||||||
|
@ -259,7 +259,9 @@ struct ContextualSubtable
|
|||||||
unsigned int offset = entry.data.markIndex + buffer->info[mark].codepoint;
|
unsigned int offset = entry.data.markIndex + buffer->info[mark].codepoint;
|
||||||
const UnsizedArrayOf<HBGlyphID16> &subs_old = (const UnsizedArrayOf<HBGlyphID16> &) subs;
|
const UnsizedArrayOf<HBGlyphID16> &subs_old = (const UnsizedArrayOf<HBGlyphID16> &) subs;
|
||||||
replacement = &subs_old[Types::wordOffsetToIndex (offset, table, subs_old.arrayZ)];
|
replacement = &subs_old[Types::wordOffsetToIndex (offset, table, subs_old.arrayZ)];
|
||||||
if (!replacement->sanitize (&c->sanitizer) || !*replacement)
|
if (!(replacement->sanitize (&c->sanitizer) &&
|
||||||
|
hb_barrier () &&
|
||||||
|
*replacement))
|
||||||
replacement = nullptr;
|
replacement = nullptr;
|
||||||
}
|
}
|
||||||
if (replacement)
|
if (replacement)
|
||||||
@ -287,7 +289,9 @@ struct ContextualSubtable
|
|||||||
unsigned int offset = entry.data.currentIndex + buffer->info[idx].codepoint;
|
unsigned int offset = entry.data.currentIndex + buffer->info[idx].codepoint;
|
||||||
const UnsizedArrayOf<HBGlyphID16> &subs_old = (const UnsizedArrayOf<HBGlyphID16> &) subs;
|
const UnsizedArrayOf<HBGlyphID16> &subs_old = (const UnsizedArrayOf<HBGlyphID16> &) subs;
|
||||||
replacement = &subs_old[Types::wordOffsetToIndex (offset, table, subs_old.arrayZ)];
|
replacement = &subs_old[Types::wordOffsetToIndex (offset, table, subs_old.arrayZ)];
|
||||||
if (!replacement->sanitize (&c->sanitizer) || !*replacement)
|
if (!(replacement->sanitize (&c->sanitizer) &&
|
||||||
|
hb_barrier () &&
|
||||||
|
*replacement))
|
||||||
replacement = nullptr;
|
replacement = nullptr;
|
||||||
}
|
}
|
||||||
if (replacement)
|
if (replacement)
|
||||||
@ -315,7 +319,7 @@ struct ContextualSubtable
|
|||||||
bool has_glyph_classes;
|
bool has_glyph_classes;
|
||||||
unsigned int mark;
|
unsigned int mark;
|
||||||
const ContextualSubtable *table;
|
const ContextualSubtable *table;
|
||||||
const UnsizedListOfOffset16To<Lookup<HBGlyphID16>, HBUINT, false> &subs;
|
const UnsizedListOfOffset16To<Lookup<HBGlyphID16>, HBUINT, void, false> &subs;
|
||||||
};
|
};
|
||||||
|
|
||||||
bool apply (hb_aat_apply_context_t *c) const
|
bool apply (hb_aat_apply_context_t *c) const
|
||||||
@ -336,6 +340,7 @@ struct ContextualSubtable
|
|||||||
|
|
||||||
unsigned int num_entries = 0;
|
unsigned int num_entries = 0;
|
||||||
if (unlikely (!machine.sanitize (c, &num_entries))) return_trace (false);
|
if (unlikely (!machine.sanitize (c, &num_entries))) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
if (!Types::extended)
|
if (!Types::extended)
|
||||||
return_trace (substitutionTables.sanitize (c, this, 0));
|
return_trace (substitutionTables.sanitize (c, this, 0));
|
||||||
@ -359,7 +364,7 @@ struct ContextualSubtable
|
|||||||
protected:
|
protected:
|
||||||
StateTable<Types, EntryData>
|
StateTable<Types, EntryData>
|
||||||
machine;
|
machine;
|
||||||
NNOffsetTo<UnsizedListOfOffset16To<Lookup<HBGlyphID16>, HBUINT, false>, HBUINT>
|
NNOffsetTo<UnsizedListOfOffset16To<Lookup<HBGlyphID16>, HBUINT, void, false>, HBUINT>
|
||||||
substitutionTables;
|
substitutionTables;
|
||||||
public:
|
public:
|
||||||
DEFINE_SIZE_STATIC (20);
|
DEFINE_SIZE_STATIC (20);
|
||||||
@ -513,6 +518,7 @@ struct LigatureSubtable
|
|||||||
if (unlikely (!buffer->move_to (match_positions[--cursor % ARRAY_LENGTH (match_positions)]))) return;
|
if (unlikely (!buffer->move_to (match_positions[--cursor % ARRAY_LENGTH (match_positions)]))) return;
|
||||||
|
|
||||||
if (unlikely (!actionData->sanitize (&c->sanitizer))) break;
|
if (unlikely (!actionData->sanitize (&c->sanitizer))) break;
|
||||||
|
hb_barrier ();
|
||||||
action = *actionData;
|
action = *actionData;
|
||||||
|
|
||||||
uint32_t uoffset = action & LigActionOffset;
|
uint32_t uoffset = action & LigActionOffset;
|
||||||
@ -523,6 +529,7 @@ struct LigatureSubtable
|
|||||||
component_idx = Types::wordOffsetToIndex (component_idx, table, component.arrayZ);
|
component_idx = Types::wordOffsetToIndex (component_idx, table, component.arrayZ);
|
||||||
const HBUINT16 &componentData = component[component_idx];
|
const HBUINT16 &componentData = component[component_idx];
|
||||||
if (unlikely (!componentData.sanitize (&c->sanitizer))) break;
|
if (unlikely (!componentData.sanitize (&c->sanitizer))) break;
|
||||||
|
hb_barrier ();
|
||||||
ligature_idx += componentData;
|
ligature_idx += componentData;
|
||||||
|
|
||||||
DEBUG_MSG (APPLY, nullptr, "Action store %d last %d",
|
DEBUG_MSG (APPLY, nullptr, "Action store %d last %d",
|
||||||
@ -533,6 +540,7 @@ struct LigatureSubtable
|
|||||||
ligature_idx = Types::offsetToIndex (ligature_idx, table, ligature.arrayZ);
|
ligature_idx = Types::offsetToIndex (ligature_idx, table, ligature.arrayZ);
|
||||||
const HBGlyphID16 &ligatureData = ligature[ligature_idx];
|
const HBGlyphID16 &ligatureData = ligature[ligature_idx];
|
||||||
if (unlikely (!ligatureData.sanitize (&c->sanitizer))) break;
|
if (unlikely (!ligatureData.sanitize (&c->sanitizer))) break;
|
||||||
|
hb_barrier ();
|
||||||
hb_codepoint_t lig = ligatureData;
|
hb_codepoint_t lig = ligatureData;
|
||||||
|
|
||||||
DEBUG_MSG (APPLY, nullptr, "Produced ligature %u", lig);
|
DEBUG_MSG (APPLY, nullptr, "Produced ligature %u", lig);
|
||||||
@ -587,6 +595,7 @@ struct LigatureSubtable
|
|||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
/* The rest of array sanitizations are done at run-time. */
|
/* The rest of array sanitizations are done at run-time. */
|
||||||
return_trace (c->check_struct (this) && machine.sanitize (c) &&
|
return_trace (c->check_struct (this) && machine.sanitize (c) &&
|
||||||
|
hb_barrier () &&
|
||||||
ligAction && component && ligature);
|
ligAction && component && ligature);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -765,6 +774,7 @@ struct InsertionSubtable
|
|||||||
unsigned int start = entry.data.markedInsertIndex;
|
unsigned int start = entry.data.markedInsertIndex;
|
||||||
const HBGlyphID16 *glyphs = &insertionAction[start];
|
const HBGlyphID16 *glyphs = &insertionAction[start];
|
||||||
if (unlikely (!c->sanitizer.check_array (glyphs, count))) count = 0;
|
if (unlikely (!c->sanitizer.check_array (glyphs, count))) count = 0;
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
bool before = flags & MarkedInsertBefore;
|
bool before = flags & MarkedInsertBefore;
|
||||||
|
|
||||||
@ -793,6 +803,7 @@ struct InsertionSubtable
|
|||||||
unsigned int start = entry.data.currentInsertIndex;
|
unsigned int start = entry.data.currentInsertIndex;
|
||||||
const HBGlyphID16 *glyphs = &insertionAction[start];
|
const HBGlyphID16 *glyphs = &insertionAction[start];
|
||||||
if (unlikely (!c->sanitizer.check_array (glyphs, count))) count = 0;
|
if (unlikely (!c->sanitizer.check_array (glyphs, count))) count = 0;
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
bool before = flags & CurrentInsertBefore;
|
bool before = flags & CurrentInsertBefore;
|
||||||
|
|
||||||
@ -849,6 +860,7 @@ struct InsertionSubtable
|
|||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
/* The rest of array sanitizations are done at run-time. */
|
/* The rest of array sanitizations are done at run-time. */
|
||||||
return_trace (c->check_struct (this) && machine.sanitize (c) &&
|
return_trace (c->check_struct (this) && machine.sanitize (c) &&
|
||||||
|
hb_barrier () &&
|
||||||
insertionAction);
|
insertionAction);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -944,9 +956,10 @@ struct ChainSubtable
|
|||||||
bool sanitize (hb_sanitize_context_t *c) const
|
bool sanitize (hb_sanitize_context_t *c) const
|
||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (!length.sanitize (c) ||
|
if (!(length.sanitize (c) &&
|
||||||
length <= min_size ||
|
hb_barrier () &&
|
||||||
!c->check_range (this, length))
|
length >= min_size &&
|
||||||
|
c->check_range (this, length)))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
|
|
||||||
hb_sanitize_with_object_t with (c, this);
|
hb_sanitize_with_object_t with (c, this);
|
||||||
@ -1089,9 +1102,10 @@ struct Chain
|
|||||||
bool sanitize (hb_sanitize_context_t *c, unsigned int version HB_UNUSED) const
|
bool sanitize (hb_sanitize_context_t *c, unsigned int version HB_UNUSED) const
|
||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (!length.sanitize (c) ||
|
if (!(length.sanitize (c) &&
|
||||||
length < min_size ||
|
hb_barrier () &&
|
||||||
!c->check_range (this, length))
|
length >= min_size &&
|
||||||
|
c->check_range (this, length)))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
|
|
||||||
if (!c->check_array (featureZ.arrayZ, featureCount))
|
if (!c->check_array (featureZ.arrayZ, featureCount))
|
||||||
@ -1103,6 +1117,7 @@ struct Chain
|
|||||||
{
|
{
|
||||||
if (!subtable->sanitize (c))
|
if (!subtable->sanitize (c))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
subtable = &StructAfter<ChainSubtable<Types>> (*subtable);
|
subtable = &StructAfter<ChainSubtable<Types>> (*subtable);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1173,7 +1188,10 @@ struct mortmorx
|
|||||||
bool sanitize (hb_sanitize_context_t *c) const
|
bool sanitize (hb_sanitize_context_t *c) const
|
||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (!version.sanitize (c) || !version || !chainCount.sanitize (c))
|
if (!(version.sanitize (c) &&
|
||||||
|
hb_barrier () &&
|
||||||
|
version &&
|
||||||
|
chainCount.sanitize (c)))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
|
|
||||||
const Chain<Types> *chain = &firstChain;
|
const Chain<Types> *chain = &firstChain;
|
||||||
@ -1182,6 +1200,7 @@ struct mortmorx
|
|||||||
{
|
{
|
||||||
if (!chain->sanitize (c, version))
|
if (!chain->sanitize (c, version))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
chain = &StructAfter<Chain<Types>> (*chain);
|
chain = &StructAfter<Chain<Types>> (*chain);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -144,6 +144,7 @@ struct opbd
|
|||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!c->check_struct (this) || version.major != 1))
|
if (unlikely (!c->check_struct (this) || version.major != 1))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
switch (format)
|
switch (format)
|
||||||
{
|
{
|
||||||
|
@ -134,6 +134,7 @@ struct TrackData
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (likely (c->check_struct (this) &&
|
return_trace (likely (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
sizeTable.sanitize (c, base, nSizes) &&
|
sizeTable.sanitize (c, base, nSizes) &&
|
||||||
trackTable.sanitize (c, nTracks, base, nSizes)));
|
trackTable.sanitize (c, nTracks, base, nSizes)));
|
||||||
}
|
}
|
||||||
@ -203,6 +204,7 @@ struct trak
|
|||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
|
|
||||||
return_trace (likely (c->check_struct (this) &&
|
return_trace (likely (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
version.major == 1 &&
|
version.major == 1 &&
|
||||||
horizData.sanitize (c, this, this) &&
|
horizData.sanitize (c, this, this) &&
|
||||||
vertData.sanitize (c, this, this)));
|
vertData.sanitize (c, this, this)));
|
||||||
|
4
src/3rdparty/harfbuzz-ng/src/hb-aat-layout.h
vendored
4
src/3rdparty/harfbuzz-ng/src/hb-aat-layout.h
vendored
@ -40,7 +40,7 @@ HB_BEGIN_DECLS
|
|||||||
* @HB_AAT_LAYOUT_FEATURE_TYPE_INVALID: Initial, unset feature type
|
* @HB_AAT_LAYOUT_FEATURE_TYPE_INVALID: Initial, unset feature type
|
||||||
* @HB_AAT_LAYOUT_FEATURE_TYPE_ALL_TYPOGRAPHIC: [All Typographic Features](https://developer.apple.com/fonts/TrueType-Reference-Manual/RM09/AppendixF.html#Type0)
|
* @HB_AAT_LAYOUT_FEATURE_TYPE_ALL_TYPOGRAPHIC: [All Typographic Features](https://developer.apple.com/fonts/TrueType-Reference-Manual/RM09/AppendixF.html#Type0)
|
||||||
* @HB_AAT_LAYOUT_FEATURE_TYPE_LIGATURES: [Ligatures](https://developer.apple.com/fonts/TrueType-Reference-Manual/RM09/AppendixF.html#Type1)
|
* @HB_AAT_LAYOUT_FEATURE_TYPE_LIGATURES: [Ligatures](https://developer.apple.com/fonts/TrueType-Reference-Manual/RM09/AppendixF.html#Type1)
|
||||||
* @HB_AAT_LAYOUT_FEATURE_TYPE_CURISVE_CONNECTION: [Cursive Connection](https://developer.apple.com/fonts/TrueType-Reference-Manual/RM09/AppendixF.html#Type2)
|
* @HB_AAT_LAYOUT_FEATURE_TYPE_CURSIVE_CONNECTION: [Cursive Connection](https://developer.apple.com/fonts/TrueType-Reference-Manual/RM09/AppendixF.html#Type2)
|
||||||
* @HB_AAT_LAYOUT_FEATURE_TYPE_LETTER_CASE: [Letter Case](https://developer.apple.com/fonts/TrueType-Reference-Manual/RM09/AppendixF.html#Type3)
|
* @HB_AAT_LAYOUT_FEATURE_TYPE_LETTER_CASE: [Letter Case](https://developer.apple.com/fonts/TrueType-Reference-Manual/RM09/AppendixF.html#Type3)
|
||||||
* @HB_AAT_LAYOUT_FEATURE_TYPE_VERTICAL_SUBSTITUTION: [Vertical Substitution](https://developer.apple.com/fonts/TrueType-Reference-Manual/RM09/AppendixF.html#Type4)
|
* @HB_AAT_LAYOUT_FEATURE_TYPE_VERTICAL_SUBSTITUTION: [Vertical Substitution](https://developer.apple.com/fonts/TrueType-Reference-Manual/RM09/AppendixF.html#Type4)
|
||||||
* @HB_AAT_LAYOUT_FEATURE_TYPE_LINGUISTIC_REARRANGEMENT: [Linguistic Rearrangement](https://developer.apple.com/fonts/TrueType-Reference-Manual/RM09/AppendixF.html#Type5)
|
* @HB_AAT_LAYOUT_FEATURE_TYPE_LINGUISTIC_REARRANGEMENT: [Linguistic Rearrangement](https://developer.apple.com/fonts/TrueType-Reference-Manual/RM09/AppendixF.html#Type5)
|
||||||
@ -88,7 +88,7 @@ typedef enum
|
|||||||
|
|
||||||
HB_AAT_LAYOUT_FEATURE_TYPE_ALL_TYPOGRAPHIC = 0,
|
HB_AAT_LAYOUT_FEATURE_TYPE_ALL_TYPOGRAPHIC = 0,
|
||||||
HB_AAT_LAYOUT_FEATURE_TYPE_LIGATURES = 1,
|
HB_AAT_LAYOUT_FEATURE_TYPE_LIGATURES = 1,
|
||||||
HB_AAT_LAYOUT_FEATURE_TYPE_CURISVE_CONNECTION = 2,
|
HB_AAT_LAYOUT_FEATURE_TYPE_CURSIVE_CONNECTION = 2,
|
||||||
HB_AAT_LAYOUT_FEATURE_TYPE_LETTER_CASE = 3,
|
HB_AAT_LAYOUT_FEATURE_TYPE_LETTER_CASE = 3,
|
||||||
HB_AAT_LAYOUT_FEATURE_TYPE_VERTICAL_SUBSTITUTION = 4,
|
HB_AAT_LAYOUT_FEATURE_TYPE_VERTICAL_SUBSTITUTION = 4,
|
||||||
HB_AAT_LAYOUT_FEATURE_TYPE_LINGUISTIC_REARRANGEMENT = 5,
|
HB_AAT_LAYOUT_FEATURE_TYPE_LINGUISTIC_REARRANGEMENT = 5,
|
||||||
|
@ -46,7 +46,9 @@ struct FTStringRange
|
|||||||
bool sanitize (hb_sanitize_context_t *c, const void *base) const
|
bool sanitize (hb_sanitize_context_t *c, const void *base) const
|
||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) && (base+tag).sanitize (c, length));
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
|
(base+tag).sanitize (c, length));
|
||||||
}
|
}
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
@ -73,6 +75,7 @@ struct ltag
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (likely (c->check_struct (this) &&
|
return_trace (likely (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
version >= 1 &&
|
version >= 1 &&
|
||||||
tagRanges.sanitize (c, this)));
|
tagRanges.sanitize (c, this)));
|
||||||
}
|
}
|
||||||
|
2
src/3rdparty/harfbuzz-ng/src/hb-array.hh
vendored
2
src/3rdparty/harfbuzz-ng/src/hb-array.hh
vendored
@ -47,6 +47,8 @@ enum hb_not_found_t
|
|||||||
template <typename Type>
|
template <typename Type>
|
||||||
struct hb_array_t : hb_iter_with_fallback_t<hb_array_t<Type>, Type&>
|
struct hb_array_t : hb_iter_with_fallback_t<hb_array_t<Type>, Type&>
|
||||||
{
|
{
|
||||||
|
static constexpr bool realloc_move = true;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Constructors.
|
* Constructors.
|
||||||
*/
|
*/
|
||||||
|
10
src/3rdparty/harfbuzz-ng/src/hb-atomic.hh
vendored
10
src/3rdparty/harfbuzz-ng/src/hb-atomic.hh
vendored
@ -118,12 +118,12 @@ _hb_atomic_ptr_impl_cmplexch (const void **P, const void *O_, const void *N)
|
|||||||
*/
|
*/
|
||||||
#ifndef _hb_compiler_memory_r_barrier
|
#ifndef _hb_compiler_memory_r_barrier
|
||||||
#if defined(__ATOMIC_ACQUIRE) // gcc-like
|
#if defined(__ATOMIC_ACQUIRE) // gcc-like
|
||||||
#define _hb_compiler_memory_r_barrier() asm volatile("": : :"memory")
|
static inline void _hb_compiler_memory_r_barrier () { asm volatile("": : :"memory"); }
|
||||||
#elif !defined(_MSC_VER)
|
#elif !defined(_MSC_VER)
|
||||||
#include <atomic>
|
#include <atomic>
|
||||||
#define _hb_compiler_memory_r_barrier() std::atomic_signal_fence (std::memory_order_acquire)
|
#define _hb_compiler_memory_r_barrier() std::atomic_signal_fence (std::memory_order_acquire)
|
||||||
#else
|
#else
|
||||||
#define _hb_compiler_memory_r_barrier() do {} while (0)
|
static inline void _hb_compiler_memory_r_barrier () {}
|
||||||
#endif
|
#endif
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
@ -218,5 +218,11 @@ struct hb_atomic_ptr_t
|
|||||||
T *v = nullptr;
|
T *v = nullptr;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
static inline bool hb_barrier ()
|
||||||
|
{
|
||||||
|
_hb_compiler_memory_r_barrier ();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
#endif /* HB_ATOMIC_HH */
|
#endif /* HB_ATOMIC_HH */
|
||||||
|
@ -359,8 +359,8 @@ struct hb_bit_set_invertible_t
|
|||||||
typedef hb_codepoint_t __item_t__;
|
typedef hb_codepoint_t __item_t__;
|
||||||
hb_codepoint_t __item__ () const { return v; }
|
hb_codepoint_t __item__ () const { return v; }
|
||||||
bool __more__ () const { return v != INVALID; }
|
bool __more__ () const { return v != INVALID; }
|
||||||
void __next__ () { s->next (&v); if (l) l--; }
|
void __next__ () { s->next (&v); if (likely (l)) l--; }
|
||||||
void __prev__ () { s->previous (&v); }
|
void __prev__ () { s->previous (&v); l++; }
|
||||||
unsigned __len__ () const { return l; }
|
unsigned __len__ () const { return l; }
|
||||||
iter_t end () const { return iter_t (*s, false); }
|
iter_t end () const { return iter_t (*s, false); }
|
||||||
bool operator != (const iter_t& o) const
|
bool operator != (const iter_t& o) const
|
||||||
|
11
src/3rdparty/harfbuzz-ng/src/hb-deprecated.h
vendored
11
src/3rdparty/harfbuzz-ng/src/hb-deprecated.h
vendored
@ -56,7 +56,7 @@ HB_BEGIN_DECLS
|
|||||||
/**
|
/**
|
||||||
* HB_SCRIPT_CANADIAN_ABORIGINAL:
|
* HB_SCRIPT_CANADIAN_ABORIGINAL:
|
||||||
*
|
*
|
||||||
* Use #HB_SCRIPT_CANADIAN_SYLLABICS instead:
|
* Use #HB_SCRIPT_CANADIAN_SYLLABICS instead.
|
||||||
*
|
*
|
||||||
* Deprecated: 0.9.20
|
* Deprecated: 0.9.20
|
||||||
*/
|
*/
|
||||||
@ -301,6 +301,15 @@ hb_font_get_glyph_shape (hb_font_t *font,
|
|||||||
hb_draw_funcs_t *dfuncs, void *draw_data);
|
hb_draw_funcs_t *dfuncs, void *draw_data);
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* HB_AAT_LAYOUT_FEATURE_TYPE_CURISVE_CONNECTION:
|
||||||
|
*
|
||||||
|
* Use #HB_AAT_LAYOUT_FEATURE_TYPE_CURSIVE_CONNECTION instead.
|
||||||
|
*
|
||||||
|
* Deprecated: 8.3.0
|
||||||
|
*/
|
||||||
|
#define HB_AAT_LAYOUT_FEATURE_TYPE_CURISVE_CONNECTION HB_AAT_LAYOUT_FEATURE_TYPE_CURSIVE_CONNECTION
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
||||||
|
4
src/3rdparty/harfbuzz-ng/src/hb-ft.cc
vendored
4
src/3rdparty/harfbuzz-ng/src/hb-ft.cc
vendored
@ -225,7 +225,7 @@ _hb_ft_hb_font_check_changed (hb_font_t *font,
|
|||||||
* Sets the FT_Load_Glyph load flags for the specified #hb_font_t.
|
* Sets the FT_Load_Glyph load flags for the specified #hb_font_t.
|
||||||
*
|
*
|
||||||
* For more information, see
|
* For more information, see
|
||||||
* https://www.freetype.org/freetype2/docs/reference/ft2-base_interface.html#ft_load_xxx
|
* <https://freetype.org/freetype2/docs/reference/ft2-glyph_retrieval.html#ft_load_xxx>
|
||||||
*
|
*
|
||||||
* This function works with #hb_font_t objects created by
|
* This function works with #hb_font_t objects created by
|
||||||
* hb_ft_font_create() or hb_ft_font_create_referenced().
|
* hb_ft_font_create() or hb_ft_font_create_referenced().
|
||||||
@ -253,7 +253,7 @@ hb_ft_font_set_load_flags (hb_font_t *font, int load_flags)
|
|||||||
* Fetches the FT_Load_Glyph load flags of the specified #hb_font_t.
|
* Fetches the FT_Load_Glyph load flags of the specified #hb_font_t.
|
||||||
*
|
*
|
||||||
* For more information, see
|
* For more information, see
|
||||||
* https://www.freetype.org/freetype2/docs/reference/ft2-base_interface.html#ft_load_xxx
|
* <https://freetype.org/freetype2/docs/reference/ft2-glyph_retrieval.html#ft_load_xxx>
|
||||||
*
|
*
|
||||||
* This function works with #hb_font_t objects created by
|
* This function works with #hb_font_t objects created by
|
||||||
* hb_ft_font_create() or hb_ft_font_create_referenced().
|
* hb_ft_font_create() or hb_ft_font_create_referenced().
|
||||||
|
31
src/3rdparty/harfbuzz-ng/src/hb-map.hh
vendored
31
src/3rdparty/harfbuzz-ng/src/hb-map.hh
vendored
@ -42,10 +42,34 @@ template <typename K, typename V,
|
|||||||
bool minus_one = false>
|
bool minus_one = false>
|
||||||
struct hb_hashmap_t
|
struct hb_hashmap_t
|
||||||
{
|
{
|
||||||
|
static constexpr bool realloc_move = true;
|
||||||
|
|
||||||
hb_hashmap_t () { init (); }
|
hb_hashmap_t () { init (); }
|
||||||
~hb_hashmap_t () { fini (); }
|
~hb_hashmap_t () { fini (); }
|
||||||
|
|
||||||
hb_hashmap_t (const hb_hashmap_t& o) : hb_hashmap_t () { alloc (o.population); hb_copy (o, *this); }
|
hb_hashmap_t (const hb_hashmap_t& o) : hb_hashmap_t ()
|
||||||
|
{
|
||||||
|
if (unlikely (!o.mask)) return;
|
||||||
|
|
||||||
|
if (item_t::is_trivial)
|
||||||
|
{
|
||||||
|
items = (item_t *) hb_malloc (sizeof (item_t) * (o.mask + 1));
|
||||||
|
if (unlikely (!items))
|
||||||
|
{
|
||||||
|
successful = false;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
population = o.population;
|
||||||
|
occupancy = o.occupancy;
|
||||||
|
mask = o.mask;
|
||||||
|
prime = o.prime;
|
||||||
|
max_chain_length = o.max_chain_length;
|
||||||
|
memcpy (items, o.items, sizeof (item_t) * (mask + 1));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
alloc (o.population); hb_copy (o, *this);
|
||||||
|
}
|
||||||
hb_hashmap_t (hb_hashmap_t&& o) : hb_hashmap_t () { hb_swap (*this, o); }
|
hb_hashmap_t (hb_hashmap_t&& o) : hb_hashmap_t () { hb_swap (*this, o); }
|
||||||
hb_hashmap_t& operator= (const hb_hashmap_t& o) { reset (); alloc (o.population); hb_copy (o, *this); return *this; }
|
hb_hashmap_t& operator= (const hb_hashmap_t& o) { reset (); alloc (o.population); hb_copy (o, *this); return *this; }
|
||||||
hb_hashmap_t& operator= (hb_hashmap_t&& o) { hb_swap (*this, o); return *this; }
|
hb_hashmap_t& operator= (hb_hashmap_t&& o) { hb_swap (*this, o); return *this; }
|
||||||
@ -209,9 +233,10 @@ struct hb_hashmap_t
|
|||||||
old_items[i].hash,
|
old_items[i].hash,
|
||||||
std::move (old_items[i].value));
|
std::move (old_items[i].value));
|
||||||
}
|
}
|
||||||
if (!item_t::is_trivial)
|
|
||||||
old_items[i].~item_t ();
|
|
||||||
}
|
}
|
||||||
|
if (!item_t::is_trivial)
|
||||||
|
for (unsigned int i = 0; i < old_size; i++)
|
||||||
|
old_items[i].~item_t ();
|
||||||
|
|
||||||
hb_free (old_items);
|
hb_free (old_items);
|
||||||
|
|
||||||
|
6
src/3rdparty/harfbuzz-ng/src/hb-open-file.hh
vendored
6
src/3rdparty/harfbuzz-ng/src/hb-open-file.hh
vendored
@ -267,6 +267,7 @@ struct TTCHeader
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!u.header.version.sanitize (c))) return_trace (false);
|
if (unlikely (!u.header.version.sanitize (c))) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
switch (u.header.version.major) {
|
switch (u.header.version.major) {
|
||||||
case 2: /* version 2 is compatible with version 1 */
|
case 2: /* version 2 is compatible with version 1 */
|
||||||
case 1: return_trace (u.version1.sanitize (c));
|
case 1: return_trace (u.version1.sanitize (c));
|
||||||
@ -302,6 +303,7 @@ struct ResourceRecord
|
|||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
offset.sanitize (c, data_base) &&
|
offset.sanitize (c, data_base) &&
|
||||||
|
hb_barrier () &&
|
||||||
get_face (data_base).sanitize (c));
|
get_face (data_base).sanitize (c));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -337,6 +339,7 @@ struct ResourceTypeRecord
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
resourcesZ.sanitize (c, type_base,
|
resourcesZ.sanitize (c, type_base,
|
||||||
get_resource_count (),
|
get_resource_count (),
|
||||||
data_base));
|
data_base));
|
||||||
@ -385,6 +388,7 @@ struct ResourceMap
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
typeList.sanitize (c, this,
|
typeList.sanitize (c, this,
|
||||||
&(this+typeList),
|
&(this+typeList),
|
||||||
data_base));
|
data_base));
|
||||||
@ -428,6 +432,7 @@ struct ResourceForkHeader
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
data.sanitize (c, this, dataLen) &&
|
data.sanitize (c, this, dataLen) &&
|
||||||
map.sanitize (c, this, &(this+data)));
|
map.sanitize (c, this, &(this+data)));
|
||||||
}
|
}
|
||||||
@ -508,6 +513,7 @@ struct OpenTypeFontFile
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!u.tag.sanitize (c))) return_trace (false);
|
if (unlikely (!u.tag.sanitize (c))) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
switch (u.tag) {
|
switch (u.tag) {
|
||||||
case CFFTag: /* All the non-collection tags */
|
case CFFTag: /* All the non-collection tags */
|
||||||
case TrueTag:
|
case TrueTag:
|
||||||
|
60
src/3rdparty/harfbuzz-ng/src/hb-open-type.hh
vendored
60
src/3rdparty/harfbuzz-ng/src/hb-open-type.hh
vendored
@ -309,7 +309,7 @@ struct _hb_has_null<Type, true>
|
|||||||
static Type *get_crap () { return &Crap (Type); }
|
static Type *get_crap () { return &Crap (Type); }
|
||||||
};
|
};
|
||||||
|
|
||||||
template <typename Type, typename OffsetType, bool has_null=true>
|
template <typename Type, typename OffsetType, typename BaseType=void, bool has_null=true>
|
||||||
struct OffsetTo : Offset<OffsetType, has_null>
|
struct OffsetTo : Offset<OffsetType, has_null>
|
||||||
{
|
{
|
||||||
using target_t = Type;
|
using target_t = Type;
|
||||||
@ -335,22 +335,22 @@ struct OffsetTo : Offset<OffsetType, has_null>
|
|||||||
}
|
}
|
||||||
|
|
||||||
template <typename Base,
|
template <typename Base,
|
||||||
hb_enable_if (hb_is_convertible (const Base, const void *))>
|
hb_enable_if (hb_is_convertible (const Base, const BaseType *))>
|
||||||
friend const Type& operator + (const Base &base, const OffsetTo &offset) { return offset ((const void *) base); }
|
friend const Type& operator + (const Base &base, const OffsetTo &offset) { return offset ((const void *) base); }
|
||||||
template <typename Base,
|
template <typename Base,
|
||||||
hb_enable_if (hb_is_convertible (const Base, const void *))>
|
hb_enable_if (hb_is_convertible (const Base, const BaseType *))>
|
||||||
friend const Type& operator + (const OffsetTo &offset, const Base &base) { return offset ((const void *) base); }
|
friend const Type& operator + (const OffsetTo &offset, const Base &base) { return offset ((const void *) base); }
|
||||||
template <typename Base,
|
template <typename Base,
|
||||||
hb_enable_if (hb_is_convertible (Base, void *))>
|
hb_enable_if (hb_is_convertible (Base, BaseType *))>
|
||||||
friend Type& operator + (Base &&base, OffsetTo &offset) { return offset ((void *) base); }
|
friend Type& operator + (Base &&base, OffsetTo &offset) { return offset ((void *) base); }
|
||||||
template <typename Base,
|
template <typename Base,
|
||||||
hb_enable_if (hb_is_convertible (Base, void *))>
|
hb_enable_if (hb_is_convertible (Base, BaseType *))>
|
||||||
friend Type& operator + (OffsetTo &offset, Base &&base) { return offset ((void *) base); }
|
friend Type& operator + (OffsetTo &offset, Base &&base) { return offset ((void *) base); }
|
||||||
|
|
||||||
|
|
||||||
template <typename ...Ts>
|
template <typename Base, typename ...Ts>
|
||||||
bool serialize_subset (hb_subset_context_t *c, const OffsetTo& src,
|
bool serialize_subset (hb_subset_context_t *c, const OffsetTo& src,
|
||||||
const void *src_base, Ts&&... ds)
|
const Base *src_base, Ts&&... ds)
|
||||||
{
|
{
|
||||||
*this = 0;
|
*this = 0;
|
||||||
if (src.is_null ())
|
if (src.is_null ())
|
||||||
@ -414,10 +414,11 @@ struct OffsetTo : Offset<OffsetType, has_null>
|
|||||||
const void *src_base, unsigned dst_bias = 0)
|
const void *src_base, unsigned dst_bias = 0)
|
||||||
{ return serialize_copy (c, src, src_base, dst_bias, hb_serialize_context_t::Head); }
|
{ return serialize_copy (c, src, src_base, dst_bias, hb_serialize_context_t::Head); }
|
||||||
|
|
||||||
bool sanitize_shallow (hb_sanitize_context_t *c, const void *base) const
|
bool sanitize_shallow (hb_sanitize_context_t *c, const BaseType *base) const
|
||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!c->check_struct (this))) return_trace (false);
|
if (unlikely (!c->check_struct (this))) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
//if (unlikely (this->is_null ())) return_trace (true);
|
//if (unlikely (this->is_null ())) return_trace (true);
|
||||||
if (unlikely ((const char *) base + (unsigned) *this < (const char *) base)) return_trace (false);
|
if (unlikely ((const char *) base + (unsigned) *this < (const char *) base)) return_trace (false);
|
||||||
return_trace (true);
|
return_trace (true);
|
||||||
@ -427,10 +428,11 @@ struct OffsetTo : Offset<OffsetType, has_null>
|
|||||||
#ifndef HB_OPTIMIZE_SIZE
|
#ifndef HB_OPTIMIZE_SIZE
|
||||||
HB_ALWAYS_INLINE
|
HB_ALWAYS_INLINE
|
||||||
#endif
|
#endif
|
||||||
bool sanitize (hb_sanitize_context_t *c, const void *base, Ts&&... ds) const
|
bool sanitize (hb_sanitize_context_t *c, const BaseType *base, Ts&&... ds) const
|
||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (sanitize_shallow (c, base) &&
|
return_trace (sanitize_shallow (c, base) &&
|
||||||
|
hb_barrier () &&
|
||||||
(this->is_null () ||
|
(this->is_null () ||
|
||||||
c->dispatch (StructAtOffset<Type> (base, *this), std::forward<Ts> (ds)...) ||
|
c->dispatch (StructAtOffset<Type> (base, *this), std::forward<Ts> (ds)...) ||
|
||||||
neuter (c)));
|
neuter (c)));
|
||||||
@ -445,14 +447,14 @@ struct OffsetTo : Offset<OffsetType, has_null>
|
|||||||
DEFINE_SIZE_STATIC (sizeof (OffsetType));
|
DEFINE_SIZE_STATIC (sizeof (OffsetType));
|
||||||
};
|
};
|
||||||
/* Partial specializations. */
|
/* Partial specializations. */
|
||||||
template <typename Type, bool has_null=true> using Offset16To = OffsetTo<Type, HBUINT16, has_null>;
|
template <typename Type, typename BaseType=void, bool has_null=true> using Offset16To = OffsetTo<Type, HBUINT16, BaseType, has_null>;
|
||||||
template <typename Type, bool has_null=true> using Offset24To = OffsetTo<Type, HBUINT24, has_null>;
|
template <typename Type, typename BaseType=void, bool has_null=true> using Offset24To = OffsetTo<Type, HBUINT24, BaseType, has_null>;
|
||||||
template <typename Type, bool has_null=true> using Offset32To = OffsetTo<Type, HBUINT32, has_null>;
|
template <typename Type, typename BaseType=void, bool has_null=true> using Offset32To = OffsetTo<Type, HBUINT32, BaseType, has_null>;
|
||||||
|
|
||||||
template <typename Type, typename OffsetType> using NNOffsetTo = OffsetTo<Type, OffsetType, false>;
|
template <typename Type, typename OffsetType, typename BaseType=void> using NNOffsetTo = OffsetTo<Type, OffsetType, BaseType, false>;
|
||||||
template <typename Type> using NNOffset16To = Offset16To<Type, false>;
|
template <typename Type, typename BaseType=void> using NNOffset16To = Offset16To<Type, BaseType, false>;
|
||||||
template <typename Type> using NNOffset24To = Offset24To<Type, false>;
|
template <typename Type, typename BaseType=void> using NNOffset24To = Offset24To<Type, BaseType, false>;
|
||||||
template <typename Type> using NNOffset32To = Offset32To<Type, false>;
|
template <typename Type, typename BaseType=void> using NNOffset32To = Offset32To<Type, BaseType, false>;
|
||||||
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -536,6 +538,7 @@ struct UnsizedArrayOf
|
|||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!sanitize_shallow (c, count))) return_trace (false);
|
if (unlikely (!sanitize_shallow (c, count))) return_trace (false);
|
||||||
if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true);
|
if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true);
|
||||||
|
hb_barrier ();
|
||||||
for (unsigned int i = 0; i < count; i++)
|
for (unsigned int i = 0; i < count; i++)
|
||||||
if (unlikely (!c->dispatch (arrayZ[i], std::forward<Ts> (ds)...)))
|
if (unlikely (!c->dispatch (arrayZ[i], std::forward<Ts> (ds)...)))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
@ -555,17 +558,17 @@ struct UnsizedArrayOf
|
|||||||
};
|
};
|
||||||
|
|
||||||
/* Unsized array of offset's */
|
/* Unsized array of offset's */
|
||||||
template <typename Type, typename OffsetType, bool has_null=true>
|
template <typename Type, typename OffsetType, typename BaseType=void, bool has_null=true>
|
||||||
using UnsizedArray16OfOffsetTo = UnsizedArrayOf<OffsetTo<Type, OffsetType, has_null>>;
|
using UnsizedArray16OfOffsetTo = UnsizedArrayOf<OffsetTo<Type, OffsetType, BaseType, has_null>>;
|
||||||
|
|
||||||
/* Unsized array of offsets relative to the beginning of the array itself. */
|
/* Unsized array of offsets relative to the beginning of the array itself. */
|
||||||
template <typename Type, typename OffsetType, bool has_null=true>
|
template <typename Type, typename OffsetType, typename BaseType=void, bool has_null=true>
|
||||||
struct UnsizedListOfOffset16To : UnsizedArray16OfOffsetTo<Type, OffsetType, has_null>
|
struct UnsizedListOfOffset16To : UnsizedArray16OfOffsetTo<Type, OffsetType, BaseType, has_null>
|
||||||
{
|
{
|
||||||
const Type& operator [] (int i_) const
|
const Type& operator [] (int i_) const
|
||||||
{
|
{
|
||||||
unsigned int i = (unsigned int) i_;
|
unsigned int i = (unsigned int) i_;
|
||||||
const OffsetTo<Type, OffsetType, has_null> *p = &this->arrayZ[i];
|
const OffsetTo<Type, OffsetType, BaseType, has_null> *p = &this->arrayZ[i];
|
||||||
if (unlikely ((const void *) p < (const void *) this->arrayZ)) return Null (Type); /* Overflowed. */
|
if (unlikely ((const void *) p < (const void *) this->arrayZ)) return Null (Type); /* Overflowed. */
|
||||||
_hb_compiler_memory_r_barrier ();
|
_hb_compiler_memory_r_barrier ();
|
||||||
return this+*p;
|
return this+*p;
|
||||||
@ -573,7 +576,7 @@ struct UnsizedListOfOffset16To : UnsizedArray16OfOffsetTo<Type, OffsetType, has_
|
|||||||
Type& operator [] (int i_)
|
Type& operator [] (int i_)
|
||||||
{
|
{
|
||||||
unsigned int i = (unsigned int) i_;
|
unsigned int i = (unsigned int) i_;
|
||||||
const OffsetTo<Type, OffsetType, has_null> *p = &this->arrayZ[i];
|
const OffsetTo<Type, OffsetType, BaseType, has_null> *p = &this->arrayZ[i];
|
||||||
if (unlikely ((const void *) p < (const void *) this->arrayZ)) return Crap (Type); /* Overflowed. */
|
if (unlikely ((const void *) p < (const void *) this->arrayZ)) return Crap (Type); /* Overflowed. */
|
||||||
_hb_compiler_memory_r_barrier ();
|
_hb_compiler_memory_r_barrier ();
|
||||||
return this+*p;
|
return this+*p;
|
||||||
@ -583,7 +586,7 @@ struct UnsizedListOfOffset16To : UnsizedArray16OfOffsetTo<Type, OffsetType, has_
|
|||||||
bool sanitize (hb_sanitize_context_t *c, unsigned int count, Ts&&... ds) const
|
bool sanitize (hb_sanitize_context_t *c, unsigned int count, Ts&&... ds) const
|
||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace ((UnsizedArray16OfOffsetTo<Type, OffsetType, has_null>
|
return_trace ((UnsizedArray16OfOffsetTo<Type, OffsetType, BaseType, has_null>
|
||||||
::sanitize (c, count, this, std::forward<Ts> (ds)...)));
|
::sanitize (c, count, this, std::forward<Ts> (ds)...)));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -725,6 +728,7 @@ struct ArrayOf
|
|||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!sanitize_shallow (c))) return_trace (false);
|
if (unlikely (!sanitize_shallow (c))) return_trace (false);
|
||||||
if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true);
|
if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true);
|
||||||
|
hb_barrier ();
|
||||||
unsigned int count = len;
|
unsigned int count = len;
|
||||||
for (unsigned int i = 0; i < count; i++)
|
for (unsigned int i = 0; i < count; i++)
|
||||||
if (unlikely (!c->dispatch (arrayZ[i], std::forward<Ts> (ds)...)))
|
if (unlikely (!c->dispatch (arrayZ[i], std::forward<Ts> (ds)...)))
|
||||||
@ -735,7 +739,9 @@ struct ArrayOf
|
|||||||
bool sanitize_shallow (hb_sanitize_context_t *c) const
|
bool sanitize_shallow (hb_sanitize_context_t *c) const
|
||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (len.sanitize (c) && c->check_array_sized (arrayZ, len, sizeof (LenType)));
|
return_trace (len.sanitize (c) &&
|
||||||
|
hb_barrier () &&
|
||||||
|
c->check_array_sized (arrayZ, len, sizeof (LenType)));
|
||||||
}
|
}
|
||||||
|
|
||||||
public:
|
public:
|
||||||
@ -866,6 +872,7 @@ struct HeadlessArrayOf
|
|||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!sanitize_shallow (c))) return_trace (false);
|
if (unlikely (!sanitize_shallow (c))) return_trace (false);
|
||||||
if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true);
|
if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true);
|
||||||
|
hb_barrier ();
|
||||||
unsigned int count = get_length ();
|
unsigned int count = get_length ();
|
||||||
for (unsigned int i = 0; i < count; i++)
|
for (unsigned int i = 0; i < count; i++)
|
||||||
if (unlikely (!c->dispatch (arrayZ[i], std::forward<Ts> (ds)...)))
|
if (unlikely (!c->dispatch (arrayZ[i], std::forward<Ts> (ds)...)))
|
||||||
@ -878,6 +885,7 @@ struct HeadlessArrayOf
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (lenP1.sanitize (c) &&
|
return_trace (lenP1.sanitize (c) &&
|
||||||
|
hb_barrier () &&
|
||||||
(!lenP1 || c->check_array_sized (arrayZ, lenP1 - 1, sizeof (LenType))));
|
(!lenP1 || c->check_array_sized (arrayZ, lenP1 - 1, sizeof (LenType))));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -919,6 +927,7 @@ struct ArrayOfM1
|
|||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!sanitize_shallow (c))) return_trace (false);
|
if (unlikely (!sanitize_shallow (c))) return_trace (false);
|
||||||
if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true);
|
if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true);
|
||||||
|
hb_barrier ();
|
||||||
unsigned int count = lenM1 + 1;
|
unsigned int count = lenM1 + 1;
|
||||||
for (unsigned int i = 0; i < count; i++)
|
for (unsigned int i = 0; i < count; i++)
|
||||||
if (unlikely (!c->dispatch (arrayZ[i], std::forward<Ts> (ds)...)))
|
if (unlikely (!c->dispatch (arrayZ[i], std::forward<Ts> (ds)...)))
|
||||||
@ -931,6 +940,7 @@ struct ArrayOfM1
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (lenM1.sanitize (c) &&
|
return_trace (lenM1.sanitize (c) &&
|
||||||
|
hb_barrier () &&
|
||||||
(c->check_array_sized (arrayZ, lenM1 + 1, sizeof (LenType))));
|
(c->check_array_sized (arrayZ, lenM1 + 1, sizeof (LenType))));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1104,6 +1114,7 @@ struct VarSizedBinSearchArrayOf
|
|||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!sanitize_shallow (c))) return_trace (false);
|
if (unlikely (!sanitize_shallow (c))) return_trace (false);
|
||||||
if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true);
|
if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true);
|
||||||
|
hb_barrier ();
|
||||||
unsigned int count = get_length ();
|
unsigned int count = get_length ();
|
||||||
for (unsigned int i = 0; i < count; i++)
|
for (unsigned int i = 0; i < count; i++)
|
||||||
if (unlikely (!(*this)[i].sanitize (c, std::forward<Ts> (ds)...)))
|
if (unlikely (!(*this)[i].sanitize (c, std::forward<Ts> (ds)...)))
|
||||||
@ -1130,6 +1141,7 @@ struct VarSizedBinSearchArrayOf
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (header.sanitize (c) &&
|
return_trace (header.sanitize (c) &&
|
||||||
|
hb_barrier () &&
|
||||||
Type::static_size <= header.unitSize &&
|
Type::static_size <= header.unitSize &&
|
||||||
c->check_range (bytesZ.arrayZ,
|
c->check_range (bytesZ.arrayZ,
|
||||||
header.nUnits,
|
header.nUnits,
|
||||||
|
31
src/3rdparty/harfbuzz-ng/src/hb-ot-cff-common.hh
vendored
31
src/3rdparty/harfbuzz-ng/src/hb-ot-cff-common.hh
vendored
@ -78,7 +78,8 @@ struct CFFIndex
|
|||||||
hb_requires (hb_is_iterable (Iterable))>
|
hb_requires (hb_is_iterable (Iterable))>
|
||||||
bool serialize (hb_serialize_context_t *c,
|
bool serialize (hb_serialize_context_t *c,
|
||||||
const Iterable &iterable,
|
const Iterable &iterable,
|
||||||
const unsigned *p_data_size = nullptr)
|
const unsigned *p_data_size = nullptr,
|
||||||
|
unsigned min_off_size = 0)
|
||||||
{
|
{
|
||||||
TRACE_SERIALIZE (this);
|
TRACE_SERIALIZE (this);
|
||||||
unsigned data_size;
|
unsigned data_size;
|
||||||
@ -88,7 +89,7 @@ struct CFFIndex
|
|||||||
total_size (iterable, &data_size);
|
total_size (iterable, &data_size);
|
||||||
|
|
||||||
auto it = hb_iter (iterable);
|
auto it = hb_iter (iterable);
|
||||||
if (unlikely (!serialize_header (c, +it, data_size))) return_trace (false);
|
if (unlikely (!serialize_header (c, +it, data_size, min_off_size))) return_trace (false);
|
||||||
unsigned char *ret = c->allocate_size<unsigned char> (data_size, false);
|
unsigned char *ret = c->allocate_size<unsigned char> (data_size, false);
|
||||||
if (unlikely (!ret)) return_trace (false);
|
if (unlikely (!ret)) return_trace (false);
|
||||||
for (const auto &_ : +it)
|
for (const auto &_ : +it)
|
||||||
@ -111,11 +112,13 @@ struct CFFIndex
|
|||||||
hb_requires (hb_is_iterator (Iterator))>
|
hb_requires (hb_is_iterator (Iterator))>
|
||||||
bool serialize_header (hb_serialize_context_t *c,
|
bool serialize_header (hb_serialize_context_t *c,
|
||||||
Iterator it,
|
Iterator it,
|
||||||
unsigned data_size)
|
unsigned data_size,
|
||||||
|
unsigned min_off_size = 0)
|
||||||
{
|
{
|
||||||
TRACE_SERIALIZE (this);
|
TRACE_SERIALIZE (this);
|
||||||
|
|
||||||
unsigned off_size = (hb_bit_storage (data_size + 1) + 7) / 8;
|
unsigned off_size = (hb_bit_storage (data_size + 1) + 7) / 8;
|
||||||
|
off_size = hb_max(min_off_size, off_size);
|
||||||
|
|
||||||
/* serialize CFFIndex header */
|
/* serialize CFFIndex header */
|
||||||
if (unlikely (!c->extend_min (this))) return_trace (false);
|
if (unlikely (!c->extend_min (this))) return_trace (false);
|
||||||
@ -195,7 +198,7 @@ struct CFFIndex
|
|||||||
|
|
||||||
template <typename Iterable,
|
template <typename Iterable,
|
||||||
hb_requires (hb_is_iterable (Iterable))>
|
hb_requires (hb_is_iterable (Iterable))>
|
||||||
static unsigned total_size (const Iterable &iterable, unsigned *data_size = nullptr)
|
static unsigned total_size (const Iterable &iterable, unsigned *data_size = nullptr, unsigned min_off_size = 0)
|
||||||
{
|
{
|
||||||
auto it = + hb_iter (iterable);
|
auto it = + hb_iter (iterable);
|
||||||
if (!it)
|
if (!it)
|
||||||
@ -211,6 +214,7 @@ struct CFFIndex
|
|||||||
if (data_size) *data_size = total;
|
if (data_size) *data_size = total;
|
||||||
|
|
||||||
unsigned off_size = (hb_bit_storage (total + 1) + 7) / 8;
|
unsigned off_size = (hb_bit_storage (total + 1) + 7) / 8;
|
||||||
|
off_size = hb_max(min_off_size, off_size);
|
||||||
|
|
||||||
return min_size + HBUINT8::static_size + (hb_len (it) + 1) * off_size + total;
|
return min_size + HBUINT8::static_size + (hb_len (it) + 1) * off_size + total;
|
||||||
}
|
}
|
||||||
@ -274,8 +278,10 @@ struct CFFIndex
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (likely (c->check_struct (this) &&
|
return_trace (likely (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
(count == 0 || /* empty INDEX */
|
(count == 0 || /* empty INDEX */
|
||||||
(count < count + 1u &&
|
(count < count + 1u &&
|
||||||
|
hb_barrier () &&
|
||||||
c->check_struct (&offSize) && offSize >= 1 && offSize <= 4 &&
|
c->check_struct (&offSize) && offSize >= 1 && offSize <= 4 &&
|
||||||
c->check_array (offsets, offSize, count + 1u) &&
|
c->check_array (offsets, offSize, count + 1u) &&
|
||||||
c->check_array ((const HBUINT8*) data_base (), 1, offset_at (count))))));
|
c->check_array ((const HBUINT8*) data_base (), 1, offset_at (count))))));
|
||||||
@ -412,6 +418,7 @@ struct FDSelect0 {
|
|||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!(c->check_struct (this))))
|
if (unlikely (!(c->check_struct (this))))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
if (unlikely (!c->check_array (fds, c->get_num_glyphs ())))
|
if (unlikely (!c->check_array (fds, c->get_num_glyphs ())))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
|
|
||||||
@ -438,7 +445,9 @@ struct FDSelect3_4_Range
|
|||||||
bool sanitize (hb_sanitize_context_t *c, const void * /*nullptr*/, unsigned int fdcount) const
|
bool sanitize (hb_sanitize_context_t *c, const void * /*nullptr*/, unsigned int fdcount) const
|
||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (first < c->get_num_glyphs () && (fd < fdcount));
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
|
first < c->get_num_glyphs () && (fd < fdcount));
|
||||||
}
|
}
|
||||||
|
|
||||||
GID_TYPE first;
|
GID_TYPE first;
|
||||||
@ -456,15 +465,20 @@ struct FDSelect3_4
|
|||||||
bool sanitize (hb_sanitize_context_t *c, unsigned int fdcount) const
|
bool sanitize (hb_sanitize_context_t *c, unsigned int fdcount) const
|
||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!c->check_struct (this) || !ranges.sanitize (c, nullptr, fdcount) ||
|
if (unlikely (!(c->check_struct (this) &&
|
||||||
(nRanges () == 0) || ranges[0].first != 0))
|
ranges.sanitize (c, nullptr, fdcount) &&
|
||||||
|
hb_barrier () &&
|
||||||
|
(nRanges () != 0) &&
|
||||||
|
ranges[0].first == 0)))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
|
|
||||||
for (unsigned int i = 1; i < nRanges (); i++)
|
for (unsigned int i = 1; i < nRanges (); i++)
|
||||||
if (unlikely (ranges[i - 1].first >= ranges[i].first))
|
if (unlikely (ranges[i - 1].first >= ranges[i].first))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
|
|
||||||
if (unlikely (!sentinel().sanitize (c) || (sentinel() != c->get_num_glyphs ())))
|
if (unlikely (!(sentinel().sanitize (c) &&
|
||||||
|
hb_barrier () &&
|
||||||
|
(sentinel() == c->get_num_glyphs ()))))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
|
|
||||||
return_trace (true);
|
return_trace (true);
|
||||||
@ -559,6 +573,7 @@ struct FDSelect
|
|||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!c->check_struct (this)))
|
if (unlikely (!c->check_struct (this)))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
switch (format)
|
switch (format)
|
||||||
{
|
{
|
||||||
|
23
src/3rdparty/harfbuzz-ng/src/hb-ot-cff1-table.hh
vendored
23
src/3rdparty/harfbuzz-ng/src/hb-ot-cff1-table.hh
vendored
@ -275,6 +275,7 @@ struct Encoding
|
|||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!c->check_struct (this)))
|
if (unlikely (!c->check_struct (this)))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
switch (table_format ())
|
switch (table_format ())
|
||||||
{
|
{
|
||||||
@ -376,13 +377,13 @@ struct Charset1_2 {
|
|||||||
bool sanitize (hb_sanitize_context_t *c, unsigned int num_glyphs, unsigned *num_charset_entries) const
|
bool sanitize (hb_sanitize_context_t *c, unsigned int num_glyphs, unsigned *num_charset_entries) const
|
||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!c->check_struct (this)))
|
|
||||||
return_trace (false);
|
|
||||||
num_glyphs--;
|
num_glyphs--;
|
||||||
unsigned i;
|
unsigned i;
|
||||||
for (i = 0; num_glyphs > 0; i++)
|
for (i = 0; num_glyphs > 0; i++)
|
||||||
{
|
{
|
||||||
if (unlikely (!ranges[i].sanitize (c) || (num_glyphs < ranges[i].nLeft + 1)))
|
if (unlikely (!(ranges[i].sanitize (c) &&
|
||||||
|
hb_barrier () &&
|
||||||
|
(num_glyphs >= ranges[i].nLeft + 1))))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
num_glyphs -= (ranges[i].nLeft + 1);
|
num_glyphs -= (ranges[i].nLeft + 1);
|
||||||
}
|
}
|
||||||
@ -615,6 +616,7 @@ struct Charset
|
|||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!c->check_struct (this)))
|
if (unlikely (!c->check_struct (this)))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
switch (format)
|
switch (format)
|
||||||
{
|
{
|
||||||
@ -1055,6 +1057,7 @@ struct cff1
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
likely (version.major == 1));
|
likely (version.major == 1));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1085,14 +1088,17 @@ struct cff1
|
|||||||
nameIndex = &cff->nameIndex (cff);
|
nameIndex = &cff->nameIndex (cff);
|
||||||
if ((nameIndex == &Null (CFF1NameIndex)) || !nameIndex->sanitize (&sc))
|
if ((nameIndex == &Null (CFF1NameIndex)) || !nameIndex->sanitize (&sc))
|
||||||
goto fail;
|
goto fail;
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
topDictIndex = &StructAtOffset<CFF1TopDictIndex> (nameIndex, nameIndex->get_size ());
|
topDictIndex = &StructAtOffset<CFF1TopDictIndex> (nameIndex, nameIndex->get_size ());
|
||||||
if ((topDictIndex == &Null (CFF1TopDictIndex)) || !topDictIndex->sanitize (&sc) || (topDictIndex->count == 0))
|
if ((topDictIndex == &Null (CFF1TopDictIndex)) || !topDictIndex->sanitize (&sc) || (topDictIndex->count == 0))
|
||||||
goto fail;
|
goto fail;
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
{ /* parse top dict */
|
{ /* parse top dict */
|
||||||
const hb_ubytes_t topDictStr = (*topDictIndex)[0];
|
const hb_ubytes_t topDictStr = (*topDictIndex)[0];
|
||||||
if (unlikely (!topDictStr.sanitize (&sc))) goto fail;
|
if (unlikely (!topDictStr.sanitize (&sc))) goto fail;
|
||||||
|
hb_barrier ();
|
||||||
cff1_top_dict_interp_env_t env (topDictStr);
|
cff1_top_dict_interp_env_t env (topDictStr);
|
||||||
cff1_top_dict_interpreter_t top_interp (env);
|
cff1_top_dict_interpreter_t top_interp (env);
|
||||||
if (unlikely (!top_interp.interpret (topDict))) goto fail;
|
if (unlikely (!top_interp.interpret (topDict))) goto fail;
|
||||||
@ -1104,6 +1110,7 @@ struct cff1
|
|||||||
{
|
{
|
||||||
charset = &StructAtOffsetOrNull<Charset> (cff, topDict.CharsetOffset);
|
charset = &StructAtOffsetOrNull<Charset> (cff, topDict.CharsetOffset);
|
||||||
if (unlikely ((charset == &Null (Charset)) || !charset->sanitize (&sc, &num_charset_entries))) goto fail;
|
if (unlikely ((charset == &Null (Charset)) || !charset->sanitize (&sc, &num_charset_entries))) goto fail;
|
||||||
|
hb_barrier ();
|
||||||
}
|
}
|
||||||
|
|
||||||
fdCount = 1;
|
fdCount = 1;
|
||||||
@ -1114,6 +1121,7 @@ struct cff1
|
|||||||
if (unlikely ((fdArray == &Null (CFF1FDArray)) || !fdArray->sanitize (&sc) ||
|
if (unlikely ((fdArray == &Null (CFF1FDArray)) || !fdArray->sanitize (&sc) ||
|
||||||
(fdSelect == &Null (CFF1FDSelect)) || !fdSelect->sanitize (&sc, fdArray->count)))
|
(fdSelect == &Null (CFF1FDSelect)) || !fdSelect->sanitize (&sc, fdArray->count)))
|
||||||
goto fail;
|
goto fail;
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
fdCount = fdArray->count;
|
fdCount = fdArray->count;
|
||||||
}
|
}
|
||||||
@ -1134,21 +1142,25 @@ struct cff1
|
|||||||
{
|
{
|
||||||
encoding = &StructAtOffsetOrNull<Encoding> (cff, topDict.EncodingOffset);
|
encoding = &StructAtOffsetOrNull<Encoding> (cff, topDict.EncodingOffset);
|
||||||
if (unlikely ((encoding == &Null (Encoding)) || !encoding->sanitize (&sc))) goto fail;
|
if (unlikely ((encoding == &Null (Encoding)) || !encoding->sanitize (&sc))) goto fail;
|
||||||
|
hb_barrier ();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
stringIndex = &StructAtOffset<CFF1StringIndex> (topDictIndex, topDictIndex->get_size ());
|
stringIndex = &StructAtOffset<CFF1StringIndex> (topDictIndex, topDictIndex->get_size ());
|
||||||
if ((stringIndex == &Null (CFF1StringIndex)) || !stringIndex->sanitize (&sc))
|
if ((stringIndex == &Null (CFF1StringIndex)) || !stringIndex->sanitize (&sc))
|
||||||
goto fail;
|
goto fail;
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
globalSubrs = &StructAtOffset<CFF1Subrs> (stringIndex, stringIndex->get_size ());
|
globalSubrs = &StructAtOffset<CFF1Subrs> (stringIndex, stringIndex->get_size ());
|
||||||
if ((globalSubrs != &Null (CFF1Subrs)) && !globalSubrs->sanitize (&sc))
|
if ((globalSubrs != &Null (CFF1Subrs)) && !globalSubrs->sanitize (&sc))
|
||||||
goto fail;
|
goto fail;
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
charStrings = &StructAtOffsetOrNull<CFF1CharStrings> (cff, topDict.charStringsOffset);
|
charStrings = &StructAtOffsetOrNull<CFF1CharStrings> (cff, topDict.charStringsOffset);
|
||||||
|
|
||||||
if ((charStrings == &Null (CFF1CharStrings)) || unlikely (!charStrings->sanitize (&sc)))
|
if ((charStrings == &Null (CFF1CharStrings)) || unlikely (!charStrings->sanitize (&sc)))
|
||||||
goto fail;
|
goto fail;
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
num_glyphs = charStrings->count;
|
num_glyphs = charStrings->count;
|
||||||
if (num_glyphs != sc.get_num_glyphs ())
|
if (num_glyphs != sc.get_num_glyphs ())
|
||||||
@ -1166,6 +1178,7 @@ struct cff1
|
|||||||
{
|
{
|
||||||
hb_ubytes_t fontDictStr = (*fdArray)[i];
|
hb_ubytes_t fontDictStr = (*fdArray)[i];
|
||||||
if (unlikely (!fontDictStr.sanitize (&sc))) goto fail;
|
if (unlikely (!fontDictStr.sanitize (&sc))) goto fail;
|
||||||
|
hb_barrier ();
|
||||||
cff1_font_dict_values_t *font;
|
cff1_font_dict_values_t *font;
|
||||||
cff1_top_dict_interp_env_t env (fontDictStr);
|
cff1_top_dict_interp_env_t env (fontDictStr);
|
||||||
cff1_font_dict_interpreter_t font_interp (env);
|
cff1_font_dict_interpreter_t font_interp (env);
|
||||||
@ -1177,6 +1190,7 @@ struct cff1
|
|||||||
PRIVDICTVAL *priv = &privateDicts[i];
|
PRIVDICTVAL *priv = &privateDicts[i];
|
||||||
const hb_ubytes_t privDictStr = StructAtOffset<UnsizedByteStr> (cff, font->privateDictInfo.offset).as_ubytes (font->privateDictInfo.size);
|
const hb_ubytes_t privDictStr = StructAtOffset<UnsizedByteStr> (cff, font->privateDictInfo.offset).as_ubytes (font->privateDictInfo.size);
|
||||||
if (unlikely (!privDictStr.sanitize (&sc))) goto fail;
|
if (unlikely (!privDictStr.sanitize (&sc))) goto fail;
|
||||||
|
hb_barrier ();
|
||||||
num_interp_env_t env2 (privDictStr);
|
num_interp_env_t env2 (privDictStr);
|
||||||
dict_interpreter_t<PRIVOPSET, PRIVDICTVAL> priv_interp (env2);
|
dict_interpreter_t<PRIVOPSET, PRIVDICTVAL> priv_interp (env2);
|
||||||
priv->init ();
|
priv->init ();
|
||||||
@ -1186,6 +1200,7 @@ struct cff1
|
|||||||
if (priv->localSubrs != &Null (CFF1Subrs) &&
|
if (priv->localSubrs != &Null (CFF1Subrs) &&
|
||||||
unlikely (!priv->localSubrs->sanitize (&sc)))
|
unlikely (!priv->localSubrs->sanitize (&sc)))
|
||||||
goto fail;
|
goto fail;
|
||||||
|
hb_barrier ();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else /* non-CID */
|
else /* non-CID */
|
||||||
@ -1195,6 +1210,7 @@ struct cff1
|
|||||||
|
|
||||||
const hb_ubytes_t privDictStr = StructAtOffset<UnsizedByteStr> (cff, font->privateDictInfo.offset).as_ubytes (font->privateDictInfo.size);
|
const hb_ubytes_t privDictStr = StructAtOffset<UnsizedByteStr> (cff, font->privateDictInfo.offset).as_ubytes (font->privateDictInfo.size);
|
||||||
if (unlikely (!privDictStr.sanitize (&sc))) goto fail;
|
if (unlikely (!privDictStr.sanitize (&sc))) goto fail;
|
||||||
|
hb_barrier ();
|
||||||
num_interp_env_t env (privDictStr);
|
num_interp_env_t env (privDictStr);
|
||||||
dict_interpreter_t<PRIVOPSET, PRIVDICTVAL> priv_interp (env);
|
dict_interpreter_t<PRIVOPSET, PRIVDICTVAL> priv_interp (env);
|
||||||
priv->init ();
|
priv->init ();
|
||||||
@ -1204,6 +1220,7 @@ struct cff1
|
|||||||
if (priv->localSubrs != &Null (CFF1Subrs) &&
|
if (priv->localSubrs != &Null (CFF1Subrs) &&
|
||||||
unlikely (!priv->localSubrs->sanitize (&sc)))
|
unlikely (!priv->localSubrs->sanitize (&sc)))
|
||||||
goto fail;
|
goto fail;
|
||||||
|
hb_barrier ();
|
||||||
}
|
}
|
||||||
|
|
||||||
return;
|
return;
|
||||||
|
12
src/3rdparty/harfbuzz-ng/src/hb-ot-cff2-table.hh
vendored
12
src/3rdparty/harfbuzz-ng/src/hb-ot-cff2-table.hh
vendored
@ -90,6 +90,7 @@ struct CFF2FDSelect
|
|||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!c->check_struct (this)))
|
if (unlikely (!c->check_struct (this)))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
switch (format)
|
switch (format)
|
||||||
{
|
{
|
||||||
@ -115,7 +116,10 @@ struct CFF2VariationStore
|
|||||||
bool sanitize (hb_sanitize_context_t *c) const
|
bool sanitize (hb_sanitize_context_t *c) const
|
||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (likely (c->check_struct (this)) && c->check_range (&varStore, size) && varStore.sanitize (c));
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
|
c->check_range (&varStore, size) &&
|
||||||
|
varStore.sanitize (c));
|
||||||
}
|
}
|
||||||
|
|
||||||
bool serialize (hb_serialize_context_t *c, const CFF2VariationStore *varStore)
|
bool serialize (hb_serialize_context_t *c, const CFF2VariationStore *varStore)
|
||||||
@ -384,6 +388,7 @@ struct cff2
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
likely (version.major == 2));
|
likely (version.major == 2));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -414,6 +419,7 @@ struct cff2
|
|||||||
{ /* parse top dict */
|
{ /* parse top dict */
|
||||||
hb_ubytes_t topDictStr = (cff2 + cff2->topDict).as_ubytes (cff2->topDictSize);
|
hb_ubytes_t topDictStr = (cff2 + cff2->topDict).as_ubytes (cff2->topDictSize);
|
||||||
if (unlikely (!topDictStr.sanitize (&sc))) goto fail;
|
if (unlikely (!topDictStr.sanitize (&sc))) goto fail;
|
||||||
|
hb_barrier ();
|
||||||
num_interp_env_t env (topDictStr);
|
num_interp_env_t env (topDictStr);
|
||||||
cff2_top_dict_interpreter_t top_interp (env);
|
cff2_top_dict_interpreter_t top_interp (env);
|
||||||
topDict.init ();
|
topDict.init ();
|
||||||
@ -430,6 +436,7 @@ struct cff2
|
|||||||
(charStrings == &Null (CFF2CharStrings)) || unlikely (!charStrings->sanitize (&sc)) ||
|
(charStrings == &Null (CFF2CharStrings)) || unlikely (!charStrings->sanitize (&sc)) ||
|
||||||
(globalSubrs == &Null (CFF2Subrs)) || unlikely (!globalSubrs->sanitize (&sc)) ||
|
(globalSubrs == &Null (CFF2Subrs)) || unlikely (!globalSubrs->sanitize (&sc)) ||
|
||||||
(fdArray == &Null (CFF2FDArray)) || unlikely (!fdArray->sanitize (&sc)) ||
|
(fdArray == &Null (CFF2FDArray)) || unlikely (!fdArray->sanitize (&sc)) ||
|
||||||
|
!hb_barrier () ||
|
||||||
(((fdSelect != &Null (CFF2FDSelect)) && unlikely (!fdSelect->sanitize (&sc, fdArray->count)))))
|
(((fdSelect != &Null (CFF2FDSelect)) && unlikely (!fdSelect->sanitize (&sc, fdArray->count)))))
|
||||||
goto fail;
|
goto fail;
|
||||||
|
|
||||||
@ -446,6 +453,7 @@ struct cff2
|
|||||||
{
|
{
|
||||||
const hb_ubytes_t fontDictStr = (*fdArray)[i];
|
const hb_ubytes_t fontDictStr = (*fdArray)[i];
|
||||||
if (unlikely (!fontDictStr.sanitize (&sc))) goto fail;
|
if (unlikely (!fontDictStr.sanitize (&sc))) goto fail;
|
||||||
|
hb_barrier ();
|
||||||
cff2_font_dict_values_t *font;
|
cff2_font_dict_values_t *font;
|
||||||
num_interp_env_t env (fontDictStr);
|
num_interp_env_t env (fontDictStr);
|
||||||
cff2_font_dict_interpreter_t font_interp (env);
|
cff2_font_dict_interpreter_t font_interp (env);
|
||||||
@ -456,6 +464,7 @@ struct cff2
|
|||||||
|
|
||||||
const hb_ubytes_t privDictStr = StructAtOffsetOrNull<UnsizedByteStr> (cff2, font->privateDictInfo.offset).as_ubytes (font->privateDictInfo.size);
|
const hb_ubytes_t privDictStr = StructAtOffsetOrNull<UnsizedByteStr> (cff2, font->privateDictInfo.offset).as_ubytes (font->privateDictInfo.size);
|
||||||
if (unlikely (!privDictStr.sanitize (&sc))) goto fail;
|
if (unlikely (!privDictStr.sanitize (&sc))) goto fail;
|
||||||
|
hb_barrier ();
|
||||||
cff2_priv_dict_interp_env_t env2 (privDictStr);
|
cff2_priv_dict_interp_env_t env2 (privDictStr);
|
||||||
dict_interpreter_t<PRIVOPSET, PRIVDICTVAL, cff2_priv_dict_interp_env_t> priv_interp (env2);
|
dict_interpreter_t<PRIVOPSET, PRIVDICTVAL, cff2_priv_dict_interp_env_t> priv_interp (env2);
|
||||||
privateDicts[i].init ();
|
privateDicts[i].init ();
|
||||||
@ -465,6 +474,7 @@ struct cff2
|
|||||||
if (privateDicts[i].localSubrs != &Null (CFF2Subrs) &&
|
if (privateDicts[i].localSubrs != &Null (CFF2Subrs) &&
|
||||||
unlikely (!privateDicts[i].localSubrs->sanitize (&sc)))
|
unlikely (!privateDicts[i].localSubrs->sanitize (&sc)))
|
||||||
goto fail;
|
goto fail;
|
||||||
|
hb_barrier ();
|
||||||
}
|
}
|
||||||
|
|
||||||
return;
|
return;
|
||||||
|
14
src/3rdparty/harfbuzz-ng/src/hb-ot-cmap-table.hh
vendored
14
src/3rdparty/harfbuzz-ng/src/hb-ot-cmap-table.hh
vendored
@ -556,6 +556,7 @@ struct CmapSubtableFormat4
|
|||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!c->check_struct (this)))
|
if (unlikely (!c->check_struct (this)))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
if (unlikely (!c->check_range (this, length)))
|
if (unlikely (!c->check_range (this, length)))
|
||||||
{
|
{
|
||||||
@ -742,10 +743,11 @@ struct CmapSubtableLongSegmented
|
|||||||
unsigned num_glyphs) const
|
unsigned num_glyphs) const
|
||||||
{
|
{
|
||||||
hb_codepoint_t last_end = 0;
|
hb_codepoint_t last_end = 0;
|
||||||
for (unsigned i = 0; i < this->groups.len; i++)
|
unsigned count = this->groups.len;
|
||||||
|
for (unsigned i = 0; i < count; i++)
|
||||||
{
|
{
|
||||||
hb_codepoint_t start = this->groups[i].startCharCode;
|
hb_codepoint_t start = this->groups.arrayZ[i].startCharCode;
|
||||||
hb_codepoint_t end = hb_min ((hb_codepoint_t) this->groups[i].endCharCode,
|
hb_codepoint_t end = hb_min ((hb_codepoint_t) this->groups.arrayZ[i].endCharCode,
|
||||||
(hb_codepoint_t) HB_UNICODE_MAX);
|
(hb_codepoint_t) HB_UNICODE_MAX);
|
||||||
if (unlikely (start > end || start < last_end)) {
|
if (unlikely (start > end || start < last_end)) {
|
||||||
// Range is not in order and is invalid, skip it.
|
// Range is not in order and is invalid, skip it.
|
||||||
@ -754,7 +756,7 @@ struct CmapSubtableLongSegmented
|
|||||||
last_end = end;
|
last_end = end;
|
||||||
|
|
||||||
|
|
||||||
hb_codepoint_t gid = this->groups[i].glyphID;
|
hb_codepoint_t gid = this->groups.arrayZ[i].glyphID;
|
||||||
if (!gid)
|
if (!gid)
|
||||||
{
|
{
|
||||||
if (T::formatNumber == 13) continue;
|
if (T::formatNumber == 13) continue;
|
||||||
@ -767,9 +769,9 @@ struct CmapSubtableLongSegmented
|
|||||||
|
|
||||||
mapping->alloc (mapping->get_population () + end - start + 1);
|
mapping->alloc (mapping->get_population () + end - start + 1);
|
||||||
|
|
||||||
|
unicodes->add_range (start, end);
|
||||||
for (unsigned cp = start; cp <= end; cp++)
|
for (unsigned cp = start; cp <= end; cp++)
|
||||||
{
|
{
|
||||||
unicodes->add (cp);
|
|
||||||
mapping->set (cp, gid);
|
mapping->set (cp, gid);
|
||||||
gid += T::increment;
|
gid += T::increment;
|
||||||
}
|
}
|
||||||
@ -1427,6 +1429,7 @@ struct CmapSubtable
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (!u.format.sanitize (c)) return_trace (false);
|
if (!u.format.sanitize (c)) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
switch (u.format) {
|
switch (u.format) {
|
||||||
case 0: return_trace (u.format0 .sanitize (c));
|
case 0: return_trace (u.format0 .sanitize (c));
|
||||||
case 4: return_trace (u.format4 .sanitize (c));
|
case 4: return_trace (u.format4 .sanitize (c));
|
||||||
@ -2060,6 +2063,7 @@ struct cmap
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
likely (version == 0) &&
|
likely (version == 0) &&
|
||||||
encodingRecord.sanitize (c, this));
|
encodingRecord.sanitize (c, this));
|
||||||
}
|
}
|
||||||
|
@ -71,6 +71,7 @@ struct DeviceRecord
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (likely (c->check_struct (this) &&
|
return_trace (likely (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
c->check_range (this, sizeDeviceRecord)));
|
c->check_range (this, sizeDeviceRecord)));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -152,6 +153,7 @@ struct hdmx
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
!hb_unsigned_mul_overflows (numRecords, sizeDeviceRecord) &&
|
!hb_unsigned_mul_overflows (numRecords, sizeDeviceRecord) &&
|
||||||
min_size + numRecords * sizeDeviceRecord > numRecords * sizeDeviceRecord &&
|
min_size + numRecords * sizeDeviceRecord > numRecords * sizeDeviceRecord &&
|
||||||
sizeDeviceRecord >= DeviceRecord::min_size &&
|
sizeDeviceRecord >= DeviceRecord::min_size &&
|
||||||
|
@ -103,6 +103,7 @@ struct head
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
version.major == 1 &&
|
version.major == 1 &&
|
||||||
magicNumber == 0x5F0F3CF5u);
|
magicNumber == 0x5F0F3CF5u);
|
||||||
}
|
}
|
||||||
|
@ -50,7 +50,9 @@ struct _hea
|
|||||||
bool sanitize (hb_sanitize_context_t *c) const
|
bool sanitize (hb_sanitize_context_t *c) const
|
||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) && likely (version.major == 1));
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
|
likely (version.major == 1));
|
||||||
}
|
}
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
@ -79,6 +79,7 @@ struct KernSubTableFormat3
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
c->check_range (kernValueZ,
|
c->check_range (kernValueZ,
|
||||||
kernValueCount * sizeof (FWORD) +
|
kernValueCount * sizeof (FWORD) +
|
||||||
glyphCount * 2 +
|
glyphCount * 2 +
|
||||||
@ -147,9 +148,10 @@ struct KernSubTable
|
|||||||
bool sanitize (hb_sanitize_context_t *c) const
|
bool sanitize (hb_sanitize_context_t *c) const
|
||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!u.header.sanitize (c) ||
|
if (unlikely (!(u.header.sanitize (c) &&
|
||||||
u.header.length < u.header.min_size ||
|
hb_barrier () &&
|
||||||
!c->check_range (this, u.header.length))) return_trace (false);
|
u.header.length >= u.header.min_size &&
|
||||||
|
c->check_range (this, u.header.length)))) return_trace (false);
|
||||||
|
|
||||||
return_trace (dispatch (c));
|
return_trace (dispatch (c));
|
||||||
}
|
}
|
||||||
@ -337,6 +339,7 @@ struct kern
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (!u.version32.sanitize (c)) return_trace (false);
|
if (!u.version32.sanitize (c)) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
return_trace (dispatch (c));
|
return_trace (dispatch (c));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -135,6 +135,7 @@ struct BaseCoord
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!u.format.sanitize (c))) return_trace (false);
|
if (unlikely (!u.format.sanitize (c))) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
switch (u.format) {
|
switch (u.format) {
|
||||||
case 1: return_trace (u.format1.sanitize (c));
|
case 1: return_trace (u.format1.sanitize (c));
|
||||||
case 2: return_trace (u.format2.sanitize (c));
|
case 2: return_trace (u.format2.sanitize (c));
|
||||||
@ -496,6 +497,7 @@ struct BASE
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (likely (c->check_struct (this) &&
|
return_trace (likely (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
likely (version.major == 1) &&
|
likely (version.major == 1) &&
|
||||||
hAxis.sanitize (c, this) &&
|
hAxis.sanitize (c, this) &&
|
||||||
vAxis.sanitize (c, this) &&
|
vAxis.sanitize (c, this) &&
|
||||||
|
155
src/3rdparty/harfbuzz-ng/src/hb-ot-layout-common.hh
vendored
155
src/3rdparty/harfbuzz-ng/src/hb-ot-layout-common.hh
vendored
@ -64,7 +64,7 @@ struct hb_collect_feature_substitutes_with_var_context_t
|
|||||||
const hb_hashmap_t<hb_tag_t, Triple> *axes_location;
|
const hb_hashmap_t<hb_tag_t, Triple> *axes_location;
|
||||||
hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *record_cond_idx_map;
|
hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *record_cond_idx_map;
|
||||||
hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map;
|
hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map;
|
||||||
bool& insert_catch_all_feature_variation_record;
|
hb_set_t& catch_all_record_feature_idxes;
|
||||||
|
|
||||||
// not stored in subset_plan
|
// not stored in subset_plan
|
||||||
hb_set_t *feature_indices;
|
hb_set_t *feature_indices;
|
||||||
@ -142,6 +142,8 @@ struct hb_subset_layout_context_t :
|
|||||||
const hb_map_t *feature_index_map;
|
const hb_map_t *feature_index_map;
|
||||||
const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map;
|
const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map;
|
||||||
hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map;
|
hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map;
|
||||||
|
const hb_set_t *catch_all_record_feature_idxes;
|
||||||
|
const hb_hashmap_t<unsigned, hb_pair_t<const void*, const void*>> *feature_idx_tag_map;
|
||||||
|
|
||||||
unsigned cur_script_index;
|
unsigned cur_script_index;
|
||||||
unsigned cur_feature_var_record_idx;
|
unsigned cur_feature_var_record_idx;
|
||||||
@ -164,6 +166,8 @@ struct hb_subset_layout_context_t :
|
|||||||
feature_index_map = &c_->plan->gsub_features;
|
feature_index_map = &c_->plan->gsub_features;
|
||||||
feature_substitutes_map = &c_->plan->gsub_feature_substitutes_map;
|
feature_substitutes_map = &c_->plan->gsub_feature_substitutes_map;
|
||||||
feature_record_cond_idx_map = c_->plan->user_axes_location.is_empty () ? nullptr : &c_->plan->gsub_feature_record_cond_idx_map;
|
feature_record_cond_idx_map = c_->plan->user_axes_location.is_empty () ? nullptr : &c_->plan->gsub_feature_record_cond_idx_map;
|
||||||
|
catch_all_record_feature_idxes = &c_->plan->gsub_old_features;
|
||||||
|
feature_idx_tag_map = &c_->plan->gsub_old_feature_idx_tag_map;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@ -172,6 +176,8 @@ struct hb_subset_layout_context_t :
|
|||||||
feature_index_map = &c_->plan->gpos_features;
|
feature_index_map = &c_->plan->gpos_features;
|
||||||
feature_substitutes_map = &c_->plan->gpos_feature_substitutes_map;
|
feature_substitutes_map = &c_->plan->gpos_feature_substitutes_map;
|
||||||
feature_record_cond_idx_map = c_->plan->user_axes_location.is_empty () ? nullptr : &c_->plan->gpos_feature_record_cond_idx_map;
|
feature_record_cond_idx_map = c_->plan->user_axes_location.is_empty () ? nullptr : &c_->plan->gpos_feature_record_cond_idx_map;
|
||||||
|
catch_all_record_feature_idxes = &c_->plan->gpos_old_features;
|
||||||
|
feature_idx_tag_map = &c_->plan->gpos_old_feature_idx_tag_map;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -454,6 +460,7 @@ struct FeatureParamsSize
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!c->check_struct (this))) return_trace (false);
|
if (unlikely (!c->check_struct (this))) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
/* This subtable has some "history", if you will. Some earlier versions of
|
/* This subtable has some "history", if you will. Some earlier versions of
|
||||||
* Adobe tools calculated the offset of the FeatureParams subtable from the
|
* Adobe tools calculated the offset of the FeatureParams subtable from the
|
||||||
@ -820,6 +827,7 @@ struct Feature
|
|||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!(c->check_struct (this) && lookupIndex.sanitize (c))))
|
if (unlikely (!(c->check_struct (this) && lookupIndex.sanitize (c))))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
/* Some earlier versions of Adobe tools calculated the offset of the
|
/* Some earlier versions of Adobe tools calculated the offset of the
|
||||||
* FeatureParams subtable from the beginning of the FeatureList table!
|
* FeatureParams subtable from the beginning of the FeatureList table!
|
||||||
@ -838,6 +846,7 @@ struct Feature
|
|||||||
unsigned int orig_offset = featureParams;
|
unsigned int orig_offset = featureParams;
|
||||||
if (unlikely (!featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE)))
|
if (unlikely (!featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE)))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
if (featureParams == 0 && closure &&
|
if (featureParams == 0 && closure &&
|
||||||
closure->tag == HB_TAG ('s','i','z','e') &&
|
closure->tag == HB_TAG ('s','i','z','e') &&
|
||||||
@ -900,7 +909,8 @@ struct Record
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
const Record_sanitize_closure_t closure = {tag, base};
|
const Record_sanitize_closure_t closure = {tag, base};
|
||||||
return_trace (c->check_struct (this) && offset.sanitize (c, base, &closure));
|
return_trace (c->check_struct (this) &&
|
||||||
|
offset.sanitize (c, base, &closure));
|
||||||
}
|
}
|
||||||
|
|
||||||
Tag tag; /* 4-byte Tag identifier */
|
Tag tag; /* 4-byte Tag identifier */
|
||||||
@ -1371,10 +1381,20 @@ struct Lookup
|
|||||||
|
|
||||||
if (lookupFlag & LookupFlag::UseMarkFilteringSet)
|
if (lookupFlag & LookupFlag::UseMarkFilteringSet)
|
||||||
{
|
{
|
||||||
if (unlikely (!c->serializer->extend (out))) return_trace (false);
|
|
||||||
const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
|
const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
|
||||||
HBUINT16 &outMarkFilteringSet = StructAfter<HBUINT16> (out->subTable);
|
hb_codepoint_t *idx;
|
||||||
outMarkFilteringSet = markFilteringSet;
|
if (!c->plan->used_mark_sets_map.has (markFilteringSet, &idx))
|
||||||
|
{
|
||||||
|
unsigned new_flag = lookupFlag;
|
||||||
|
new_flag &= ~LookupFlag::UseMarkFilteringSet;
|
||||||
|
out->lookupFlag = new_flag;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
if (unlikely (!c->serializer->extend (out))) return_trace (false);
|
||||||
|
HBUINT16 &outMarkFilteringSet = StructAfter<HBUINT16> (out->subTable);
|
||||||
|
outMarkFilteringSet = *idx;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Always keep the lookup even if it's empty. The rest of layout subsetting depends on lookup
|
// Always keep the lookup even if it's empty. The rest of layout subsetting depends on lookup
|
||||||
@ -1391,6 +1411,7 @@ struct Lookup
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (!(c->check_struct (this) && subTable.sanitize (c))) return_trace (false);
|
if (!(c->check_struct (this) && subTable.sanitize (c))) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
unsigned subtables = get_subtable_count ();
|
unsigned subtables = get_subtable_count ();
|
||||||
if (unlikely (!c->visit_subtables (subtables))) return_trace (false);
|
if (unlikely (!c->visit_subtables (subtables))) return_trace (false);
|
||||||
@ -1406,6 +1427,8 @@ struct Lookup
|
|||||||
|
|
||||||
if (unlikely (get_type () == TSubTable::Extension && !c->get_edit_count ()))
|
if (unlikely (get_type () == TSubTable::Extension && !c->get_edit_count ()))
|
||||||
{
|
{
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
/* The spec says all subtables of an Extension lookup should
|
/* The spec says all subtables of an Extension lookup should
|
||||||
* have the same type, which shall not be the Extension type
|
* have the same type, which shall not be the Extension type
|
||||||
* itself (but we already checked for that).
|
* itself (but we already checked for that).
|
||||||
@ -2156,6 +2179,7 @@ struct ClassDef
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (!u.format.sanitize (c)) return_trace (false);
|
if (!u.format.sanitize (c)) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
switch (u.format) {
|
switch (u.format) {
|
||||||
case 1: return_trace (u.format1.sanitize (c));
|
case 1: return_trace (u.format1.sanitize (c));
|
||||||
case 2: return_trace (u.format2.sanitize (c));
|
case 2: return_trace (u.format2.sanitize (c));
|
||||||
@ -2534,7 +2558,9 @@ struct VarRegionList
|
|||||||
bool sanitize (hb_sanitize_context_t *c) const
|
bool sanitize (hb_sanitize_context_t *c) const
|
||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) && axesZ.sanitize (c, axisCount * regionCount));
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
|
axesZ.sanitize (c, axisCount * regionCount));
|
||||||
}
|
}
|
||||||
|
|
||||||
bool serialize (hb_serialize_context_t *c,
|
bool serialize (hb_serialize_context_t *c,
|
||||||
@ -2728,6 +2754,7 @@ struct VarData
|
|||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
regionIndices.sanitize (c) &&
|
regionIndices.sanitize (c) &&
|
||||||
|
hb_barrier () &&
|
||||||
wordCount () <= regionIndices.len &&
|
wordCount () <= regionIndices.len &&
|
||||||
c->check_range (get_delta_bytes (),
|
c->check_range (get_delta_bytes (),
|
||||||
itemCount,
|
itemCount,
|
||||||
@ -3077,6 +3104,7 @@ struct VariationStore
|
|||||||
|
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
format == 1 &&
|
format == 1 &&
|
||||||
regions.sanitize (c, this) &&
|
regions.sanitize (c, this) &&
|
||||||
dataSets.sanitize (c, this));
|
dataSets.sanitize (c, this));
|
||||||
@ -3330,8 +3358,12 @@ struct ConditionFormat1
|
|||||||
|
|
||||||
Triple axis_range (-1.f, 0.f, 1.f);
|
Triple axis_range (-1.f, 0.f, 1.f);
|
||||||
Triple *axis_limit;
|
Triple *axis_limit;
|
||||||
|
bool axis_set_by_user = false;
|
||||||
if (c->axes_location->has (axis_tag, &axis_limit))
|
if (c->axes_location->has (axis_tag, &axis_limit))
|
||||||
|
{
|
||||||
axis_range = *axis_limit;
|
axis_range = *axis_limit;
|
||||||
|
axis_set_by_user = true;
|
||||||
|
}
|
||||||
|
|
||||||
float axis_min_val = axis_range.minimum;
|
float axis_min_val = axis_range.minimum;
|
||||||
float axis_default_val = axis_range.middle;
|
float axis_default_val = axis_range.middle;
|
||||||
@ -3350,8 +3382,7 @@ struct ConditionFormat1
|
|||||||
return DROP_RECORD_WITH_VAR;
|
return DROP_RECORD_WITH_VAR;
|
||||||
|
|
||||||
//condition met and axis pinned, drop the condition
|
//condition met and axis pinned, drop the condition
|
||||||
if (c->axes_location->has (axis_tag) &&
|
if (axis_set_by_user && axis_range.is_point ())
|
||||||
c->axes_location->get (axis_tag).is_point ())
|
|
||||||
return DROP_COND_WITH_VAR;
|
return DROP_COND_WITH_VAR;
|
||||||
|
|
||||||
if (filter_max_val != axis_max_val || filter_min_val != axis_min_val)
|
if (filter_max_val != axis_max_val || filter_min_val != axis_min_val)
|
||||||
@ -3365,7 +3396,6 @@ struct ConditionFormat1
|
|||||||
condition_map->set (axisIndex, val);
|
condition_map->set (axisIndex, val);
|
||||||
return KEEP_COND_WITH_VAR;
|
return KEEP_COND_WITH_VAR;
|
||||||
}
|
}
|
||||||
|
|
||||||
return KEEP_RECORD_WITH_VAR;
|
return KEEP_RECORD_WITH_VAR;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3424,6 +3454,7 @@ struct Condition
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (!u.format.sanitize (c)) return_trace (false);
|
if (!u.format.sanitize (c)) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
switch (u.format) {
|
switch (u.format) {
|
||||||
case 1: return_trace (u.format1.sanitize (c));
|
case 1: return_trace (u.format1.sanitize (c));
|
||||||
default:return_trace (true);
|
default:return_trace (true);
|
||||||
@ -3497,12 +3528,15 @@ struct ConditionSet
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool subset (hb_subset_context_t *c,
|
bool subset (hb_subset_context_t *c,
|
||||||
hb_subset_layout_context_t *l) const
|
hb_subset_layout_context_t *l,
|
||||||
|
bool insert_catch_all) const
|
||||||
{
|
{
|
||||||
TRACE_SUBSET (this);
|
TRACE_SUBSET (this);
|
||||||
auto *out = c->serializer->start_embed (this);
|
auto *out = c->serializer->start_embed (this);
|
||||||
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
|
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
|
||||||
|
|
||||||
|
if (insert_catch_all) return_trace (true);
|
||||||
|
|
||||||
hb_set_t *retained_cond_set = nullptr;
|
hb_set_t *retained_cond_set = nullptr;
|
||||||
if (l->feature_record_cond_idx_map != nullptr)
|
if (l->feature_record_cond_idx_map != nullptr)
|
||||||
retained_cond_set = l->feature_record_cond_idx_map->get (l->cur_feature_var_record_idx);
|
retained_cond_set = l->feature_record_cond_idx_map->get (l->cur_feature_var_record_idx);
|
||||||
@ -3548,27 +3582,51 @@ struct FeatureTableSubstitutionRecord
|
|||||||
}
|
}
|
||||||
|
|
||||||
void collect_feature_substitutes_with_variations (hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map,
|
void collect_feature_substitutes_with_variations (hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map,
|
||||||
|
hb_set_t& catch_all_record_feature_idxes,
|
||||||
const hb_set_t *feature_indices,
|
const hb_set_t *feature_indices,
|
||||||
const void *base) const
|
const void *base) const
|
||||||
{
|
{
|
||||||
if (feature_indices->has (featureIndex))
|
if (feature_indices->has (featureIndex))
|
||||||
|
{
|
||||||
feature_substitutes_map->set (featureIndex, &(base+feature));
|
feature_substitutes_map->set (featureIndex, &(base+feature));
|
||||||
|
catch_all_record_feature_idxes.add (featureIndex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bool serialize (hb_subset_layout_context_t *c,
|
||||||
|
unsigned feature_index,
|
||||||
|
const Feature *f, const Tag *tag)
|
||||||
|
{
|
||||||
|
TRACE_SERIALIZE (this);
|
||||||
|
hb_serialize_context_t *s = c->subset_context->serializer;
|
||||||
|
if (unlikely (!s->extend_min (this))) return_trace (false);
|
||||||
|
|
||||||
|
uint32_t *new_feature_idx;
|
||||||
|
if (!c->feature_index_map->has (feature_index, &new_feature_idx))
|
||||||
|
return_trace (false);
|
||||||
|
|
||||||
|
if (!s->check_assign (featureIndex, *new_feature_idx, HB_SERIALIZE_ERROR_INT_OVERFLOW))
|
||||||
|
return_trace (false);
|
||||||
|
|
||||||
|
s->push ();
|
||||||
|
bool ret = f->subset (c->subset_context, c, tag);
|
||||||
|
if (ret) s->add_link (feature, s->pop_pack ());
|
||||||
|
else s->pop_discard ();
|
||||||
|
|
||||||
|
return_trace (ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool subset (hb_subset_layout_context_t *c, const void *base) const
|
bool subset (hb_subset_layout_context_t *c, const void *base) const
|
||||||
{
|
{
|
||||||
TRACE_SUBSET (this);
|
TRACE_SUBSET (this);
|
||||||
if (!c->feature_index_map->has (featureIndex) ||
|
uint32_t *new_feature_index;
|
||||||
c->feature_substitutes_map->has (featureIndex)) {
|
if (!c->feature_index_map->has (featureIndex, &new_feature_index))
|
||||||
// Feature that is being substituted is not being retained, so we don't
|
|
||||||
// need this.
|
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
}
|
|
||||||
|
|
||||||
auto *out = c->subset_context->serializer->embed (this);
|
auto *out = c->subset_context->serializer->embed (this);
|
||||||
if (unlikely (!out)) return_trace (false);
|
if (unlikely (!out)) return_trace (false);
|
||||||
|
|
||||||
out->featureIndex = c->feature_index_map->get (featureIndex);
|
out->featureIndex = *new_feature_index;
|
||||||
return_trace (out->feature.serialize_subset (c->subset_context, feature, base, c));
|
return_trace (out->feature.serialize_subset (c->subset_context, feature, base, c));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3600,16 +3658,10 @@ struct FeatureTableSubstitution
|
|||||||
}
|
}
|
||||||
|
|
||||||
void collect_lookups (const hb_set_t *feature_indexes,
|
void collect_lookups (const hb_set_t *feature_indexes,
|
||||||
const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map,
|
|
||||||
hb_set_t *lookup_indexes /* OUT */) const
|
hb_set_t *lookup_indexes /* OUT */) const
|
||||||
{
|
{
|
||||||
+ hb_iter (substitutions)
|
+ hb_iter (substitutions)
|
||||||
| hb_filter (feature_indexes, &FeatureTableSubstitutionRecord::featureIndex)
|
| hb_filter (feature_indexes, &FeatureTableSubstitutionRecord::featureIndex)
|
||||||
| hb_filter ([feature_substitutes_map] (const FeatureTableSubstitutionRecord& record)
|
|
||||||
{
|
|
||||||
if (feature_substitutes_map == nullptr) return true;
|
|
||||||
return !feature_substitutes_map->has (record.featureIndex);
|
|
||||||
})
|
|
||||||
| hb_apply ([this, lookup_indexes] (const FeatureTableSubstitutionRecord& r)
|
| hb_apply ([this, lookup_indexes] (const FeatureTableSubstitutionRecord& r)
|
||||||
{ r.collect_lookups (this, lookup_indexes); })
|
{ r.collect_lookups (this, lookup_indexes); })
|
||||||
;
|
;
|
||||||
@ -3634,11 +3686,14 @@ struct FeatureTableSubstitution
|
|||||||
void collect_feature_substitutes_with_variations (hb_collect_feature_substitutes_with_var_context_t *c) const
|
void collect_feature_substitutes_with_variations (hb_collect_feature_substitutes_with_var_context_t *c) const
|
||||||
{
|
{
|
||||||
for (const FeatureTableSubstitutionRecord& record : substitutions)
|
for (const FeatureTableSubstitutionRecord& record : substitutions)
|
||||||
record.collect_feature_substitutes_with_variations (c->feature_substitutes_map, c->feature_indices, this);
|
record.collect_feature_substitutes_with_variations (c->feature_substitutes_map,
|
||||||
|
c->catch_all_record_feature_idxes,
|
||||||
|
c->feature_indices, this);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool subset (hb_subset_context_t *c,
|
bool subset (hb_subset_context_t *c,
|
||||||
hb_subset_layout_context_t *l) const
|
hb_subset_layout_context_t *l,
|
||||||
|
bool insert_catch_all) const
|
||||||
{
|
{
|
||||||
TRACE_SUBSET (this);
|
TRACE_SUBSET (this);
|
||||||
auto *out = c->serializer->start_embed (*this);
|
auto *out = c->serializer->start_embed (*this);
|
||||||
@ -3647,6 +3702,22 @@ struct FeatureTableSubstitution
|
|||||||
out->version.major = version.major;
|
out->version.major = version.major;
|
||||||
out->version.minor = version.minor;
|
out->version.minor = version.minor;
|
||||||
|
|
||||||
|
if (insert_catch_all)
|
||||||
|
{
|
||||||
|
for (unsigned feature_index : *(l->catch_all_record_feature_idxes))
|
||||||
|
{
|
||||||
|
hb_pair_t<const void*, const void*> *p;
|
||||||
|
if (!l->feature_idx_tag_map->has (feature_index, &p))
|
||||||
|
return_trace (false);
|
||||||
|
auto *o = out->substitutions.serialize_append (c->serializer);
|
||||||
|
if (!o->serialize (l, feature_index,
|
||||||
|
reinterpret_cast<const Feature*> (p->first),
|
||||||
|
reinterpret_cast<const Tag*> (p->second)))
|
||||||
|
return_trace (false);
|
||||||
|
}
|
||||||
|
return_trace (true);
|
||||||
|
}
|
||||||
|
|
||||||
+ substitutions.iter ()
|
+ substitutions.iter ()
|
||||||
| hb_apply (subset_record_array (l, &(out->substitutions), this))
|
| hb_apply (subset_record_array (l, &(out->substitutions), this))
|
||||||
;
|
;
|
||||||
@ -3658,6 +3729,7 @@ struct FeatureTableSubstitution
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (version.sanitize (c) &&
|
return_trace (version.sanitize (c) &&
|
||||||
|
hb_barrier () &&
|
||||||
likely (version.major == 1) &&
|
likely (version.major == 1) &&
|
||||||
substitutions.sanitize (c, this));
|
substitutions.sanitize (c, this));
|
||||||
}
|
}
|
||||||
@ -3676,10 +3748,9 @@ struct FeatureVariationRecord
|
|||||||
|
|
||||||
void collect_lookups (const void *base,
|
void collect_lookups (const void *base,
|
||||||
const hb_set_t *feature_indexes,
|
const hb_set_t *feature_indexes,
|
||||||
const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map,
|
|
||||||
hb_set_t *lookup_indexes /* OUT */) const
|
hb_set_t *lookup_indexes /* OUT */) const
|
||||||
{
|
{
|
||||||
return (base+substitutions).collect_lookups (feature_indexes, feature_substitutes_map, lookup_indexes);
|
return (base+substitutions).collect_lookups (feature_indexes, lookup_indexes);
|
||||||
}
|
}
|
||||||
|
|
||||||
void closure_features (const void *base,
|
void closure_features (const void *base,
|
||||||
@ -3705,14 +3776,15 @@ struct FeatureVariationRecord
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
bool subset (hb_subset_layout_context_t *c, const void *base) const
|
bool subset (hb_subset_layout_context_t *c, const void *base,
|
||||||
|
bool insert_catch_all = false) const
|
||||||
{
|
{
|
||||||
TRACE_SUBSET (this);
|
TRACE_SUBSET (this);
|
||||||
auto *out = c->subset_context->serializer->embed (this);
|
auto *out = c->subset_context->serializer->embed (this);
|
||||||
if (unlikely (!out)) return_trace (false);
|
if (unlikely (!out)) return_trace (false);
|
||||||
|
|
||||||
out->conditions.serialize_subset (c->subset_context, conditions, base, c);
|
out->conditions.serialize_subset (c->subset_context, conditions, base, c, insert_catch_all);
|
||||||
out->substitutions.serialize_subset (c->subset_context, substitutions, base, c);
|
out->substitutions.serialize_subset (c->subset_context, substitutions, base, c, insert_catch_all);
|
||||||
|
|
||||||
return_trace (true);
|
return_trace (true);
|
||||||
}
|
}
|
||||||
@ -3771,9 +3843,8 @@ struct FeatureVariations
|
|||||||
if (c->universal)
|
if (c->universal)
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if (c->variation_applied && !c->universal &&
|
if (c->universal || c->record_cond_idx_map->is_empty ())
|
||||||
!c->record_cond_idx_map->is_empty ())
|
c->catch_all_record_feature_idxes.reset ();
|
||||||
c->insert_catch_all_feature_variation_record = true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
FeatureVariations* copy (hb_serialize_context_t *c) const
|
FeatureVariations* copy (hb_serialize_context_t *c) const
|
||||||
@ -3783,11 +3854,17 @@ struct FeatureVariations
|
|||||||
}
|
}
|
||||||
|
|
||||||
void collect_lookups (const hb_set_t *feature_indexes,
|
void collect_lookups (const hb_set_t *feature_indexes,
|
||||||
const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map,
|
const hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map,
|
||||||
hb_set_t *lookup_indexes /* OUT */) const
|
hb_set_t *lookup_indexes /* OUT */) const
|
||||||
{
|
{
|
||||||
for (const FeatureVariationRecord& r : varRecords)
|
unsigned count = varRecords.len;
|
||||||
r.collect_lookups (this, feature_indexes, feature_substitutes_map, lookup_indexes);
|
for (unsigned int i = 0; i < count; i++)
|
||||||
|
{
|
||||||
|
if (feature_record_cond_idx_map &&
|
||||||
|
!feature_record_cond_idx_map->has (i))
|
||||||
|
continue;
|
||||||
|
varRecords[i].collect_lookups (this, feature_indexes, lookup_indexes);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void closure_features (const hb_map_t *lookup_indexes,
|
void closure_features (const hb_map_t *lookup_indexes,
|
||||||
@ -3832,6 +3909,13 @@ struct FeatureVariations
|
|||||||
l->cur_feature_var_record_idx = i;
|
l->cur_feature_var_record_idx = i;
|
||||||
subset_record_array (l, &(out->varRecords), this) (varRecords[i]);
|
subset_record_array (l, &(out->varRecords), this) (varRecords[i]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (out->varRecords.len && !l->catch_all_record_feature_idxes->is_empty ())
|
||||||
|
{
|
||||||
|
bool insert_catch_all_record = true;
|
||||||
|
subset_record_array (l, &(out->varRecords), this, insert_catch_all_record) (varRecords[0]);
|
||||||
|
}
|
||||||
|
|
||||||
return_trace (bool (out->varRecords));
|
return_trace (bool (out->varRecords));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3839,6 +3923,7 @@ struct FeatureVariations
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (version.sanitize (c) &&
|
return_trace (version.sanitize (c) &&
|
||||||
|
hb_barrier () &&
|
||||||
likely (version.major == 1) &&
|
likely (version.major == 1) &&
|
||||||
varRecords.sanitize (c, this));
|
varRecords.sanitize (c, this));
|
||||||
}
|
}
|
||||||
|
@ -2051,6 +2051,7 @@ struct Rule
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
c->check_range (inputZ.arrayZ,
|
c->check_range (inputZ.arrayZ,
|
||||||
inputZ.item_size * (inputCount ? inputCount - 1 : 0) +
|
inputZ.item_size * (inputCount ? inputCount - 1 : 0) +
|
||||||
LookupRecord::static_size * lookupCount));
|
LookupRecord::static_size * lookupCount));
|
||||||
@ -2826,6 +2827,7 @@ struct ContextFormat3
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!c->check_struct (this))) return_trace (false);
|
if (unlikely (!c->check_struct (this))) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
unsigned int count = glyphCount;
|
unsigned int count = glyphCount;
|
||||||
if (unlikely (!count)) return_trace (false); /* We want to access coverageZ[0] freely. */
|
if (unlikely (!count)) return_trace (false); /* We want to access coverageZ[0] freely. */
|
||||||
if (unlikely (!c->check_array (coverageZ.arrayZ, count))) return_trace (false);
|
if (unlikely (!c->check_array (coverageZ.arrayZ, count))) return_trace (false);
|
||||||
@ -3219,10 +3221,13 @@ struct ChainRule
|
|||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
/* Hyper-optimized sanitized because this is really hot. */
|
/* Hyper-optimized sanitized because this is really hot. */
|
||||||
if (unlikely (!backtrack.len.sanitize (c))) return_trace (false);
|
if (unlikely (!backtrack.len.sanitize (c))) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
const auto &input = StructAfter<decltype (inputX)> (backtrack);
|
const auto &input = StructAfter<decltype (inputX)> (backtrack);
|
||||||
if (unlikely (!input.lenP1.sanitize (c))) return_trace (false);
|
if (unlikely (!input.lenP1.sanitize (c))) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
|
const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
|
||||||
if (unlikely (!lookahead.len.sanitize (c))) return_trace (false);
|
if (unlikely (!lookahead.len.sanitize (c))) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
|
const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
|
||||||
return_trace (likely (lookup.sanitize (c)));
|
return_trace (likely (lookup.sanitize (c)));
|
||||||
}
|
}
|
||||||
@ -4121,11 +4126,14 @@ struct ChainContextFormat3
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!backtrack.sanitize (c, this))) return_trace (false);
|
if (unlikely (!backtrack.sanitize (c, this))) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
const auto &input = StructAfter<decltype (inputX)> (backtrack);
|
const auto &input = StructAfter<decltype (inputX)> (backtrack);
|
||||||
if (unlikely (!input.sanitize (c, this))) return_trace (false);
|
if (unlikely (!input.sanitize (c, this))) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
if (unlikely (!input.len)) return_trace (false); /* To be consistent with Context. */
|
if (unlikely (!input.len)) return_trace (false); /* To be consistent with Context. */
|
||||||
const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
|
const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
|
||||||
if (unlikely (!lookahead.sanitize (c, this))) return_trace (false);
|
if (unlikely (!lookahead.sanitize (c, this))) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
|
const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
|
||||||
return_trace (likely (lookup.sanitize (c)));
|
return_trace (likely (lookup.sanitize (c)));
|
||||||
}
|
}
|
||||||
@ -4209,6 +4217,7 @@ struct ExtensionFormat1
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
extensionLookupType != T::SubTable::Extension);
|
extensionLookupType != T::SubTable::Extension);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -4472,13 +4481,6 @@ struct GSUBGPOSVersion1_2
|
|||||||
if (!c->subset_context->serializer->extend_min (&out->featureVars))
|
if (!c->subset_context->serializer->extend_min (&out->featureVars))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
|
|
||||||
// TODO(qxliu76): the current implementation doesn't correctly handle feature variations
|
|
||||||
// that are dropped by instancing when the associated conditions don't trigger.
|
|
||||||
// Since partial instancing isn't yet supported this isn't an issue yet but will
|
|
||||||
// need to be fixed for partial instancing.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
// if all axes are pinned all feature vars are dropped.
|
// if all axes are pinned all feature vars are dropped.
|
||||||
bool ret = !c->subset_context->plan->all_axes_pinned
|
bool ret = !c->subset_context->plan->all_axes_pinned
|
||||||
&& out->featureVars.serialize_subset (c->subset_context, featureVars, this, c);
|
&& out->featureVars.serialize_subset (c->subset_context, featureVars, this, c);
|
||||||
@ -4513,6 +4515,7 @@ struct GSUBGPOS
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!u.version.sanitize (c))) return_trace (false);
|
if (unlikely (!u.version.sanitize (c))) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
switch (u.version.major) {
|
switch (u.version.major) {
|
||||||
case 1: return_trace (u.version1.sanitize<TLookup> (c));
|
case 1: return_trace (u.version1.sanitize<TLookup> (c));
|
||||||
#ifndef HB_NO_BEYOND_64K
|
#ifndef HB_NO_BEYOND_64K
|
||||||
@ -4638,11 +4641,11 @@ struct GSUBGPOS
|
|||||||
}
|
}
|
||||||
|
|
||||||
void feature_variation_collect_lookups (const hb_set_t *feature_indexes,
|
void feature_variation_collect_lookups (const hb_set_t *feature_indexes,
|
||||||
const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map,
|
const hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map,
|
||||||
hb_set_t *lookup_indexes /* OUT */) const
|
hb_set_t *lookup_indexes /* OUT */) const
|
||||||
{
|
{
|
||||||
#ifndef HB_NO_VAR
|
#ifndef HB_NO_VAR
|
||||||
get_feature_variations ().collect_lookups (feature_indexes, feature_substitutes_map, lookup_indexes);
|
get_feature_variations ().collect_lookups (feature_indexes, feature_record_cond_idx_map, lookup_indexes);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -214,6 +214,7 @@ struct JSTF
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (version.sanitize (c) &&
|
return_trace (version.sanitize (c) &&
|
||||||
|
hb_barrier () &&
|
||||||
likely (version.major == 1) &&
|
likely (version.major == 1) &&
|
||||||
scriptList.sanitize (c, this));
|
scriptList.sanitize (c, this));
|
||||||
}
|
}
|
||||||
|
@ -333,6 +333,7 @@ struct MathKern
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
c->check_array (mathValueRecordsZ.arrayZ, 2 * heightCount + 1) &&
|
c->check_array (mathValueRecordsZ.arrayZ, 2 * heightCount + 1) &&
|
||||||
sanitize_math_value_records (c));
|
sanitize_math_value_records (c));
|
||||||
}
|
}
|
||||||
@ -984,6 +985,7 @@ struct MathVariants
|
|||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
vertGlyphCoverage.sanitize (c, this) &&
|
vertGlyphCoverage.sanitize (c, this) &&
|
||||||
horizGlyphCoverage.sanitize (c, this) &&
|
horizGlyphCoverage.sanitize (c, this) &&
|
||||||
|
hb_barrier () &&
|
||||||
c->check_array (glyphConstruction.arrayZ, vertGlyphCount + horizGlyphCount) &&
|
c->check_array (glyphConstruction.arrayZ, vertGlyphCount + horizGlyphCount) &&
|
||||||
sanitize_offsets (c));
|
sanitize_offsets (c));
|
||||||
}
|
}
|
||||||
@ -1103,6 +1105,7 @@ struct MATH
|
|||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (version.sanitize (c) &&
|
return_trace (version.sanitize (c) &&
|
||||||
likely (version.major == 1) &&
|
likely (version.major == 1) &&
|
||||||
|
hb_barrier () &&
|
||||||
mathConstants.sanitize (c, this) &&
|
mathConstants.sanitize (c, this) &&
|
||||||
mathGlyphInfo.sanitize (c, this) &&
|
mathGlyphInfo.sanitize (c, this) &&
|
||||||
mathVariants.sanitize (c, this));
|
mathVariants.sanitize (c, this));
|
||||||
|
@ -85,7 +85,7 @@ struct maxp
|
|||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!c->check_struct (this)))
|
if (unlikely (!c->check_struct (this)))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
if (version.major == 1)
|
if (version.major == 1)
|
||||||
{
|
{
|
||||||
const maxpV1Tail &v1 = StructAfter<maxpV1Tail> (*this);
|
const maxpV1Tail &v1 = StructAfter<maxpV1Tail> (*this);
|
||||||
@ -103,6 +103,7 @@ struct maxp
|
|||||||
maxp_prime->numGlyphs = hb_min (c->plan->num_output_glyphs (), 0xFFFFu);
|
maxp_prime->numGlyphs = hb_min (c->plan->num_output_glyphs (), 0xFFFFu);
|
||||||
if (maxp_prime->version.major == 1)
|
if (maxp_prime->version.major == 1)
|
||||||
{
|
{
|
||||||
|
hb_barrier ();
|
||||||
const maxpV1Tail *src_v1 = &StructAfter<maxpV1Tail> (*this);
|
const maxpV1Tail *src_v1 = &StructAfter<maxpV1Tail> (*this);
|
||||||
maxpV1Tail *dest_v1 = c->serializer->embed<maxpV1Tail> (src_v1);
|
maxpV1Tail *dest_v1 = c->serializer->embed<maxpV1Tail> (src_v1);
|
||||||
if (unlikely (!dest_v1)) return_trace (false);
|
if (unlikely (!dest_v1)) return_trace (false);
|
||||||
|
@ -51,6 +51,7 @@ struct DataMap
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (likely (c->check_struct (this) &&
|
return_trace (likely (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
dataZ.sanitize (c, base, dataLength)));
|
dataZ.sanitize (c, base, dataLength)));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -101,6 +102,7 @@ struct meta
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (likely (c->check_struct (this) &&
|
return_trace (likely (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
version == 1 &&
|
version == 1 &&
|
||||||
dataMaps.sanitize (c, this)));
|
dataMaps.sanitize (c, this)));
|
||||||
}
|
}
|
||||||
|
24
src/3rdparty/harfbuzz-ng/src/hb-ot-os2-table.hh
vendored
24
src/3rdparty/harfbuzz-ng/src/hb-ot-os2-table.hh
vendored
@ -209,6 +209,23 @@ struct OS2
|
|||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static unsigned calc_avg_char_width (const hb_hashmap_t<hb_codepoint_t, hb_pair_t<unsigned, int>>& hmtx_map)
|
||||||
|
{
|
||||||
|
unsigned num = 0;
|
||||||
|
unsigned total_width = 0;
|
||||||
|
for (const auto& _ : hmtx_map.values_ref ())
|
||||||
|
{
|
||||||
|
unsigned width = _.first;
|
||||||
|
if (width)
|
||||||
|
{
|
||||||
|
total_width += width;
|
||||||
|
num++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return num ? (unsigned) roundf (total_width / num) : 0;
|
||||||
|
}
|
||||||
|
|
||||||
bool subset (hb_subset_context_t *c) const
|
bool subset (hb_subset_context_t *c) const
|
||||||
{
|
{
|
||||||
TRACE_SUBSET (this);
|
TRACE_SUBSET (this);
|
||||||
@ -239,10 +256,16 @@ struct OS2
|
|||||||
|
|
||||||
if (os2_prime->version >= 2)
|
if (os2_prime->version >= 2)
|
||||||
{
|
{
|
||||||
|
hb_barrier ();
|
||||||
auto *table = & const_cast<OS2V2Tail &> (os2_prime->v2 ());
|
auto *table = & const_cast<OS2V2Tail &> (os2_prime->v2 ());
|
||||||
HB_ADD_MVAR_VAR (HB_OT_METRICS_TAG_X_HEIGHT, sxHeight);
|
HB_ADD_MVAR_VAR (HB_OT_METRICS_TAG_X_HEIGHT, sxHeight);
|
||||||
HB_ADD_MVAR_VAR (HB_OT_METRICS_TAG_CAP_HEIGHT, sCapHeight);
|
HB_ADD_MVAR_VAR (HB_OT_METRICS_TAG_CAP_HEIGHT, sCapHeight);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
unsigned avg_char_width = calc_avg_char_width (c->plan->hmtx_map);
|
||||||
|
if (!c->serializer->check_assign (os2_prime->xAvgCharWidth, avg_char_width,
|
||||||
|
HB_SERIALIZE_ERROR_INT_OVERFLOW))
|
||||||
|
return_trace (false);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
@ -334,6 +357,7 @@ struct OS2
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!c->check_struct (this))) return_trace (false);
|
if (unlikely (!c->check_struct (this))) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
if (unlikely (version >= 1 && !v1X.sanitize (c))) return_trace (false);
|
if (unlikely (version >= 1 && !v1X.sanitize (c))) return_trace (false);
|
||||||
if (unlikely (version >= 2 && !v2X.sanitize (c))) return_trace (false);
|
if (unlikely (version >= 2 && !v2X.sanitize (c))) return_trace (false);
|
||||||
if (unlikely (version >= 5 && !v5X.sanitize (c))) return_trace (false);
|
if (unlikely (version >= 5 && !v5X.sanitize (c))) return_trace (false);
|
||||||
|
18
src/3rdparty/harfbuzz-ng/src/hb-ot-post-table.hh
vendored
18
src/3rdparty/harfbuzz-ng/src/hb-ot-post-table.hh
vendored
@ -122,7 +122,10 @@ struct post
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (glyph_names && version.major == 2)
|
if (glyph_names && version.major == 2)
|
||||||
|
{
|
||||||
|
hb_barrier ();
|
||||||
return_trace (v2X.subset (c));
|
return_trace (v2X.subset (c));
|
||||||
|
}
|
||||||
|
|
||||||
return_trace (true);
|
return_trace (true);
|
||||||
}
|
}
|
||||||
@ -138,6 +141,7 @@ struct post
|
|||||||
|
|
||||||
version = table->version.to_int ();
|
version = table->version.to_int ();
|
||||||
if (version != 0x00020000) return;
|
if (version != 0x00020000) return;
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
const postV2Tail &v2 = table->v2X;
|
const postV2Tail &v2 = table->v2X;
|
||||||
|
|
||||||
@ -217,10 +221,16 @@ struct post
|
|||||||
unsigned int get_glyph_count () const
|
unsigned int get_glyph_count () const
|
||||||
{
|
{
|
||||||
if (version == 0x00010000)
|
if (version == 0x00010000)
|
||||||
|
{
|
||||||
|
hb_barrier ();
|
||||||
return format1_names_length;
|
return format1_names_length;
|
||||||
|
}
|
||||||
|
|
||||||
if (version == 0x00020000)
|
if (version == 0x00020000)
|
||||||
|
{
|
||||||
|
hb_barrier ();
|
||||||
return glyphNameIndex->len;
|
return glyphNameIndex->len;
|
||||||
|
}
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
@ -245,13 +255,18 @@ struct post
|
|||||||
{
|
{
|
||||||
if (version == 0x00010000)
|
if (version == 0x00010000)
|
||||||
{
|
{
|
||||||
|
hb_barrier ();
|
||||||
if (glyph >= format1_names_length)
|
if (glyph >= format1_names_length)
|
||||||
return hb_bytes_t ();
|
return hb_bytes_t ();
|
||||||
|
|
||||||
return format1_names (glyph);
|
return format1_names (glyph);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (version != 0x00020000 || glyph >= glyphNameIndex->len)
|
if (version != 0x00020000)
|
||||||
|
return hb_bytes_t ();
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
|
if (glyph >= glyphNameIndex->len)
|
||||||
return hb_bytes_t ();
|
return hb_bytes_t ();
|
||||||
|
|
||||||
unsigned int index = glyphNameIndex->arrayZ[glyph];
|
unsigned int index = glyphNameIndex->arrayZ[glyph];
|
||||||
@ -284,6 +299,7 @@ struct post
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
(version.to_int () == 0x00010000 ||
|
(version.to_int () == 0x00010000 ||
|
||||||
(version.to_int () == 0x00020000 && v2X.sanitize (c)) ||
|
(version.to_int () == 0x00020000 && v2X.sanitize (c)) ||
|
||||||
version.to_int () == 0x00030000));
|
version.to_int () == 0x00030000));
|
||||||
|
@ -327,6 +327,7 @@ struct AxisValueFormat4
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (likely (c->check_struct (this) &&
|
return_trace (likely (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
axisValues.sanitize (c, axisCount)));
|
axisValues.sanitize (c, axisCount)));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -416,6 +417,7 @@ struct AxisValue
|
|||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (unlikely (!c->check_struct (this)))
|
if (unlikely (!c->check_struct (this)))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
switch (u.format)
|
switch (u.format)
|
||||||
{
|
{
|
||||||
@ -560,6 +562,7 @@ struct STAT
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (likely (c->check_struct (this) &&
|
return_trace (likely (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
version.major == 1 &&
|
version.major == 1 &&
|
||||||
version.minor > 0 &&
|
version.minor > 0 &&
|
||||||
designAxesOffset.sanitize (c, this, designAxisCount) &&
|
designAxesOffset.sanitize (c, this, designAxisCount) &&
|
||||||
|
@ -273,6 +273,7 @@ struct avar
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (!(version.sanitize (c) &&
|
if (!(version.sanitize (c) &&
|
||||||
|
hb_barrier () &&
|
||||||
(version.major == 1
|
(version.major == 1
|
||||||
#ifndef HB_NO_AVAR2
|
#ifndef HB_NO_AVAR2
|
||||||
|| version.major == 2
|
|| version.major == 2
|
||||||
@ -293,6 +294,7 @@ struct avar
|
|||||||
#ifndef HB_NO_AVAR2
|
#ifndef HB_NO_AVAR2
|
||||||
if (version.major < 2)
|
if (version.major < 2)
|
||||||
return_trace (true);
|
return_trace (true);
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
const auto &v2 = * (const avarV2Tail *) map;
|
const auto &v2 = * (const avarV2Tail *) map;
|
||||||
if (unlikely (!v2.sanitize (c, this)))
|
if (unlikely (!v2.sanitize (c, this)))
|
||||||
@ -316,6 +318,7 @@ struct avar
|
|||||||
#ifndef HB_NO_AVAR2
|
#ifndef HB_NO_AVAR2
|
||||||
if (version.major < 2)
|
if (version.major < 2)
|
||||||
return;
|
return;
|
||||||
|
hb_barrier ();
|
||||||
|
|
||||||
for (; count < axisCount; count++)
|
for (; count < axisCount; count++)
|
||||||
map = &StructAfter<SegmentMaps> (*map);
|
map = &StructAfter<SegmentMaps> (*map);
|
||||||
|
77
src/3rdparty/harfbuzz-ng/src/hb-ot-var-common.hh
vendored
77
src/3rdparty/harfbuzz-ng/src/hb-ot-var-common.hh
vendored
@ -119,6 +119,7 @@ struct DeltaSetIndexMapFormat01
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
c->check_range (mapDataZ.arrayZ,
|
c->check_range (mapDataZ.arrayZ,
|
||||||
mapCount,
|
mapCount,
|
||||||
get_width ()));
|
get_width ()));
|
||||||
@ -191,6 +192,7 @@ struct DeltaSetIndexMap
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
if (!u.format.sanitize (c)) return_trace (false);
|
if (!u.format.sanitize (c)) return_trace (false);
|
||||||
|
hb_barrier ();
|
||||||
switch (u.format) {
|
switch (u.format) {
|
||||||
case 0: return_trace (u.format0.sanitize (c));
|
case 0: return_trace (u.format0.sanitize (c));
|
||||||
case 1: return_trace (u.format1.sanitize (c));
|
case 1: return_trace (u.format1.sanitize (c));
|
||||||
@ -434,6 +436,8 @@ enum packed_delta_flag_t
|
|||||||
|
|
||||||
struct tuple_delta_t
|
struct tuple_delta_t
|
||||||
{
|
{
|
||||||
|
static constexpr bool realloc_move = true; // Watch out when adding new members!
|
||||||
|
|
||||||
public:
|
public:
|
||||||
hb_hashmap_t<hb_tag_t, Triple> axis_tuples;
|
hb_hashmap_t<hb_tag_t, Triple> axis_tuples;
|
||||||
|
|
||||||
@ -514,14 +518,19 @@ struct tuple_delta_t
|
|||||||
return *this;
|
return *this;
|
||||||
|
|
||||||
unsigned num = indices.length;
|
unsigned num = indices.length;
|
||||||
for (unsigned i = 0; i < num; i++)
|
if (deltas_y)
|
||||||
{
|
for (unsigned i = 0; i < num; i++)
|
||||||
if (!indices.arrayZ[i]) continue;
|
{
|
||||||
|
if (!indices.arrayZ[i]) continue;
|
||||||
deltas_x[i] *= scalar;
|
deltas_x[i] *= scalar;
|
||||||
if (deltas_y)
|
deltas_y[i] *= scalar;
|
||||||
deltas_y[i] *= scalar;
|
}
|
||||||
}
|
else
|
||||||
|
for (unsigned i = 0; i < num; i++)
|
||||||
|
{
|
||||||
|
if (!indices.arrayZ[i]) continue;
|
||||||
|
deltas_x[i] *= scalar;
|
||||||
|
}
|
||||||
return *this;
|
return *this;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -767,7 +776,7 @@ struct tuple_delta_t
|
|||||||
unsigned encoded_len = 0;
|
unsigned encoded_len = 0;
|
||||||
while (i < num_deltas)
|
while (i < num_deltas)
|
||||||
{
|
{
|
||||||
int val = deltas[i];
|
int val = deltas.arrayZ[i];
|
||||||
if (val == 0)
|
if (val == 0)
|
||||||
encoded_len += encode_delta_run_as_zeroes (i, encoded_bytes.sub_array (encoded_len), deltas);
|
encoded_len += encode_delta_run_as_zeroes (i, encoded_bytes.sub_array (encoded_len), deltas);
|
||||||
else if (val >= -128 && val <= 127)
|
else if (val >= -128 && val <= 127)
|
||||||
@ -786,7 +795,7 @@ struct tuple_delta_t
|
|||||||
unsigned run_length = 0;
|
unsigned run_length = 0;
|
||||||
auto it = encoded_bytes.iter ();
|
auto it = encoded_bytes.iter ();
|
||||||
unsigned encoded_len = 0;
|
unsigned encoded_len = 0;
|
||||||
while (i < num_deltas && deltas[i] == 0)
|
while (i < num_deltas && deltas.arrayZ[i] == 0)
|
||||||
{
|
{
|
||||||
i++;
|
i++;
|
||||||
run_length++;
|
run_length++;
|
||||||
@ -815,13 +824,13 @@ struct tuple_delta_t
|
|||||||
unsigned num_deltas = deltas.length;
|
unsigned num_deltas = deltas.length;
|
||||||
while (i < num_deltas)
|
while (i < num_deltas)
|
||||||
{
|
{
|
||||||
int val = deltas[i];
|
int val = deltas.arrayZ[i];
|
||||||
if (val > 127 || val < -128)
|
if (val > 127 || val < -128)
|
||||||
break;
|
break;
|
||||||
|
|
||||||
/* from fonttools: if there're 2 or more zeros in a sequence,
|
/* from fonttools: if there're 2 or more zeros in a sequence,
|
||||||
* it is better to start a new run to save bytes. */
|
* it is better to start a new run to save bytes. */
|
||||||
if (val == 0 && i + 1 < num_deltas && deltas[i+1] == 0)
|
if (val == 0 && i + 1 < num_deltas && deltas.arrayZ[i+1] == 0)
|
||||||
break;
|
break;
|
||||||
|
|
||||||
i++;
|
i++;
|
||||||
@ -838,7 +847,7 @@ struct tuple_delta_t
|
|||||||
|
|
||||||
for (unsigned j = 0; j < 64; j++)
|
for (unsigned j = 0; j < 64; j++)
|
||||||
{
|
{
|
||||||
*it++ = static_cast<char> (deltas[start + j]);
|
*it++ = static_cast<char> (deltas.arrayZ[start + j]);
|
||||||
encoded_len++;
|
encoded_len++;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -853,7 +862,7 @@ struct tuple_delta_t
|
|||||||
|
|
||||||
while (start < i)
|
while (start < i)
|
||||||
{
|
{
|
||||||
*it++ = static_cast<char> (deltas[start++]);
|
*it++ = static_cast<char> (deltas.arrayZ[start++]);
|
||||||
encoded_len++;
|
encoded_len++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -869,8 +878,8 @@ struct tuple_delta_t
|
|||||||
unsigned num_deltas = deltas.length;
|
unsigned num_deltas = deltas.length;
|
||||||
while (i < num_deltas)
|
while (i < num_deltas)
|
||||||
{
|
{
|
||||||
int val = deltas[i];
|
int val = deltas.arrayZ[i];
|
||||||
|
|
||||||
/* start a new run for a single zero value*/
|
/* start a new run for a single zero value*/
|
||||||
if (val == 0) break;
|
if (val == 0) break;
|
||||||
|
|
||||||
@ -879,7 +888,7 @@ struct tuple_delta_t
|
|||||||
* Only start a new run when there're 2 continuous such values. */
|
* Only start a new run when there're 2 continuous such values. */
|
||||||
if (val >= -128 && val <= 127 &&
|
if (val >= -128 && val <= 127 &&
|
||||||
i + 1 < num_deltas &&
|
i + 1 < num_deltas &&
|
||||||
deltas[i+1] >= -128 && deltas[i+1] <= 127)
|
deltas.arrayZ[i+1] >= -128 && deltas.arrayZ[i+1] <= 127)
|
||||||
break;
|
break;
|
||||||
|
|
||||||
i++;
|
i++;
|
||||||
@ -895,7 +904,7 @@ struct tuple_delta_t
|
|||||||
|
|
||||||
for (unsigned j = 0; j < 64; j++)
|
for (unsigned j = 0; j < 64; j++)
|
||||||
{
|
{
|
||||||
int16_t delta_val = deltas[start + j];
|
int16_t delta_val = deltas.arrayZ[start + j];
|
||||||
*it++ = static_cast<char> (delta_val >> 8);
|
*it++ = static_cast<char> (delta_val >> 8);
|
||||||
*it++ = static_cast<char> (delta_val & 0xFF);
|
*it++ = static_cast<char> (delta_val & 0xFF);
|
||||||
|
|
||||||
@ -912,7 +921,7 @@ struct tuple_delta_t
|
|||||||
encoded_len++;
|
encoded_len++;
|
||||||
while (start < i)
|
while (start < i)
|
||||||
{
|
{
|
||||||
int16_t delta_val = deltas[start++];
|
int16_t delta_val = deltas.arrayZ[start++];
|
||||||
*it++ = static_cast<char> (delta_val >> 8);
|
*it++ = static_cast<char> (delta_val >> 8);
|
||||||
*it++ = static_cast<char> (delta_val & 0xFF);
|
*it++ = static_cast<char> (delta_val & 0xFF);
|
||||||
|
|
||||||
@ -1175,6 +1184,7 @@ struct TupleVariationData
|
|||||||
bool create_from_item_var_data (const VarData &var_data,
|
bool create_from_item_var_data (const VarData &var_data,
|
||||||
const hb_vector_t<hb_hashmap_t<hb_tag_t, Triple>>& regions,
|
const hb_vector_t<hb_hashmap_t<hb_tag_t, Triple>>& regions,
|
||||||
const hb_map_t& axes_old_index_tag_map,
|
const hb_map_t& axes_old_index_tag_map,
|
||||||
|
unsigned& item_count,
|
||||||
const hb_inc_bimap_t* inner_map = nullptr)
|
const hb_inc_bimap_t* inner_map = nullptr)
|
||||||
{
|
{
|
||||||
/* NULL offset, to keep original varidx valid, just return */
|
/* NULL offset, to keep original varidx valid, just return */
|
||||||
@ -1184,7 +1194,8 @@ struct TupleVariationData
|
|||||||
unsigned num_regions = var_data.get_region_index_count ();
|
unsigned num_regions = var_data.get_region_index_count ();
|
||||||
if (!tuple_vars.alloc (num_regions)) return false;
|
if (!tuple_vars.alloc (num_regions)) return false;
|
||||||
|
|
||||||
unsigned item_count = inner_map ? inner_map->get_population () : var_data.get_item_count ();
|
item_count = inner_map ? inner_map->get_population () : var_data.get_item_count ();
|
||||||
|
if (!item_count) return true;
|
||||||
unsigned row_size = var_data.get_row_size ();
|
unsigned row_size = var_data.get_row_size ();
|
||||||
const HBUINT8 *delta_bytes = var_data.get_delta_bytes ();
|
const HBUINT8 *delta_bytes = var_data.get_delta_bytes ();
|
||||||
|
|
||||||
@ -1775,6 +1786,14 @@ struct item_variations_t
|
|||||||
* have the same num of deltas (rows) */
|
* have the same num of deltas (rows) */
|
||||||
hb_vector_t<tuple_variations_t> vars;
|
hb_vector_t<tuple_variations_t> vars;
|
||||||
|
|
||||||
|
/* num of retained rows for each subtable, there're 2 cases when var_data is empty:
|
||||||
|
* 1. retained item_count is zero
|
||||||
|
* 2. regions is empty and item_count is non-zero.
|
||||||
|
* when converting to tuples, both will be dropped because the tuple is empty,
|
||||||
|
* however, we need to retain 2. as all-zero rows to keep original varidx
|
||||||
|
* valid, so we need a way to remember the num of rows for each subtable */
|
||||||
|
hb_vector_t<unsigned> var_data_num_rows;
|
||||||
|
|
||||||
/* original region list, decompiled from item varstore, used when rebuilding
|
/* original region list, decompiled from item varstore, used when rebuilding
|
||||||
* region list after instantiation */
|
* region list after instantiation */
|
||||||
hb_vector_t<hb_hashmap_t<hb_tag_t, Triple>> orig_region_list;
|
hb_vector_t<hb_hashmap_t<hb_tag_t, Triple>> orig_region_list;
|
||||||
@ -1836,22 +1855,26 @@ struct item_variations_t
|
|||||||
|
|
||||||
unsigned num_var_data = varStore.get_sub_table_count ();
|
unsigned num_var_data = varStore.get_sub_table_count ();
|
||||||
if (inner_maps && inner_maps.length != num_var_data) return false;
|
if (inner_maps && inner_maps.length != num_var_data) return false;
|
||||||
if (!vars.alloc (num_var_data)) return false;
|
if (!vars.alloc (num_var_data) ||
|
||||||
|
!var_data_num_rows.alloc (num_var_data)) return false;
|
||||||
|
|
||||||
for (unsigned i = 0; i < num_var_data; i++)
|
for (unsigned i = 0; i < num_var_data; i++)
|
||||||
{
|
{
|
||||||
if (inner_maps && !inner_maps.arrayZ[i].get_population ())
|
if (inner_maps && !inner_maps.arrayZ[i].get_population ())
|
||||||
continue;
|
continue;
|
||||||
tuple_variations_t var_data_tuples;
|
tuple_variations_t var_data_tuples;
|
||||||
|
unsigned item_count = 0;
|
||||||
if (!var_data_tuples.create_from_item_var_data (varStore.get_sub_table (i),
|
if (!var_data_tuples.create_from_item_var_data (varStore.get_sub_table (i),
|
||||||
orig_region_list,
|
orig_region_list,
|
||||||
axes_old_index_tag_map,
|
axes_old_index_tag_map,
|
||||||
|
item_count,
|
||||||
inner_maps ? &(inner_maps.arrayZ[i]) : nullptr))
|
inner_maps ? &(inner_maps.arrayZ[i]) : nullptr))
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
|
var_data_num_rows.push (item_count);
|
||||||
vars.push (std::move (var_data_tuples));
|
vars.push (std::move (var_data_tuples));
|
||||||
}
|
}
|
||||||
return !vars.in_error ();
|
return !vars.in_error () && !var_data_num_rows.in_error () && vars.length == var_data_num_rows.length;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool instantiate_tuple_vars (const hb_hashmap_t<hb_tag_t, Triple>& normalized_axes_location,
|
bool instantiate_tuple_vars (const hb_hashmap_t<hb_tag_t, Triple>& normalized_axes_location,
|
||||||
@ -1973,12 +1996,8 @@ struct item_variations_t
|
|||||||
unsigned num_cols = region_list.length;
|
unsigned num_cols = region_list.length;
|
||||||
/* pre-alloc a 2D vector for all sub_table's VarData rows */
|
/* pre-alloc a 2D vector for all sub_table's VarData rows */
|
||||||
unsigned total_rows = 0;
|
unsigned total_rows = 0;
|
||||||
for (unsigned major = 0; major < vars.length; major++)
|
for (unsigned major = 0; major < var_data_num_rows.length; major++)
|
||||||
{
|
total_rows += var_data_num_rows[major];
|
||||||
const tuple_variations_t& tuples = vars[major];
|
|
||||||
/* all tuples in each sub_table should have same num of deltas(num rows) */
|
|
||||||
total_rows += tuples.tuple_vars[0].deltas_x.length;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!delta_rows.resize (total_rows)) return false;
|
if (!delta_rows.resize (total_rows)) return false;
|
||||||
/* init all rows to [0]*num_cols */
|
/* init all rows to [0]*num_cols */
|
||||||
@ -1998,7 +2017,7 @@ struct item_variations_t
|
|||||||
/* deltas are stored in tuples(column based), convert them back into items
|
/* deltas are stored in tuples(column based), convert them back into items
|
||||||
* (row based) delta */
|
* (row based) delta */
|
||||||
const tuple_variations_t& tuples = vars[major];
|
const tuple_variations_t& tuples = vars[major];
|
||||||
unsigned num_rows = tuples.tuple_vars[0].deltas_x.length;
|
unsigned num_rows = var_data_num_rows[major];
|
||||||
for (const tuple_delta_t& tuple: tuples.tuple_vars)
|
for (const tuple_delta_t& tuple: tuples.tuple_vars)
|
||||||
{
|
{
|
||||||
if (tuple.deltas_x.length != num_rows)
|
if (tuple.deltas_x.length != num_rows)
|
||||||
|
@ -45,7 +45,8 @@ struct cvar
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
version.sanitize (c) && likely (version.major == 1) &&
|
hb_barrier () &&
|
||||||
|
likely (version.major == 1) &&
|
||||||
tupleVariationData.sanitize (c));
|
tupleVariationData.sanitize (c));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -131,6 +131,7 @@ struct InstanceRecord
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
c->check_array (coordinatesZ.arrayZ, axis_count));
|
c->check_array (coordinatesZ.arrayZ, axis_count));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -277,8 +278,10 @@ struct fvar
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (version.sanitize (c) &&
|
return_trace (version.sanitize (c) &&
|
||||||
|
hb_barrier () &&
|
||||||
likely (version.major == 1) &&
|
likely (version.major == 1) &&
|
||||||
c->check_struct (this) &&
|
c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
axisSize == 20 && /* Assumed in our code. */
|
axisSize == 20 && /* Assumed in our code. */
|
||||||
instanceSize >= axisCount * 4 + 4 &&
|
instanceSize >= axisCount * 4 + 4 &&
|
||||||
get_axes ().sanitize (c) &&
|
get_axes ().sanitize (c) &&
|
||||||
|
@ -296,7 +296,9 @@ struct gvar
|
|||||||
bool sanitize_shallow (hb_sanitize_context_t *c) const
|
bool sanitize_shallow (hb_sanitize_context_t *c) const
|
||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) && (version.major == 1) &&
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
|
(version.major == 1) &&
|
||||||
sharedTuples.sanitize (c, this, axisCount * sharedTupleCount) &&
|
sharedTuples.sanitize (c, this, axisCount * sharedTupleCount) &&
|
||||||
(is_long_offset () ?
|
(is_long_offset () ?
|
||||||
c->check_array (get_long_offset_array (), c->get_num_glyphs () + 1) :
|
c->check_array (get_long_offset_array (), c->get_num_glyphs () + 1) :
|
||||||
@ -426,7 +428,10 @@ struct gvar
|
|||||||
subset_data_size += get_glyph_var_data_bytes (c->source_blob, glyph_count, old_gid).length;
|
subset_data_size += get_glyph_var_data_bytes (c->source_blob, glyph_count, old_gid).length;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool long_offset = subset_data_size & ~0xFFFFu;
|
bool long_offset = (subset_data_size & ~0xFFFFu);
|
||||||
|
#ifdef HB_EXPERIMENTAL_API
|
||||||
|
long_offset = long_offset || (c->plan->flags & HB_SUBSET_FLAGS_IFTB_REQUIREMENTS);
|
||||||
|
#endif
|
||||||
out->flags = long_offset ? 1 : 0;
|
out->flags = long_offset ? 1 : 0;
|
||||||
|
|
||||||
HBUINT8 *subset_offsets = c->serializer->allocate_size<HBUINT8> ((long_offset ? 4 : 2) * (num_glyphs + 1), false);
|
HBUINT8 *subset_offsets = c->serializer->allocate_size<HBUINT8> ((long_offset ? 4 : 2) * (num_glyphs + 1), false);
|
||||||
@ -444,6 +449,8 @@ struct gvar
|
|||||||
hb_memcpy (tuples, this+sharedTuples, shared_tuple_size);
|
hb_memcpy (tuples, this+sharedTuples, shared_tuple_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* This ordering relative to the shared tuples array, which puts the glyphVariationData
|
||||||
|
last in the table, is required when HB_SUBSET_FLAGS_IFTB_REQUIREMENTS is set */
|
||||||
char *subset_data = c->serializer->allocate_size<char> (subset_data_size, false);
|
char *subset_data = c->serializer->allocate_size<char> (subset_data_size, false);
|
||||||
if (!subset_data) return_trace (false);
|
if (!subset_data) return_trace (false);
|
||||||
out->dataZ = subset_data - (char *) out;
|
out->dataZ = subset_data - (char *) out;
|
||||||
|
@ -288,6 +288,7 @@ struct HVARVVAR
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (version.sanitize (c) &&
|
return_trace (version.sanitize (c) &&
|
||||||
|
hb_barrier () &&
|
||||||
likely (version.major == 1) &&
|
likely (version.major == 1) &&
|
||||||
varStore.sanitize (c, this) &&
|
varStore.sanitize (c, this) &&
|
||||||
advMap.sanitize (c, this) &&
|
advMap.sanitize (c, this) &&
|
||||||
|
@ -77,8 +77,10 @@ struct MVAR
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (version.sanitize (c) &&
|
return_trace (version.sanitize (c) &&
|
||||||
|
hb_barrier () &&
|
||||||
likely (version.major == 1) &&
|
likely (version.major == 1) &&
|
||||||
c->check_struct (this) &&
|
c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
valueRecordSize >= VariationValueRecord::static_size &&
|
valueRecordSize >= VariationValueRecord::static_size &&
|
||||||
varStore.sanitize (c, this) &&
|
varStore.sanitize (c, this) &&
|
||||||
c->check_range (valuesZ.arrayZ,
|
c->check_range (valuesZ.arrayZ,
|
||||||
|
@ -117,6 +117,7 @@ struct VORG
|
|||||||
{
|
{
|
||||||
TRACE_SANITIZE (this);
|
TRACE_SANITIZE (this);
|
||||||
return_trace (c->check_struct (this) &&
|
return_trace (c->check_struct (this) &&
|
||||||
|
hb_barrier () &&
|
||||||
version.major == 1 &&
|
version.major == 1 &&
|
||||||
vertYOrigins.sanitize (c));
|
vertYOrigins.sanitize (c));
|
||||||
}
|
}
|
||||||
|
@ -55,6 +55,9 @@ struct hb_priority_queue_t
|
|||||||
|
|
||||||
bool in_error () const { return heap.in_error (); }
|
bool in_error () const { return heap.in_error (); }
|
||||||
|
|
||||||
|
bool alloc (unsigned size)
|
||||||
|
{ return heap.alloc (size); }
|
||||||
|
|
||||||
#ifndef HB_OPTIMIZE_SIZE
|
#ifndef HB_OPTIMIZE_SIZE
|
||||||
HB_ALWAYS_INLINE
|
HB_ALWAYS_INLINE
|
||||||
#endif
|
#endif
|
||||||
|
5
src/3rdparty/harfbuzz-ng/src/hb-sanitize.hh
vendored
5
src/3rdparty/harfbuzz-ng/src/hb-sanitize.hh
vendored
@ -134,7 +134,10 @@ struct hb_sanitize_context_t :
|
|||||||
const char *get_name () { return "SANITIZE"; }
|
const char *get_name () { return "SANITIZE"; }
|
||||||
template <typename T, typename F>
|
template <typename T, typename F>
|
||||||
bool may_dispatch (const T *obj HB_UNUSED, const F *format)
|
bool may_dispatch (const T *obj HB_UNUSED, const F *format)
|
||||||
{ return format->sanitize (this); }
|
{
|
||||||
|
return format->sanitize (this) &&
|
||||||
|
hb_barrier ();
|
||||||
|
}
|
||||||
static return_t default_return_value () { return true; }
|
static return_t default_return_value () { return true; }
|
||||||
static return_t no_dispatch_return_value () { return false; }
|
static return_t no_dispatch_return_value () { return false; }
|
||||||
bool stop_sublookup_iteration (const return_t r) const { return !r; }
|
bool stop_sublookup_iteration (const return_t r) const { return !r; }
|
||||||
|
2
src/3rdparty/harfbuzz-ng/src/hb-set.hh
vendored
2
src/3rdparty/harfbuzz-ng/src/hb-set.hh
vendored
@ -35,6 +35,8 @@
|
|||||||
template <typename impl_t>
|
template <typename impl_t>
|
||||||
struct hb_sparseset_t
|
struct hb_sparseset_t
|
||||||
{
|
{
|
||||||
|
static constexpr bool realloc_move = true;
|
||||||
|
|
||||||
hb_object_header_t header;
|
hb_object_header_t header;
|
||||||
impl_t s;
|
impl_t s;
|
||||||
|
|
||||||
|
55
src/3rdparty/harfbuzz-ng/src/hb-subset-cff1.cc
vendored
55
src/3rdparty/harfbuzz-ng/src/hb-subset-cff1.cc
vendored
@ -620,6 +620,12 @@ struct cff1_subset_plan
|
|||||||
drop_hints = plan->flags & HB_SUBSET_FLAGS_NO_HINTING;
|
drop_hints = plan->flags & HB_SUBSET_FLAGS_NO_HINTING;
|
||||||
desubroutinize = plan->flags & HB_SUBSET_FLAGS_DESUBROUTINIZE;
|
desubroutinize = plan->flags & HB_SUBSET_FLAGS_DESUBROUTINIZE;
|
||||||
|
|
||||||
|
#ifdef HB_EXPERIMENTAL_API
|
||||||
|
min_charstrings_off_size = (plan->flags & HB_SUBSET_FLAGS_IFTB_REQUIREMENTS) ? 4 : 0;
|
||||||
|
#else
|
||||||
|
min_charstrings_off_size = 0;
|
||||||
|
#endif
|
||||||
|
|
||||||
subset_charset = !acc.is_predef_charset ();
|
subset_charset = !acc.is_predef_charset ();
|
||||||
if (!subset_charset)
|
if (!subset_charset)
|
||||||
/* check whether the subset renumbers any glyph IDs */
|
/* check whether the subset renumbers any glyph IDs */
|
||||||
@ -778,13 +784,43 @@ struct cff1_subset_plan
|
|||||||
unsigned int topDictModSIDs[name_dict_values_t::ValCount];
|
unsigned int topDictModSIDs[name_dict_values_t::ValCount];
|
||||||
|
|
||||||
bool desubroutinize = false;
|
bool desubroutinize = false;
|
||||||
|
|
||||||
|
unsigned min_charstrings_off_size = 0;
|
||||||
};
|
};
|
||||||
} // namespace OT
|
} // namespace OT
|
||||||
|
|
||||||
|
static bool _serialize_cff1_charstrings (hb_serialize_context_t *c,
|
||||||
|
struct OT::cff1_subset_plan &plan,
|
||||||
|
const OT::cff1::accelerator_subset_t &acc)
|
||||||
|
{
|
||||||
|
c->push<CFF1CharStrings> ();
|
||||||
|
|
||||||
|
unsigned data_size = 0;
|
||||||
|
unsigned total_size = CFF1CharStrings::total_size (plan.subset_charstrings, &data_size, plan.min_charstrings_off_size);
|
||||||
|
if (unlikely (!c->start_zerocopy (total_size)))
|
||||||
|
return false;
|
||||||
|
|
||||||
|
auto *cs = c->start_embed<CFF1CharStrings> ();
|
||||||
|
if (unlikely (!cs->serialize (c, plan.subset_charstrings, &data_size, plan.min_charstrings_off_size))) {
|
||||||
|
c->pop_discard ();
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
plan.info.char_strings_link = c->pop_pack (false);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
OT::cff1::accelerator_subset_t::serialize (hb_serialize_context_t *c,
|
OT::cff1::accelerator_subset_t::serialize (hb_serialize_context_t *c,
|
||||||
struct OT::cff1_subset_plan &plan) const
|
struct OT::cff1_subset_plan &plan) const
|
||||||
{
|
{
|
||||||
|
/* push charstrings onto the object stack first which will ensure it packs as the last
|
||||||
|
object in the table. Keeping the chastrings last satisfies the requirements for patching
|
||||||
|
via IFTB. If this ordering needs to be changed in the future, charstrings should be left
|
||||||
|
at the end whenever HB_SUBSET_FLAGS_ITFB_REQUIREMENTS is enabled. */
|
||||||
|
if (!_serialize_cff1_charstrings(c, plan, *this))
|
||||||
|
return false;
|
||||||
|
|
||||||
/* private dicts & local subrs */
|
/* private dicts & local subrs */
|
||||||
for (int i = (int) privateDicts.length; --i >= 0 ;)
|
for (int i = (int) privateDicts.length; --i >= 0 ;)
|
||||||
{
|
{
|
||||||
@ -823,25 +859,6 @@ OT::cff1::accelerator_subset_t::serialize (hb_serialize_context_t *c,
|
|||||||
if (!is_CID ())
|
if (!is_CID ())
|
||||||
plan.info.privateDictInfo = plan.fontdicts_mod[0].privateDictInfo;
|
plan.info.privateDictInfo = plan.fontdicts_mod[0].privateDictInfo;
|
||||||
|
|
||||||
/* CharStrings */
|
|
||||||
{
|
|
||||||
c->push<CFF1CharStrings> ();
|
|
||||||
|
|
||||||
unsigned data_size = 0;
|
|
||||||
unsigned total_size = CFF1CharStrings::total_size (plan.subset_charstrings, &data_size);
|
|
||||||
if (unlikely (!c->start_zerocopy (total_size)))
|
|
||||||
return false;
|
|
||||||
|
|
||||||
auto *cs = c->start_embed<CFF1CharStrings> ();
|
|
||||||
if (likely (cs->serialize (c, plan.subset_charstrings, &data_size)))
|
|
||||||
plan.info.char_strings_link = c->pop_pack (false);
|
|
||||||
else
|
|
||||||
{
|
|
||||||
c->pop_discard ();
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/* FDArray (FD Index) */
|
/* FDArray (FD Index) */
|
||||||
if (fdArray != &Null (CFF1FDArray))
|
if (fdArray != &Null (CFF1FDArray))
|
||||||
{
|
{
|
||||||
|
56
src/3rdparty/harfbuzz-ng/src/hb-subset-cff2.cc
vendored
56
src/3rdparty/harfbuzz-ng/src/hb-subset-cff2.cc
vendored
@ -439,6 +439,12 @@ struct cff2_subset_plan
|
|||||||
desubroutinize = plan->flags & HB_SUBSET_FLAGS_DESUBROUTINIZE ||
|
desubroutinize = plan->flags & HB_SUBSET_FLAGS_DESUBROUTINIZE ||
|
||||||
pinned; // For instancing we need this path
|
pinned; // For instancing we need this path
|
||||||
|
|
||||||
|
#ifdef HB_EXPERIMENTAL_API
|
||||||
|
min_charstrings_off_size = (plan->flags & HB_SUBSET_FLAGS_IFTB_REQUIREMENTS) ? 4 : 0;
|
||||||
|
#else
|
||||||
|
min_charstrings_off_size = 0;
|
||||||
|
#endif
|
||||||
|
|
||||||
if (desubroutinize)
|
if (desubroutinize)
|
||||||
{
|
{
|
||||||
/* Flatten global & local subrs */
|
/* Flatten global & local subrs */
|
||||||
@ -510,14 +516,45 @@ struct cff2_subset_plan
|
|||||||
|
|
||||||
bool drop_hints = false;
|
bool drop_hints = false;
|
||||||
bool desubroutinize = false;
|
bool desubroutinize = false;
|
||||||
|
|
||||||
|
unsigned min_charstrings_off_size = 0;
|
||||||
};
|
};
|
||||||
} // namespace OT
|
} // namespace OT
|
||||||
|
|
||||||
|
static bool _serialize_cff2_charstrings (hb_serialize_context_t *c,
|
||||||
|
cff2_subset_plan &plan,
|
||||||
|
const OT::cff2::accelerator_subset_t &acc)
|
||||||
|
{
|
||||||
|
c->push ();
|
||||||
|
|
||||||
|
unsigned data_size = 0;
|
||||||
|
unsigned total_size = CFF2CharStrings::total_size (plan.subset_charstrings, &data_size, plan.min_charstrings_off_size);
|
||||||
|
if (unlikely (!c->start_zerocopy (total_size)))
|
||||||
|
return false;
|
||||||
|
|
||||||
|
auto *cs = c->start_embed<CFF2CharStrings> ();
|
||||||
|
if (unlikely (!cs->serialize (c, plan.subset_charstrings, &data_size, plan.min_charstrings_off_size)))
|
||||||
|
{
|
||||||
|
c->pop_discard ();
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
plan.info.char_strings_link = c->pop_pack (false);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
OT::cff2::accelerator_subset_t::serialize (hb_serialize_context_t *c,
|
OT::cff2::accelerator_subset_t::serialize (hb_serialize_context_t *c,
|
||||||
struct cff2_subset_plan &plan,
|
struct cff2_subset_plan &plan,
|
||||||
hb_array_t<int> normalized_coords) const
|
hb_array_t<int> normalized_coords) const
|
||||||
{
|
{
|
||||||
|
/* push charstrings onto the object stack first which will ensure it packs as the last
|
||||||
|
object in the table. Keeping the chastrings last satisfies the requirements for patching
|
||||||
|
via IFTB. If this ordering needs to be changed in the future, charstrings should be left
|
||||||
|
at the end whenever HB_SUBSET_FLAGS_ITFB_REQUIREMENTS is enabled. */
|
||||||
|
if (!_serialize_cff2_charstrings(c, plan, *this))
|
||||||
|
return false;
|
||||||
|
|
||||||
/* private dicts & local subrs */
|
/* private dicts & local subrs */
|
||||||
hb_vector_t<table_info_t> private_dict_infos;
|
hb_vector_t<table_info_t> private_dict_infos;
|
||||||
if (unlikely (!private_dict_infos.resize (plan.subset_fdcount))) return false;
|
if (unlikely (!private_dict_infos.resize (plan.subset_fdcount))) return false;
|
||||||
@ -556,25 +593,6 @@ OT::cff2::accelerator_subset_t::serialize (hb_serialize_context_t *c,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* CharStrings */
|
|
||||||
{
|
|
||||||
c->push ();
|
|
||||||
|
|
||||||
unsigned data_size = 0;
|
|
||||||
unsigned total_size = CFF2CharStrings::total_size (plan.subset_charstrings, &data_size);
|
|
||||||
if (unlikely (!c->start_zerocopy (total_size)))
|
|
||||||
return false;
|
|
||||||
|
|
||||||
auto *cs = c->start_embed<CFF2CharStrings> ();
|
|
||||||
if (likely (cs->serialize (c, plan.subset_charstrings, &data_size)))
|
|
||||||
plan.info.char_strings_link = c->pop_pack (false);
|
|
||||||
else
|
|
||||||
{
|
|
||||||
c->pop_discard ();
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/* FDSelect */
|
/* FDSelect */
|
||||||
if (fdSelect != &Null (CFF2FDSelect))
|
if (fdSelect != &Null (CFF2FDSelect))
|
||||||
{
|
{
|
||||||
|
@ -123,6 +123,12 @@ hb_subset_input_t::hb_subset_input_t ()
|
|||||||
//justify
|
//justify
|
||||||
HB_TAG ('j', 'a', 'l', 't'), // HarfBuzz doesn't use; others might
|
HB_TAG ('j', 'a', 'l', 't'), // HarfBuzz doesn't use; others might
|
||||||
|
|
||||||
|
//East Asian spacing
|
||||||
|
HB_TAG ('c', 'h', 'w', 's'),
|
||||||
|
HB_TAG ('v', 'c', 'h', 'w'),
|
||||||
|
HB_TAG ('h', 'a', 'l', 't'),
|
||||||
|
HB_TAG ('v', 'h', 'a', 'l'),
|
||||||
|
|
||||||
//private
|
//private
|
||||||
HB_TAG ('H', 'a', 'r', 'f'),
|
HB_TAG ('H', 'a', 'r', 'f'),
|
||||||
HB_TAG ('H', 'A', 'R', 'F'),
|
HB_TAG ('H', 'A', 'R', 'F'),
|
||||||
|
@ -168,12 +168,14 @@ _solve (Triple tent, Triple axisLimit, bool negative = false)
|
|||||||
* |
|
* |
|
||||||
* crossing
|
* crossing
|
||||||
*/
|
*/
|
||||||
if (gain > outGain)
|
if (gain >= outGain)
|
||||||
{
|
{
|
||||||
|
// Note that this is the branch taken if both gain and outGain are 0.
|
||||||
|
|
||||||
// Crossing point on the axis.
|
// Crossing point on the axis.
|
||||||
float crossing = peak + (1 - gain) * (upper - peak);
|
float crossing = peak + (1 - gain) * (upper - peak);
|
||||||
|
|
||||||
Triple loc{axisDef, peak, crossing};
|
Triple loc{hb_max (lower, axisDef), peak, crossing};
|
||||||
float scalar = 1.f;
|
float scalar = 1.f;
|
||||||
|
|
||||||
// The part before the crossing point.
|
// The part before the crossing point.
|
||||||
@ -253,7 +255,7 @@ _solve (Triple tent, Triple axisLimit, bool negative = false)
|
|||||||
* axisDef axisMax
|
* axisDef axisMax
|
||||||
*/
|
*/
|
||||||
float newUpper = peak + (1 - gain) * (upper - peak);
|
float newUpper = peak + (1 - gain) * (upper - peak);
|
||||||
assert (axisMax <= newUpper); // Because outGain >= gain
|
assert (axisMax <= newUpper); // Because outGain > gain
|
||||||
if (newUpper <= axisDef + (axisMax - axisDef) * 2)
|
if (newUpper <= axisDef + (axisMax - axisDef) * 2)
|
||||||
{
|
{
|
||||||
upper = newUpper;
|
upper = newUpper;
|
||||||
|
@ -70,6 +70,9 @@ HB_SUBSET_PLAN_MEMBER (hb_set_t, _glyphset_colred)
|
|||||||
HB_SUBSET_PLAN_MEMBER (hb_map_t, gsub_lookups)
|
HB_SUBSET_PLAN_MEMBER (hb_map_t, gsub_lookups)
|
||||||
HB_SUBSET_PLAN_MEMBER (hb_map_t, gpos_lookups)
|
HB_SUBSET_PLAN_MEMBER (hb_map_t, gpos_lookups)
|
||||||
|
|
||||||
|
//use_mark_sets mapping: old->new
|
||||||
|
HB_SUBSET_PLAN_MEMBER (hb_map_t, used_mark_sets_map)
|
||||||
|
|
||||||
//active langsys we'd like to retain
|
//active langsys we'd like to retain
|
||||||
HB_SUBSET_PLAN_MEMBER (hb_hashmap_t E(<unsigned, hb::unique_ptr<hb_set_t>>), gsub_langsys)
|
HB_SUBSET_PLAN_MEMBER (hb_hashmap_t E(<unsigned, hb::unique_ptr<hb_set_t>>), gsub_langsys)
|
||||||
HB_SUBSET_PLAN_MEMBER (hb_hashmap_t E(<unsigned, hb::unique_ptr<hb_set_t>>), gpos_langsys)
|
HB_SUBSET_PLAN_MEMBER (hb_hashmap_t E(<unsigned, hb::unique_ptr<hb_set_t>>), gpos_langsys)
|
||||||
@ -87,6 +90,15 @@ HB_SUBSET_PLAN_MEMBER (hb_hashmap_t E(<unsigned, hb::shared_ptr<hb_set_t>>), gpo
|
|||||||
HB_SUBSET_PLAN_MEMBER (hb_hashmap_t E(<unsigned, const OT::Feature*>), gsub_feature_substitutes_map)
|
HB_SUBSET_PLAN_MEMBER (hb_hashmap_t E(<unsigned, const OT::Feature*>), gsub_feature_substitutes_map)
|
||||||
HB_SUBSET_PLAN_MEMBER (hb_hashmap_t E(<unsigned, const OT::Feature*>), gpos_feature_substitutes_map)
|
HB_SUBSET_PLAN_MEMBER (hb_hashmap_t E(<unsigned, const OT::Feature*>), gpos_feature_substitutes_map)
|
||||||
|
|
||||||
|
// old feature_indexes set, used to reinstate the old features
|
||||||
|
HB_SUBSET_PLAN_MEMBER (hb_set_t, gsub_old_features)
|
||||||
|
HB_SUBSET_PLAN_MEMBER (hb_set_t, gpos_old_features)
|
||||||
|
|
||||||
|
//feature_index->pair of (address of old feature, feature tag), used for inserting a catch all record
|
||||||
|
//if necessary
|
||||||
|
HB_SUBSET_PLAN_MEMBER (hb_hashmap_t E(<unsigned, hb_pair_t E(<const void*, const void*>)>), gsub_old_feature_idx_tag_map)
|
||||||
|
HB_SUBSET_PLAN_MEMBER (hb_hashmap_t E(<unsigned, hb_pair_t E(<const void*, const void*>)>), gpos_old_feature_idx_tag_map)
|
||||||
|
|
||||||
//active layers/palettes we'd like to retain
|
//active layers/palettes we'd like to retain
|
||||||
HB_SUBSET_PLAN_MEMBER (hb_map_t, colrv1_layers)
|
HB_SUBSET_PLAN_MEMBER (hb_map_t, colrv1_layers)
|
||||||
HB_SUBSET_PLAN_MEMBER (hb_map_t, colr_palettes)
|
HB_SUBSET_PLAN_MEMBER (hb_map_t, colr_palettes)
|
||||||
|
91
src/3rdparty/harfbuzz-ng/src/hb-subset-plan.cc
vendored
91
src/3rdparty/harfbuzz-ng/src/hb-subset-plan.cc
vendored
@ -150,7 +150,8 @@ static void _collect_layout_indices (hb_subset_plan_t *plan,
|
|||||||
hb_set_t *feature_indices, /* OUT */
|
hb_set_t *feature_indices, /* OUT */
|
||||||
hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map, /* OUT */
|
hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map, /* OUT */
|
||||||
hb_hashmap_t<unsigned, const OT::Feature*> *feature_substitutes_map, /* OUT */
|
hb_hashmap_t<unsigned, const OT::Feature*> *feature_substitutes_map, /* OUT */
|
||||||
bool& insert_catch_all_feature_variation_record)
|
hb_set_t& catch_all_record_feature_idxes, /* OUT */
|
||||||
|
hb_hashmap_t<unsigned, hb_pair_t<const void*, const void*>>& catch_all_record_idx_feature_map /* OUT */)
|
||||||
{
|
{
|
||||||
unsigned num_features = table.get_feature_count ();
|
unsigned num_features = table.get_feature_count ();
|
||||||
hb_vector_t<hb_tag_t> features;
|
hb_vector_t<hb_tag_t> features;
|
||||||
@ -186,7 +187,7 @@ static void _collect_layout_indices (hb_subset_plan_t *plan,
|
|||||||
&plan->axes_location,
|
&plan->axes_location,
|
||||||
feature_record_cond_idx_map,
|
feature_record_cond_idx_map,
|
||||||
feature_substitutes_map,
|
feature_substitutes_map,
|
||||||
insert_catch_all_feature_variation_record,
|
catch_all_record_feature_idxes,
|
||||||
feature_indices,
|
feature_indices,
|
||||||
false,
|
false,
|
||||||
false,
|
false,
|
||||||
@ -208,17 +209,25 @@ static void _collect_layout_indices (hb_subset_plan_t *plan,
|
|||||||
f->add_lookup_indexes_to (lookup_indices);
|
f->add_lookup_indexes_to (lookup_indices);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#ifndef HB_NO_VAR
|
||||||
|
if (catch_all_record_feature_idxes)
|
||||||
|
{
|
||||||
|
for (unsigned feature_index : catch_all_record_feature_idxes)
|
||||||
|
{
|
||||||
|
const OT::Feature& f = table.get_feature (feature_index);
|
||||||
|
f.add_lookup_indexes_to (lookup_indices);
|
||||||
|
const void *tag = reinterpret_cast<const void*> (&(table.get_feature_list ().get_tag (feature_index)));
|
||||||
|
catch_all_record_idx_feature_map.set (feature_index, hb_pair (&f, tag));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// If all axes are pinned then all feature variations will be dropped so there's no need
|
// If all axes are pinned then all feature variations will be dropped so there's no need
|
||||||
// to collect lookups from them.
|
// to collect lookups from them.
|
||||||
if (!plan->all_axes_pinned)
|
if (!plan->all_axes_pinned)
|
||||||
{
|
table.feature_variation_collect_lookups (feature_indices,
|
||||||
// TODO(qxliu76): this collection doesn't work correctly for feature variations that are dropped
|
plan->user_axes_location.is_empty () ? nullptr: feature_record_cond_idx_map,
|
||||||
// but not applied. The collection will collect and retain the lookup indices
|
lookup_indices);
|
||||||
// associated with those dropped but not activated rules. Since partial instancing
|
#endif
|
||||||
// isn't yet supported this isn't an issue yet but will need to be fixed for
|
|
||||||
// partial instancing.
|
|
||||||
table.feature_variation_collect_lookups (feature_indices, feature_substitutes_map, lookup_indices);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -302,7 +311,8 @@ _closure_glyphs_lookups_features (hb_subset_plan_t *plan,
|
|||||||
script_langsys_map *langsys_map,
|
script_langsys_map *langsys_map,
|
||||||
hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map,
|
hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map,
|
||||||
hb_hashmap_t<unsigned, const OT::Feature*> *feature_substitutes_map,
|
hb_hashmap_t<unsigned, const OT::Feature*> *feature_substitutes_map,
|
||||||
bool& insert_catch_all_feature_variation_record)
|
hb_set_t &catch_all_record_feature_idxes,
|
||||||
|
hb_hashmap_t<unsigned, hb_pair_t<const void*, const void*>>& catch_all_record_idx_feature_map)
|
||||||
{
|
{
|
||||||
hb_blob_ptr_t<T> table = plan->source_table<T> ();
|
hb_blob_ptr_t<T> table = plan->source_table<T> ();
|
||||||
hb_tag_t table_tag = table->tableTag;
|
hb_tag_t table_tag = table->tableTag;
|
||||||
@ -313,7 +323,8 @@ _closure_glyphs_lookups_features (hb_subset_plan_t *plan,
|
|||||||
&feature_indices,
|
&feature_indices,
|
||||||
feature_record_cond_idx_map,
|
feature_record_cond_idx_map,
|
||||||
feature_substitutes_map,
|
feature_substitutes_map,
|
||||||
insert_catch_all_feature_variation_record);
|
catch_all_record_feature_idxes,
|
||||||
|
catch_all_record_idx_feature_map);
|
||||||
|
|
||||||
if (table_tag == HB_OT_TAG_GSUB && !(plan->flags & HB_SUBSET_FLAGS_NO_LAYOUT_CLOSURE))
|
if (table_tag == HB_OT_TAG_GSUB && !(plan->flags & HB_SUBSET_FLAGS_NO_LAYOUT_CLOSURE))
|
||||||
hb_ot_layout_lookups_substitute_closure (plan->source,
|
hb_ot_layout_lookups_substitute_closure (plan->source,
|
||||||
@ -465,6 +476,24 @@ _math_closure (hb_subset_plan_t *plan,
|
|||||||
math.destroy ();
|
math.destroy ();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static inline void
|
||||||
|
_remap_used_mark_sets (hb_subset_plan_t *plan,
|
||||||
|
hb_map_t& used_mark_sets_map)
|
||||||
|
{
|
||||||
|
hb_blob_ptr_t<OT::GDEF> gdef = plan->source_table<OT::GDEF> ();
|
||||||
|
|
||||||
|
if (!gdef->has_data () || !gdef->has_mark_glyph_sets ())
|
||||||
|
{
|
||||||
|
gdef.destroy ();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
hb_set_t used_mark_sets;
|
||||||
|
gdef->get_mark_glyph_sets ().collect_used_mark_sets (plan->_glyphset_gsub, used_mark_sets);
|
||||||
|
gdef.destroy ();
|
||||||
|
|
||||||
|
_remap_indexes (&used_mark_sets, &used_mark_sets_map);
|
||||||
|
}
|
||||||
|
|
||||||
static inline void
|
static inline void
|
||||||
_remove_invalid_gids (hb_set_t *glyphs,
|
_remove_invalid_gids (hb_set_t *glyphs,
|
||||||
@ -578,14 +607,18 @@ _populate_unicodes_to_retain (const hb_set_t *unicodes,
|
|||||||
else
|
else
|
||||||
{
|
{
|
||||||
plan->codepoint_to_glyph->alloc (cmap_unicodes->get_population ());
|
plan->codepoint_to_glyph->alloc (cmap_unicodes->get_population ());
|
||||||
for (hb_codepoint_t cp : *cmap_unicodes)
|
hb_codepoint_t first = HB_SET_VALUE_INVALID, last = HB_SET_VALUE_INVALID;
|
||||||
|
for (; cmap_unicodes->next_range (&first, &last); )
|
||||||
{
|
{
|
||||||
hb_codepoint_t gid = (*unicode_glyphid_map)[cp];
|
for (unsigned cp = first; cp <= last; cp++)
|
||||||
if (!unicodes->has (cp) && !glyphs->has (gid))
|
{
|
||||||
continue;
|
hb_codepoint_t gid = (*unicode_glyphid_map)[cp];
|
||||||
|
if (!unicodes->has (cp) && !glyphs->has (gid))
|
||||||
|
continue;
|
||||||
|
|
||||||
plan->codepoint_to_glyph->set (cp, gid);
|
plan->codepoint_to_glyph->set (cp, gid);
|
||||||
plan->unicode_to_new_gid_list.push (hb_pair (cp, gid));
|
plan->unicode_to_new_gid_list.push (hb_pair (cp, gid));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -714,7 +747,8 @@ _populate_gids_to_retain (hb_subset_plan_t* plan,
|
|||||||
&plan->gsub_langsys,
|
&plan->gsub_langsys,
|
||||||
&plan->gsub_feature_record_cond_idx_map,
|
&plan->gsub_feature_record_cond_idx_map,
|
||||||
&plan->gsub_feature_substitutes_map,
|
&plan->gsub_feature_substitutes_map,
|
||||||
plan->gsub_insert_catch_all_feature_variation_rec);
|
plan->gsub_old_features,
|
||||||
|
plan->gsub_old_feature_idx_tag_map);
|
||||||
|
|
||||||
if (!drop_tables->has (HB_OT_TAG_GPOS))
|
if (!drop_tables->has (HB_OT_TAG_GPOS))
|
||||||
_closure_glyphs_lookups_features<GPOS> (
|
_closure_glyphs_lookups_features<GPOS> (
|
||||||
@ -725,7 +759,8 @@ _populate_gids_to_retain (hb_subset_plan_t* plan,
|
|||||||
&plan->gpos_langsys,
|
&plan->gpos_langsys,
|
||||||
&plan->gpos_feature_record_cond_idx_map,
|
&plan->gpos_feature_record_cond_idx_map,
|
||||||
&plan->gpos_feature_substitutes_map,
|
&plan->gpos_feature_substitutes_map,
|
||||||
plan->gpos_insert_catch_all_feature_variation_rec);
|
plan->gpos_old_features,
|
||||||
|
plan->gpos_old_feature_idx_tag_map);
|
||||||
#endif
|
#endif
|
||||||
_remove_invalid_gids (&plan->_glyphset_gsub, plan->source->get_num_glyphs ());
|
_remove_invalid_gids (&plan->_glyphset_gsub, plan->source->get_num_glyphs ());
|
||||||
|
|
||||||
@ -814,12 +849,12 @@ _create_old_gid_to_new_gid_map (const hb_face_t *face,
|
|||||||
|
|
||||||
if (retain_gids)
|
if (retain_gids)
|
||||||
{
|
{
|
||||||
DEBUG_MSG (SUBSET, nullptr,
|
DEBUG_MSG (SUBSET, nullptr,
|
||||||
"HB_SUBSET_FLAGS_RETAIN_GIDS cannot be set if "
|
"HB_SUBSET_FLAGS_RETAIN_GIDS cannot be set if "
|
||||||
"a custom glyph mapping has been provided.");
|
"a custom glyph mapping has been provided.");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
hb_codepoint_t max_glyph = 0;
|
hb_codepoint_t max_glyph = 0;
|
||||||
hb_set_t remaining;
|
hb_set_t remaining;
|
||||||
for (auto old_gid : all_gids_to_retain->iter ())
|
for (auto old_gid : all_gids_to_retain->iter ())
|
||||||
@ -871,9 +906,11 @@ _create_old_gid_to_new_gid_map (const hb_face_t *face,
|
|||||||
*num_glyphs = max_glyph + 1;
|
*num_glyphs = max_glyph + 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
reverse_glyph_map->alloc (reverse_glyph_map->get_population () + new_to_old_gid_list->length);
|
||||||
+ hb_iter (new_to_old_gid_list)
|
+ hb_iter (new_to_old_gid_list)
|
||||||
| hb_sink (reverse_glyph_map)
|
| hb_sink (reverse_glyph_map)
|
||||||
;
|
;
|
||||||
|
glyph_map->alloc (glyph_map->get_population () + new_to_old_gid_list->length);
|
||||||
+ hb_iter (new_to_old_gid_list)
|
+ hb_iter (new_to_old_gid_list)
|
||||||
| hb_map (&hb_codepoint_pair_t::reverse)
|
| hb_map (&hb_codepoint_pair_t::reverse)
|
||||||
| hb_sink (glyph_map)
|
| hb_sink (glyph_map)
|
||||||
@ -969,7 +1006,7 @@ _update_instance_metrics_map_from_cff2 (hb_subset_plan_t *plan)
|
|||||||
float *hvar_store_cache = nullptr;
|
float *hvar_store_cache = nullptr;
|
||||||
if (_hmtx.has_data () && _hmtx.var_table.get_length ())
|
if (_hmtx.has_data () && _hmtx.var_table.get_length ())
|
||||||
hvar_store_cache = _hmtx.var_table->get_var_store ().create_cache ();
|
hvar_store_cache = _hmtx.var_table->get_var_store ().create_cache ();
|
||||||
|
|
||||||
OT::vmtx_accelerator_t _vmtx (plan->source);
|
OT::vmtx_accelerator_t _vmtx (plan->source);
|
||||||
float *vvar_store_cache = nullptr;
|
float *vvar_store_cache = nullptr;
|
||||||
if (_vmtx.has_data () && _vmtx.var_table.get_length ())
|
if (_vmtx.has_data () && _vmtx.var_table.get_length ())
|
||||||
@ -1093,6 +1130,7 @@ hb_subset_plan_t::hb_subset_plan_t (hb_face_t *face,
|
|||||||
user_axes_location = input->axes_location;
|
user_axes_location = input->axes_location;
|
||||||
all_axes_pinned = false;
|
all_axes_pinned = false;
|
||||||
pinned_at_default = true;
|
pinned_at_default = true;
|
||||||
|
has_gdef_varstore = false;
|
||||||
|
|
||||||
#ifdef HB_EXPERIMENTAL_API
|
#ifdef HB_EXPERIMENTAL_API
|
||||||
for (auto _ : input->name_table_overrides)
|
for (auto _ : input->name_table_overrides)
|
||||||
@ -1112,6 +1150,10 @@ hb_subset_plan_t::hb_subset_plan_t (hb_face_t *face,
|
|||||||
|
|
||||||
attach_accelerator_data = input->attach_accelerator_data;
|
attach_accelerator_data = input->attach_accelerator_data;
|
||||||
force_long_loca = input->force_long_loca;
|
force_long_loca = input->force_long_loca;
|
||||||
|
#ifdef HB_EXPERIMENTAL_API
|
||||||
|
force_long_loca = force_long_loca || (flags & HB_SUBSET_FLAGS_IFTB_REQUIREMENTS);
|
||||||
|
#endif
|
||||||
|
|
||||||
if (accel)
|
if (accel)
|
||||||
accelerator = (hb_subset_accelerator_t*) accel;
|
accelerator = (hb_subset_accelerator_t*) accel;
|
||||||
|
|
||||||
@ -1160,6 +1202,9 @@ hb_subset_plan_t::hb_subset_plan_t (hb_face_t *face,
|
|||||||
for (auto &v : bounds_height_vec)
|
for (auto &v : bounds_height_vec)
|
||||||
v = 0xFFFFFFFF;
|
v = 0xFFFFFFFF;
|
||||||
|
|
||||||
|
if (!drop_tables.has (HB_OT_TAG_GDEF))
|
||||||
|
_remap_used_mark_sets (this, used_mark_sets_map);
|
||||||
|
|
||||||
if (unlikely (in_error ()))
|
if (unlikely (in_error ()))
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
@ -147,6 +147,9 @@ struct hb_subset_plan_t
|
|||||||
bool gsub_insert_catch_all_feature_variation_rec;
|
bool gsub_insert_catch_all_feature_variation_rec;
|
||||||
bool gpos_insert_catch_all_feature_variation_rec;
|
bool gpos_insert_catch_all_feature_variation_rec;
|
||||||
|
|
||||||
|
// whether GDEF VarStore is retained
|
||||||
|
mutable bool has_gdef_varstore;
|
||||||
|
|
||||||
#define HB_SUBSET_PLAN_MEMBER(Type, Name) Type Name;
|
#define HB_SUBSET_PLAN_MEMBER(Type, Name) Type Name;
|
||||||
#include "hb-subset-plan-member-list.hh"
|
#include "hb-subset-plan-member-list.hh"
|
||||||
#undef HB_SUBSET_PLAN_MEMBER
|
#undef HB_SUBSET_PLAN_MEMBER
|
||||||
|
3
src/3rdparty/harfbuzz-ng/src/hb-subset.cc
vendored
3
src/3rdparty/harfbuzz-ng/src/hb-subset.cc
vendored
@ -460,9 +460,10 @@ _dependencies_satisfied (hb_subset_plan_t *plan, hb_tag_t tag,
|
|||||||
case HB_OT_TAG_hmtx:
|
case HB_OT_TAG_hmtx:
|
||||||
case HB_OT_TAG_vmtx:
|
case HB_OT_TAG_vmtx:
|
||||||
case HB_OT_TAG_maxp:
|
case HB_OT_TAG_maxp:
|
||||||
|
case HB_OT_TAG_OS2:
|
||||||
return !plan->normalized_coords || !pending_subset_tags.has (HB_OT_TAG_glyf);
|
return !plan->normalized_coords || !pending_subset_tags.has (HB_OT_TAG_glyf);
|
||||||
case HB_OT_TAG_GPOS:
|
case HB_OT_TAG_GPOS:
|
||||||
return !plan->normalized_coords || plan->all_axes_pinned || !pending_subset_tags.has (HB_OT_TAG_GDEF);
|
return plan->all_axes_pinned || !pending_subset_tags.has (HB_OT_TAG_GDEF);
|
||||||
default:
|
default:
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
6
src/3rdparty/harfbuzz-ng/src/hb-subset.h
vendored
6
src/3rdparty/harfbuzz-ng/src/hb-subset.h
vendored
@ -73,6 +73,9 @@ typedef struct hb_subset_plan_t hb_subset_plan_t;
|
|||||||
* OS/2 will not be recalculated.
|
* OS/2 will not be recalculated.
|
||||||
* @HB_SUBSET_FLAGS_NO_LAYOUT_CLOSURE: If set don't perform glyph closure on layout
|
* @HB_SUBSET_FLAGS_NO_LAYOUT_CLOSURE: If set don't perform glyph closure on layout
|
||||||
* substitution rules (GSUB). Since: 7.2.0.
|
* substitution rules (GSUB). Since: 7.2.0.
|
||||||
|
* @HB_SUBSET_FLAGS_IFTB_REQUIREMENTS: If set enforce requirements on the output subset
|
||||||
|
* to allow it to be used with incremental font transfer IFTB patches. Primarily,
|
||||||
|
* this forces all outline data to use long (32 bit) offsets. Since: EXPERIMENTAL
|
||||||
*
|
*
|
||||||
* List of boolean properties that can be configured on the subset input.
|
* List of boolean properties that can be configured on the subset input.
|
||||||
*
|
*
|
||||||
@ -90,6 +93,9 @@ typedef enum { /*< flags >*/
|
|||||||
HB_SUBSET_FLAGS_GLYPH_NAMES = 0x00000080u,
|
HB_SUBSET_FLAGS_GLYPH_NAMES = 0x00000080u,
|
||||||
HB_SUBSET_FLAGS_NO_PRUNE_UNICODE_RANGES = 0x00000100u,
|
HB_SUBSET_FLAGS_NO_PRUNE_UNICODE_RANGES = 0x00000100u,
|
||||||
HB_SUBSET_FLAGS_NO_LAYOUT_CLOSURE = 0x00000200u,
|
HB_SUBSET_FLAGS_NO_LAYOUT_CLOSURE = 0x00000200u,
|
||||||
|
#ifdef HB_EXPERIMENTAL_API
|
||||||
|
HB_SUBSET_FLAGS_IFTB_REQUIREMENTS = 0x00000400u,
|
||||||
|
#endif
|
||||||
} hb_subset_flags_t;
|
} hb_subset_flags_t;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
7
src/3rdparty/harfbuzz-ng/src/hb-vector.hh
vendored
7
src/3rdparty/harfbuzz-ng/src/hb-vector.hh
vendored
@ -37,6 +37,8 @@ template <typename Type,
|
|||||||
bool sorted=false>
|
bool sorted=false>
|
||||||
struct hb_vector_t
|
struct hb_vector_t
|
||||||
{
|
{
|
||||||
|
static constexpr bool realloc_move = true;
|
||||||
|
|
||||||
typedef Type item_t;
|
typedef Type item_t;
|
||||||
static constexpr unsigned item_size = hb_static_size (Type);
|
static constexpr unsigned item_size = hb_static_size (Type);
|
||||||
using array_t = typename std::conditional<sorted, hb_sorted_array_t<Type>, hb_array_t<Type>>::type;
|
using array_t = typename std::conditional<sorted, hb_sorted_array_t<Type>, hb_array_t<Type>>::type;
|
||||||
@ -268,10 +270,9 @@ struct hb_vector_t
|
|||||||
}
|
}
|
||||||
return new_array;
|
return new_array;
|
||||||
}
|
}
|
||||||
/* Specialization for hb_vector_t<hb_{vector,array}_t<U>> to speed up. */
|
/* Specialization for types that can be moved using realloc(). */
|
||||||
template <typename T = Type,
|
template <typename T = Type,
|
||||||
hb_enable_if (hb_is_same (T, hb_vector_t<typename T::item_t>) ||
|
hb_enable_if (T::realloc_move)>
|
||||||
hb_is_same (T, hb_array_t <typename T::item_t>))>
|
|
||||||
Type *
|
Type *
|
||||||
realloc_vector (unsigned new_allocated, hb_priority<1>)
|
realloc_vector (unsigned new_allocated, hb_priority<1>)
|
||||||
{
|
{
|
||||||
|
6
src/3rdparty/harfbuzz-ng/src/hb-version.h
vendored
6
src/3rdparty/harfbuzz-ng/src/hb-version.h
vendored
@ -47,20 +47,20 @@ HB_BEGIN_DECLS
|
|||||||
*
|
*
|
||||||
* The minor component of the library version available at compile-time.
|
* The minor component of the library version available at compile-time.
|
||||||
*/
|
*/
|
||||||
#define HB_VERSION_MINOR 2
|
#define HB_VERSION_MINOR 3
|
||||||
/**
|
/**
|
||||||
* HB_VERSION_MICRO:
|
* HB_VERSION_MICRO:
|
||||||
*
|
*
|
||||||
* The micro component of the library version available at compile-time.
|
* The micro component of the library version available at compile-time.
|
||||||
*/
|
*/
|
||||||
#define HB_VERSION_MICRO 2
|
#define HB_VERSION_MICRO 0
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* HB_VERSION_STRING:
|
* HB_VERSION_STRING:
|
||||||
*
|
*
|
||||||
* A string literal containing the library version available at compile-time.
|
* A string literal containing the library version available at compile-time.
|
||||||
*/
|
*/
|
||||||
#define HB_VERSION_STRING "8.2.2"
|
#define HB_VERSION_STRING "8.3.0"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* HB_VERSION_ATLEAST:
|
* HB_VERSION_ATLEAST:
|
||||||
|
Loading…
Reference in New Issue
Block a user