[vector] Use Crap pool in push() as well

This commit is contained in:
Behdad Esfahbod 2018-06-01 17:48:37 -07:00
parent 975bdd5ef5
commit f7515769fd
9 changed files with 23 additions and 69 deletions

View File

@ -670,15 +670,11 @@ _hb_coretext_shape (hb_shape_plan_t *shape_plan,
feature_event_t *event;
event = feature_events.push ();
if (unlikely (!event))
goto fail_features;
event->index = features[i].start;
event->start = true;
event->feature = feature;
event = feature_events.push ();
if (unlikely (!event))
goto fail_features;
event->index = features[i].end;
event->start = false;
event->feature = feature;
@ -692,8 +688,6 @@ _hb_coretext_shape (hb_shape_plan_t *shape_plan,
feature.order = num_features + 1;
feature_event_t *event = feature_events.push ();
if (unlikely (!event))
goto fail_features;
event->index = 0; /* This value does magic. */
event->start = false;
event->feature = feature;
@ -710,8 +704,6 @@ _hb_coretext_shape (hb_shape_plan_t *shape_plan,
{
/* Save a snapshot of active features and the range. */
range_record_t *range = range_records.push ();
if (unlikely (!range))
goto fail_features;
if (active_features.len)
{
@ -769,10 +761,9 @@ _hb_coretext_shape (hb_shape_plan_t *shape_plan,
last_index = event->index;
}
if (event->start) {
active_feature_t *feature = active_features.push (event->feature);
if (unlikely (!feature))
goto fail_features;
if (event->start)
{
active_features.push (event->feature);
} else {
active_feature_t *feature = active_features.find (&event->feature);
if (feature)
@ -780,11 +771,6 @@ _hb_coretext_shape (hb_shape_plan_t *shape_plan,
}
}
}
else
{
fail_features:
num_features = 0;
}
unsigned int scratch_size;
hb_buffer_t::scratch_buffer_t *scratch = buffer->get_scratch_buffer (&scratch_size);

View File

@ -211,7 +211,8 @@ struct CmapSubtableFormat4
}
// There must be a final entry with end_code == 0xFFFF. Check if we need to add one.
if (segment == nullptr || segment->end_code != 0xFFFF) {
if (segment == nullptr || segment->end_code != 0xFFFF)
{
segment = segments->push ();
segment->start_code.set (0xFFFF);
segment->end_code.set (0xFFFF);

View File

@ -1099,8 +1099,7 @@ struct hb_get_subtables_context_t :
inline return_t dispatch (const T &obj)
{
hb_applicable_t *entry = array.push();
if (likely (entry))
entry->init (&obj, apply_to<T>);
entry->init (&obj, apply_to<T>);
return HB_VOID;
}
static return_t default_return_value (void) { return HB_VOID; }

View File

@ -67,7 +67,6 @@ void hb_ot_map_builder_t::add_feature (hb_tag_t tag, unsigned int value,
hb_ot_map_feature_flags_t flags)
{
feature_info_t *info = feature_infos.push();
if (unlikely (!info)) return;
if (unlikely (!tag)) return;
info->tag = tag;
info->seq = feature_infos.len;
@ -108,8 +107,6 @@ hb_ot_map_builder_t::add_lookups (hb_ot_map_t &m,
if (lookup_indices[i] >= table_lookup_count)
continue;
hb_ot_map_t::lookup_map_t *lookup = m.lookups[table_index].push ();
if (unlikely (!lookup))
return;
lookup->mask = mask;
lookup->index = lookup_indices[i];
lookup->auto_zwnj = auto_zwnj;
@ -124,10 +121,8 @@ hb_ot_map_builder_t::add_lookups (hb_ot_map_t &m,
void hb_ot_map_builder_t::add_pause (unsigned int table_index, hb_ot_map_t::pause_func_t pause_func)
{
stage_info_t *s = stages[table_index].push ();
if (likely (s)) {
s->index = current_stage[table_index];
s->pause_func = pause_func;
}
s->index = current_stage[table_index];
s->pause_func = pause_func;
current_stage[table_index]++;
}
@ -238,8 +233,6 @@ hb_ot_map_builder_t::compile (hb_ot_map_t &m,
hb_ot_map_t::feature_map_t *map = m.features.push ();
if (unlikely (!map))
break;
map->tag = info->tag;
map->index[0] = feature_index[0];
@ -321,10 +314,8 @@ hb_ot_map_builder_t::compile (hb_ot_map_t &m,
if (stage_index < stages[table_index].len && stages[table_index][stage_index].index == stage) {
hb_ot_map_t::stage_map_t *stage_map = m.stages[table_index].push ();
if (likely (stage_map)) {
stage_map->last_lookup = last_num_lookups;
stage_map->pause_func = stages[table_index][stage_index].pause_func;
}
stage_map->last_lookup = last_num_lookups;
stage_map->pause_func = stages[table_index][stage_index].pause_func;
stage_index++;
}

View File

@ -125,11 +125,7 @@ struct post
const uint8_t *end = (uint8_t *) table + table_length;
for (const uint8_t *data = pool; data < end && data + *data <= end; data += 1 + *data)
{
uint32_t *offset = index_to_offset.push (data - pool);
if (unlikely (!offset))
break;
}
index_to_offset.push (data - pool);
}
inline void fini (void)
{

View File

@ -658,17 +658,14 @@ struct hb_vector_t
inline Type *push (void)
{
if (unlikely (!resize (len + 1)))
return nullptr;
return &Crap(Type);
return &arrayZ[len - 1];
}
inline Type *push (const Type& v)
{
if (unlikely (!resize (len + 1)))
return nullptr;
arrayZ[len - 1] = v;
return &arrayZ[len - 1];
Type *p = push ();
*p = v;
return p;
}
/* Allocate for size but don't adjust len. */

View File

@ -97,9 +97,7 @@ _populate_gids_to_retain (hb_face_t *face,
glyphs->alloc (all_gids_to_retain->get_population ());
gid = HB_SET_VALUE_INVALID;
while (all_gids_to_retain->next (&gid))
{
glyphs->push (gid);
}
hb_set_destroy (all_gids_to_retain);
glyf.fini ();

View File

@ -224,8 +224,6 @@ hb_subset_face_add_table (hb_face_t *face, hb_tag_t tag, hb_blob_t *blob)
hb_subset_face_data_t *data = (hb_subset_face_data_t *) face->user_data;
hb_subset_face_data_t::table_entry_t *entry = data->tables.push ();
if (unlikely (!entry))
return false;
entry->tag = tag;
entry->blob = hb_blob_reference (blob);

View File

@ -646,15 +646,11 @@ _hb_uniscribe_shape (hb_shape_plan_t *shape_plan,
feature_event_t *event;
event = feature_events.push ();
if (unlikely (!event))
goto fail_features;
event->index = features[i].start;
event->start = true;
event->feature = feature;
event = feature_events.push ();
if (unlikely (!event))
goto fail_features;
event->index = features[i].end;
event->start = false;
event->feature = feature;
@ -668,8 +664,6 @@ _hb_uniscribe_shape (hb_shape_plan_t *shape_plan,
feature.order = num_features + 1;
feature_event_t *event = feature_events.push ();
if (unlikely (!event))
goto fail_features;
event->index = 0; /* This value does magic. */
event->start = false;
event->feature = feature;
@ -686,8 +680,6 @@ _hb_uniscribe_shape (hb_shape_plan_t *shape_plan,
{
/* Save a snapshot of active features and the range. */
range_record_t *range = range_records.push ();
if (unlikely (!range))
goto fail_features;
unsigned int offset = feature_records.len;
@ -696,8 +688,7 @@ _hb_uniscribe_shape (hb_shape_plan_t *shape_plan,
{
if (!j || active_features[j].rec.tagFeature != feature_records[feature_records.len - 1].tagFeature)
{
if (unlikely (!feature_records.push (active_features[j].rec)))
goto fail_features;
feature_records.push (active_features[j].rec);
}
else
{
@ -716,10 +707,12 @@ _hb_uniscribe_shape (hb_shape_plan_t *shape_plan,
last_index = event->index;
}
if (event->start) {
if (unlikely (!active_features.push (event->feature)))
goto fail_features;
} else {
if (event->start)
{
active_features.push (event->feature);
}
else
{
active_feature_t *feature = active_features.find (&event->feature);
if (feature)
active_features.remove (feature - active_features.arrayZ);
@ -727,7 +720,7 @@ _hb_uniscribe_shape (hb_shape_plan_t *shape_plan,
}
if (!range_records.len) /* No active feature found. */
goto fail_features;
num_features = 0;
/* Fixup the pointers. */
for (unsigned int i = 0; i < range_records.len; i++)
@ -736,11 +729,6 @@ _hb_uniscribe_shape (hb_shape_plan_t *shape_plan,
range->props.potfRecords = feature_records.arrayZ + reinterpret_cast<uintptr_t> (range->props.potfRecords);
}
}
else
{
fail_features:
num_features = 0;
}
#define FAIL(...) \
HB_STMT_START { \