[vector] Make it act more like pointer
Add pointer cast operator and plus operator.
This commit is contained in:
parent
8d55e2adef
commit
955aa56b11
@ -586,7 +586,7 @@ _hb_coretext_shape (hb_shape_plan_t *shape_plan,
|
|||||||
} else {
|
} else {
|
||||||
active_feature_t *feature = active_features.find (&event->feature);
|
active_feature_t *feature = active_features.find (&event->feature);
|
||||||
if (feature)
|
if (feature)
|
||||||
active_features.remove (feature - active_features.arrayZ());
|
active_features.remove (feature - active_features);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -612,7 +612,7 @@ struct Supplier
|
|||||||
}
|
}
|
||||||
inline Supplier (const hb_vector_t<Type> *v)
|
inline Supplier (const hb_vector_t<Type> *v)
|
||||||
{
|
{
|
||||||
head = v->arrayZ();
|
head = *v;
|
||||||
len = v->len;
|
len = v->len;
|
||||||
stride = sizeof (Type);
|
stride = sizeof (Type);
|
||||||
}
|
}
|
||||||
|
@ -495,7 +495,7 @@ struct CmapSubtableLongSegmented
|
|||||||
{
|
{
|
||||||
TRACE_SERIALIZE (this);
|
TRACE_SERIALIZE (this);
|
||||||
if (unlikely (!c->extend_min (*this))) return_trace (false);
|
if (unlikely (!c->extend_min (*this))) return_trace (false);
|
||||||
Supplier<CmapSubtableLongGroup> supplier (group_data.arrayZ(), group_data.len);
|
Supplier<CmapSubtableLongGroup> supplier (group_data, group_data.len);
|
||||||
if (unlikely (!groups.serialize (c, supplier, group_data.len))) return_trace (false);
|
if (unlikely (!groups.serialize (c, supplier, group_data.len))) return_trace (false);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -368,8 +368,8 @@ struct hb_set_t
|
|||||||
if (!resize (count))
|
if (!resize (count))
|
||||||
return;
|
return;
|
||||||
population = other->population;
|
population = other->population;
|
||||||
memcpy (pages.arrayZ(), other->pages.arrayZ(), count * sizeof (pages[0]));
|
memcpy (pages, other->pages, count * sizeof (pages[0]));
|
||||||
memcpy (page_map.arrayZ(), other->page_map.arrayZ(), count * sizeof (page_map[0]));
|
memcpy (page_map, other->page_map, count * sizeof (page_map[0]));
|
||||||
}
|
}
|
||||||
|
|
||||||
inline bool is_equal (const hb_set_t *other) const
|
inline bool is_equal (const hb_set_t *other) const
|
||||||
@ -669,8 +669,8 @@ struct hb_set_t
|
|||||||
return nullptr;
|
return nullptr;
|
||||||
|
|
||||||
pages[map.index].init0 ();
|
pages[map.index].init0 ();
|
||||||
memmove (page_map.arrayZ() + i + 1,
|
memmove (page_map + i + 1,
|
||||||
page_map.arrayZ() + i,
|
page_map + i,
|
||||||
(page_map.len - 1 - i) * sizeof (page_map[0]));
|
(page_map.len - 1 - i) * sizeof (page_map[0]));
|
||||||
page_map[i] = map;
|
page_map[i] = map;
|
||||||
}
|
}
|
||||||
|
@ -77,7 +77,7 @@ _subset2 (hb_subset_plan_t *plan)
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
retry:
|
retry:
|
||||||
hb_serialize_context_t serializer (buf.arrayZ(), buf_size);
|
hb_serialize_context_t serializer (buf, buf_size);
|
||||||
hb_subset_context_t c (plan, &serializer);
|
hb_subset_context_t c (plan, &serializer);
|
||||||
result = table->subset (&c);
|
result = table->subset (&c);
|
||||||
if (serializer.ran_out_of_room)
|
if (serializer.ran_out_of_room)
|
||||||
|
@ -717,7 +717,7 @@ _hb_uniscribe_shape (hb_shape_plan_t *shape_plan,
|
|||||||
{
|
{
|
||||||
active_feature_t *feature = active_features.find (&event->feature);
|
active_feature_t *feature = active_features.find (&event->feature);
|
||||||
if (feature)
|
if (feature)
|
||||||
active_features.remove (feature - active_features.arrayZ());
|
active_features.remove (feature - active_features);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -728,7 +728,7 @@ _hb_uniscribe_shape (hb_shape_plan_t *shape_plan,
|
|||||||
for (unsigned int i = 0; i < range_records.len; i++)
|
for (unsigned int i = 0; i < range_records.len; i++)
|
||||||
{
|
{
|
||||||
range_record_t *range = &range_records[i];
|
range_record_t *range = &range_records[i];
|
||||||
range->props.potfRecords = feature_records.arrayZ() + reinterpret_cast<uintptr_t> (range->props.potfRecords);
|
range->props.potfRecords = feature_records + reinterpret_cast<uintptr_t> (range->props.potfRecords);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -902,8 +902,8 @@ retry:
|
|||||||
&items[i].a,
|
&items[i].a,
|
||||||
script_tags[i],
|
script_tags[i],
|
||||||
language_tag,
|
language_tag,
|
||||||
range_char_counts.arrayZ(),
|
range_char_counts,
|
||||||
range_properties.arrayZ(),
|
range_properties,
|
||||||
range_properties.len,
|
range_properties.len,
|
||||||
pchars + chars_offset,
|
pchars + chars_offset,
|
||||||
item_chars_len,
|
item_chars_len,
|
||||||
@ -943,8 +943,8 @@ retry:
|
|||||||
&items[i].a,
|
&items[i].a,
|
||||||
script_tags[i],
|
script_tags[i],
|
||||||
language_tag,
|
language_tag,
|
||||||
range_char_counts.arrayZ(),
|
range_char_counts,
|
||||||
range_properties.arrayZ(),
|
range_properties,
|
||||||
range_properties.len,
|
range_properties.len,
|
||||||
pchars + chars_offset,
|
pchars + chars_offset,
|
||||||
log_clusters + chars_offset,
|
log_clusters + chars_offset,
|
||||||
|
@ -66,6 +66,12 @@ struct hb_vector_t
|
|||||||
return arrayZ()[i];
|
return arrayZ()[i];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template <typename T> inline operator T * (void) { return arrayZ(); }
|
||||||
|
template <typename T> inline operator const T * (void) const { return arrayZ(); }
|
||||||
|
|
||||||
|
inline Type * operator + (unsigned int i) { return arrayZ() + i; }
|
||||||
|
inline const Type * operator + (unsigned int i) const { return arrayZ() + i; }
|
||||||
|
|
||||||
inline Type *push (void)
|
inline Type *push (void)
|
||||||
{
|
{
|
||||||
if (unlikely (!resize (len + 1)))
|
if (unlikely (!resize (len + 1)))
|
||||||
|
Loading…
Reference in New Issue
Block a user