Merge branch 'master' into docs-gtkdoc-colormath
This commit is contained in:
commit
1a850abd66
@ -28,13 +28,13 @@ jobs:
|
||||
steps:
|
||||
- checkout
|
||||
- run: HOMEBREW_NO_AUTO_UPDATE=1 brew install wget autoconf automake libtool pkg-config ragel freetype glib cairo icu4c graphite2
|
||||
- run: export PKG_CONFIG_PATH="/usr/local/opt/icu4c/lib/pkgconfig" && ./autogen.sh --with-freetype --with-glib --with-gobject --with-cairo --with-icu --with-coretext --with-graphite2
|
||||
- run: export PKG_CONFIG_PATH="/usr/local/opt/icu4c/lib/pkgconfig:/usr/local/opt/libffi/lib/pkgconfig" && ./autogen.sh --with-freetype --with-glib --with-gobject --with-cairo --with-icu --with-coretext --with-graphite2
|
||||
- run: make -j4
|
||||
- run: make check || .ci/fail.sh
|
||||
|
||||
distcheck:
|
||||
docker:
|
||||
- image: ubuntu:17.10
|
||||
- image: ubuntu:19.04
|
||||
steps:
|
||||
- checkout
|
||||
- run: apt update && apt install -y ninja-build binutils libtool autoconf automake make cmake gcc g++ pkg-config ragel gtk-doc-tools libfontconfig1-dev libfreetype6-dev libglib2.0-dev libcairo2-dev libicu-dev libgraphite2-dev python python-pip
|
||||
@ -203,7 +203,7 @@ jobs:
|
||||
|
||||
cmake-gcc:
|
||||
docker:
|
||||
- image: ubuntu:17.10
|
||||
- image: ubuntu:19.04
|
||||
steps:
|
||||
- checkout
|
||||
- run: apt update && apt install -y ninja-build binutils cmake gcc g++ pkg-config ragel gtk-doc-tools libfreetype6-dev libglib2.0-dev libcairo2-dev libicu-dev libgraphite2-dev python python-pip
|
||||
@ -235,22 +235,13 @@ jobs:
|
||||
- run: CFLAGS="-Wno-attributes" CXXFLAGS="-Wno-attributes" ./autogen.sh --prefix=/usr/local/djgpp --host=i586-pc-msdosdjgpp
|
||||
- run: make -j32
|
||||
|
||||
crosscompile-notest-freebsd9:
|
||||
docker:
|
||||
- image: donbowman/freebsd-cross-build
|
||||
steps:
|
||||
- checkout
|
||||
- run: apt update && apt install -y pkg-config ragel
|
||||
- run: ./autogen.sh --prefix=/freebsd --host=x86_64-pc-freebsd9
|
||||
- run: make -j32
|
||||
|
||||
crosscompile-notest-psvita:
|
||||
docker:
|
||||
- image: dockcross/base
|
||||
steps:
|
||||
- checkout
|
||||
- run: git clone https://github.com/vitasdk/vdpm && cd vdpm && ./bootstrap-vitasdk.sh
|
||||
- run: echo "#""!""/bin/true" > /usr/bin/ragel && chmod +x /usr/bin/ragel
|
||||
- run: echo '#!/bin/true' > /usr/bin/ragel && chmod +x /usr/bin/ragel
|
||||
- run: ./autogen.sh --prefix=/usr/local/vitasdk/arm-vita-eabi --host=arm-vita-eabi
|
||||
- run: make -j32
|
||||
|
||||
@ -327,7 +318,6 @@ workflows:
|
||||
# they can't be test thus are without tests
|
||||
## autotools
|
||||
- crosscompile-notest-djgpp
|
||||
- crosscompile-notest-freebsd9
|
||||
- crosscompile-notest-psvita
|
||||
|
||||
## cmake
|
||||
|
@ -9,7 +9,7 @@ insert_final_newline = true
|
||||
[*.{c,cc,h,hh}]
|
||||
tab_width = 8
|
||||
indent_size = 2
|
||||
indent_style = space
|
||||
indent_style = tab # should be space
|
||||
|
||||
[*.{py,sh}]
|
||||
indent_style = tab
|
||||
|
3
AUTHORS
3
AUTHORS
@ -1,11 +1,14 @@
|
||||
Behdad Esfahbod
|
||||
David Corbett
|
||||
David Turner
|
||||
Ebrahim Byagowi
|
||||
Garret Rieger
|
||||
Jonathan Kew
|
||||
Khaled Hosny
|
||||
Lars Knoll
|
||||
Martin Hosken
|
||||
Owen Taylor
|
||||
Roderick Sheeter
|
||||
Roozbeh Pournader
|
||||
Simon Hausmann
|
||||
Werner Lemberg
|
||||
|
@ -844,7 +844,7 @@ endif ()
|
||||
|
||||
if (HB_BUILD_TESTS)
|
||||
## src/ executables
|
||||
foreach (prog main test test-would-substitute test-size-params test-buffer-serialize hb-ot-tag test-unicode-ranges)
|
||||
foreach (prog main test test-gsub-would-substitute test-gpos-size-params test-buffer-serialize hb-ot-tag test-unicode-ranges)
|
||||
set (prog_name ${prog})
|
||||
if (${prog_name} STREQUAL "test")
|
||||
# test can not be used as a valid executable name on cmake, lets special case it
|
||||
|
3
COPYING
3
COPYING
@ -2,7 +2,8 @@ HarfBuzz is licensed under the so-called "Old MIT" license. Details follow.
|
||||
For parts of HarfBuzz that are licensed under different licenses see individual
|
||||
files names COPYING in subdirectories where applicable.
|
||||
|
||||
Copyright © 2010,2011,2012 Google, Inc.
|
||||
Copyright © 2010,2011,2012,2013,2014,2015,2016,2017,2018,2019 Google, Inc.
|
||||
Copyright © 2019 Facebook, Inc.
|
||||
Copyright © 2012 Mozilla Foundation
|
||||
Copyright © 2011 Codethink Limited
|
||||
Copyright © 2008,2010 Nokia Corporation and/or its subsidiary(-ies)
|
||||
|
@ -13,6 +13,10 @@ For bug reports, mailing list, and other information please visit:
|
||||
|
||||
http://harfbuzz.org/
|
||||
|
||||
For license information, see the file COPYING.
|
||||
For license information, see [COPYING](COPYING).
|
||||
|
||||
For build information, see [BUILD.md](BUILD.md).
|
||||
|
||||
For test execution, see [TESTING.md](TESTING.md).
|
||||
|
||||
Documentation: https://harfbuzz.github.io
|
||||
|
64
TESTING.md
Normal file
64
TESTING.md
Normal file
@ -0,0 +1,64 @@
|
||||
## Build & Run
|
||||
|
||||
Depending on what area you are working in change or add `HB_DEBUG_<whatever>`.
|
||||
Values defined in `hb-debug.hh`.
|
||||
|
||||
```shell
|
||||
# quick sanity check
|
||||
time (make -j4 CPPFLAGS='-DHB_DEBUG_SUBSET=100' \
|
||||
&& (make -j4 -C test/api check || cat test/api/test-suite.log))
|
||||
|
||||
# slower sanity check
|
||||
time (make -j4 CPPFLAGS='-DHB_DEBUG_SUBSET=100' \
|
||||
&& make -j4 -C src check \
|
||||
&& make -j4 -C test/api check \
|
||||
&& make -j4 -C test/subset check)
|
||||
|
||||
# confirm you didn't break anything else
|
||||
time (make -j4 CPPFLAGS='-DHB_DEBUG_SUBSET=100' \
|
||||
&& make -j4 check)
|
||||
|
||||
# often catches files you didn't add, e.g. test fonts to EXTRA_DIST
|
||||
make distcheck
|
||||
```
|
||||
|
||||
### Run tests with asan
|
||||
|
||||
```shell
|
||||
./configure CC=clang CXX=clang++ CPPFLAGS=-fsanitize=address LDFLAGS=-fsanitize=address
|
||||
# make/run tests as usual
|
||||
```
|
||||
|
||||
### Debug with GDB
|
||||
|
||||
```
|
||||
cd ./util
|
||||
../libtool --mode=execute gdb --args ./hb-subset ...
|
||||
```
|
||||
|
||||
### Enable Debug Logging
|
||||
|
||||
```shell
|
||||
# make clean if you previously build w/o debug logging
|
||||
make CPPFLAGS=-DHB_DEBUG_SUBSET=100
|
||||
```
|
||||
|
||||
## Build and Test via CMake
|
||||
|
||||
Note: You'll need to first install ninja-build via apt-get.
|
||||
|
||||
```shell
|
||||
cd harfbuzz
|
||||
mkdir buid
|
||||
cmake -DHB_CHECK=ON -Bbuild -H. -GNinja && ninja -Cbuild && CTEST_OUTPUT_ON_FAILURE=1 ninja -Cbuild test
|
||||
```
|
||||
## Test with the Fuzzer
|
||||
|
||||
```shell
|
||||
# push your changs to a branch on googlefonts/harfbuzz
|
||||
# In a local copy of oss-fuzz, edit projects/harfbuzz/Dockerfile
|
||||
# Change the git clone to pull your branch
|
||||
sudo python infra/helper.py build_image harfbuzz
|
||||
sudo python infra/helper.py build_fuzzers --sanitizer address harfbuzz
|
||||
sudo python infra/helper.py run_fuzzer harfbuzz hb-subset-fuzzer
|
||||
```
|
2
THANKS
2
THANKS
@ -1,6 +1,6 @@
|
||||
Bradley Grainger
|
||||
Khaled Hosny
|
||||
Kenichi Ishibashi
|
||||
Ivan Kuckir <https://photopea.com/>
|
||||
Ryan Lortie
|
||||
Jeff Muizelaar
|
||||
suzuki toshiya
|
||||
|
@ -9,7 +9,7 @@ AC_CONFIG_MACRO_DIR([m4])
|
||||
AC_CONFIG_SRCDIR([src/harfbuzz.pc.in])
|
||||
AC_CONFIG_HEADERS([config.h])
|
||||
|
||||
AM_INIT_AUTOMAKE([1.13.0 gnits tar-ustar dist-bzip2 no-dist-gzip -Wall no-define color-tests -Wno-portability])
|
||||
AM_INIT_AUTOMAKE([1.13.0 gnits tar-ustar dist-xz no-dist-gzip -Wall no-define color-tests -Wno-portability])
|
||||
AM_SILENT_RULES([yes])
|
||||
AX_CODE_COVERAGE
|
||||
|
||||
|
@ -33,7 +33,7 @@ SCAN_OPTIONS=--rebuild-types --deprecated-guards="HB_DISABLE_DEPRECATED" \
|
||||
|
||||
# Header files or dirs to ignore when scanning. Use base file/dir names
|
||||
# e.g. IGNORE_HFILES=gtkdebug.h gtkintl.h private_code
|
||||
IGNORE_HFILES=`cd $(top_srcdir)/src; find . -path './hb-*/*.h' | sed 's@^.*/@@'`
|
||||
IGNORE_HFILES=`cd $(top_srcdir)/src; find . -path './*/*.h' | sed 's@^.*/@@'`
|
||||
if HAVE_GOBJECT
|
||||
else
|
||||
IGNORE_HFILES+=hb-gobject.h hb-gobject-enums.h hb-gobject-structs.h
|
||||
@ -76,6 +76,7 @@ content_files= \
|
||||
usermanual-install-harfbuzz.xml \
|
||||
usermanual-getting-started.xml \
|
||||
usermanual-shaping-concepts.xml \
|
||||
usermanual-object-model.xml \
|
||||
usermanual-buffers-language-script-and-direction.xml \
|
||||
usermanual-fonts-and-faces.xml \
|
||||
usermanual-clusters.xml \
|
||||
|
@ -20,11 +20,7 @@
|
||||
|
||||
<para>
|
||||
The canonical source-code tree is available at
|
||||
<ulink
|
||||
url="https://github.com/harfbuzz/harfbuzz">github.com/harfbuzz/harfbuzz</ulink>
|
||||
and is also available at
|
||||
<ulink
|
||||
url="http://cgit.freedesktop.org/harfbuzz/">cgit.freedesktop.org/harfbuzz</ulink>.
|
||||
<ulink url="https://github.com/harfbuzz/harfbuzz">github.com/harfbuzz/harfbuzz</ulink>.
|
||||
See <xref linkend="download" endterm="download.title"/> for
|
||||
release tarballs.
|
||||
</para>
|
||||
@ -37,6 +33,7 @@
|
||||
<xi:include href="usermanual-install-harfbuzz.xml"/>
|
||||
<xi:include href="usermanual-getting-started.xml"/>
|
||||
<xi:include href="usermanual-shaping-concepts.xml"/>
|
||||
<xi:include href="usermanual-object-model.xml"/>
|
||||
<xi:include href="usermanual-buffers-language-script-and-direction.xml"/>
|
||||
<xi:include href="usermanual-fonts-and-faces.xml"/>
|
||||
<xi:include href="usermanual-clusters.xml"/>
|
||||
|
258
docs/usermanual-object-model.xml
Normal file
258
docs/usermanual-object-model.xml
Normal file
@ -0,0 +1,258 @@
|
||||
<?xml version="1.0"?>
|
||||
<!DOCTYPE book PUBLIC "-//OASIS//DTD DocBook XML V4.3//EN"
|
||||
"http://www.oasis-open.org/docbook/xml/4.3/docbookx.dtd" [
|
||||
<!ENTITY % local.common.attrib "xmlns:xi CDATA #FIXED 'http://www.w3.org/2003/XInclude'">
|
||||
<!ENTITY version SYSTEM "version.xml">
|
||||
]>
|
||||
<chapter id="object-model">
|
||||
<title>The HarfBuzz object model</title>
|
||||
<section id="object-model-intro">
|
||||
<title>An overview of data types in HarfBuzz</title>
|
||||
<para>
|
||||
HarfBuzz features two kinds of data types: non-opaque,
|
||||
pass-by-value types and opaque, heap-allocated types. This kind
|
||||
of separation is common in C libraries that have to provide
|
||||
API/ABI compatibility (almost) indefinitely.
|
||||
</para>
|
||||
<para>
|
||||
<emphasis>Value types:</emphasis> The non-opaque, pass-by-value
|
||||
types include integer types, enums, and small structs. Exposing
|
||||
a struct in the public API makes it impossible to expand the
|
||||
struct in the future. As such, exposing structs is reserved for
|
||||
cases where it’s extremely inefficient to do otherwise.
|
||||
</para>
|
||||
<para>
|
||||
In HarfBuzz, several structs, like <literal>hb_glyph_info_t</literal> and
|
||||
<literal>hb_glyph_position_t</literal>, fall into that efficiency-sensitive
|
||||
category and are non-opaque.
|
||||
</para>
|
||||
<para>
|
||||
For all non-opaque structs where future extensibility may be
|
||||
necessary, reserved members are included to hold space for
|
||||
possible future members. As such, it’s important to provide
|
||||
<function>equal()</function>, and <function>hash()</function>
|
||||
methods for such structs, allowing users of the API do
|
||||
effectively deal with the type without having to
|
||||
adapt their code to future changes.
|
||||
</para>
|
||||
<para>
|
||||
Important value types provided by HarfBuzz include the structs
|
||||
for working with Unicode code points, glyphs, and tags for font
|
||||
tables and features, as well as the enums for many Unicode and
|
||||
OpenType properties.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id="object-model-object-types">
|
||||
<title>Objects in HarfBuzz</title>
|
||||
<para>
|
||||
<emphasis>Object types:</emphasis> Opaque struct types are used
|
||||
for what HarfBuzz loosely calls "objects." This doesn’t have
|
||||
much to do with the terminology from object-oriented programming
|
||||
(OOP), although some of the concepts are similar.
|
||||
</para>
|
||||
<para>
|
||||
In HarfBuzz, all object types provide certain
|
||||
lifecycle-management APIs. Objects are reference-counted, and
|
||||
constructed with various <function>create()</function> methods, referenced via
|
||||
<function>reference()</function> and dereferenced using
|
||||
<function>destroy()</function>.
|
||||
</para>
|
||||
<para>
|
||||
For example,
|
||||
the <literal>hb_buffer_t</literal> object has
|
||||
<function>hb_buffer_create()</function> as its constructor,
|
||||
<function>hb_buffer_reference()</function> to reference, and
|
||||
<function>hb_buffer_destroy()</function> to dereference.
|
||||
</para>
|
||||
<para>
|
||||
After construction, each object's properties are accessible only
|
||||
through the setter and getter functions described in the API
|
||||
Reference manual.
|
||||
</para>
|
||||
<para>
|
||||
Key object types provided by HarfBuzz include:
|
||||
</para>
|
||||
<itemizedlist spacing="compact">
|
||||
<listitem>
|
||||
<para>
|
||||
<emphasis>blobs</emphasis>, which act as low-level wrappers around binary
|
||||
data. Blobs are typically used to hold the contents of a
|
||||
binary font file.
|
||||
</para>
|
||||
</listitem>
|
||||
<listitem>
|
||||
<para>
|
||||
<emphasis>faces</emphasis>, which represent typefaces from a
|
||||
font file, but without specific parameters (such as size) set.
|
||||
</para>
|
||||
</listitem>
|
||||
<listitem>
|
||||
<para>
|
||||
<emphasis>fonts</emphasis>, which represent instances of a
|
||||
face with all of their parameters specified.
|
||||
</para>
|
||||
</listitem>
|
||||
<listitem>
|
||||
<para>
|
||||
<emphasis>buffers</emphasis>, which hold Unicode code points
|
||||
for characters (before shaping) and the shaped glyph output
|
||||
(after shaping).
|
||||
</para>
|
||||
</listitem>
|
||||
<listitem>
|
||||
<para>
|
||||
<emphasis>shape plans</emphasis>, which store the settings
|
||||
that HarfBuzz will use when shaping a particular text
|
||||
segment. Shape plans are not generally used by client
|
||||
programs directly, but as we will see in a later chapter,
|
||||
they are still valuable to understand.
|
||||
</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
|
||||
</section>
|
||||
|
||||
|
||||
|
||||
<section id="object-model-lifecycle">
|
||||
<title>Object lifecycle management</title>
|
||||
<para>
|
||||
Each object type in HarfBuzz provides a
|
||||
<function>create()</function> method. Some object types provide
|
||||
additional variants of <function>create()</function> to handle
|
||||
special cases or to speed up common tasks; those variants are
|
||||
documented in the API reference. For example,
|
||||
<function>hb_blob_create_from_file()</function> constructs a new
|
||||
blob directly from the contents of a file.
|
||||
</para>
|
||||
<para>
|
||||
All objects are created with an initial reference count of
|
||||
<literal>1</literal>. Client programs can increase the reference
|
||||
count on an object by calling its
|
||||
<function>reference()</function> method. Whenever a client
|
||||
program is finished with an object, it should call its
|
||||
corresponding <function>destroy()</function> method. The destroy
|
||||
method will decrease the reference count on the object and,
|
||||
whenever the reference count reaches zero, it will also destroy
|
||||
the object and free all of the associated memory.
|
||||
</para>
|
||||
<para>
|
||||
All of HarfBuzz's object-lifecycle-management APIs are
|
||||
thread-safe (unless you compiled HarfBuzz from source with the
|
||||
<literal>HB_NO_MT</literal> configuration flag), even when the
|
||||
object as a whole is not thread-safe.
|
||||
It is also permissible to <function>reference()</function> or to
|
||||
<function>destroy()</function> the <literal>NULL</literal>
|
||||
value.
|
||||
</para>
|
||||
<para>
|
||||
Some objects are thread-safe after they have been constructed
|
||||
and set up. The general pattern is to
|
||||
<function>create()</function> the object, make a few
|
||||
<function>set_*()</function> calls to set up the
|
||||
object, and then use it without further modification.
|
||||
</para>
|
||||
<para>
|
||||
To ensure that such an object is not modified, client programs
|
||||
can explicitly mark an object as immutable. HarfBuzz provides
|
||||
<function>make_immutable()</function> methods to mark an object
|
||||
as immutable and <function>is_immutable()</function> methods to
|
||||
test whether or not an object is immutable. Attempts to use
|
||||
setter functions on immutable objects will fail silently; see the API
|
||||
Reference manual for specifics.
|
||||
</para>
|
||||
<para>
|
||||
Note also that there are no "make mutable" methods. If client
|
||||
programs need to alter an object previously marked as immutable,
|
||||
they will need to make a duplicate of the original.
|
||||
</para>
|
||||
<para>
|
||||
Finally, object constructors (and, indeed, as much of the
|
||||
shaping API as possible) will never return
|
||||
<literal>NULL</literal>. Instead, if there is an allocation
|
||||
error, each constructor will return an “empty” object
|
||||
singleton.
|
||||
</para>
|
||||
<para>
|
||||
These empty-object singletons are inert and safe (although
|
||||
typically useless) to pass around. This design choice avoids
|
||||
having to check for <literal>NULL</literal> pointers all
|
||||
throughout the code.
|
||||
</para>
|
||||
<para>
|
||||
In addition, this “empty” object singleton can also be accessed
|
||||
using the <function>get_empty()</function> method of the object
|
||||
type in question.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
|
||||
<section id="object-model-user-data">
|
||||
<title>User data</title>
|
||||
<para>
|
||||
To better integrate with client programs, HarfBuzz's objects
|
||||
offer a "user data" mechanism that can be used to attach
|
||||
arbitrary data to the object. User-data attachment can be
|
||||
useful for tying the lifecycles of various pieces of data
|
||||
together, or for creating language bindings.
|
||||
</para>
|
||||
<para>
|
||||
Each object type has a <function>set_user_data()</function>
|
||||
method and a <function>get_user_data()</function> method. The
|
||||
<function>set_user_data()</function> methods take a client-provided
|
||||
<literal>key</literal> and a pointer,
|
||||
<literal>user_data</literal>, pointing to the data itself. Once
|
||||
the key-data pair has been attached to the object, the
|
||||
<function>get_user_data()</function> method can be called with
|
||||
the key, returning the <function>user_data</function> pointer.
|
||||
</para>
|
||||
<para>
|
||||
The <function>set_user_data()</function> methods also support an
|
||||
optional <function>destroy</function> callback. Client programs
|
||||
can set the <function>destroy</function> callback and receive
|
||||
notification from HarfBuzz whenever the object is destructed.
|
||||
</para>
|
||||
<para>
|
||||
Finally, each <function>set_user_data()</function> method allows
|
||||
the client program to set a <literal>replace</literal> Boolean
|
||||
indicating whether or not the function call should replace any
|
||||
existing <literal>user_data</literal>
|
||||
associated with the specified key.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
|
||||
|
||||
<section id="object-model-blobs">
|
||||
<title>Blobs</title>
|
||||
<para>
|
||||
While most of HarfBuzz's object types are specific to the
|
||||
shaping process, <emphasis>blobs</emphasis> are somewhat
|
||||
different.
|
||||
</para>
|
||||
<para>
|
||||
Blobs are an abstraction desgined to negotiate lifecycle and
|
||||
permissions for raw pieces of data. For example, when you load
|
||||
the raw font data into memory and want to pass it to HarfBuzz,
|
||||
you do so in a <literal>hb_blob_t</literal> wrapper.
|
||||
</para>
|
||||
<para>
|
||||
This allows you to take advantage of HarffBuzz's
|
||||
reference-counting and <function>destroy</function>
|
||||
callbacks. If you allocated the memory for the data using
|
||||
<function>malloc()</function>, you would create the blob using
|
||||
</para>
|
||||
<programlisting language="C">
|
||||
hb_blob_create (data, length, HB_MEMORY_MODE_WRITABLE, NULL, free)
|
||||
</programlisting>
|
||||
<para>
|
||||
That way, HarfBuzz will call <function>free()</function> on the
|
||||
allocated memory whenever the blob drops its last reference and
|
||||
is deconstructed. Consequently, the user code can stop worrying
|
||||
about freeing memory and let the reference-counting machinery
|
||||
take care of that.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
</chapter>
|
@ -151,9 +151,9 @@
|
||||
</para>
|
||||
<para>
|
||||
For example, in Tamil, when the letter "TTA" (ட)
|
||||
letter is followed by "U" (உ), the pair
|
||||
letter is followed by the vowel sign "U" (ு), the pair
|
||||
must be replaced by the single glyph "டு". The
|
||||
sequence of Unicode characters "டஉ" needs to be
|
||||
sequence of Unicode characters "ட,ு" needs to be
|
||||
substituted with a single "டு" glyph from the
|
||||
font.
|
||||
</para>
|
||||
|
@ -310,9 +310,9 @@ noinst_PROGRAMS = \
|
||||
main \
|
||||
test \
|
||||
test-buffer-serialize \
|
||||
test-name-table \
|
||||
test-size-params \
|
||||
test-would-substitute \
|
||||
test-ot-name \
|
||||
test-gpos-size-params \
|
||||
test-gsub-would-substitute \
|
||||
$(NULL)
|
||||
bin_PROGRAMS =
|
||||
|
||||
@ -328,17 +328,17 @@ test_buffer_serialize_SOURCES = test-buffer-serialize.cc
|
||||
test_buffer_serialize_CPPFLAGS = $(HBCFLAGS)
|
||||
test_buffer_serialize_LDADD = libharfbuzz.la $(HBLIBS)
|
||||
|
||||
test_name_table_SOURCES = test-name-table.cc
|
||||
test_name_table_CPPFLAGS = $(HBCFLAGS)
|
||||
test_name_table_LDADD = libharfbuzz.la $(HBLIBS)
|
||||
test_ot_name_SOURCES = test-ot-name.cc
|
||||
test_ot_name_CPPFLAGS = $(HBCFLAGS)
|
||||
test_ot_name_LDADD = libharfbuzz.la $(HBLIBS)
|
||||
|
||||
test_size_params_SOURCES = test-size-params.cc
|
||||
test_size_params_CPPFLAGS = $(HBCFLAGS)
|
||||
test_size_params_LDADD = libharfbuzz.la $(HBLIBS)
|
||||
test_gpos_size_params_SOURCES = test-gpos-size-params.cc
|
||||
test_gpos_size_params_CPPFLAGS = $(HBCFLAGS)
|
||||
test_gpos_size_params_LDADD = libharfbuzz.la $(HBLIBS)
|
||||
|
||||
test_would_substitute_SOURCES = test-would-substitute.cc
|
||||
test_would_substitute_CPPFLAGS = $(HBCFLAGS) $(FREETYPE_CFLAGS)
|
||||
test_would_substitute_LDADD = libharfbuzz.la $(HBLIBS) $(FREETYPE_LIBS)
|
||||
test_gsub_would_substitute_SOURCES = test-gsub-would-substitute.cc
|
||||
test_gsub_would_substitute_CPPFLAGS = $(HBCFLAGS) $(FREETYPE_CFLAGS)
|
||||
test_gsub_would_substitute_LDADD = libharfbuzz.la $(HBLIBS) $(FREETYPE_LIBS)
|
||||
|
||||
if HAVE_FREETYPE
|
||||
if HAVE_CAIRO_FT
|
||||
@ -384,7 +384,7 @@ dump_use_data_SOURCES = dump-use-data.cc hb-ot-shape-complex-use-table.cc
|
||||
dump_use_data_CPPFLAGS = $(HBCFLAGS)
|
||||
dump_use_data_LDADD = libharfbuzz.la $(HBLIBS)
|
||||
|
||||
COMPILED_TESTS = test-algs test-iter test-ot-tag test-unicode-ranges
|
||||
COMPILED_TESTS = test-algs test-iter test-meta test-ot-tag test-unicode-ranges
|
||||
COMPILED_TESTS_CPPFLAGS = $(HBCFLAGS) -DMAIN -UNDEBUG
|
||||
COMPILED_TESTS_LDADD = libharfbuzz.la $(HBLIBS)
|
||||
check_PROGRAMS += $(COMPILED_TESTS)
|
||||
@ -398,6 +398,10 @@ test_iter_SOURCES = test-iter.cc hb-static.cc
|
||||
test_iter_CPPFLAGS = $(COMPILED_TESTS_CPPFLAGS)
|
||||
test_iter_LDADD = $(COMPILED_TESTS_LDADD)
|
||||
|
||||
test_meta_SOURCES = test-meta.cc hb-static.cc
|
||||
test_meta_CPPFLAGS = $(COMPILED_TESTS_CPPFLAGS)
|
||||
test_meta_LDADD = $(COMPILED_TESTS_LDADD)
|
||||
|
||||
test_ot_tag_SOURCES = hb-ot-tag.cc
|
||||
test_ot_tag_CPPFLAGS = $(COMPILED_TESTS_CPPFLAGS)
|
||||
test_ot_tag_LDADD = $(COMPILED_TESTS_LDADD)
|
||||
|
@ -31,6 +31,7 @@ HB_BASE_sources = \
|
||||
hb-cff1-interp-cs.hh \
|
||||
hb-cff2-interp-cs.hh \
|
||||
hb-common.cc \
|
||||
hb-config.hh \
|
||||
hb-debug.hh \
|
||||
hb-dispatch.hh \
|
||||
hb-face.cc \
|
||||
@ -84,7 +85,7 @@ HB_BASE_sources = \
|
||||
hb-ot-math-table.hh \
|
||||
hb-ot-math.cc \
|
||||
hb-ot-maxp-table.hh \
|
||||
hb-ot-name-language.cc \
|
||||
hb-ot-name-language-static.hh \
|
||||
hb-ot-name-language.hh \
|
||||
hb-ot-name-table.hh \
|
||||
hb-ot-name.cc \
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
# Generates the code for a sorted unicode range array as used in hb-ot-os2-unicode-ranges.hh
|
||||
# Input is a tab seperated list of unicode ranges from the otspec
|
||||
# (https://docs.microsoft.com/en-us/typography/opentype/spec/os2#ulunicoderange1).
|
||||
# (https://docs.microsoft.com/en-us/typography/opentype/spec/os2#ur).
|
||||
|
||||
from __future__ import print_function, division, absolute_import
|
||||
|
||||
|
@ -895,20 +895,18 @@ def language_name_intersection (a, b):
|
||||
def get_matching_language_name (intersection, candidates):
|
||||
return next (iter (c for c in candidates if not intersection.isdisjoint (get_variant_set (c))))
|
||||
|
||||
maximum_tags = 0
|
||||
def same_tag (bcp_47_tag, ot_tags):
|
||||
return len (bcp_47_tag) == 3 and len (ot_tags) == 1 and bcp_47_tag == ot_tags[0].lower ()
|
||||
|
||||
for language, tags in sorted (ot.from_bcp_47.items ()):
|
||||
if language == '' or '-' in language:
|
||||
continue
|
||||
print (' {\"%s\",\t{' % language, end='')
|
||||
maximum_tags = max (maximum_tags, len (tags))
|
||||
tag_count = len (tags)
|
||||
commented_out = same_tag (language, tags)
|
||||
for i, tag in enumerate (tags, start=1):
|
||||
if i > 1:
|
||||
print ('\t\t ', end='')
|
||||
print (hb_tag (tag), end='')
|
||||
if i == tag_count:
|
||||
print ('}}', end='')
|
||||
print (',\t/* ', end='')
|
||||
print ('%s{\"%s\",\t%s},' % ('/*' if commented_out else ' ', language, hb_tag (tag)), end='')
|
||||
if commented_out:
|
||||
print ('*/', end='')
|
||||
print ('\t/* ', end='')
|
||||
bcp_47_name = bcp_47.names.get (language, '')
|
||||
bcp_47_name_candidates = bcp_47_name.split ('\n')
|
||||
intersection = language_name_intersection (bcp_47_name, ot.names[tag])
|
||||
@ -923,8 +921,6 @@ for language, tags in sorted (ot.from_bcp_47.items ()):
|
||||
|
||||
print ('};')
|
||||
print ()
|
||||
print ('static_assert (HB_OT_MAX_TAGS_PER_LANGUAGE == %iu, "");' % maximum_tags)
|
||||
print ()
|
||||
|
||||
print ('/**')
|
||||
print (' * hb_ot_tags_from_complex_language:')
|
||||
@ -1051,7 +1047,8 @@ print (' * @tag: A language tag.')
|
||||
print (' *')
|
||||
print (' * Converts @tag to a BCP 47 language tag if it is ambiguous (it corresponds to')
|
||||
print (' * many language tags) and the best tag is not the alphabetically first, or if')
|
||||
print (' * the best tag consists of multiple subtags.')
|
||||
print (' * the best tag consists of multiple subtags, or if the best tag does not appear')
|
||||
print (' * in #ot_languages.')
|
||||
print (' *')
|
||||
print (' * Return value: The #hb_language_t corresponding to the BCP 47 language tag,')
|
||||
print (' * or #HB_LANGUAGE_INVALID if @tag is not ambiguous.')
|
||||
@ -1102,7 +1099,8 @@ def verify_disambiguation_dict ():
|
||||
'%s is not a valid disambiguation for %s' % (disambiguation[ot_tag], ot_tag))
|
||||
elif ot_tag not in disambiguation:
|
||||
disambiguation[ot_tag] = macrolanguages[0]
|
||||
if disambiguation[ot_tag] == sorted (primary_tags)[0] and '-' not in disambiguation[ot_tag]:
|
||||
different_primary_tags = sorted (t for t in primary_tags if not same_tag (t, ot.from_bcp_47.get (t)))
|
||||
if different_primary_tags and disambiguation[ot_tag] == different_primary_tags[0] and '-' not in disambiguation[ot_tag]:
|
||||
del disambiguation[ot_tag]
|
||||
for ot_tag in disambiguation.keys ():
|
||||
expect (ot_tag in ot.to_bcp_47, 'unknown OT tag: %s' % ot_tag)
|
||||
|
@ -47,8 +47,22 @@ defaults = ('Other', 'Not_Applicable', 'Cn', 'No_Block')
|
||||
|
||||
# TODO Characters that are not in Unicode Indic files, but used in USE
|
||||
data[0][0x034F] = defaults[0]
|
||||
data[0][0x1B61] = defaults[0]
|
||||
data[0][0x1B63] = defaults[0]
|
||||
data[0][0x1B64] = defaults[0]
|
||||
data[0][0x1B65] = defaults[0]
|
||||
data[0][0x1B66] = defaults[0]
|
||||
data[0][0x1B67] = defaults[0]
|
||||
data[0][0x1B69] = defaults[0]
|
||||
data[0][0x1B6A] = defaults[0]
|
||||
data[0][0x2060] = defaults[0]
|
||||
# TODO https://github.com/roozbehp/unicode-data/issues/9
|
||||
# TODO https://github.com/harfbuzz/harfbuzz/pull/1685
|
||||
data[0][0x1B5B] = 'Consonant_Placeholder'
|
||||
data[0][0x1B5C] = 'Consonant_Placeholder'
|
||||
data[0][0x1B5F] = 'Consonant_Placeholder'
|
||||
data[0][0x1B62] = 'Consonant_Placeholder'
|
||||
data[0][0x1B68] = 'Consonant_Placeholder'
|
||||
# TODO https://github.com/harfbuzz/harfbuzz/issues/1035
|
||||
data[0][0x11C44] = 'Consonant_Placeholder'
|
||||
data[0][0x11C45] = 'Consonant_Placeholder'
|
||||
# TODO https://github.com/harfbuzz/harfbuzz/pull/1399
|
||||
@ -171,7 +185,7 @@ def is_BASE(U, UISC, UGC):
|
||||
def is_BASE_IND(U, UISC, UGC):
|
||||
#SPEC-DRAFT return (UISC in [Consonant_Dead, Modifying_Letter] or UGC == Po)
|
||||
return (UISC in [Consonant_Dead, Modifying_Letter] or
|
||||
(UGC == Po and not U in [0x104B, 0x104E, 0x2022, 0x111C8, 0x11A3F, 0x11A45, 0x11C44, 0x11C45]) or
|
||||
(UGC == Po and not U in [0x104B, 0x104E, 0x1B5B, 0x1B5C, 0x1B5F, 0x2022, 0x111C8, 0x11A3F, 0x11A45, 0x11C44, 0x11C45]) or
|
||||
False # SPEC-DRAFT-OUTDATED! U == 0x002D
|
||||
)
|
||||
def is_BASE_NUM(U, UISC, UGC):
|
||||
@ -216,6 +230,7 @@ def is_Word_Joiner(U, UISC, UGC):
|
||||
def is_OTHER(U, UISC, UGC):
|
||||
#SPEC-OUTDATED return UGC == Zs # or any other SCRIPT_COMMON characters
|
||||
return (UISC == Other
|
||||
and not is_SYM(U, UISC, UGC)
|
||||
and not is_SYM_MOD(U, UISC, UGC)
|
||||
and not is_CGJ(U, UISC, UGC)
|
||||
and not is_Word_Joiner(U, UISC, UGC)
|
||||
@ -228,17 +243,17 @@ def is_REPHA(U, UISC, UGC):
|
||||
def is_SYM(U, UISC, UGC):
|
||||
if U == 0x25CC: return False #SPEC-DRAFT
|
||||
#SPEC-DRAFT return UGC in [So, Sc] or UISC == Symbol_Letter
|
||||
return UGC in [So, Sc]
|
||||
return UGC in [So, Sc] and U not in [0x1B62, 0x1B68]
|
||||
def is_SYM_MOD(U, UISC, UGC):
|
||||
return U in [0x1B6B, 0x1B6C, 0x1B6D, 0x1B6E, 0x1B6F, 0x1B70, 0x1B71, 0x1B72, 0x1B73]
|
||||
def is_VARIATION_SELECTOR(U, UISC, UGC):
|
||||
return 0xFE00 <= U <= 0xFE0F
|
||||
def is_VOWEL(U, UISC, UGC):
|
||||
# https://github.com/roozbehp/unicode-data/issues/6
|
||||
# https://github.com/harfbuzz/harfbuzz/issues/376
|
||||
return (UISC == Pure_Killer or
|
||||
(UGC != Lo and UISC in [Vowel, Vowel_Dependent] and U not in [0xAA29]))
|
||||
def is_VOWEL_MOD(U, UISC, UGC):
|
||||
# https://github.com/roozbehp/unicode-data/issues/6
|
||||
# https://github.com/harfbuzz/harfbuzz/issues/376
|
||||
return (UISC in [Tone_Mark, Cantillation_Mark, Register_Shifter, Visarga] or
|
||||
(UGC != Lo and (UISC == Bindu or U in [0xAA29])))
|
||||
|
||||
|
@ -180,6 +180,9 @@ print ('_hb_preprocess_text_vowel_constraints (const hb_ot_shape_plan_t *plan HB
|
||||
print ('\t\t\t\t hb_buffer_t *buffer,')
|
||||
print ('\t\t\t\t hb_font_t *font HB_UNUSED)')
|
||||
print ('{')
|
||||
print ('#if defined(HB_NO_OT_SHAPE_COMPLEX_VOWEL_CONSTRAINTS)')
|
||||
print (' return;')
|
||||
print ('#endif')
|
||||
print (' if (buffer->flags & HB_BUFFER_FLAG_DO_NOT_INSERT_DOTTED_CIRCLE)')
|
||||
print (' return;')
|
||||
print ()
|
||||
|
@ -83,7 +83,7 @@ struct ankr
|
||||
protected:
|
||||
HBUINT16 version; /* Version number (set to zero) */
|
||||
HBUINT16 flags; /* Flags (currently unused; set to zero) */
|
||||
LOffsetTo<Lookup<NNOffsetTo<GlyphAnchors> > >
|
||||
LOffsetTo<Lookup<NNOffsetTo<GlyphAnchors>>>
|
||||
lookupTable; /* Offset to the table's lookup table */
|
||||
LNNOffsetTo<HBUINT8>
|
||||
anchorData; /* Offset to the glyph data table */
|
||||
|
@ -125,7 +125,7 @@ struct LookupFormat2
|
||||
|
||||
protected:
|
||||
HBUINT16 format; /* Format identifier--format = 2 */
|
||||
VarSizedBinSearchArrayOf<LookupSegmentSingle<T> >
|
||||
VarSizedBinSearchArrayOf<LookupSegmentSingle<T>>
|
||||
segments; /* The actual segments. These must already be sorted,
|
||||
* according to the first word in each one (the last
|
||||
* glyph in each segment). */
|
||||
@ -153,18 +153,18 @@ struct LookupSegmentArray
|
||||
first <= last &&
|
||||
valuesZ.sanitize (c, base, last - first + 1));
|
||||
}
|
||||
template <typename T2>
|
||||
bool sanitize (hb_sanitize_context_t *c, const void *base, T2 user_data) const
|
||||
template <typename ...Ts>
|
||||
bool sanitize (hb_sanitize_context_t *c, const void *base, Ts&&... ds) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (c->check_struct (this) &&
|
||||
first <= last &&
|
||||
valuesZ.sanitize (c, base, last - first + 1, user_data));
|
||||
valuesZ.sanitize (c, base, last - first + 1, hb_forward<Ts> (ds)...));
|
||||
}
|
||||
|
||||
GlyphID last; /* Last GlyphID in this segment */
|
||||
GlyphID first; /* First GlyphID in this segment */
|
||||
NNOffsetTo<UnsizedArrayOf<T> >
|
||||
NNOffsetTo<UnsizedArrayOf<T>>
|
||||
valuesZ; /* A 16-bit offset from the start of
|
||||
* the table to the data. */
|
||||
public:
|
||||
@ -196,7 +196,7 @@ struct LookupFormat4
|
||||
|
||||
protected:
|
||||
HBUINT16 format; /* Format identifier--format = 4 */
|
||||
VarSizedBinSearchArrayOf<LookupSegmentArray<T> >
|
||||
VarSizedBinSearchArrayOf<LookupSegmentArray<T>>
|
||||
segments; /* The actual segments. These must already be sorted,
|
||||
* according to the first word in each one (the last
|
||||
* glyph in each segment). */
|
||||
@ -253,7 +253,7 @@ struct LookupFormat6
|
||||
|
||||
protected:
|
||||
HBUINT16 format; /* Format identifier--format = 6 */
|
||||
VarSizedBinSearchArrayOf<LookupSingle<T> >
|
||||
VarSizedBinSearchArrayOf<LookupSingle<T>>
|
||||
entries; /* The actual entries, sorted by glyph index. */
|
||||
public:
|
||||
DEFINE_SIZE_ARRAY (8, entries);
|
||||
@ -419,7 +419,7 @@ struct Lookup
|
||||
/* Ugly hand-coded null objects for template Lookup<> :(. */
|
||||
extern HB_INTERNAL const unsigned char _hb_Null_AAT_Lookup[2];
|
||||
template <typename T>
|
||||
struct Null<AAT::Lookup<T> > {
|
||||
struct Null<AAT::Lookup<T>> {
|
||||
static AAT::Lookup<T> const & get_null ()
|
||||
{ return *reinterpret_cast<const AAT::Lookup<T> *> (_hb_Null_AAT_Lookup); }
|
||||
};
|
||||
@ -510,7 +510,7 @@ struct StateTable
|
||||
const Entry<Extra> &get_entry (int state, unsigned int klass) const
|
||||
{
|
||||
if (unlikely (klass >= nClasses))
|
||||
klass = StateTable<Types, Entry<Extra> >::CLASS_OUT_OF_BOUNDS;
|
||||
klass = StateTable<Types, Entry<Extra>>::CLASS_OUT_OF_BOUNDS;
|
||||
|
||||
const HBUSHORT *states = (this+stateArrayTable).arrayZ;
|
||||
const Entry<Extra> *entries = (this+entryTable).arrayZ;
|
||||
@ -576,7 +576,7 @@ struct StateTable
|
||||
if (unlikely (stop > states))
|
||||
return_trace (false);
|
||||
for (const HBUSHORT *p = states; stop < p; p--)
|
||||
num_entries = MAX<unsigned int> (num_entries, *(p - 1) + 1);
|
||||
num_entries = hb_max (num_entries, *(p - 1) + 1);
|
||||
state_neg = min_state;
|
||||
}
|
||||
}
|
||||
@ -597,7 +597,7 @@ struct StateTable
|
||||
if (unlikely (stop < states))
|
||||
return_trace (false);
|
||||
for (const HBUSHORT *p = &states[state_pos * num_classes]; p < stop; p++)
|
||||
num_entries = MAX<unsigned int> (num_entries, *p + 1);
|
||||
num_entries = hb_max (num_entries, *p + 1);
|
||||
state_pos = max_state + 1;
|
||||
}
|
||||
}
|
||||
@ -611,8 +611,8 @@ struct StateTable
|
||||
for (const Entry<Extra> *p = &entries[entry]; p < stop; p++)
|
||||
{
|
||||
int newState = new_state (p->newState);
|
||||
min_state = MIN (min_state, newState);
|
||||
max_state = MAX (max_state, newState);
|
||||
min_state = hb_min (min_state, newState);
|
||||
max_state = hb_max (max_state, newState);
|
||||
}
|
||||
entry = num_entries;
|
||||
}
|
||||
@ -631,7 +631,7 @@ struct StateTable
|
||||
classTable; /* Offset to the class table. */
|
||||
NNOffsetTo<UnsizedArrayOf<HBUSHORT>, HBUINT>
|
||||
stateArrayTable;/* Offset to the state array. */
|
||||
NNOffsetTo<UnsizedArrayOf<Entry<Extra> >, HBUINT>
|
||||
NNOffsetTo<UnsizedArrayOf<Entry<Extra>>, HBUINT>
|
||||
entryTable; /* Offset to the entry array. */
|
||||
|
||||
public:
|
||||
|
@ -165,7 +165,7 @@ struct feat
|
||||
unsigned int feature_count = featureNameCount;
|
||||
if (count && *count)
|
||||
{
|
||||
unsigned int len = MIN (feature_count - start_offset, *count);
|
||||
unsigned int len = hb_min (feature_count - start_offset, *count);
|
||||
for (unsigned int i = 0; i < len; i++)
|
||||
features[i] = namesZ[i + start_offset].get_feature_type ();
|
||||
*count = len;
|
||||
|
@ -309,7 +309,7 @@ struct WidthDeltaPair
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (24);
|
||||
};
|
||||
|
||||
|
||||
typedef OT::LArrayOf<WidthDeltaPair> WidthDeltaCluster;
|
||||
|
||||
struct JustificationCategory
|
||||
@ -371,7 +371,7 @@ struct JustificationHeader
|
||||
* of postcompensation subtable (set to zero if none).
|
||||
*
|
||||
* The postcompensation subtable, if present in the font. */
|
||||
Lookup<OffsetTo<WidthDeltaCluster> >
|
||||
Lookup<OffsetTo<WidthDeltaCluster>>
|
||||
lookupTable; /* Lookup table associating glyphs with width delta
|
||||
* clusters. See the description of Width Delta Clusters
|
||||
* table for details on how to interpret the lookup values. */
|
||||
|
@ -251,7 +251,7 @@ struct KerxSubTableFormat1
|
||||
|
||||
if (Format1EntryT::performAction (entry) && depth)
|
||||
{
|
||||
unsigned int tuple_count = MAX (1u, table->header.tuple_count ());
|
||||
unsigned int tuple_count = hb_max (1u, table->header.tuple_count ());
|
||||
|
||||
unsigned int kern_idx = Format1EntryT::kernActionIndex (entry);
|
||||
kern_idx = Types::byteOffsetToIndex (kern_idx, &table->machine, kernAction.arrayZ);
|
||||
@ -712,18 +712,18 @@ struct KerxSubTableFormat6
|
||||
{
|
||||
struct Long
|
||||
{
|
||||
LNNOffsetTo<Lookup<HBUINT32> > rowIndexTable;
|
||||
LNNOffsetTo<Lookup<HBUINT32> > columnIndexTable;
|
||||
LNNOffsetTo<UnsizedArrayOf<FWORD32> > array;
|
||||
LNNOffsetTo<Lookup<HBUINT32>> rowIndexTable;
|
||||
LNNOffsetTo<Lookup<HBUINT32>> columnIndexTable;
|
||||
LNNOffsetTo<UnsizedArrayOf<FWORD32>> array;
|
||||
} l;
|
||||
struct Short
|
||||
{
|
||||
LNNOffsetTo<Lookup<HBUINT16> > rowIndexTable;
|
||||
LNNOffsetTo<Lookup<HBUINT16> > columnIndexTable;
|
||||
LNNOffsetTo<UnsizedArrayOf<FWORD> > array;
|
||||
LNNOffsetTo<Lookup<HBUINT16>> rowIndexTable;
|
||||
LNNOffsetTo<Lookup<HBUINT16>> columnIndexTable;
|
||||
LNNOffsetTo<UnsizedArrayOf<FWORD>> array;
|
||||
} s;
|
||||
} u;
|
||||
LNNOffsetTo<UnsizedArrayOf<FWORD> > vector;
|
||||
LNNOffsetTo<UnsizedArrayOf<FWORD>> vector;
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (KernSubTableHeader::static_size + 24);
|
||||
};
|
||||
@ -771,17 +771,17 @@ struct KerxSubTable
|
||||
unsigned int get_size () const { return u.header.length; }
|
||||
unsigned int get_type () const { return u.header.coverage & u.header.SubtableType; }
|
||||
|
||||
template <typename context_t>
|
||||
typename context_t::return_t dispatch (context_t *c) const
|
||||
template <typename context_t, typename ...Ts>
|
||||
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
|
||||
{
|
||||
unsigned int subtable_type = get_type ();
|
||||
TRACE_DISPATCH (this, subtable_type);
|
||||
switch (subtable_type) {
|
||||
case 0: return_trace (c->dispatch (u.format0));
|
||||
case 1: return_trace (c->dispatch (u.format1));
|
||||
case 2: return_trace (c->dispatch (u.format2));
|
||||
case 4: return_trace (c->dispatch (u.format4));
|
||||
case 6: return_trace (c->dispatch (u.format6));
|
||||
case 0: return_trace (c->dispatch (u.format0, hb_forward<Ts> (ds)...));
|
||||
case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
|
||||
case 2: return_trace (c->dispatch (u.format2, hb_forward<Ts> (ds)...));
|
||||
case 4: return_trace (c->dispatch (u.format4, hb_forward<Ts> (ds)...));
|
||||
case 6: return_trace (c->dispatch (u.format6, hb_forward<Ts> (ds)...));
|
||||
default: return_trace (c->default_return_value ());
|
||||
}
|
||||
}
|
||||
|
@ -81,7 +81,7 @@ struct lcar
|
||||
protected:
|
||||
FixedVersion<>version; /* Version number of the ligature caret table */
|
||||
HBUINT16 format; /* Format of the ligature caret table. */
|
||||
Lookup<OffsetTo<LigCaretClassEntry> >
|
||||
Lookup<OffsetTo<LigCaretClassEntry>>
|
||||
lookup; /* data Lookup table associating glyphs */
|
||||
|
||||
public:
|
||||
|
@ -88,7 +88,7 @@ struct RearrangementSubtable
|
||||
start = buffer->idx;
|
||||
|
||||
if (flags & MarkLast)
|
||||
end = MIN (buffer->idx + 1, buffer->len);
|
||||
end = hb_min (buffer->idx + 1, buffer->len);
|
||||
|
||||
if ((flags & Verb) && start < end)
|
||||
{
|
||||
@ -117,14 +117,14 @@ struct RearrangementSubtable
|
||||
};
|
||||
|
||||
unsigned int m = map[flags & Verb];
|
||||
unsigned int l = MIN<unsigned int> (2, m >> 4);
|
||||
unsigned int r = MIN<unsigned int> (2, m & 0x0F);
|
||||
unsigned int l = hb_min (2u, m >> 4);
|
||||
unsigned int r = hb_min (2u, m & 0x0F);
|
||||
bool reverse_l = 3 == (m >> 4);
|
||||
bool reverse_r = 3 == (m & 0x0F);
|
||||
|
||||
if (end - start >= l + r)
|
||||
{
|
||||
buffer->merge_clusters (start, MIN (buffer->idx + 1, buffer->len));
|
||||
buffer->merge_clusters (start, hb_min (buffer->idx + 1, buffer->len));
|
||||
buffer->merge_clusters (start, end);
|
||||
|
||||
hb_glyph_info_t *info = buffer->info;
|
||||
@ -261,13 +261,13 @@ struct ContextualSubtable
|
||||
}
|
||||
if (replacement)
|
||||
{
|
||||
buffer->unsafe_to_break (mark, MIN (buffer->idx + 1, buffer->len));
|
||||
buffer->unsafe_to_break (mark, hb_min (buffer->idx + 1, buffer->len));
|
||||
buffer->info[mark].codepoint = *replacement;
|
||||
ret = true;
|
||||
}
|
||||
|
||||
replacement = nullptr;
|
||||
unsigned int idx = MIN (buffer->idx, buffer->len - 1);
|
||||
unsigned int idx = hb_min (buffer->idx, buffer->len - 1);
|
||||
if (Types::extended)
|
||||
{
|
||||
if (entry.data.currentIndex != 0xFFFF)
|
||||
@ -337,9 +337,9 @@ struct ContextualSubtable
|
||||
const EntryData &data = entries[i].data;
|
||||
|
||||
if (data.markIndex != 0xFFFF)
|
||||
num_lookups = MAX<unsigned int> (num_lookups, 1 + data.markIndex);
|
||||
num_lookups = hb_max (num_lookups, 1 + data.markIndex);
|
||||
if (data.currentIndex != 0xFFFF)
|
||||
num_lookups = MAX<unsigned int> (num_lookups, 1 + data.currentIndex);
|
||||
num_lookups = hb_max (num_lookups, 1 + data.currentIndex);
|
||||
}
|
||||
|
||||
return_trace (substitutionTables.sanitize (c, this, num_lookups));
|
||||
@ -744,7 +744,7 @@ struct InsertionSubtable
|
||||
|
||||
buffer->move_to (end + count);
|
||||
|
||||
buffer->unsafe_to_break_from_outbuffer (mark, MIN (buffer->idx + 1, buffer->len));
|
||||
buffer->unsafe_to_break_from_outbuffer (mark, hb_min (buffer->idx + 1, buffer->len));
|
||||
}
|
||||
|
||||
if (flags & SetMark)
|
||||
@ -883,17 +883,17 @@ struct ChainSubtable
|
||||
Insertion = 5
|
||||
};
|
||||
|
||||
template <typename context_t>
|
||||
typename context_t::return_t dispatch (context_t *c) const
|
||||
template <typename context_t, typename ...Ts>
|
||||
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
|
||||
{
|
||||
unsigned int subtable_type = get_type ();
|
||||
TRACE_DISPATCH (this, subtable_type);
|
||||
switch (subtable_type) {
|
||||
case Rearrangement: return_trace (c->dispatch (u.rearrangement));
|
||||
case Contextual: return_trace (c->dispatch (u.contextual));
|
||||
case Ligature: return_trace (c->dispatch (u.ligature));
|
||||
case Noncontextual: return_trace (c->dispatch (u.noncontextual));
|
||||
case Insertion: return_trace (c->dispatch (u.insertion));
|
||||
case Rearrangement: return_trace (c->dispatch (u.rearrangement, hb_forward<Ts> (ds)...));
|
||||
case Contextual: return_trace (c->dispatch (u.contextual, hb_forward<Ts> (ds)...));
|
||||
case Ligature: return_trace (c->dispatch (u.ligature, hb_forward<Ts> (ds)...));
|
||||
case Noncontextual: return_trace (c->dispatch (u.noncontextual, hb_forward<Ts> (ds)...));
|
||||
case Insertion: return_trace (c->dispatch (u.insertion, hb_forward<Ts> (ds)...));
|
||||
default: return_trace (c->default_return_value ());
|
||||
}
|
||||
}
|
||||
@ -969,7 +969,7 @@ struct Chain
|
||||
void apply (hb_aat_apply_context_t *c,
|
||||
hb_mask_t flags) const
|
||||
{
|
||||
const ChainSubtable<Types> *subtable = &StructAfter<ChainSubtable<Types> > (featureZ.as_array (featureCount));
|
||||
const ChainSubtable<Types> *subtable = &StructAfter<ChainSubtable<Types>> (featureZ.as_array (featureCount));
|
||||
unsigned int count = subtableCount;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
{
|
||||
@ -1031,7 +1031,7 @@ struct Chain
|
||||
if (unlikely (!c->buffer->successful)) return;
|
||||
|
||||
skip:
|
||||
subtable = &StructAfter<ChainSubtable<Types> > (*subtable);
|
||||
subtable = &StructAfter<ChainSubtable<Types>> (*subtable);
|
||||
c->set_lookup_index (c->lookup_index + 1);
|
||||
}
|
||||
}
|
||||
@ -1049,13 +1049,13 @@ struct Chain
|
||||
if (!c->check_array (featureZ.arrayZ, featureCount))
|
||||
return_trace (false);
|
||||
|
||||
const ChainSubtable<Types> *subtable = &StructAfter<ChainSubtable<Types> > (featureZ.as_array (featureCount));
|
||||
const ChainSubtable<Types> *subtable = &StructAfter<ChainSubtable<Types>> (featureZ.as_array (featureCount));
|
||||
unsigned int count = subtableCount;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
{
|
||||
if (!subtable->sanitize (c))
|
||||
return_trace (false);
|
||||
subtable = &StructAfter<ChainSubtable<Types> > (*subtable);
|
||||
subtable = &StructAfter<ChainSubtable<Types>> (*subtable);
|
||||
}
|
||||
|
||||
return_trace (true);
|
||||
@ -1095,7 +1095,7 @@ struct mortmorx
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
{
|
||||
map->chain_flags.push (chain->compile_flags (mapper));
|
||||
chain = &StructAfter<Chain<Types> > (*chain);
|
||||
chain = &StructAfter<Chain<Types>> (*chain);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1109,7 +1109,7 @@ struct mortmorx
|
||||
{
|
||||
chain->apply (c, c->plan->aat_map.chain_flags[i]);
|
||||
if (unlikely (!c->buffer->successful)) return;
|
||||
chain = &StructAfter<Chain<Types> > (*chain);
|
||||
chain = &StructAfter<Chain<Types>> (*chain);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1125,7 +1125,7 @@ struct mortmorx
|
||||
{
|
||||
if (!chain->sanitize (c, version))
|
||||
return_trace (false);
|
||||
chain = &StructAfter<Chain<Types> > (*chain);
|
||||
chain = &StructAfter<Chain<Types>> (*chain);
|
||||
}
|
||||
|
||||
return_trace (true);
|
||||
|
@ -66,7 +66,7 @@ struct TrackTableEntry
|
||||
NameID trackNameID; /* The 'name' table index for this track.
|
||||
* (a short word or phrase like "loose"
|
||||
* or "very tight") */
|
||||
NNOffsetTo<UnsizedArrayOf<FWORD> >
|
||||
NNOffsetTo<UnsizedArrayOf<FWORD>>
|
||||
valuesZ; /* Offset from start of tracking table to
|
||||
* per-size tracking values for this track. */
|
||||
|
||||
@ -133,8 +133,8 @@ struct TrackData
|
||||
if (size_table[size_index].to_float () >= csspx)
|
||||
break;
|
||||
|
||||
return round (interpolate_at (size_index ? size_index - 1 : 0, csspx,
|
||||
*trackTableEntry, base));
|
||||
return roundf (interpolate_at (size_index ? size_index - 1 : 0, csspx,
|
||||
*trackTableEntry, base));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c, const void *base) const
|
||||
|
@ -135,6 +135,10 @@ static const hb_aat_feature_mapping_t feature_mappings[] =
|
||||
const hb_aat_feature_mapping_t *
|
||||
hb_aat_layout_find_feature_mapping (hb_tag_t tag)
|
||||
{
|
||||
#ifdef HB_NO_SHAPE_AAT
|
||||
return nullptr;
|
||||
#endif
|
||||
|
||||
return (const hb_aat_feature_mapping_t *) bsearch (&tag,
|
||||
feature_mappings,
|
||||
ARRAY_LENGTH (feature_mappings),
|
||||
@ -147,6 +151,8 @@ hb_aat_layout_find_feature_mapping (hb_tag_t tag)
|
||||
* hb_aat_apply_context_t
|
||||
*/
|
||||
|
||||
/* Note: This context is used for kerning, even without AAT. */
|
||||
|
||||
AAT::hb_aat_apply_context_t::hb_aat_apply_context_t (const hb_ot_shape_plan_t *plan_,
|
||||
hb_font_t *font_,
|
||||
hb_buffer_t *buffer_,
|
||||
@ -183,6 +189,10 @@ void
|
||||
hb_aat_layout_compile_map (const hb_aat_map_builder_t *mapper,
|
||||
hb_aat_map_t *map)
|
||||
{
|
||||
#ifdef HB_NO_SHAPE_AAT
|
||||
return;
|
||||
#endif
|
||||
|
||||
const AAT::morx& morx = *mapper->face->table.morx;
|
||||
if (morx.has_data ())
|
||||
{
|
||||
@ -209,6 +219,10 @@ hb_aat_layout_compile_map (const hb_aat_map_builder_t *mapper,
|
||||
hb_bool_t
|
||||
hb_aat_layout_has_substitution (hb_face_t *face)
|
||||
{
|
||||
#ifdef HB_NO_SHAPE_AAT
|
||||
return false;
|
||||
#endif
|
||||
|
||||
return face->table.morx->has_data () ||
|
||||
face->table.mort->has_data ();
|
||||
}
|
||||
@ -218,6 +232,10 @@ hb_aat_layout_substitute (const hb_ot_shape_plan_t *plan,
|
||||
hb_font_t *font,
|
||||
hb_buffer_t *buffer)
|
||||
{
|
||||
#ifdef HB_NO_SHAPE_AAT
|
||||
return;
|
||||
#endif
|
||||
|
||||
hb_blob_t *morx_blob = font->face->table.morx.get_blob ();
|
||||
const AAT::morx& morx = *morx_blob->as<AAT::morx> ();
|
||||
if (morx.has_data ())
|
||||
@ -240,6 +258,10 @@ hb_aat_layout_substitute (const hb_ot_shape_plan_t *plan,
|
||||
void
|
||||
hb_aat_layout_zero_width_deleted_glyphs (hb_buffer_t *buffer)
|
||||
{
|
||||
#ifdef HB_NO_SHAPE_AAT
|
||||
return;
|
||||
#endif
|
||||
|
||||
unsigned int count = buffer->len;
|
||||
hb_glyph_info_t *info = buffer->info;
|
||||
hb_glyph_position_t *pos = buffer->pos;
|
||||
@ -257,6 +279,10 @@ is_deleted_glyph (const hb_glyph_info_t *info)
|
||||
void
|
||||
hb_aat_layout_remove_deleted_glyphs (hb_buffer_t *buffer)
|
||||
{
|
||||
#ifdef HB_NO_SHAPE_AAT
|
||||
return;
|
||||
#endif
|
||||
|
||||
hb_ot_layout_delete_glyphs_inplace (buffer, is_deleted_glyph);
|
||||
}
|
||||
|
||||
@ -270,6 +296,10 @@ hb_aat_layout_remove_deleted_glyphs (hb_buffer_t *buffer)
|
||||
hb_bool_t
|
||||
hb_aat_layout_has_positioning (hb_face_t *face)
|
||||
{
|
||||
#ifdef HB_NO_SHAPE_AAT
|
||||
return false;
|
||||
#endif
|
||||
|
||||
return face->table.kerx->has_data ();
|
||||
}
|
||||
|
||||
@ -278,6 +308,10 @@ hb_aat_layout_position (const hb_ot_shape_plan_t *plan,
|
||||
hb_font_t *font,
|
||||
hb_buffer_t *buffer)
|
||||
{
|
||||
#ifdef HB_NO_SHAPE_AAT
|
||||
return;
|
||||
#endif
|
||||
|
||||
hb_blob_t *kerx_blob = font->face->table.kerx.get_blob ();
|
||||
const AAT::kerx& kerx = *kerx_blob->as<AAT::kerx> ();
|
||||
|
||||
@ -297,6 +331,10 @@ hb_aat_layout_position (const hb_ot_shape_plan_t *plan,
|
||||
hb_bool_t
|
||||
hb_aat_layout_has_tracking (hb_face_t *face)
|
||||
{
|
||||
#ifdef HB_NO_SHAPE_AAT
|
||||
return false;
|
||||
#endif
|
||||
|
||||
return face->table.trak->has_data ();
|
||||
}
|
||||
|
||||
@ -305,20 +343,16 @@ hb_aat_layout_track (const hb_ot_shape_plan_t *plan,
|
||||
hb_font_t *font,
|
||||
hb_buffer_t *buffer)
|
||||
{
|
||||
#ifdef HB_NO_SHAPE_AAT
|
||||
return;
|
||||
#endif
|
||||
|
||||
const AAT::trak& trak = *font->face->table.trak;
|
||||
|
||||
AAT::hb_aat_apply_context_t c (plan, font, buffer);
|
||||
trak.apply (&c);
|
||||
}
|
||||
|
||||
|
||||
hb_language_t
|
||||
_hb_aat_language_get (hb_face_t *face,
|
||||
unsigned int i)
|
||||
{
|
||||
return face->table.ltag->get_language (i);
|
||||
}
|
||||
|
||||
/**
|
||||
* hb_aat_layout_get_feature_types:
|
||||
* @face: a face object
|
||||
@ -336,6 +370,12 @@ hb_aat_layout_get_feature_types (hb_face_t *face,
|
||||
unsigned int *feature_count, /* IN/OUT. May be NULL. */
|
||||
hb_aat_layout_feature_type_t *features /* OUT. May be NULL. */)
|
||||
{
|
||||
#ifdef HB_NO_SHAPE_AAT
|
||||
if (feature_count)
|
||||
*feature_count = 0;
|
||||
return 0;
|
||||
#endif
|
||||
|
||||
return face->table.feat->get_feature_types (start_offset, feature_count, features);
|
||||
}
|
||||
|
||||
@ -352,6 +392,10 @@ hb_ot_name_id_t
|
||||
hb_aat_layout_feature_type_get_name_id (hb_face_t *face,
|
||||
hb_aat_layout_feature_type_t feature_type)
|
||||
{
|
||||
#ifdef HB_NO_SHAPE_AAT
|
||||
return HB_OT_NAME_ID_INVALID;
|
||||
#endif
|
||||
|
||||
return face->table.feat->get_feature_name_id (feature_type);
|
||||
}
|
||||
|
||||
@ -380,5 +424,11 @@ hb_aat_layout_feature_type_get_selector_infos (hb_face_t
|
||||
hb_aat_layout_feature_selector_info_t *selectors, /* OUT. May be NULL. */
|
||||
unsigned int *default_index /* OUT. May be NULL. */)
|
||||
{
|
||||
#ifdef HB_NO_SHAPE_AAT
|
||||
if (selector_count)
|
||||
*selector_count = 0;
|
||||
return 0;
|
||||
#endif
|
||||
|
||||
return face->table.feat->get_selector_infos (feature_type, start_offset, selector_count, selectors, default_index);
|
||||
}
|
||||
|
@ -30,7 +30,7 @@
|
||||
#include "hb.hh"
|
||||
|
||||
#include "hb-ot-shape.hh"
|
||||
|
||||
#include "hb-aat-ltag-table.hh"
|
||||
|
||||
struct hb_aat_feature_mapping_t
|
||||
{
|
||||
@ -39,7 +39,7 @@ struct hb_aat_feature_mapping_t
|
||||
hb_aat_layout_feature_selector_t selectorToEnable;
|
||||
hb_aat_layout_feature_selector_t selectorToDisable;
|
||||
|
||||
static int cmp (const void *key_, const void *entry_)
|
||||
HB_INTERNAL static int cmp (const void *key_, const void *entry_)
|
||||
{
|
||||
hb_tag_t key = * (unsigned int *) key_;
|
||||
const hb_aat_feature_mapping_t * entry = (const hb_aat_feature_mapping_t *) entry_;
|
||||
@ -77,9 +77,13 @@ hb_aat_layout_track (const hb_ot_shape_plan_t *plan,
|
||||
hb_font_t *font,
|
||||
hb_buffer_t *buffer);
|
||||
|
||||
HB_INTERNAL hb_language_t
|
||||
|
||||
inline hb_language_t
|
||||
_hb_aat_language_get (hb_face_t *face,
|
||||
unsigned int i);
|
||||
unsigned int i)
|
||||
{
|
||||
return face->table.ltag->get_language (i);
|
||||
}
|
||||
|
||||
|
||||
#endif /* HB_AAT_LAYOUT_HH */
|
||||
|
@ -50,7 +50,7 @@ struct FTStringRange
|
||||
}
|
||||
|
||||
protected:
|
||||
NNOffsetTo<UnsizedArrayOf<HBUINT8> >
|
||||
NNOffsetTo<UnsizedArrayOf<HBUINT8>>
|
||||
tag; /* Offset from the start of the table to
|
||||
* the beginning of the string */
|
||||
HBUINT16 length; /* String length (in bytes) */
|
||||
|
@ -34,6 +34,10 @@
|
||||
void hb_aat_map_builder_t::add_feature (hb_tag_t tag,
|
||||
unsigned int value)
|
||||
{
|
||||
#ifdef HB_NO_SHAPE_AAT
|
||||
return;
|
||||
#endif
|
||||
|
||||
if (tag == HB_TAG ('a','a','l','t'))
|
||||
{
|
||||
feature_info_t *info = features.push();
|
||||
@ -53,6 +57,10 @@ void hb_aat_map_builder_t::add_feature (hb_tag_t tag,
|
||||
void
|
||||
hb_aat_map_builder_t::compile (hb_aat_map_t &m)
|
||||
{
|
||||
#ifdef HB_NO_SHAPE_AAT
|
||||
return;
|
||||
#endif
|
||||
|
||||
/* Sort features and merge duplicates */
|
||||
if (features.length)
|
||||
{
|
||||
|
@ -66,7 +66,7 @@ struct hb_aat_map_builder_t
|
||||
hb_aat_layout_feature_selector_t setting;
|
||||
unsigned seq; /* For stable sorting only. */
|
||||
|
||||
static int cmp (const void *pa, const void *pb)
|
||||
HB_INTERNAL static int cmp (const void *pa, const void *pb)
|
||||
{
|
||||
const feature_info_t *a = (const feature_info_t *) pa;
|
||||
const feature_info_t *b = (const feature_info_t *) pb;
|
||||
|
278
src/hb-algs.hh
278
src/hb-algs.hh
@ -1,5 +1,6 @@
|
||||
/*
|
||||
* Copyright © 2017 Google, Inc.
|
||||
* Copyright © 2019 Google, Inc.
|
||||
*
|
||||
* This is part of HarfBuzz, a text shaping library.
|
||||
*
|
||||
@ -22,6 +23,7 @@
|
||||
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
|
||||
*
|
||||
* Google Author(s): Behdad Esfahbod
|
||||
* Facebook Author(s): Behdad Esfahbod
|
||||
*/
|
||||
|
||||
#ifndef HB_ALGS_HH
|
||||
@ -32,41 +34,178 @@
|
||||
#include "hb-null.hh"
|
||||
|
||||
|
||||
static const struct
|
||||
struct
|
||||
{
|
||||
/* Don't know how to set priority of following. Doesn't work right now. */
|
||||
//template <typename T>
|
||||
//uint32_t operator () (const T& v) const
|
||||
//{ return hb_deref_pointer (v).hash (); }
|
||||
/* Instead, the following ugly soution: */
|
||||
template <typename T,
|
||||
hb_enable_if (!hb_is_integer (hb_remove_const (hb_remove_reference (T))) && !hb_is_pointer (T))>
|
||||
uint32_t operator () (T&& v) const { return v.hash (); }
|
||||
|
||||
template <typename T>
|
||||
uint32_t operator () (const T *v) const
|
||||
{ return operator() (*v); }
|
||||
|
||||
template <typename T,
|
||||
hb_enable_if (hb_is_integer (T))>
|
||||
uint32_t operator () (T v) const
|
||||
{
|
||||
/* Knuth's multiplicative method: */
|
||||
return (uint32_t) v * 2654435761u;
|
||||
}
|
||||
} hb_hash HB_UNUSED;
|
||||
|
||||
static const struct
|
||||
/* Note. This is dangerous in that if it's passed an rvalue, it returns rvalue-reference. */
|
||||
template <typename T> auto
|
||||
operator () (T&& v) const HB_AUTO_RETURN ( hb_forward<T> (v) )
|
||||
}
|
||||
HB_FUNCOBJ (hb_identity);
|
||||
struct
|
||||
{
|
||||
template <typename T> T
|
||||
operator () (const T& v) const { return v; }
|
||||
} hb_identity HB_UNUSED;
|
||||
/* Like identity(), but only retains lvalue-references. Rvalues are returned as rvalues. */
|
||||
template <typename T> T&
|
||||
operator () (T& v) const { return v; }
|
||||
|
||||
static const struct
|
||||
template <typename T> hb_remove_reference<T>
|
||||
operator () (T&& v) const { return v; }
|
||||
}
|
||||
HB_FUNCOBJ (hb_lidentity);
|
||||
struct
|
||||
{
|
||||
/* Like identity(), but always returns rvalue. */
|
||||
template <typename T> hb_remove_reference<T>
|
||||
operator () (T&& v) const { return v; }
|
||||
}
|
||||
HB_FUNCOBJ (hb_ridentity);
|
||||
|
||||
struct
|
||||
{
|
||||
template <typename T> bool
|
||||
operator () (const T& v) const { return bool (v); }
|
||||
} hb_bool HB_UNUSED;
|
||||
operator () (T&& v) const { return bool (hb_forward<T> (v)); }
|
||||
}
|
||||
HB_FUNCOBJ (hb_bool);
|
||||
|
||||
struct
|
||||
{
|
||||
private:
|
||||
template <typename T> auto
|
||||
impl (const T& v, hb_priority<1>) const HB_RETURN (uint32_t, hb_deref (v).hash ())
|
||||
|
||||
template <typename T,
|
||||
hb_enable_if (hb_is_integral (T))> auto
|
||||
impl (const T& v, hb_priority<0>) const HB_AUTO_RETURN
|
||||
(
|
||||
/* Knuth's multiplicative method: */
|
||||
(uint32_t) v * 2654435761u
|
||||
)
|
||||
|
||||
public:
|
||||
|
||||
template <typename T> auto
|
||||
operator () (const T& v) const HB_RETURN (uint32_t, impl (v, hb_prioritize))
|
||||
}
|
||||
HB_FUNCOBJ (hb_hash);
|
||||
|
||||
|
||||
struct
|
||||
{
|
||||
private:
|
||||
|
||||
/* Pointer-to-member-function. */
|
||||
template <typename Appl, typename T, typename ...Ts> auto
|
||||
impl (Appl&& a, hb_priority<2>, T &&v, Ts&&... ds) const HB_AUTO_RETURN
|
||||
((hb_deref (hb_forward<T> (v)).*hb_forward<Appl> (a)) (hb_forward<Ts> (ds)...))
|
||||
|
||||
/* Pointer-to-member. */
|
||||
template <typename Appl, typename T> auto
|
||||
impl (Appl&& a, hb_priority<1>, T &&v) const HB_AUTO_RETURN
|
||||
((hb_deref (hb_forward<T> (v))).*hb_forward<Appl> (a))
|
||||
|
||||
/* Operator(). */
|
||||
template <typename Appl, typename ...Ts> auto
|
||||
impl (Appl&& a, hb_priority<0>, Ts&&... ds) const HB_AUTO_RETURN
|
||||
(hb_deref (hb_forward<Appl> (a)) (hb_forward<Ts> (ds)...))
|
||||
|
||||
public:
|
||||
|
||||
template <typename Appl, typename ...Ts> auto
|
||||
operator () (Appl&& a, Ts&&... ds) const HB_AUTO_RETURN
|
||||
(
|
||||
impl (hb_forward<Appl> (a),
|
||||
hb_prioritize,
|
||||
hb_forward<Ts> (ds)...)
|
||||
)
|
||||
}
|
||||
HB_FUNCOBJ (hb_invoke);
|
||||
|
||||
struct
|
||||
{
|
||||
private:
|
||||
|
||||
template <typename Pred, typename Val> auto
|
||||
impl (Pred&& p, Val &&v, hb_priority<1>) const HB_AUTO_RETURN
|
||||
(hb_deref (hb_forward<Pred> (p)).has (hb_forward<Val> (v)))
|
||||
|
||||
template <typename Pred, typename Val> auto
|
||||
impl (Pred&& p, Val &&v, hb_priority<0>) const HB_AUTO_RETURN
|
||||
(
|
||||
hb_invoke (hb_forward<Pred> (p),
|
||||
hb_forward<Val> (v))
|
||||
)
|
||||
|
||||
public:
|
||||
|
||||
template <typename Pred, typename Val> auto
|
||||
operator () (Pred&& p, Val &&v) const HB_RETURN (bool,
|
||||
impl (hb_forward<Pred> (p),
|
||||
hb_forward<Val> (v),
|
||||
hb_prioritize)
|
||||
)
|
||||
}
|
||||
HB_FUNCOBJ (hb_has);
|
||||
|
||||
struct
|
||||
{
|
||||
private:
|
||||
|
||||
template <typename Pred, typename Val> auto
|
||||
impl (Pred&& p, Val &&v, hb_priority<1>) const HB_AUTO_RETURN
|
||||
(
|
||||
hb_has (hb_forward<Pred> (p),
|
||||
hb_forward<Val> (v))
|
||||
)
|
||||
|
||||
template <typename Pred, typename Val> auto
|
||||
impl (Pred&& p, Val &&v, hb_priority<0>) const HB_AUTO_RETURN
|
||||
(
|
||||
hb_forward<Pred> (p) == hb_forward<Val> (v)
|
||||
)
|
||||
|
||||
public:
|
||||
|
||||
template <typename Pred, typename Val> auto
|
||||
operator () (Pred&& p, Val &&v) const HB_RETURN (bool,
|
||||
impl (hb_forward<Pred> (p),
|
||||
hb_forward<Val> (v),
|
||||
hb_prioritize)
|
||||
)
|
||||
}
|
||||
HB_FUNCOBJ (hb_match);
|
||||
|
||||
struct
|
||||
{
|
||||
private:
|
||||
|
||||
template <typename Proj, typename Val> auto
|
||||
impl (Proj&& f, Val &&v, hb_priority<2>) const HB_AUTO_RETURN
|
||||
(hb_deref (hb_forward<Proj> (f)).get (hb_forward<Val> (v)))
|
||||
|
||||
template <typename Proj, typename Val> auto
|
||||
impl (Proj&& f, Val &&v, hb_priority<1>) const HB_AUTO_RETURN
|
||||
(
|
||||
hb_invoke (hb_forward<Proj> (f),
|
||||
hb_forward<Val> (v))
|
||||
)
|
||||
|
||||
template <typename Proj, typename Val> auto
|
||||
impl (Proj&& f, Val &&v, hb_priority<0>) const HB_AUTO_RETURN
|
||||
(
|
||||
hb_forward<Proj> (f)[hb_forward<Val> (v)]
|
||||
)
|
||||
|
||||
public:
|
||||
|
||||
template <typename Proj, typename Val> auto
|
||||
operator () (Proj&& f, Val &&v) const HB_AUTO_RETURN
|
||||
(
|
||||
impl (hb_forward<Proj> (f),
|
||||
hb_forward<Val> (v),
|
||||
hb_prioritize)
|
||||
)
|
||||
}
|
||||
HB_FUNCOBJ (hb_get);
|
||||
|
||||
|
||||
template <typename T1, typename T2>
|
||||
struct hb_pair_t
|
||||
@ -78,36 +217,73 @@ struct hb_pair_t
|
||||
hb_pair_t (T1 a, T2 b) : first (a), second (b) {}
|
||||
hb_pair_t (const pair_t& o) : first (o.first), second (o.second) {}
|
||||
|
||||
template <typename Q1, typename Q2,
|
||||
hb_enable_if (hb_is_convertible (T1, Q1) &&
|
||||
hb_is_convertible (T2, T2))>
|
||||
operator hb_pair_t<Q1, Q2> () { return hb_pair_t<Q1, Q2> (first, second); }
|
||||
|
||||
hb_pair_t<T1, T2> reverse () const
|
||||
{ return hb_pair_t<T1, T2> (second, first); }
|
||||
|
||||
bool operator == (const pair_t& o) const { return first == o.first && second == o.second; }
|
||||
|
||||
T1 first;
|
||||
T2 second;
|
||||
};
|
||||
#define hb_pair_t(T1,T2) hb_pair_t<T1, T2>
|
||||
template <typename T1, typename T2> static inline hb_pair_t<T1, T2>
|
||||
hb_pair (T1&& a, T2&& b) { return hb_pair_t<T1, T2> (a, b); }
|
||||
|
||||
static const struct
|
||||
struct
|
||||
{
|
||||
template <typename Pair> decltype (hb_declval (Pair).first)
|
||||
operator () (const Pair& pair) const { return pair.first; }
|
||||
} hb_first HB_UNUSED;
|
||||
template <typename Pair> auto
|
||||
operator () (const Pair& pair) const HB_AUTO_RETURN (pair.first)
|
||||
}
|
||||
HB_FUNCOBJ (hb_first);
|
||||
|
||||
static const struct
|
||||
struct
|
||||
{
|
||||
template <typename Pair> decltype (hb_declval (Pair).second)
|
||||
operator () (const Pair& pair) const { return pair.second; }
|
||||
} hb_second HB_UNUSED;
|
||||
template <typename Pair> auto
|
||||
operator () (const Pair& pair) const HB_AUTO_RETURN (pair.second)
|
||||
}
|
||||
HB_FUNCOBJ (hb_second);
|
||||
|
||||
static const struct
|
||||
/* Note. In min/max impl, we can use hb_type_identity<T> for second argument.
|
||||
* However, that would silently convert between different-signedness integers.
|
||||
* Instead we accept two different types, such that compiler can err if
|
||||
* comparing integers of different signedness. */
|
||||
struct
|
||||
{
|
||||
template <typename T, typename T2> T
|
||||
operator () (const T& a, const T2& b) const { return a <= b ? a : b; }
|
||||
} hb_min HB_UNUSED;
|
||||
static const struct
|
||||
private:
|
||||
template <typename T, typename T2> auto
|
||||
impl (T&& a, T2&& b) const HB_AUTO_RETURN
|
||||
(hb_forward<T> (a) <= hb_forward<T2> (b) ? hb_forward<T> (a) : hb_forward<T2> (b))
|
||||
|
||||
public:
|
||||
template <typename T> auto
|
||||
operator () (T&& a) const HB_AUTO_RETURN (hb_forward<T> (a))
|
||||
|
||||
template <typename T, typename... Ts> auto
|
||||
operator () (T&& a, Ts&& ...ds) const HB_AUTO_RETURN
|
||||
(impl (hb_forward<T> (a), (*this) (hb_forward<Ts> (ds)...)))
|
||||
}
|
||||
HB_FUNCOBJ (hb_min);
|
||||
struct
|
||||
{
|
||||
template <typename T, typename T2> T
|
||||
operator () (const T& a, const T2& b) const { return a >= b ? a : b; }
|
||||
} hb_max HB_UNUSED;
|
||||
private:
|
||||
template <typename T, typename T2> auto
|
||||
impl (T&& a, T2&& b) const HB_AUTO_RETURN
|
||||
(hb_forward<T> (a) >= hb_forward<T2> (b) ? hb_forward<T> (a) : hb_forward<T2> (b))
|
||||
|
||||
public:
|
||||
template <typename T> auto
|
||||
operator () (T&& a) const HB_AUTO_RETURN (hb_forward<T> (a))
|
||||
|
||||
template <typename T, typename... Ts> auto
|
||||
operator () (T&& a, Ts&& ...ds) const HB_AUTO_RETURN
|
||||
(impl (hb_forward<T> (a), (*this) (hb_forward<Ts> (ds)...)))
|
||||
}
|
||||
HB_FUNCOBJ (hb_max);
|
||||
|
||||
|
||||
/*
|
||||
@ -319,14 +495,6 @@ static inline unsigned char TOUPPER (unsigned char c)
|
||||
static inline unsigned char TOLOWER (unsigned char c)
|
||||
{ return (c >= 'A' && c <= 'Z') ? c - 'A' + 'a' : c; }
|
||||
|
||||
#undef MIN
|
||||
template <typename Type>
|
||||
static inline Type MIN (const Type &a, const Type &b) { return a < b ? a : b; }
|
||||
|
||||
#undef MAX
|
||||
template <typename Type>
|
||||
static inline Type MAX (const Type &a, const Type &b) { return a > b ? a : b; }
|
||||
|
||||
static inline unsigned int DIV_CEIL (const unsigned int a, unsigned int b)
|
||||
{ return (a + (b - 1)) / b; }
|
||||
|
||||
@ -578,7 +746,7 @@ hb_codepoint_parse (const char *s, unsigned int len, int base, hb_codepoint_t *o
|
||||
{
|
||||
/* Pain because we don't know whether s is nul-terminated. */
|
||||
char buf[64];
|
||||
len = MIN (ARRAY_LENGTH (buf) - 1, len);
|
||||
len = hb_min (ARRAY_LENGTH (buf) - 1, len);
|
||||
strncpy (buf, s, len);
|
||||
buf[len] = '\0';
|
||||
|
||||
|
@ -42,19 +42,21 @@ struct hb_array_t : hb_iter_with_fallback_t<hb_array_t<Type>, Type&>
|
||||
/*
|
||||
* Constructors.
|
||||
*/
|
||||
hb_array_t () : arrayZ (nullptr), length (0) {}
|
||||
hb_array_t (const hb_array_t<Type> &o) : arrayZ (o.arrayZ), length (o.length) {}
|
||||
template <typename U = Type, hb_enable_if (hb_is_const (U))>
|
||||
hb_array_t (const hb_array_t<hb_remove_const (Type)> &o) : arrayZ (o.arrayZ), length (o.length) {}
|
||||
hb_array_t () : arrayZ (nullptr), length (0), backwards_length (0) {}
|
||||
hb_array_t (Type *array_, unsigned int length_) : arrayZ (array_), length (length_), backwards_length (0) {}
|
||||
template <unsigned int length_>
|
||||
hb_array_t (Type (&array_)[length_]) : arrayZ (array_), length (length_), backwards_length (0) {}
|
||||
|
||||
hb_array_t (Type *array_, unsigned int length_) : arrayZ (array_), length (length_) {}
|
||||
template <unsigned int length_> hb_array_t (Type (&array_)[length_]) : arrayZ (array_), length (length_) {}
|
||||
template <typename U,
|
||||
hb_enable_if (hb_is_cr_convertible(U, Type))>
|
||||
hb_array_t (const hb_array_t<U> &o) :
|
||||
hb_iter_with_fallback_t<hb_array_t<Type>, Type&> (),
|
||||
arrayZ (o.arrayZ), length (o.length), backwards_length (o.backwards_length) {}
|
||||
template <typename U,
|
||||
hb_enable_if (hb_is_cr_convertible(U, Type))>
|
||||
hb_array_t& operator = (const hb_array_t<U> &o)
|
||||
{ arrayZ = o.arrayZ; length = o.length; backwards_length = o.backwards_length; return *this; }
|
||||
|
||||
template <typename U = Type, hb_enable_if (hb_is_const (U))>
|
||||
hb_array_t& operator = (const hb_array_t<hb_remove_const (Type)> &o)
|
||||
{ arrayZ = o.arrayZ; length = o.length; return *this; }
|
||||
hb_array_t& operator = (const hb_array_t &o)
|
||||
{ arrayZ = o.arrayZ; length = o.length; return *this; }
|
||||
/*
|
||||
* Iterator implementation.
|
||||
*/
|
||||
@ -70,15 +72,20 @@ struct hb_array_t : hb_iter_with_fallback_t<hb_array_t<Type>, Type&>
|
||||
if (unlikely (n > length))
|
||||
n = length;
|
||||
length -= n;
|
||||
backwards_length += n;
|
||||
arrayZ += n;
|
||||
}
|
||||
void __rewind__ (unsigned n)
|
||||
{
|
||||
if (unlikely (n > length))
|
||||
n = length;
|
||||
length -= n;
|
||||
if (unlikely (n > backwards_length))
|
||||
n = backwards_length;
|
||||
length += n;
|
||||
backwards_length -= n;
|
||||
arrayZ -= n;
|
||||
}
|
||||
unsigned __len__ () const { return length; }
|
||||
bool operator != (const hb_array_t& o) const
|
||||
{ return arrayZ != o.arrayZ || length != o.length || backwards_length != o.backwards_length; }
|
||||
|
||||
/* Extra operators.
|
||||
*/
|
||||
@ -86,8 +93,8 @@ struct hb_array_t : hb_iter_with_fallback_t<hb_array_t<Type>, Type&>
|
||||
operator hb_array_t<const Type> () { return hb_array_t<const Type> (arrayZ, length); }
|
||||
template <typename T> operator T * () const { return arrayZ; }
|
||||
|
||||
bool operator == (const hb_array_t &o) const;
|
||||
uint32_t hash () const;
|
||||
HB_INTERNAL bool operator == (const hb_array_t &o) const;
|
||||
HB_INTERNAL uint32_t hash () const;
|
||||
|
||||
/*
|
||||
* Compare, Sort, and Search.
|
||||
@ -100,7 +107,7 @@ struct hb_array_t : hb_iter_with_fallback_t<hb_array_t<Type>, Type&>
|
||||
return (int) a.length - (int) length;
|
||||
return hb_memcmp (a.arrayZ, arrayZ, get_size ());
|
||||
}
|
||||
static int cmp (const void *pa, const void *pb)
|
||||
HB_INTERNAL static int cmp (const void *pa, const void *pb)
|
||||
{
|
||||
hb_array_t<Type> *a = (hb_array_t<Type> *) pa;
|
||||
hb_array_t<Type> *b = (hb_array_t<Type> *) pb;
|
||||
@ -140,7 +147,7 @@ struct hb_array_t : hb_iter_with_fallback_t<hb_array_t<Type>, Type&>
|
||||
}
|
||||
void qsort (unsigned int start, unsigned int end)
|
||||
{
|
||||
end = MIN (end, length);
|
||||
end = hb_min (end, length);
|
||||
assert (start <= end);
|
||||
if (likely (start < end))
|
||||
::qsort (arrayZ + start, end - start, this->item_size, Type::cmp);
|
||||
@ -163,7 +170,7 @@ struct hb_array_t : hb_iter_with_fallback_t<hb_array_t<Type>, Type&>
|
||||
else
|
||||
count -= start_offset;
|
||||
if (seg_count)
|
||||
count = *seg_count = MIN (count, *seg_count);
|
||||
count = *seg_count = hb_min (count, *seg_count);
|
||||
return hb_array_t<Type> (arrayZ + start_offset, count);
|
||||
}
|
||||
hb_array_t<Type> sub_array (unsigned int start_offset, unsigned int seg_count) const
|
||||
@ -173,6 +180,17 @@ struct hb_array_t : hb_iter_with_fallback_t<hb_array_t<Type>, Type&>
|
||||
void free ()
|
||||
{ ::free ((void *) arrayZ); arrayZ = nullptr; length = 0; }
|
||||
|
||||
template <typename hb_serialize_context_t>
|
||||
hb_array_t copy (hb_serialize_context_t *c) const
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
auto* out = c->start_embed (arrayZ);
|
||||
if (unlikely (!c->extend_size (out, get_size ()))) return_trace (hb_array_t ());
|
||||
for (unsigned i = 0; i < length; i++)
|
||||
out[i] = arrayZ[i]; /* TODO: add version that calls c->copy() */
|
||||
return_trace (hb_array_t (out, length));
|
||||
}
|
||||
|
||||
template <typename hb_sanitize_context_t>
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{ return c->check_array (arrayZ, length); }
|
||||
@ -184,6 +202,7 @@ struct hb_array_t : hb_iter_with_fallback_t<hb_array_t<Type>, Type&>
|
||||
public:
|
||||
Type *arrayZ;
|
||||
unsigned int length;
|
||||
unsigned int backwards_length;
|
||||
};
|
||||
template <typename T> inline hb_array_t<T>
|
||||
hb_array (T *array, unsigned int length)
|
||||
@ -210,11 +229,23 @@ struct hb_sorted_array_t :
|
||||
static constexpr bool is_sorted_iterator = true;
|
||||
|
||||
hb_sorted_array_t () : hb_array_t<Type> () {}
|
||||
hb_sorted_array_t (const hb_array_t<Type> &o) : hb_array_t<Type> (o) {}
|
||||
template <typename U = Type, hb_enable_if (hb_is_const (U))>
|
||||
hb_sorted_array_t (const hb_sorted_array_t<hb_remove_const (Type)> &o) : hb_array_t<Type> (o) {}
|
||||
hb_sorted_array_t (Type *array_, unsigned int length_) : hb_array_t<Type> (array_, length_) {}
|
||||
template <unsigned int length_> hb_sorted_array_t (Type (&array_)[length_]) : hb_array_t<Type> (array_) {}
|
||||
template <unsigned int length_>
|
||||
hb_sorted_array_t (Type (&array_)[length_]) : hb_array_t<Type> (array_) {}
|
||||
|
||||
template <typename U,
|
||||
hb_enable_if (hb_is_cr_convertible(U, Type))>
|
||||
hb_sorted_array_t (const hb_array_t<U> &o) :
|
||||
hb_iter_t<hb_sorted_array_t<Type>, Type&> (),
|
||||
hb_array_t<Type> (o) {}
|
||||
template <typename U,
|
||||
hb_enable_if (hb_is_cr_convertible(U, Type))>
|
||||
hb_sorted_array_t& operator = (const hb_array_t<U> &o)
|
||||
{ hb_array_t<Type> (*this) = o; return *this; }
|
||||
|
||||
/* Iterator implementation. */
|
||||
bool operator != (const hb_sorted_array_t& o) const
|
||||
{ return this->arrayZ != o.arrayZ || this->length != o.length; }
|
||||
|
||||
hb_sorted_array_t<Type> sub_array (unsigned int start_offset, unsigned int *seg_count /* IN/OUT */) const
|
||||
{ return hb_sorted_array_t<Type> (((const hb_array_t<Type> *) (this))->sub_array (start_offset, seg_count)); }
|
||||
|
@ -107,7 +107,7 @@ _hb_atomic_ptr_impl_cmplexch (const void **P, const void *O_, const void *N)
|
||||
|
||||
static inline void _hb_memory_barrier ()
|
||||
{
|
||||
#if !defined(MemoryBarrier)
|
||||
#ifndef MemoryBarrier
|
||||
/* MinGW has a convoluted history of supporting MemoryBarrier. */
|
||||
LONG dummy = 0;
|
||||
InterlockedExchange (&dummy, 1);
|
||||
@ -283,7 +283,7 @@ struct hb_atomic_int_t
|
||||
template <typename P>
|
||||
struct hb_atomic_ptr_t
|
||||
{
|
||||
typedef hb_remove_pointer (P) T;
|
||||
typedef hb_remove_pointer<P> T;
|
||||
|
||||
void init (T* v_ = nullptr) { set_relaxed (v_); }
|
||||
void set_relaxed (T* v_) { hb_atomic_ptr_impl_set_relaxed (&v, v_); }
|
||||
|
@ -155,7 +155,7 @@ hb_blob_create_sub_blob (hb_blob_t *parent,
|
||||
hb_blob_make_immutable (parent);
|
||||
|
||||
blob = hb_blob_create (parent->data + offset,
|
||||
MIN (length, parent->length - offset),
|
||||
hb_min (length, parent->length - offset),
|
||||
HB_MEMORY_MODE_READONLY,
|
||||
hb_blob_reference (parent),
|
||||
_hb_blob_destroy);
|
||||
|
@ -81,7 +81,7 @@ struct hb_blob_t
|
||||
template <typename P>
|
||||
struct hb_blob_ptr_t
|
||||
{
|
||||
typedef hb_remove_pointer (P) T;
|
||||
typedef hb_remove_pointer<P> T;
|
||||
|
||||
hb_blob_ptr_t (hb_blob_t *b_ = nullptr) : b (b_) {}
|
||||
hb_blob_t * operator = (hb_blob_t *b_) { return b = b_; }
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -138,34 +138,34 @@ _hb_buffer_serialize_glyphs_json (hb_buffer_t *buffer,
|
||||
*p++ = '"';
|
||||
}
|
||||
else
|
||||
p += MAX (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), "%u", info[i].codepoint));
|
||||
p += hb_max (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), "%u", info[i].codepoint));
|
||||
|
||||
if (!(flags & HB_BUFFER_SERIALIZE_FLAG_NO_CLUSTERS)) {
|
||||
p += MAX (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), ",\"cl\":%u", info[i].cluster));
|
||||
p += hb_max (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), ",\"cl\":%u", info[i].cluster));
|
||||
}
|
||||
|
||||
if (!(flags & HB_BUFFER_SERIALIZE_FLAG_NO_POSITIONS))
|
||||
{
|
||||
p += MAX (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), ",\"dx\":%d,\"dy\":%d",
|
||||
p += hb_max (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), ",\"dx\":%d,\"dy\":%d",
|
||||
x+pos[i].x_offset, y+pos[i].y_offset));
|
||||
if (!(flags & HB_BUFFER_SERIALIZE_FLAG_NO_ADVANCES))
|
||||
p += MAX (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), ",\"ax\":%d,\"ay\":%d",
|
||||
p += hb_max (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), ",\"ax\":%d,\"ay\":%d",
|
||||
pos[i].x_advance, pos[i].y_advance));
|
||||
}
|
||||
|
||||
if (flags & HB_BUFFER_SERIALIZE_FLAG_GLYPH_FLAGS)
|
||||
{
|
||||
if (info[i].mask & HB_GLYPH_FLAG_DEFINED)
|
||||
p += MAX (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), ",\"fl\":%u", info[i].mask & HB_GLYPH_FLAG_DEFINED));
|
||||
p += hb_max (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), ",\"fl\":%u", info[i].mask & HB_GLYPH_FLAG_DEFINED));
|
||||
}
|
||||
|
||||
if (flags & HB_BUFFER_SERIALIZE_FLAG_GLYPH_EXTENTS)
|
||||
{
|
||||
hb_glyph_extents_t extents;
|
||||
hb_font_get_glyph_extents(font, info[i].codepoint, &extents);
|
||||
p += MAX (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), ",\"xb\":%d,\"yb\":%d",
|
||||
p += hb_max (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), ",\"xb\":%d,\"yb\":%d",
|
||||
extents.x_bearing, extents.y_bearing));
|
||||
p += MAX (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), ",\"w\":%d,\"h\":%d",
|
||||
p += hb_max (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), ",\"w\":%d,\"h\":%d",
|
||||
extents.width, extents.height));
|
||||
}
|
||||
|
||||
@ -224,37 +224,37 @@ _hb_buffer_serialize_glyphs_text (hb_buffer_t *buffer,
|
||||
p += strlen (p);
|
||||
}
|
||||
else
|
||||
p += MAX (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), "%u", info[i].codepoint));
|
||||
p += hb_max (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), "%u", info[i].codepoint));
|
||||
|
||||
if (!(flags & HB_BUFFER_SERIALIZE_FLAG_NO_CLUSTERS)) {
|
||||
p += MAX (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), "=%u", info[i].cluster));
|
||||
p += hb_max (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), "=%u", info[i].cluster));
|
||||
}
|
||||
|
||||
if (!(flags & HB_BUFFER_SERIALIZE_FLAG_NO_POSITIONS))
|
||||
{
|
||||
if (x+pos[i].x_offset || y+pos[i].y_offset)
|
||||
p += MAX (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), "@%d,%d", x+pos[i].x_offset, y+pos[i].y_offset));
|
||||
p += hb_max (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), "@%d,%d", x+pos[i].x_offset, y+pos[i].y_offset));
|
||||
|
||||
if (!(flags & HB_BUFFER_SERIALIZE_FLAG_NO_ADVANCES))
|
||||
{
|
||||
*p++ = '+';
|
||||
p += MAX (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), "%d", pos[i].x_advance));
|
||||
p += hb_max (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), "%d", pos[i].x_advance));
|
||||
if (pos[i].y_advance)
|
||||
p += MAX (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), ",%d", pos[i].y_advance));
|
||||
p += hb_max (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), ",%d", pos[i].y_advance));
|
||||
}
|
||||
}
|
||||
|
||||
if (flags & HB_BUFFER_SERIALIZE_FLAG_GLYPH_FLAGS)
|
||||
{
|
||||
if (info[i].mask & HB_GLYPH_FLAG_DEFINED)
|
||||
p += MAX (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), "#%X", info[i].mask &HB_GLYPH_FLAG_DEFINED));
|
||||
p += hb_max (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), "#%X", info[i].mask &HB_GLYPH_FLAG_DEFINED));
|
||||
}
|
||||
|
||||
if (flags & HB_BUFFER_SERIALIZE_FLAG_GLYPH_EXTENTS)
|
||||
{
|
||||
hb_glyph_extents_t extents;
|
||||
hb_font_get_glyph_extents(font, info[i].codepoint, &extents);
|
||||
p += MAX (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), "<%d,%d,%d,%d>", extents.x_bearing, extents.y_bearing, extents.width, extents.height));
|
||||
p += hb_max (0, snprintf (p, ARRAY_LENGTH (b) - (p - b), "<%d,%d,%d,%d>", extents.x_bearing, extents.y_bearing, extents.width, extents.height));
|
||||
}
|
||||
|
||||
unsigned int l = p - b;
|
||||
@ -380,7 +380,7 @@ static hb_bool_t
|
||||
parse_uint (const char *pp, const char *end, uint32_t *pv)
|
||||
{
|
||||
char buf[32];
|
||||
unsigned int len = MIN (ARRAY_LENGTH (buf) - 1, (unsigned int) (end - pp));
|
||||
unsigned int len = hb_min (ARRAY_LENGTH (buf) - 1, (unsigned int) (end - pp));
|
||||
strncpy (buf, pp, len);
|
||||
buf[len] = '\0';
|
||||
|
||||
@ -401,7 +401,7 @@ static hb_bool_t
|
||||
parse_int (const char *pp, const char *end, int32_t *pv)
|
||||
{
|
||||
char buf[32];
|
||||
unsigned int len = MIN (ARRAY_LENGTH (buf) - 1, (unsigned int) (end - pp));
|
||||
unsigned int len = hb_min (ARRAY_LENGTH (buf) - 1, (unsigned int) (end - pp));
|
||||
strncpy (buf, pp, len);
|
||||
buf[len] = '\0';
|
||||
|
||||
|
@ -524,7 +524,7 @@ hb_buffer_t::merge_clusters_impl (unsigned int start,
|
||||
unsigned int cluster = info[start].cluster;
|
||||
|
||||
for (unsigned int i = start + 1; i < end; i++)
|
||||
cluster = MIN<unsigned int> (cluster, info[i].cluster);
|
||||
cluster = hb_min (cluster, info[i].cluster);
|
||||
|
||||
/* Extend end */
|
||||
while (end < len && info[end - 1].cluster == info[end].cluster)
|
||||
@ -555,7 +555,7 @@ hb_buffer_t::merge_out_clusters (unsigned int start,
|
||||
unsigned int cluster = out_info[start].cluster;
|
||||
|
||||
for (unsigned int i = start + 1; i < end; i++)
|
||||
cluster = MIN<unsigned int> (cluster, out_info[i].cluster);
|
||||
cluster = hb_min (cluster, out_info[i].cluster);
|
||||
|
||||
/* Extend start */
|
||||
while (start && out_info[start - 1].cluster == out_info[start].cluster)
|
||||
|
@ -379,7 +379,7 @@ struct hb_buffer_t
|
||||
unsigned int cluster) const
|
||||
{
|
||||
for (unsigned int i = start; i < end; i++)
|
||||
cluster = MIN<unsigned int> (cluster, infos[i].cluster);
|
||||
cluster = hb_min (cluster, infos[i].cluster);
|
||||
return cluster;
|
||||
}
|
||||
void
|
||||
|
@ -691,7 +691,7 @@ struct opset_t
|
||||
|
||||
case OpCode_TwoByteNegInt0: case OpCode_TwoByteNegInt1:
|
||||
case OpCode_TwoByteNegInt2: case OpCode_TwoByteNegInt3:
|
||||
env.argStack.push_int ((int16_t)(-(op - OpCode_TwoByteNegInt0) * 256 - env.str_ref[0] - 108));
|
||||
env.argStack.push_int ((-(int16_t)(op - OpCode_TwoByteNegInt0) * 256 - env.str_ref[0] - 108));
|
||||
env.str_ref.inc ();
|
||||
break;
|
||||
|
||||
|
@ -147,8 +147,9 @@ struct cs_interp_env_t : interp_env_t<ARG>
|
||||
return callStack.in_error () || SUPER::in_error ();
|
||||
}
|
||||
|
||||
bool popSubrNum (const biased_subrs_t<SUBRS>& biasedSubrs, unsigned int &subr_num)
|
||||
bool pop_subr_num (const biased_subrs_t<SUBRS>& biasedSubrs, unsigned int &subr_num)
|
||||
{
|
||||
subr_num = 0;
|
||||
int n = SUPER::argStack.pop_int ();
|
||||
n += biasedSubrs.get_bias ();
|
||||
if (unlikely ((n < 0) || ((unsigned int)n >= biasedSubrs.get_count ())))
|
||||
@ -158,11 +159,11 @@ struct cs_interp_env_t : interp_env_t<ARG>
|
||||
return true;
|
||||
}
|
||||
|
||||
void callSubr (const biased_subrs_t<SUBRS>& biasedSubrs, cs_type_t type)
|
||||
void call_subr (const biased_subrs_t<SUBRS>& biasedSubrs, cs_type_t type)
|
||||
{
|
||||
unsigned int subr_num;
|
||||
unsigned int subr_num = 0;
|
||||
|
||||
if (unlikely (!popSubrNum (biasedSubrs, subr_num)
|
||||
if (unlikely (!pop_subr_num (biasedSubrs, subr_num)
|
||||
|| callStack.get_count () >= kMaxCallLimit))
|
||||
{
|
||||
SUPER::set_error ();
|
||||
@ -175,7 +176,7 @@ struct cs_interp_env_t : interp_env_t<ARG>
|
||||
SUPER::str_ref = context.str_ref;
|
||||
}
|
||||
|
||||
void returnFromSubr ()
|
||||
void return_from_subr ()
|
||||
{
|
||||
if (unlikely (SUPER::str_ref.in_error ()))
|
||||
SUPER::set_error ();
|
||||
@ -246,7 +247,7 @@ struct path_procs_null_t
|
||||
static void flex1 (ENV &env, PARAM& param) {}
|
||||
};
|
||||
|
||||
template <typename ARG, typename OPSET, typename ENV, typename PARAM, typename PATH=path_procs_null_t<ENV, PARAM> >
|
||||
template <typename ARG, typename OPSET, typename ENV, typename PARAM, typename PATH=path_procs_null_t<ENV, PARAM>>
|
||||
struct cs_opset_t : opset_t<ARG>
|
||||
{
|
||||
static void process_op (op_code_t op, ENV &env, PARAM& param)
|
||||
@ -254,7 +255,7 @@ struct cs_opset_t : opset_t<ARG>
|
||||
switch (op) {
|
||||
|
||||
case OpCode_return:
|
||||
env.returnFromSubr ();
|
||||
env.return_from_subr ();
|
||||
break;
|
||||
case OpCode_endchar:
|
||||
OPSET::check_width (op, env, param);
|
||||
@ -267,11 +268,11 @@ struct cs_opset_t : opset_t<ARG>
|
||||
break;
|
||||
|
||||
case OpCode_callsubr:
|
||||
env.callSubr (env.localSubrs, CSType_LocalSubr);
|
||||
env.call_subr (env.localSubrs, CSType_LocalSubr);
|
||||
break;
|
||||
|
||||
case OpCode_callgsubr:
|
||||
env.callSubr (env.globalSubrs, CSType_GlobalSubr);
|
||||
env.call_subr (env.globalSubrs, CSType_GlobalSubr);
|
||||
break;
|
||||
|
||||
case OpCode_hstem:
|
||||
|
@ -81,7 +81,7 @@ struct cff1_cs_interp_env_t : cs_interp_env_t<number_t, CFF1Subrs>
|
||||
typedef cs_interp_env_t<number_t, CFF1Subrs> SUPER;
|
||||
};
|
||||
|
||||
template <typename OPSET, typename PARAM, typename PATH=path_procs_null_t<cff1_cs_interp_env_t, PARAM> >
|
||||
template <typename OPSET, typename PARAM, typename PATH=path_procs_null_t<cff1_cs_interp_env_t, PARAM>>
|
||||
struct cff1_cs_opset_t : cs_opset_t<number_t, OPSET, cff1_cs_interp_env_t, PARAM, PATH>
|
||||
{
|
||||
/* PostScript-originated legacy opcodes (OpCode_add etc) are unsupported */
|
||||
|
@ -193,7 +193,7 @@ struct cff2_cs_interp_env_t : cs_interp_env_t<blend_arg_t, CFF2Subrs>
|
||||
|
||||
typedef cs_interp_env_t<blend_arg_t, CFF2Subrs> SUPER;
|
||||
};
|
||||
template <typename OPSET, typename PARAM, typename PATH=path_procs_null_t<cff2_cs_interp_env_t, PARAM> >
|
||||
template <typename OPSET, typename PARAM, typename PATH=path_procs_null_t<cff2_cs_interp_env_t, PARAM>>
|
||||
struct cff2_cs_opset_t : cs_opset_t<blend_arg_t, OPSET, cff2_cs_interp_env_t, PARAM, PATH>
|
||||
{
|
||||
static void process_op (op_code_t op, cff2_cs_interp_env_t &env, PARAM& param)
|
||||
|
@ -356,7 +356,7 @@ hb_language_from_string (const char *str, int len)
|
||||
{
|
||||
/* NUL-terminate it. */
|
||||
char strbuf[64];
|
||||
len = MIN (len, (int) sizeof (strbuf) - 1);
|
||||
len = hb_min (len, (int) sizeof (strbuf) - 1);
|
||||
memcpy (strbuf, str, len);
|
||||
strbuf[len] = '\0';
|
||||
item = lang_find_or_insert (strbuf);
|
||||
@ -488,7 +488,7 @@ hb_script_from_string (const char *str, int len)
|
||||
|
||||
/**
|
||||
* hb_script_to_iso15924_tag:
|
||||
* @script: an #hb_script_ to convert.
|
||||
* @script: an #hb_script_t to convert.
|
||||
*
|
||||
* See hb_script_from_iso15924_tag().
|
||||
*
|
||||
@ -720,7 +720,7 @@ static bool
|
||||
parse_uint (const char **pp, const char *end, unsigned int *pv)
|
||||
{
|
||||
char buf[32];
|
||||
unsigned int len = MIN (ARRAY_LENGTH (buf) - 1, (unsigned int) (end - *pp));
|
||||
unsigned int len = hb_min (ARRAY_LENGTH (buf) - 1, (unsigned int) (end - *pp));
|
||||
strncpy (buf, *pp, len);
|
||||
buf[len] = '\0';
|
||||
|
||||
@ -744,7 +744,7 @@ static bool
|
||||
parse_uint32 (const char **pp, const char *end, uint32_t *pv)
|
||||
{
|
||||
char buf[32];
|
||||
unsigned int len = MIN (ARRAY_LENGTH (buf) - 1, (unsigned int) (end - *pp));
|
||||
unsigned int len = hb_min (ARRAY_LENGTH (buf) - 1, (unsigned int) (end - *pp));
|
||||
strncpy (buf, *pp, len);
|
||||
buf[len] = '\0';
|
||||
|
||||
@ -783,7 +783,7 @@ parse_uint32 (const char **pp, const char *end, uint32_t *pv)
|
||||
static void free_static_C_locale ();
|
||||
#endif
|
||||
|
||||
static struct hb_C_locale_lazy_loader_t : hb_lazy_loader_t<hb_remove_pointer (HB_LOCALE_T),
|
||||
static struct hb_C_locale_lazy_loader_t : hb_lazy_loader_t<hb_remove_pointer<HB_LOCALE_T>,
|
||||
hb_C_locale_lazy_loader_t>
|
||||
{
|
||||
static HB_LOCALE_T create ()
|
||||
@ -825,7 +825,7 @@ static bool
|
||||
parse_float (const char **pp, const char *end, float *pv)
|
||||
{
|
||||
char buf[32];
|
||||
unsigned int len = MIN (ARRAY_LENGTH (buf) - 1, (unsigned int) (end - *pp));
|
||||
unsigned int len = hb_min (ARRAY_LENGTH (buf) - 1, (unsigned int) (end - *pp));
|
||||
strncpy (buf, *pp, len);
|
||||
buf[len] = '\0';
|
||||
|
||||
@ -1071,21 +1071,21 @@ hb_feature_to_string (hb_feature_t *feature,
|
||||
{
|
||||
s[len++] = '[';
|
||||
if (feature->start)
|
||||
len += MAX (0, snprintf (s + len, ARRAY_LENGTH (s) - len, "%u", feature->start));
|
||||
len += hb_max (0, snprintf (s + len, ARRAY_LENGTH (s) - len, "%u", feature->start));
|
||||
if (feature->end != feature->start + 1) {
|
||||
s[len++] = ':';
|
||||
if (feature->end != (unsigned int) -1)
|
||||
len += MAX (0, snprintf (s + len, ARRAY_LENGTH (s) - len, "%u", feature->end));
|
||||
len += hb_max (0, snprintf (s + len, ARRAY_LENGTH (s) - len, "%u", feature->end));
|
||||
}
|
||||
s[len++] = ']';
|
||||
}
|
||||
if (feature->value > 1)
|
||||
{
|
||||
s[len++] = '=';
|
||||
len += MAX (0, snprintf (s + len, ARRAY_LENGTH (s) - len, "%u", feature->value));
|
||||
len += hb_max (0, snprintf (s + len, ARRAY_LENGTH (s) - len, "%u", feature->value));
|
||||
}
|
||||
assert (len < ARRAY_LENGTH (s));
|
||||
len = MIN (len, size - 1);
|
||||
len = hb_min (len, size - 1);
|
||||
memcpy (buf, s, len);
|
||||
buf[len] = '\0';
|
||||
}
|
||||
@ -1152,14 +1152,71 @@ hb_variation_to_string (hb_variation_t *variation,
|
||||
while (len && s[len - 1] == ' ')
|
||||
len--;
|
||||
s[len++] = '=';
|
||||
len += MAX (0, snprintf (s + len, ARRAY_LENGTH (s) - len, "%g", (double) variation->value));
|
||||
len += hb_max (0, snprintf (s + len, ARRAY_LENGTH (s) - len, "%g", (double) variation->value));
|
||||
|
||||
assert (len < ARRAY_LENGTH (s));
|
||||
len = MIN (len, size - 1);
|
||||
len = hb_min (len, size - 1);
|
||||
memcpy (buf, s, len);
|
||||
buf[len] = '\0';
|
||||
}
|
||||
|
||||
/**
|
||||
* hb_color_get_alpha:
|
||||
* color: a #hb_color_t we are interested in its channels.
|
||||
*
|
||||
* Return value: Alpha channel value of the given color
|
||||
*
|
||||
* Since: REPLACEME
|
||||
*/
|
||||
uint8_t
|
||||
(hb_color_get_alpha) (hb_color_t color)
|
||||
{
|
||||
return hb_color_get_alpha (color);
|
||||
}
|
||||
|
||||
/**
|
||||
* hb_color_get_red:
|
||||
* color: a #hb_color_t we are interested in its channels.
|
||||
*
|
||||
* Return value: Red channel value of the given color
|
||||
*
|
||||
* Since: REPLACEME
|
||||
*/
|
||||
uint8_t
|
||||
(hb_color_get_red) (hb_color_t color)
|
||||
{
|
||||
return hb_color_get_red (color);
|
||||
}
|
||||
|
||||
/**
|
||||
* hb_color_get_green:
|
||||
* color: a #hb_color_t we are interested in its channels.
|
||||
*
|
||||
* Return value: Green channel value of the given color
|
||||
*
|
||||
* Since: REPLACEME
|
||||
*/
|
||||
uint8_t
|
||||
(hb_color_get_green) (hb_color_t color)
|
||||
{
|
||||
return hb_color_get_green (color);
|
||||
}
|
||||
|
||||
/**
|
||||
* hb_color_get_blue:
|
||||
* color: a #hb_color_t we are interested in its channels.
|
||||
*
|
||||
* Return value: Blue channel value of the given color
|
||||
*
|
||||
* Since: REPLACEME
|
||||
*/
|
||||
uint8_t
|
||||
(hb_color_get_blue) (hb_color_t color)
|
||||
{
|
||||
return hb_color_get_blue (color);
|
||||
}
|
||||
|
||||
|
||||
/* If there is no visibility control, then hb-static.cc will NOT
|
||||
* define anything. Instead, we get it to define one set in here
|
||||
* only, so only libharfbuzz.so defines them, not other libs. */
|
||||
|
@ -467,39 +467,21 @@ typedef uint32_t hb_color_t;
|
||||
|
||||
#define HB_COLOR(b,g,r,a) ((hb_color_t) HB_TAG ((b),(g),(r),(a)))
|
||||
|
||||
/**
|
||||
* hb_color_get_alpha:
|
||||
*
|
||||
*
|
||||
*
|
||||
* Since: 2.1.0
|
||||
*/
|
||||
HB_EXTERN uint8_t
|
||||
hb_color_get_alpha (hb_color_t color);
|
||||
#define hb_color_get_alpha(color) ((color) & 0xFF)
|
||||
/**
|
||||
* hb_color_get_red:
|
||||
*
|
||||
*
|
||||
*
|
||||
* Since: 2.1.0
|
||||
*/
|
||||
#define hb_color_get_red(color) (((color) >> 8) & 0xFF)
|
||||
/**
|
||||
* hb_color_get_green:
|
||||
*
|
||||
*
|
||||
*
|
||||
* Since: 2.1.0
|
||||
*/
|
||||
#define hb_color_get_green(color) (((color) >> 16) & 0xFF)
|
||||
/**
|
||||
* hb_color_get_blue:
|
||||
*
|
||||
*
|
||||
*
|
||||
* Since: 2.1.0
|
||||
*/
|
||||
#define hb_color_get_blue(color) (((color) >> 24) & 0xFF)
|
||||
|
||||
HB_EXTERN uint8_t
|
||||
hb_color_get_red (hb_color_t color);
|
||||
#define hb_color_get_red(color) (((color) >> 8) & 0xFF)
|
||||
|
||||
HB_EXTERN uint8_t
|
||||
hb_color_get_green (hb_color_t color);
|
||||
#define hb_color_get_green(color) (((color) >> 16) & 0xFF)
|
||||
|
||||
HB_EXTERN uint8_t
|
||||
hb_color_get_blue (hb_color_t color);
|
||||
#define hb_color_get_blue(color) (((color) >> 24) & 0xFF)
|
||||
|
||||
HB_END_DECLS
|
||||
|
||||
|
100
src/hb-config.hh
Normal file
100
src/hb-config.hh
Normal file
@ -0,0 +1,100 @@
|
||||
/*
|
||||
* Copyright © 2019 Facebook, Inc.
|
||||
*
|
||||
* This is part of HarfBuzz, a text shaping library.
|
||||
*
|
||||
* Permission is hereby granted, without written agreement and without
|
||||
* license or royalty fees, to use, copy, modify, and distribute this
|
||||
* software and its documentation for any purpose, provided that the
|
||||
* above copyright notice and the following two paragraphs appear in
|
||||
* all copies of this software.
|
||||
*
|
||||
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
|
||||
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
|
||||
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
|
||||
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
|
||||
* DAMAGE.
|
||||
*
|
||||
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
|
||||
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
|
||||
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
|
||||
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
|
||||
*
|
||||
* Facebook Author(s): Behdad Esfahbod
|
||||
*/
|
||||
|
||||
#ifndef HB_CONFIG_HH
|
||||
#define HB_CONFIG_HH
|
||||
|
||||
#if 0 /* Make test happy. */
|
||||
#include "hb.hh"
|
||||
#endif
|
||||
#ifdef HAVE_CONFIG_H
|
||||
#include "config.h"
|
||||
#endif
|
||||
|
||||
|
||||
#ifdef HB_TINY
|
||||
#define HB_LEAN
|
||||
#define HB_MINI
|
||||
//#define HB_NO_MT /* Let user choose */
|
||||
#endif
|
||||
|
||||
#ifdef HB_LEAN
|
||||
#define HB_DISABLE_DEPRECATED
|
||||
#define HB_NO_ATEXIT
|
||||
#define HB_NO_BITMAP
|
||||
#define HB_NO_CFF
|
||||
#define HB_NO_COLOR
|
||||
#define HB_NO_GETENV
|
||||
#define HB_NO_MATH
|
||||
#define HB_NO_NAME
|
||||
#define HB_NO_LAYOUT_UNUSED
|
||||
#endif
|
||||
|
||||
#ifdef HB_MINI
|
||||
#define HB_NO_AAT
|
||||
#define HB_NO_LEGACY
|
||||
#endif
|
||||
|
||||
/* Closure. */
|
||||
|
||||
#ifdef HB_DISABLE_DEPRECATED
|
||||
#define HB_IF_NOT_DEPRECATED(x)
|
||||
#else
|
||||
#define HB_IF_NOT_DEPRECATED(x) x
|
||||
#endif
|
||||
|
||||
#ifdef HB_NO_AAT
|
||||
#define HB_NO_OT_NAME_LANGUAGE_AAT
|
||||
#define HB_NO_SHAPE_AAT
|
||||
#endif
|
||||
|
||||
#ifdef HB_NO_BITMAP
|
||||
#define HB_NO_OT_FONT_BITMAP
|
||||
#endif
|
||||
|
||||
#ifdef HB_NO_CFF
|
||||
#define HB_NO_OT_FONT_CFF
|
||||
#define HB_NO_SUBSET_CFF
|
||||
#endif
|
||||
|
||||
#ifdef HB_NO_LEGACY
|
||||
#define HB_NO_OT_LAYOUT_BLACKLIST
|
||||
#define HB_NO_OT_SHAPE_FALLBACK
|
||||
#endif
|
||||
|
||||
#ifdef HB_NO_NAME
|
||||
#define HB_NO_OT_NAME_LANGUAGE
|
||||
#endif
|
||||
|
||||
#ifdef HB_NO_OT_SHAPE_FALLBACK
|
||||
#define HB_NO_OT_SHAPE_COMPLEX_ARABIC_FALLBACK
|
||||
#define HB_NO_OT_SHAPE_COMPLEX_HEBREW_FALLBACK
|
||||
#define HB_NO_OT_SHAPE_COMPLEX_THAI_FALLBACK
|
||||
#define HB_NO_OT_SHAPE_COMPLEX_VOWEL_CONSTRAINTS
|
||||
#endif
|
||||
|
||||
|
||||
#endif /* HB_CONFIG_HH */
|
@ -55,13 +55,13 @@ coretext_font_size_from_ptem (float ptem)
|
||||
* https://developer.apple.com/library/content/documentation/GraphicsAnimation/Conceptual/HighResolutionOSX/Explained/Explained.html
|
||||
*/
|
||||
ptem *= 96.f / 72.f;
|
||||
return ptem <= 0.f ? HB_CORETEXT_DEFAULT_FONT_SIZE : ptem;
|
||||
return (CGFloat) (ptem <= 0.f ? HB_CORETEXT_DEFAULT_FONT_SIZE : ptem);
|
||||
}
|
||||
static float
|
||||
coretext_font_size_to_ptem (CGFloat size)
|
||||
{
|
||||
size *= 72.f / 96.f;
|
||||
return size <= 0.f ? 0 : size;
|
||||
size *= 72. / 96.;
|
||||
return size <= 0 ? 0 : size;
|
||||
}
|
||||
|
||||
static void
|
||||
@ -410,7 +410,7 @@ struct active_feature_t {
|
||||
feature_record_t rec;
|
||||
unsigned int order;
|
||||
|
||||
static int cmp (const void *pa, const void *pb) {
|
||||
HB_INTERNAL static int cmp (const void *pa, const void *pb) {
|
||||
const active_feature_t *a = (const active_feature_t *) pa;
|
||||
const active_feature_t *b = (const active_feature_t *) pb;
|
||||
return a->rec.feature < b->rec.feature ? -1 : a->rec.feature > b->rec.feature ? 1 :
|
||||
@ -428,7 +428,7 @@ struct feature_event_t {
|
||||
bool start;
|
||||
active_feature_t feature;
|
||||
|
||||
static int cmp (const void *pa, const void *pb) {
|
||||
HB_INTERNAL static int cmp (const void *pa, const void *pb) {
|
||||
const feature_event_t *a = (const feature_event_t *) pa;
|
||||
const feature_event_t *b = (const feature_event_t *) pb;
|
||||
return a->index < b->index ? -1 : a->index > b->index ? 1 :
|
||||
@ -598,7 +598,7 @@ _hb_coretext_shape (hb_shape_plan_t *shape_plan,
|
||||
} else {
|
||||
active_feature_t *feature = active_features.find (&event->feature);
|
||||
if (feature)
|
||||
active_features.remove (feature - active_features.arrayZ ());
|
||||
active_features.remove (feature - active_features.arrayZ);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -771,7 +771,7 @@ resize_and_retry:
|
||||
feature.start < chars_len && feature.start < feature.end)
|
||||
{
|
||||
CFRange feature_range = CFRangeMake (feature.start,
|
||||
MIN (feature.end, chars_len) - feature.start);
|
||||
hb_min (feature.end, chars_len) - feature.start);
|
||||
if (feature.value)
|
||||
CFAttributedStringRemoveAttribute (attr_string, feature_range, kCTKernAttributeName);
|
||||
else
|
||||
@ -1116,7 +1116,7 @@ resize_and_retry:
|
||||
unsigned int cluster = info[count - 1].cluster;
|
||||
for (unsigned int i = count - 1; i > 0; i--)
|
||||
{
|
||||
cluster = MIN (cluster, info[i - 1].cluster);
|
||||
cluster = hb_min (cluster, info[i - 1].cluster);
|
||||
info[i - 1].cluster = cluster;
|
||||
}
|
||||
}
|
||||
@ -1125,7 +1125,7 @@ resize_and_retry:
|
||||
unsigned int cluster = info[0].cluster;
|
||||
for (unsigned int i = 1; i < count; i++)
|
||||
{
|
||||
cluster = MIN (cluster, info[i].cluster);
|
||||
cluster = hb_min (cluster, info[i].cluster);
|
||||
info[i].cluster = cluster;
|
||||
}
|
||||
}
|
||||
|
@ -63,6 +63,9 @@ extern HB_INTERNAL hb_atomic_int_t _hb_options;
|
||||
static inline hb_options_t
|
||||
hb_options ()
|
||||
{
|
||||
#ifdef HB_NO_GETENV
|
||||
return hb_options_t ();
|
||||
#endif
|
||||
/* Make a local copy, so we can access bitfield threadsafely. */
|
||||
hb_options_union_t u;
|
||||
u.i = _hb_options.get_relaxed ();
|
||||
@ -158,7 +161,7 @@ _hb_debug_msg_va (const char *what,
|
||||
VBAR VBAR VBAR VBAR VBAR VBAR VBAR VBAR VBAR VBAR;
|
||||
fprintf (stderr, "%2u %s" VRBAR "%s",
|
||||
level,
|
||||
bars + sizeof (bars) - 1 - MIN ((unsigned int) sizeof (bars) - 1, (unsigned int) (sizeof (VBAR) - 1) * level),
|
||||
bars + sizeof (bars) - 1 - hb_min ((unsigned int) sizeof (bars) - 1, (unsigned int) (sizeof (VBAR) - 1) * level),
|
||||
level_dir ? (level_dir > 0 ? DLBAR : ULBAR) : LBAR);
|
||||
} else
|
||||
fprintf (stderr, " " VRBAR LBAR);
|
||||
@ -246,8 +249,8 @@ struct hb_printer_t<bool> {
|
||||
};
|
||||
|
||||
template <>
|
||||
struct hb_printer_t<hb_void_t> {
|
||||
const char *print (hb_void_t) { return ""; }
|
||||
struct hb_printer_t<hb_empty_t> {
|
||||
const char *print (hb_empty_t) { return ""; }
|
||||
};
|
||||
|
||||
|
||||
@ -263,7 +266,7 @@ static inline void _hb_warn_no_return (bool returned)
|
||||
}
|
||||
}
|
||||
template <>
|
||||
/*static*/ inline void _hb_warn_no_return<hb_void_t> (bool returned HB_UNUSED)
|
||||
/*static*/ inline void _hb_warn_no_return<hb_empty_t> (bool returned HB_UNUSED)
|
||||
{}
|
||||
|
||||
template <int max_level, typename ret_t>
|
||||
@ -327,18 +330,20 @@ struct hb_auto_trace_t<0, ret_t>
|
||||
const char *message,
|
||||
...) HB_PRINTF_FUNC(6, 7) {}
|
||||
|
||||
ret_t ret (ret_t v,
|
||||
const char *func HB_UNUSED = nullptr,
|
||||
unsigned int line HB_UNUSED = 0) { return v; }
|
||||
template <typename T>
|
||||
T ret (T&& v,
|
||||
const char *func HB_UNUSED = nullptr,
|
||||
unsigned int line HB_UNUSED = 0) { return hb_forward<T> (v); }
|
||||
};
|
||||
|
||||
/* For disabled tracing; optimize out everything.
|
||||
* https://github.com/harfbuzz/harfbuzz/pull/605 */
|
||||
template <typename ret_t>
|
||||
struct hb_no_trace_t {
|
||||
ret_t ret (ret_t v,
|
||||
const char *func HB_UNUSED = "",
|
||||
unsigned int line HB_UNUSED = 0) { return v; }
|
||||
template <typename T>
|
||||
T ret (T&& v,
|
||||
const char *func HB_UNUSED = nullptr,
|
||||
unsigned int line HB_UNUSED = 0) { return hb_forward<T> (v); }
|
||||
};
|
||||
|
||||
#define return_trace(RET) return trace.ret (RET, HB_FUNC, __LINE__)
|
||||
|
@ -778,7 +778,7 @@ retry_getglyphs:
|
||||
{
|
||||
uint32_t *p =
|
||||
&vis_clusters[log_clusters[buffer->info[i].utf16_index ()]];
|
||||
*p = MIN (*p, buffer->info[i].cluster);
|
||||
*p = hb_min (*p, buffer->info[i].cluster);
|
||||
}
|
||||
for (unsigned int i = 1; i < glyphCount; i++)
|
||||
if (vis_clusters[i] == (uint32_t) -1)
|
||||
@ -930,7 +930,9 @@ _hb_directwrite_font_release (void *data)
|
||||
|
||||
/**
|
||||
* hb_directwrite_face_create:
|
||||
* @font_face:
|
||||
* @font_face: a DirectWrite IDWriteFontFace object.
|
||||
*
|
||||
* Return value: #hb_face_t object corresponding to the given input
|
||||
*
|
||||
* Since: 2.4.0
|
||||
**/
|
||||
@ -945,7 +947,9 @@ hb_directwrite_face_create (IDWriteFontFace *font_face)
|
||||
|
||||
/**
|
||||
* hb_directwrite_face_get_font_face:
|
||||
* @face:
|
||||
* @face: a #hb_face_t object
|
||||
*
|
||||
* Return value: DirectWrite IDWriteFontFace object corresponding to the given input
|
||||
*
|
||||
* Since: REPLACEME
|
||||
**/
|
||||
|
@ -38,10 +38,18 @@
|
||||
template <typename Context, typename Return, unsigned int MaxDebugDepth>
|
||||
struct hb_dispatch_context_t
|
||||
{
|
||||
private:
|
||||
/* https://en.wikipedia.org/wiki/Curiously_recurring_template_pattern */
|
||||
const Context* thiz () const { return static_cast<const Context *> (this); }
|
||||
Context* thiz () { return static_cast< Context *> (this); }
|
||||
public:
|
||||
static constexpr unsigned max_debug_depth = MaxDebugDepth;
|
||||
typedef Return return_t;
|
||||
template <typename T, typename F>
|
||||
bool may_dispatch (const T *obj HB_UNUSED, const F *format HB_UNUSED) { return true; }
|
||||
template <typename T, typename ...Ts>
|
||||
return_t dispatch (const T &obj, Ts&&... ds)
|
||||
{ return obj.dispatch (thiz (), hb_forward<Ts> (ds)...); }
|
||||
static return_t no_dispatch_return_value () { return Context::default_return_value (); }
|
||||
static bool stop_sublookup_iteration (const return_t r HB_UNUSED) { return false; }
|
||||
};
|
||||
|
@ -336,6 +336,7 @@ hb_font_get_glyph_v_origin_default (hb_font_t *font,
|
||||
return ret;
|
||||
}
|
||||
|
||||
#ifndef HB_DISABLE_DEPRECATED
|
||||
static hb_position_t
|
||||
hb_font_get_glyph_h_kerning_nil (hb_font_t *font HB_UNUSED,
|
||||
void *font_data HB_UNUSED,
|
||||
@ -373,6 +374,7 @@ hb_font_get_glyph_v_kerning_default (hb_font_t *font,
|
||||
{
|
||||
return font->parent_scale_y_distance (font->parent->get_glyph_v_kerning (top_glyph, bottom_glyph));
|
||||
}
|
||||
#endif
|
||||
|
||||
static hb_bool_t
|
||||
hb_font_get_glyph_extents_nil (hb_font_t *font HB_UNUSED,
|
||||
@ -925,6 +927,7 @@ hb_font_get_glyph_v_origin (hb_font_t *font,
|
||||
return font->get_glyph_v_origin (glyph, x, y);
|
||||
}
|
||||
|
||||
#ifndef HB_DISABLE_DEPRECATED
|
||||
/**
|
||||
* hb_font_get_glyph_h_kerning:
|
||||
* @font: a font.
|
||||
@ -964,6 +967,7 @@ hb_font_get_glyph_v_kerning (hb_font_t *font,
|
||||
{
|
||||
return font->get_glyph_v_kerning (top_glyph, bottom_glyph);
|
||||
}
|
||||
#endif
|
||||
|
||||
/**
|
||||
* hb_font_get_glyph_extents:
|
||||
@ -1173,6 +1177,7 @@ hb_font_subtract_glyph_origin_for_direction (hb_font_t *font,
|
||||
return font->subtract_glyph_origin_for_direction (glyph, direction, x, y);
|
||||
}
|
||||
|
||||
#ifndef HB_DISABLE_DEPRECATED
|
||||
/**
|
||||
* hb_font_get_glyph_kerning_for_direction:
|
||||
* @font: a font.
|
||||
@ -1195,6 +1200,7 @@ hb_font_get_glyph_kerning_for_direction (hb_font_t *font,
|
||||
{
|
||||
return font->get_glyph_kerning_for_direction (first_glyph, second_glyph, direction, x, y);
|
||||
}
|
||||
#endif
|
||||
|
||||
/**
|
||||
* hb_font_get_glyph_extents_for_origin:
|
||||
@ -1347,7 +1353,7 @@ hb_font_create (hb_face_t *face)
|
||||
{
|
||||
hb_font_t *font = _hb_font_create (face);
|
||||
|
||||
#if !defined(HB_NO_OT_FONT)
|
||||
#ifndef HB_NO_OT_FONT
|
||||
/* Install our in-house, very lightweight, funcs. */
|
||||
hb_ot_font_set_funcs (font);
|
||||
#endif
|
||||
@ -1916,6 +1922,7 @@ hb_font_get_var_coords_normalized (hb_font_t *font,
|
||||
}
|
||||
|
||||
|
||||
#ifndef HB_DISABLE_DEPRECATED
|
||||
/*
|
||||
* Deprecated get_glyph_func():
|
||||
*/
|
||||
@ -2038,3 +2045,4 @@ hb_font_funcs_set_glyph_func (hb_font_funcs_t *ffuncs,
|
||||
trampoline,
|
||||
trampoline_destroy);
|
||||
}
|
||||
#endif
|
||||
|
@ -51,8 +51,8 @@
|
||||
HB_FONT_FUNC_IMPLEMENT (glyph_v_advances) \
|
||||
HB_FONT_FUNC_IMPLEMENT (glyph_h_origin) \
|
||||
HB_FONT_FUNC_IMPLEMENT (glyph_v_origin) \
|
||||
HB_FONT_FUNC_IMPLEMENT (glyph_h_kerning) \
|
||||
HB_FONT_FUNC_IMPLEMENT (glyph_v_kerning) \
|
||||
HB_IF_NOT_DEPRECATED (HB_FONT_FUNC_IMPLEMENT (glyph_h_kerning)) \
|
||||
HB_IF_NOT_DEPRECATED (HB_FONT_FUNC_IMPLEMENT (glyph_v_kerning)) \
|
||||
HB_FONT_FUNC_IMPLEMENT (glyph_extents) \
|
||||
HB_FONT_FUNC_IMPLEMENT (glyph_contour_point) \
|
||||
HB_FONT_FUNC_IMPLEMENT (glyph_name) \
|
||||
@ -304,17 +304,25 @@ struct hb_font_t
|
||||
hb_position_t get_glyph_h_kerning (hb_codepoint_t left_glyph,
|
||||
hb_codepoint_t right_glyph)
|
||||
{
|
||||
#ifdef HB_DISABLE_DEPRECATED
|
||||
return 0;
|
||||
#else
|
||||
return klass->get.f.glyph_h_kerning (this, user_data,
|
||||
left_glyph, right_glyph,
|
||||
klass->user_data.glyph_h_kerning);
|
||||
#endif
|
||||
}
|
||||
|
||||
hb_position_t get_glyph_v_kerning (hb_codepoint_t top_glyph,
|
||||
hb_codepoint_t bottom_glyph)
|
||||
{
|
||||
#ifdef HB_DISABLE_DEPRECATED
|
||||
return 0;
|
||||
#else
|
||||
return klass->get.f.glyph_v_kerning (this, user_data,
|
||||
top_glyph, bottom_glyph,
|
||||
klass->user_data.glyph_v_kerning);
|
||||
#endif
|
||||
}
|
||||
|
||||
hb_bool_t get_glyph_extents (hb_codepoint_t glyph,
|
||||
@ -607,7 +615,7 @@ struct hb_font_t
|
||||
return (hb_position_t) (scaled / upem);
|
||||
}
|
||||
hb_position_t em_scalef (float v, int scale)
|
||||
{ return (hb_position_t) round (v * scale / face->get_upem ()); }
|
||||
{ return (hb_position_t) roundf (v * scale / face->get_upem ()); }
|
||||
float em_fscale (int16_t v, int scale)
|
||||
{ return (float) v * scale / face->get_upem (); }
|
||||
};
|
||||
|
@ -439,7 +439,7 @@ hb_ft_get_glyph_from_name (hb_font_t *font HB_UNUSED,
|
||||
else {
|
||||
/* Make a nul-terminated version. */
|
||||
char buf[128];
|
||||
len = MIN (len, (int) sizeof (buf) - 1);
|
||||
len = hb_min (len, (int) sizeof (buf) - 1);
|
||||
strncpy (buf, name, len);
|
||||
buf[len] = '\0';
|
||||
*glyph = FT_Get_Name_Index (ft_face, buf);
|
||||
@ -748,7 +748,7 @@ hb_ft_font_create_referenced (FT_Face ft_face)
|
||||
static void free_static_ft_library ();
|
||||
#endif
|
||||
|
||||
static struct hb_ft_library_lazy_loader_t : hb_lazy_loader_t<hb_remove_pointer (FT_Library),
|
||||
static struct hb_ft_library_lazy_loader_t : hb_lazy_loader_t<hb_remove_pointer<FT_Library>,
|
||||
hb_ft_library_lazy_loader_t>
|
||||
{
|
||||
static FT_Library create ()
|
||||
|
@ -202,6 +202,7 @@ _hb_graphite2_shaper_font_data_destroy (hb_graphite2_font_data_t *data HB_UNUSED
|
||||
{
|
||||
}
|
||||
|
||||
#ifndef HB_DISABLE_DEPRECATED
|
||||
/**
|
||||
* hb_graphite2_font_get_gr_font:
|
||||
*
|
||||
@ -213,6 +214,7 @@ hb_graphite2_font_get_gr_font (hb_font_t *font HB_UNUSED)
|
||||
{
|
||||
return nullptr;
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
/*
|
||||
|
@ -236,7 +236,7 @@ hb_icu_unicode_decompose (hb_unicode_funcs_t *ufuncs HB_UNUSED,
|
||||
/* We don't ifdef-out the fallback code such that compiler always
|
||||
* sees it and makes sure it's compilable. */
|
||||
|
||||
UChar utf16[2], normalized[2 * HB_UNICODE_MAX_DECOMPOSITION_LEN + 1];
|
||||
UChar utf16[2], normalized[2 * 19/*HB_UNICODE_MAX_DECOMPOSITION_LEN*/ + 1];
|
||||
unsigned int len;
|
||||
hb_bool_t ret, err;
|
||||
UErrorCode icu_err;
|
||||
|
529
src/hb-iter.hh
529
src/hb-iter.hh
@ -1,5 +1,6 @@
|
||||
/*
|
||||
* Copyright © 2018 Google, Inc.
|
||||
* Copyright © 2019 Facebook, Inc.
|
||||
*
|
||||
* This is part of HarfBuzz, a text shaping library.
|
||||
*
|
||||
@ -22,12 +23,14 @@
|
||||
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
|
||||
*
|
||||
* Google Author(s): Behdad Esfahbod
|
||||
* Facebook Author(s): Behdad Esfahbod
|
||||
*/
|
||||
|
||||
#ifndef HB_ITER_HH
|
||||
#define HB_ITER_HH
|
||||
|
||||
#include "hb.hh"
|
||||
#include "hb-algs.hh"
|
||||
#include "hb-meta.hh"
|
||||
|
||||
|
||||
@ -39,6 +42,17 @@
|
||||
* copied by value. If the collection / object being iterated on
|
||||
* is writable, then the iterator returns lvalues, otherwise it
|
||||
* returns rvalues.
|
||||
*
|
||||
* TODO Document more.
|
||||
*
|
||||
* If iterator implementation implements operator!=, then can be
|
||||
* used in range-based for loop. That comes free if the iterator
|
||||
* is random-access. Otherwise, the range-based for loop incurs
|
||||
* one traversal to find end(), which can be avoided if written
|
||||
* as a while-style for loop, or if iterator implements a faster
|
||||
* __end__() method.
|
||||
* TODO When opting in for C++17, address this by changing return
|
||||
* type of .end()?
|
||||
*/
|
||||
|
||||
|
||||
@ -69,32 +83,41 @@ struct hb_iter_t
|
||||
/* Operators. */
|
||||
iter_t iter () const { return *thiz(); }
|
||||
iter_t operator + () const { return *thiz(); }
|
||||
iter_t begin () const { return *thiz(); }
|
||||
iter_t end () const { return thiz()->__end__ (); }
|
||||
explicit operator bool () const { return thiz()->__more__ (); }
|
||||
unsigned len () const { return thiz()->__len__ (); }
|
||||
/* The following can only be enabled if item_t is reference type. Otherwise
|
||||
* it will be returning pointer to temporary rvalue. */
|
||||
* it will be returning pointer to temporary rvalue.
|
||||
* TODO Use a wrapper return type to fix for non-reference type. */
|
||||
template <typename T = item_t,
|
||||
hb_enable_if (hb_is_reference (T))>
|
||||
hb_remove_reference (item_t)* operator -> () const { return hb_addressof (**thiz()); }
|
||||
hb_remove_reference<item_t>* operator -> () const { return hb_addressof (**thiz()); }
|
||||
item_t operator * () const { return thiz()->__item__ (); }
|
||||
item_t operator * () { return thiz()->__item__ (); }
|
||||
item_t operator [] (unsigned i) const { return thiz()->__item_at__ (i); }
|
||||
item_t operator [] (unsigned i) { return thiz()->__item_at__ (i); }
|
||||
iter_t& operator += (unsigned count) { thiz()->__forward__ (count); return *thiz(); }
|
||||
iter_t& operator ++ () { thiz()->__next__ (); return *thiz(); }
|
||||
iter_t& operator -= (unsigned count) { thiz()->__rewind__ (count); return *thiz(); }
|
||||
iter_t& operator -- () { thiz()->__prev__ (); return *thiz(); }
|
||||
iter_t& operator += (unsigned count) & { thiz()->__forward__ (count); return *thiz(); }
|
||||
iter_t operator += (unsigned count) && { thiz()->__forward__ (count); return *thiz(); }
|
||||
iter_t& operator ++ () & { thiz()->__next__ (); return *thiz(); }
|
||||
iter_t operator ++ () && { thiz()->__next__ (); return *thiz(); }
|
||||
iter_t& operator -= (unsigned count) & { thiz()->__rewind__ (count); return *thiz(); }
|
||||
iter_t operator -= (unsigned count) && { thiz()->__rewind__ (count); return *thiz(); }
|
||||
iter_t& operator -- () & { thiz()->__prev__ (); return *thiz(); }
|
||||
iter_t operator -- () && { thiz()->__prev__ (); return *thiz(); }
|
||||
iter_t operator + (unsigned count) const { auto c = thiz()->iter (); c += count; return c; }
|
||||
friend iter_t operator + (unsigned count, const iter_t &it) { return it + count; }
|
||||
iter_t operator ++ (int) { iter_t c (*thiz()); ++*thiz(); return c; }
|
||||
iter_t operator - (unsigned count) const { auto c = thiz()->iter (); c -= count; return c; }
|
||||
iter_t operator -- (int) { iter_t c (*thiz()); --*thiz(); return c; }
|
||||
template <typename T>
|
||||
iter_t& operator >> (T &v) { v = **thiz(); ++*thiz(); return *thiz(); }
|
||||
iter_t& operator >> (T &v) & { v = **thiz(); ++*thiz(); return *thiz(); }
|
||||
template <typename T>
|
||||
iter_t& operator >> (T &v) const { v = **thiz(); ++*thiz(); return *thiz(); }
|
||||
iter_t operator >> (T &v) && { v = **thiz(); ++*thiz(); return *thiz(); }
|
||||
template <typename T>
|
||||
iter_t& operator << (const T v) { **thiz() = v; ++*thiz(); return *thiz(); }
|
||||
iter_t& operator << (const T v) & { **thiz() = v; ++*thiz(); return *thiz(); }
|
||||
template <typename T>
|
||||
iter_t operator << (const T v) && { **thiz() = v; ++*thiz(); return *thiz(); }
|
||||
|
||||
protected:
|
||||
hb_iter_t () {}
|
||||
@ -104,6 +127,8 @@ struct hb_iter_t
|
||||
|
||||
#define HB_ITER_USING(Name) \
|
||||
using item_t = typename Name::item_t; \
|
||||
using Name::begin; \
|
||||
using Name::end; \
|
||||
using Name::item_size; \
|
||||
using Name::is_iterator; \
|
||||
using Name::iter; \
|
||||
@ -122,20 +147,20 @@ struct hb_iter_t
|
||||
using Name::operator <<; \
|
||||
static_assert (true, "")
|
||||
|
||||
/* Returns iterator type of a type. */
|
||||
#define hb_iter_t(Iterable) decltype (hb_declval (Iterable).iter ())
|
||||
/* Returns iterator / item type of a type. */
|
||||
template <typename Iterable>
|
||||
using hb_iter_type = decltype (hb_deref (hb_declval (Iterable)).iter ());
|
||||
template <typename Iterable>
|
||||
using hb_item_type = decltype (*hb_deref (hb_declval (Iterable)).iter ());
|
||||
|
||||
|
||||
/* TODO Change to function-object. */
|
||||
|
||||
template <typename> struct hb_array_t;
|
||||
|
||||
static const struct
|
||||
struct
|
||||
{
|
||||
template <typename T>
|
||||
hb_iter_t (T)
|
||||
template <typename T> hb_iter_type<T>
|
||||
operator () (T&& c) const
|
||||
{ return c.iter (); }
|
||||
{ return hb_deref (hb_forward<T> (c)).iter (); }
|
||||
|
||||
/* Specialization for C arrays. */
|
||||
|
||||
@ -147,8 +172,8 @@ static const struct
|
||||
operator () (Type (&array)[length]) const
|
||||
{ return hb_array_t<Type> (array, length); }
|
||||
|
||||
} hb_iter HB_UNUSED;
|
||||
|
||||
}
|
||||
HB_FUNCOBJ (hb_iter);
|
||||
|
||||
/* Mixin to fill in what the subclass doesn't provide. */
|
||||
template <typename iter_t, typename item_t = typename iter_t::__item_t__>
|
||||
@ -165,17 +190,29 @@ struct hb_iter_fallback_mixin_t
|
||||
item_t __item_at__ (unsigned i) const { return *(*thiz() + i); }
|
||||
|
||||
/* Termination: Implement __more__(), or __len__() if random-access. */
|
||||
bool __more__ () const { return thiz()->len (); }
|
||||
bool __more__ () const { return bool (thiz()->len ()); }
|
||||
unsigned __len__ () const
|
||||
{ iter_t c (*thiz()); unsigned l = 0; while (c) { c++; l++; }; return l; }
|
||||
|
||||
/* Advancing: Implement __next__(), or __forward__() if random-access. */
|
||||
void __next__ () { *thiz() += 1; }
|
||||
void __forward__ (unsigned n) { while (n--) ++*thiz(); }
|
||||
void __forward__ (unsigned n) { while (*thiz() && n--) ++*thiz(); }
|
||||
|
||||
/* Rewinding: Implement __prev__() or __rewind__() if bidirectional. */
|
||||
void __prev__ () { *thiz() -= 1; }
|
||||
void __rewind__ (unsigned n) { while (n--) --*thiz(); }
|
||||
void __rewind__ (unsigned n) { while (*thiz() && n--) --*thiz(); }
|
||||
|
||||
/* Range-based for: Implement __end__() if can be done faster,
|
||||
* and operator!=. */
|
||||
iter_t __end__ () const
|
||||
{
|
||||
if (thiz()->is_random_access_iterator)
|
||||
return *thiz() + thiz()->len ();
|
||||
/* Above expression loops twice. Following loops once. */
|
||||
auto it = *thiz();
|
||||
while (it) ++it;
|
||||
return it;
|
||||
}
|
||||
|
||||
protected:
|
||||
hb_iter_fallback_mixin_t () {}
|
||||
@ -200,53 +237,101 @@ struct hb_iter_with_fallback_t :
|
||||
* Meta-programming predicates.
|
||||
*/
|
||||
|
||||
/* hb_is_iterator() / hb_is_iterator_of() */
|
||||
|
||||
template<typename Iter, typename Item>
|
||||
struct hb_is_iterator_of
|
||||
{
|
||||
template <typename Item2 = Item>
|
||||
static hb_true_type impl (hb_priority<2>, hb_iter_t<Iter, hb_type_identity<Item2>> *);
|
||||
static hb_false_type impl (hb_priority<0>, const void *);
|
||||
|
||||
public:
|
||||
static constexpr bool value = decltype (impl (hb_prioritize, hb_declval (Iter*)))::value;
|
||||
};
|
||||
#define hb_is_iterator_of(Iter, Item) hb_is_iterator_of<Iter, Item>::value
|
||||
#define hb_is_iterator(Iter) hb_is_iterator_of (Iter, typename Iter::item_t)
|
||||
|
||||
/* hb_is_iterable() */
|
||||
|
||||
template <typename T>
|
||||
struct hb_is_iterable
|
||||
{
|
||||
private:
|
||||
|
||||
template <typename U>
|
||||
static auto test (int) -> decltype (hb_declval (U).iter (), hb_true_t ());
|
||||
static auto impl (hb_priority<1>) -> decltype (hb_declval (U).iter (), hb_true_type ());
|
||||
|
||||
template <typename>
|
||||
static hb_false_t test (...);
|
||||
static hb_false_type impl (hb_priority<0>);
|
||||
|
||||
public:
|
||||
enum { value = decltype (test<T> (0))::value };
|
||||
static constexpr bool value = decltype (impl<T> (hb_prioritize))::value;
|
||||
};
|
||||
#define hb_is_iterable(Iterable) hb_is_iterable<Iterable>::value
|
||||
|
||||
/* TODO Add hb_is_iterable_of().
|
||||
* TODO Add random_access / sorted variants. */
|
||||
/* hb_is_source_of() / hb_is_sink_of() */
|
||||
|
||||
|
||||
/* hb_is_iterator() / hb_is_random_access_iterator() / hb_is_sorted_iterator() */
|
||||
|
||||
template <typename Iter>
|
||||
struct _hb_is_iterator_of
|
||||
{
|
||||
char operator () (...) { return 0; }
|
||||
template<typename Item> int operator () (hb_iter_t<Iter, Item> *) { return 0; }
|
||||
template<typename Item> int operator () (hb_iter_t<Iter, const Item> *) { return 0; }
|
||||
template<typename Item> int operator () (hb_iter_t<Iter, Item&> *) { return 0; }
|
||||
template<typename Item> int operator () (hb_iter_t<Iter, const Item&> *) { return 0; }
|
||||
static_assert (sizeof (char) != sizeof (int), "");
|
||||
};
|
||||
template<typename Iter, typename Item>
|
||||
struct hb_is_iterator_of { enum {
|
||||
value = sizeof (int) == sizeof (hb_declval (_hb_is_iterator_of<Iter>) (hb_declval (Iter*))) }; };
|
||||
#define hb_is_iterator_of(Iter, Item) hb_is_iterator_of<Iter, Item>::value
|
||||
#define hb_is_iterator(Iter) hb_is_iterator_of (Iter, typename Iter::item_t)
|
||||
struct hb_is_source_of
|
||||
{
|
||||
private:
|
||||
template <typename Iter2 = Iter,
|
||||
hb_enable_if (hb_is_convertible (typename Iter2::item_t, hb_add_lvalue_reference<hb_add_const<Item>>))>
|
||||
static hb_true_type impl (hb_priority<2>);
|
||||
template <typename Iter2 = Iter>
|
||||
static auto impl (hb_priority<1>) -> decltype (hb_declval (Iter2) >> hb_declval (Item &), hb_true_type ());
|
||||
static hb_false_type impl (hb_priority<0>);
|
||||
|
||||
#define hb_is_random_access_iterator_of(Iter, Item) \
|
||||
hb_is_iterator_of (Iter, Item) && Iter::is_random_access_iterator
|
||||
#define hb_is_random_access_iterator(Iter) \
|
||||
hb_is_random_access_iterator_of (Iter, typename Iter::item_t)
|
||||
public:
|
||||
static constexpr bool value = decltype (impl (hb_prioritize))::value;
|
||||
};
|
||||
#define hb_is_source_of(Iter, Item) hb_is_source_of<Iter, Item>::value
|
||||
|
||||
#define hb_is_sorted_iterator_of(Iter, Item) \
|
||||
hb_is_iterator_of (Iter, Item) && Iter::is_sorted_iterator
|
||||
#define hb_is_sorted_iterator(Iter) \
|
||||
hb_is_sorted_iterator_of (Iter, typename Iter::item_t)
|
||||
template<typename Iter, typename Item>
|
||||
struct hb_is_sink_of
|
||||
{
|
||||
private:
|
||||
template <typename Iter2 = Iter,
|
||||
hb_enable_if (hb_is_convertible (typename Iter2::item_t, hb_add_lvalue_reference<Item>))>
|
||||
static hb_true_type impl (hb_priority<2>);
|
||||
template <typename Iter2 = Iter>
|
||||
static auto impl (hb_priority<1>) -> decltype (hb_declval (Iter2) << hb_declval (Item), hb_true_type ());
|
||||
static hb_false_type impl (hb_priority<0>);
|
||||
|
||||
public:
|
||||
static constexpr bool value = decltype (impl (hb_prioritize))::value;
|
||||
};
|
||||
#define hb_is_sink_of(Iter, Item) hb_is_sink_of<Iter, Item>::value
|
||||
|
||||
/* This is commonly used, so define: */
|
||||
#define hb_is_sorted_source_of(Iter, Item) \
|
||||
(hb_is_source_of(Iter, Item) && Iter::is_sorted_iterator)
|
||||
|
||||
|
||||
/* Range-based 'for' for iterables. */
|
||||
|
||||
template <typename Iterable,
|
||||
hb_requires (hb_is_iterable (Iterable))>
|
||||
static inline auto begin (Iterable&& iterable) HB_AUTO_RETURN (hb_iter (iterable).begin ())
|
||||
|
||||
template <typename Iterable,
|
||||
hb_requires (hb_is_iterable (Iterable))>
|
||||
static inline auto end (Iterable&& iterable) HB_AUTO_RETURN (hb_iter (iterable).end ())
|
||||
|
||||
/* begin()/end() are NOT looked up non-ADL. So each namespace must declare them.
|
||||
* Do it for namespace OT. */
|
||||
namespace OT {
|
||||
|
||||
template <typename Iterable,
|
||||
hb_requires (hb_is_iterable (Iterable))>
|
||||
static inline auto begin (Iterable&& iterable) HB_AUTO_RETURN (hb_iter (iterable).begin ())
|
||||
|
||||
template <typename Iterable,
|
||||
hb_requires (hb_is_iterable (Iterable))>
|
||||
static inline auto end (Iterable&& iterable) HB_AUTO_RETURN (hb_iter (iterable).end ())
|
||||
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
@ -254,78 +339,109 @@ struct hb_is_iterator_of { enum {
|
||||
*/
|
||||
|
||||
template <typename Lhs, typename Rhs,
|
||||
hb_enable_if (hb_is_iterator (Lhs))>
|
||||
static inline decltype (hb_declval (Rhs) (hb_declval (Lhs)))
|
||||
operator | (Lhs lhs, const Rhs &rhs) { return rhs (lhs); }
|
||||
hb_requires (hb_is_iterator (Lhs))>
|
||||
static inline auto
|
||||
operator | (Lhs&& lhs, Rhs&& rhs) HB_AUTO_RETURN (hb_forward<Rhs> (rhs) (hb_forward<Lhs> (lhs)))
|
||||
|
||||
/* hb_map(), hb_filter(), hb_reduce() */
|
||||
|
||||
template <typename Iter, typename Proj,
|
||||
hb_enable_if (hb_is_iterator (Iter))>
|
||||
struct hb_map_iter_t :
|
||||
hb_iter_t<hb_map_iter_t<Iter, Proj>,
|
||||
decltype (hb_declval (Proj) (hb_declval (typename Iter::item_t)))>
|
||||
{
|
||||
hb_map_iter_t (const Iter& it, Proj f) : it (it), f (f) {}
|
||||
enum sorted_t {
|
||||
NOT_SORTED,
|
||||
RETAINS_SORTING,
|
||||
SORTED,
|
||||
};
|
||||
|
||||
typedef decltype (hb_declval (Proj) (hb_declval (typename Iter::item_t))) __item_t__;
|
||||
template <typename Iter, typename Proj, sorted_t Sorted,
|
||||
hb_requires (hb_is_iterator (Iter))>
|
||||
struct hb_map_iter_t :
|
||||
hb_iter_t<hb_map_iter_t<Iter, Proj, Sorted>,
|
||||
decltype (hb_get (hb_declval (Proj), *hb_declval (Iter)))>
|
||||
{
|
||||
hb_map_iter_t (const Iter& it, Proj f_) : it (it), f (f_) {}
|
||||
|
||||
typedef decltype (hb_get (hb_declval (Proj), *hb_declval (Iter))) __item_t__;
|
||||
static constexpr bool is_random_access_iterator = Iter::is_random_access_iterator;
|
||||
__item_t__ __item__ () const { return f (*it); }
|
||||
__item_t__ __item_at__ (unsigned i) const { return f (it[i]); }
|
||||
static constexpr bool is_sorted_iterator =
|
||||
Sorted == SORTED ? true : Sorted == RETAINS_SORTING ? Iter::is_sorted_iterator : false;
|
||||
__item_t__ __item__ () const { return hb_get (f.get (), *it); }
|
||||
__item_t__ __item_at__ (unsigned i) const { return hb_get (f.get (), it[i]); }
|
||||
bool __more__ () const { return bool (it); }
|
||||
unsigned __len__ () const { return it.len (); }
|
||||
void __next__ () { ++it; }
|
||||
void __forward__ (unsigned n) { it += n; }
|
||||
void __prev__ () { --it; }
|
||||
void __rewind__ (unsigned n) { it -= n; }
|
||||
hb_map_iter_t __end__ () const { return hb_map_iter_t (it.end (), f); }
|
||||
bool operator != (const hb_map_iter_t& o) const
|
||||
{ return it != o.it || f != o.f; }
|
||||
|
||||
private:
|
||||
Iter it;
|
||||
Proj f;
|
||||
hb_reference_wrapper<Proj> f;
|
||||
};
|
||||
|
||||
template <typename Proj>
|
||||
template <typename Proj, sorted_t Sorted>
|
||||
struct hb_map_iter_factory_t
|
||||
{
|
||||
hb_map_iter_factory_t (Proj f) : f (f) {}
|
||||
|
||||
template <typename Iter,
|
||||
hb_enable_if (hb_is_iterator (Iter))>
|
||||
hb_map_iter_t<Iter, Proj>
|
||||
operator () (Iter it) const
|
||||
{ return hb_map_iter_t<Iter, Proj> (it, f); }
|
||||
hb_requires (hb_is_iterator (Iter))>
|
||||
hb_map_iter_t<Iter, Proj, Sorted>
|
||||
operator () (Iter it)
|
||||
{ return hb_map_iter_t<Iter, Proj, Sorted> (it, f); }
|
||||
|
||||
private:
|
||||
Proj f;
|
||||
};
|
||||
static const struct
|
||||
struct
|
||||
{
|
||||
template <typename Proj>
|
||||
hb_map_iter_factory_t<Proj>
|
||||
hb_map_iter_factory_t<Proj, NOT_SORTED>
|
||||
operator () (Proj&& f) const
|
||||
{ return hb_map_iter_factory_t<Proj> (f); }
|
||||
} hb_map HB_UNUSED;
|
||||
{ return hb_map_iter_factory_t<Proj, NOT_SORTED> (f); }
|
||||
}
|
||||
HB_FUNCOBJ (hb_map);
|
||||
struct
|
||||
{
|
||||
template <typename Proj>
|
||||
hb_map_iter_factory_t<Proj, SORTED>
|
||||
operator () (Proj&& f) const
|
||||
{ return hb_map_iter_factory_t<Proj, RETAINS_SORTING> (f); }
|
||||
}
|
||||
HB_FUNCOBJ (hb_map_retains_sorting);
|
||||
struct
|
||||
{
|
||||
template <typename Proj>
|
||||
hb_map_iter_factory_t<Proj, SORTED>
|
||||
operator () (Proj&& f) const
|
||||
{ return hb_map_iter_factory_t<Proj, SORTED> (f); }
|
||||
}
|
||||
HB_FUNCOBJ (hb_map_sorted);
|
||||
|
||||
template <typename Iter, typename Pred, typename Proj,
|
||||
hb_enable_if (hb_is_iterator (Iter))>
|
||||
hb_requires (hb_is_iterator (Iter))>
|
||||
struct hb_filter_iter_t :
|
||||
hb_iter_with_fallback_t<hb_filter_iter_t<Iter, Pred, Proj>,
|
||||
typename Iter::item_t>
|
||||
{
|
||||
hb_filter_iter_t (const Iter& it_, Pred p, Proj f) : it (it_), p (p), f (f)
|
||||
{ while (it && !p (f (*it))) ++it; }
|
||||
hb_filter_iter_t (const Iter& it_, Pred p_, Proj f_) : it (it_), p (p_), f (f_)
|
||||
{ while (it && !hb_has (p.get (), hb_get (f.get (), *it))) ++it; }
|
||||
|
||||
typedef typename Iter::item_t __item_t__;
|
||||
static constexpr bool is_sorted_iterator = Iter::is_sorted_iterator;
|
||||
__item_t__ __item__ () const { return *it; }
|
||||
bool __more__ () const { return bool (it); }
|
||||
void __next__ () { do ++it; while (it && !p (f (*it))); }
|
||||
void __prev__ () { --it; }
|
||||
void __next__ () { do ++it; while (it && !hb_has (p.get (), hb_get (f.get (), *it))); }
|
||||
void __prev__ () { do --it; while (it && !hb_has (p.get (), hb_get (f.get (), *it))); }
|
||||
hb_filter_iter_t __end__ () const { return hb_filter_iter_t (it.end (), p, f); }
|
||||
bool operator != (const hb_filter_iter_t& o) const
|
||||
{ return it != o.it || p != o.p || f != o.f; }
|
||||
|
||||
private:
|
||||
Iter it;
|
||||
Pred p;
|
||||
Proj f;
|
||||
hb_reference_wrapper<Pred> p;
|
||||
hb_reference_wrapper<Proj> f;
|
||||
};
|
||||
template <typename Pred, typename Proj>
|
||||
struct hb_filter_iter_factory_t
|
||||
@ -333,23 +449,24 @@ struct hb_filter_iter_factory_t
|
||||
hb_filter_iter_factory_t (Pred p, Proj f) : p (p), f (f) {}
|
||||
|
||||
template <typename Iter,
|
||||
hb_enable_if (hb_is_iterator (Iter))>
|
||||
hb_requires (hb_is_iterator (Iter))>
|
||||
hb_filter_iter_t<Iter, Pred, Proj>
|
||||
operator () (Iter it) const
|
||||
operator () (Iter it)
|
||||
{ return hb_filter_iter_t<Iter, Pred, Proj> (it, p, f); }
|
||||
|
||||
private:
|
||||
Pred p;
|
||||
Proj f;
|
||||
};
|
||||
static const struct
|
||||
struct
|
||||
{
|
||||
template <typename Pred = decltype ((hb_bool)),
|
||||
template <typename Pred = decltype ((hb_identity)),
|
||||
typename Proj = decltype ((hb_identity))>
|
||||
hb_filter_iter_factory_t<Pred, Proj>
|
||||
operator () (Pred&& p = hb_bool, Proj&& f = hb_identity) const
|
||||
operator () (Pred&& p = hb_identity, Proj&& f = hb_identity) const
|
||||
{ return hb_filter_iter_factory_t<Pred, Proj> (p, f); }
|
||||
} hb_filter HB_UNUSED;
|
||||
}
|
||||
HB_FUNCOBJ (hb_filter);
|
||||
|
||||
template <typename Redu, typename InitT>
|
||||
struct hb_reduce_t
|
||||
@ -357,10 +474,10 @@ struct hb_reduce_t
|
||||
hb_reduce_t (Redu r, InitT init_value) : r (r), init_value (init_value) {}
|
||||
|
||||
template <typename Iter,
|
||||
hb_enable_if (hb_is_iterator (Iter)),
|
||||
hb_requires (hb_is_iterator (Iter)),
|
||||
typename AccuT = decltype (hb_declval (Redu) (hb_declval (InitT), hb_declval (typename Iter::item_t)))>
|
||||
AccuT
|
||||
operator () (Iter it) const
|
||||
operator () (Iter it)
|
||||
{
|
||||
AccuT value = init_value;
|
||||
for (; it; ++it)
|
||||
@ -372,13 +489,14 @@ struct hb_reduce_t
|
||||
Redu r;
|
||||
InitT init_value;
|
||||
};
|
||||
static const struct
|
||||
struct
|
||||
{
|
||||
template <typename Redu, typename InitT>
|
||||
hb_reduce_t<Redu, InitT>
|
||||
operator () (Redu&& r, InitT init_value) const
|
||||
{ return hb_reduce_t<Redu, InitT> (r, init_value); }
|
||||
} hb_reduce HB_UNUSED;
|
||||
}
|
||||
HB_FUNCOBJ (hb_reduce);
|
||||
|
||||
|
||||
/* hb_zip() */
|
||||
@ -386,7 +504,7 @@ static const struct
|
||||
template <typename A, typename B>
|
||||
struct hb_zip_iter_t :
|
||||
hb_iter_t<hb_zip_iter_t<A, B>,
|
||||
hb_pair_t<typename A::item_t, typename B::item_t> >
|
||||
hb_pair_t<typename A::item_t, typename B::item_t>>
|
||||
{
|
||||
hb_zip_iter_t () {}
|
||||
hb_zip_iter_t (const A& a, const B& b) : a (a), b (b) {}
|
||||
@ -400,60 +518,29 @@ struct hb_zip_iter_t :
|
||||
B::is_sorted_iterator;
|
||||
__item_t__ __item__ () const { return __item_t__ (*a, *b); }
|
||||
__item_t__ __item_at__ (unsigned i) const { return __item_t__ (a[i], b[i]); }
|
||||
bool __more__ () const { return a && b; }
|
||||
unsigned __len__ () const { return MIN (a.len (), b.len ()); }
|
||||
bool __more__ () const { return bool (a) && bool (b); }
|
||||
unsigned __len__ () const { return hb_min (a.len (), b.len ()); }
|
||||
void __next__ () { ++a; ++b; }
|
||||
void __forward__ (unsigned n) { a += n; b += n; }
|
||||
void __prev__ () { --a; --b; }
|
||||
void __rewind__ (unsigned n) { a -= n; b -= n; }
|
||||
hb_zip_iter_t __end__ () const { return hb_zip_iter_t (a.end (), b.end ()); }
|
||||
bool operator != (const hb_zip_iter_t& o) const
|
||||
{ return a != o.a && b != o.b; }
|
||||
|
||||
private:
|
||||
A a;
|
||||
B b;
|
||||
};
|
||||
static const struct
|
||||
struct
|
||||
{
|
||||
template <typename A, typename B,
|
||||
hb_enable_if (hb_is_iterable (A) && hb_is_iterable (B))>
|
||||
hb_zip_iter_t<hb_iter_t (A), hb_iter_t (B)>
|
||||
operator () (A& a, B &b) const
|
||||
{ return hb_zip_iter_t<hb_iter_t (A), hb_iter_t (B)> (hb_iter (a), hb_iter (b)); }
|
||||
} hb_zip HB_UNUSED;
|
||||
|
||||
/* hb_enumerate */
|
||||
|
||||
template <typename Iter,
|
||||
hb_enable_if (hb_is_iterator (Iter))>
|
||||
struct hb_enumerate_iter_t :
|
||||
hb_iter_t<hb_enumerate_iter_t<Iter>,
|
||||
hb_pair_t<unsigned, typename Iter::item_t> >
|
||||
{
|
||||
hb_enumerate_iter_t (const Iter& it) : i (0), it (it) {}
|
||||
|
||||
typedef hb_pair_t<unsigned, typename Iter::item_t> __item_t__;
|
||||
static constexpr bool is_random_access_iterator = Iter::is_random_access_iterator;
|
||||
static constexpr bool is_sorted_iterator = true;
|
||||
__item_t__ __item__ () const { return __item_t__ (+i, *it); }
|
||||
__item_t__ __item_at__ (unsigned j) const { return __item_t__ (i + j, it[j]); }
|
||||
bool __more__ () const { return bool (it); }
|
||||
unsigned __len__ () const { return it.len (); }
|
||||
void __next__ () { ++i; ++it; }
|
||||
void __forward__ (unsigned n) { i += n; it += n; }
|
||||
void __prev__ () { --i; --it; }
|
||||
void __rewind__ (unsigned n) { i -= n; it -= n; }
|
||||
|
||||
private:
|
||||
unsigned i;
|
||||
Iter it;
|
||||
};
|
||||
static const struct
|
||||
{
|
||||
template <typename Iterable,
|
||||
hb_enable_if (hb_is_iterable (Iterable))>
|
||||
hb_enumerate_iter_t<hb_iter_t (Iterable)>
|
||||
operator () (Iterable& it) const
|
||||
{ return hb_enumerate_iter_t<hb_iter_t (Iterable)> (hb_iter (it)); }
|
||||
} hb_enumerate HB_UNUSED;
|
||||
hb_requires (hb_is_iterable (A) && hb_is_iterable (B))>
|
||||
hb_zip_iter_t<hb_iter_type<A>, hb_iter_type<B>>
|
||||
operator () (A&& a, B&& b) const
|
||||
{ return hb_zip_iter_t<hb_iter_type<A>, hb_iter_type<B>> (hb_iter (a), hb_iter (b)); }
|
||||
}
|
||||
HB_FUNCOBJ (hb_zip);
|
||||
|
||||
/* hb_apply() */
|
||||
|
||||
@ -463,18 +550,17 @@ struct hb_apply_t
|
||||
hb_apply_t (Appl a) : a (a) {}
|
||||
|
||||
template <typename Iter,
|
||||
hb_enable_if (hb_is_iterator (Iter))>
|
||||
void
|
||||
operator () (Iter it) const
|
||||
hb_requires (hb_is_iterator (Iter))>
|
||||
void operator () (Iter it)
|
||||
{
|
||||
for (; it; ++it)
|
||||
a (*it);
|
||||
(void) hb_invoke (a, *it);
|
||||
}
|
||||
|
||||
private:
|
||||
Appl a;
|
||||
};
|
||||
static const struct
|
||||
struct
|
||||
{
|
||||
template <typename Appl> hb_apply_t<Appl>
|
||||
operator () (Appl&& a) const
|
||||
@ -483,19 +569,91 @@ static const struct
|
||||
template <typename Appl> hb_apply_t<Appl&>
|
||||
operator () (Appl *a) const
|
||||
{ return hb_apply_t<Appl&> (*a); }
|
||||
} hb_apply HB_UNUSED;
|
||||
}
|
||||
HB_FUNCOBJ (hb_apply);
|
||||
|
||||
/* hb_iota()/hb_range() */
|
||||
|
||||
template <typename T, typename S>
|
||||
struct hb_counter_iter_t :
|
||||
hb_iter_t<hb_counter_iter_t<T, S>, T>
|
||||
{
|
||||
hb_counter_iter_t (T start, T end_, S step) : v (start), end_ (end_for (start, end_, step)), step (step) {}
|
||||
|
||||
typedef T __item_t__;
|
||||
static constexpr bool is_random_access_iterator = true;
|
||||
static constexpr bool is_sorted_iterator = true;
|
||||
__item_t__ __item__ () const { return +v; }
|
||||
__item_t__ __item_at__ (unsigned j) const { return v + j * step; }
|
||||
bool __more__ () const { return v != end_; }
|
||||
unsigned __len__ () const { return !step ? UINT_MAX : (end_ - v) / step; }
|
||||
void __next__ () { v += step; }
|
||||
void __forward__ (unsigned n) { v += n * step; }
|
||||
void __prev__ () { v -= step; }
|
||||
void __rewind__ (unsigned n) { v -= n * step; }
|
||||
hb_counter_iter_t __end__ () const { return hb_counter_iter_t (end_, end_, step); }
|
||||
bool operator != (const hb_counter_iter_t& o) const
|
||||
{ return v != o.v || end_ != o.end_ || step != o.step; }
|
||||
|
||||
private:
|
||||
static inline T end_for (T start, T end_, S step)
|
||||
{
|
||||
if (!step)
|
||||
return end_;
|
||||
auto res = (end_ - start) % step;
|
||||
if (!res)
|
||||
return end_;
|
||||
end_ += step - res;
|
||||
return end_;
|
||||
}
|
||||
|
||||
private:
|
||||
T v;
|
||||
T end_;
|
||||
S step;
|
||||
};
|
||||
struct
|
||||
{
|
||||
template <typename T = unsigned, typename S = unsigned> hb_counter_iter_t<T, S>
|
||||
operator () (T start = 0u, S&& step = 1u) const
|
||||
{ return hb_counter_iter_t<T, S> (start, step >= 0 ? hb_int_max (T) : hb_int_min (T), step); }
|
||||
}
|
||||
HB_FUNCOBJ (hb_iota);
|
||||
struct
|
||||
{
|
||||
template <typename T = unsigned> hb_counter_iter_t<T, unsigned>
|
||||
operator () (T end = (unsigned) -1) const
|
||||
{ return hb_counter_iter_t<T, unsigned> (0, end, 1u); }
|
||||
|
||||
template <typename T, typename S = unsigned> hb_counter_iter_t<T, S>
|
||||
operator () (T start, T end, S&& step = 1u) const
|
||||
{ return hb_counter_iter_t<T, S> (start, end, step); }
|
||||
}
|
||||
HB_FUNCOBJ (hb_range);
|
||||
|
||||
/* hb_enumerate */
|
||||
|
||||
struct
|
||||
{
|
||||
template <typename Iterable,
|
||||
typename Index = unsigned,
|
||||
hb_requires (hb_is_iterable (Iterable))>
|
||||
auto operator () (Iterable&& it, Index start = 0u) const HB_AUTO_RETURN
|
||||
( hb_zip (hb_iota (start), it) )
|
||||
}
|
||||
HB_FUNCOBJ (hb_enumerate);
|
||||
|
||||
|
||||
/* hb_sink() */
|
||||
|
||||
template <typename Sink>
|
||||
struct hb_sink_t
|
||||
{
|
||||
hb_sink_t (Sink&& s) : s (s) {}
|
||||
hb_sink_t (Sink s) : s (s) {}
|
||||
|
||||
template <typename Iter,
|
||||
hb_enable_if (hb_is_iterator (Iter))>
|
||||
void
|
||||
operator () (Iter it) const
|
||||
hb_requires (hb_is_iterator (Iter))>
|
||||
void operator () (Iter it)
|
||||
{
|
||||
for (; it; ++it)
|
||||
s << *it;
|
||||
@ -504,7 +662,7 @@ struct hb_sink_t
|
||||
private:
|
||||
Sink s;
|
||||
};
|
||||
static const struct
|
||||
struct
|
||||
{
|
||||
template <typename Sink> hb_sink_t<Sink>
|
||||
operator () (Sink&& s) const
|
||||
@ -513,33 +671,33 @@ static const struct
|
||||
template <typename Sink> hb_sink_t<Sink&>
|
||||
operator () (Sink *s) const
|
||||
{ return hb_sink_t<Sink&> (*s); }
|
||||
} hb_sink HB_UNUSED;
|
||||
}
|
||||
HB_FUNCOBJ (hb_sink);
|
||||
|
||||
/* hb-drain: hb_sink to void / blackhole / /dev/null. */
|
||||
|
||||
static const struct
|
||||
struct
|
||||
{
|
||||
template <typename Iter,
|
||||
hb_enable_if (hb_is_iterator (Iter))>
|
||||
void
|
||||
operator () (Iter it) const
|
||||
hb_requires (hb_is_iterator (Iter))>
|
||||
void operator () (Iter it) const
|
||||
{
|
||||
for (; it; ++it)
|
||||
(void) *it;
|
||||
}
|
||||
} hb_drain HB_UNUSED;
|
||||
}
|
||||
HB_FUNCOBJ (hb_drain);
|
||||
|
||||
/* hb_unzip(): unzip and sink to two sinks. */
|
||||
|
||||
template <typename Sink1, typename Sink2>
|
||||
struct hb_unzip_t
|
||||
{
|
||||
hb_unzip_t (Sink1&& s1, Sink2&& s2) : s1 (s1), s2 (s2) {}
|
||||
hb_unzip_t (Sink1 s1, Sink2 s2) : s1 (s1), s2 (s2) {}
|
||||
|
||||
template <typename Iter,
|
||||
hb_enable_if (hb_is_iterator (Iter))>
|
||||
void
|
||||
operator () (Iter it) const
|
||||
hb_requires (hb_is_iterator (Iter))>
|
||||
void operator () (Iter it)
|
||||
{
|
||||
for (; it; ++it)
|
||||
{
|
||||
@ -553,7 +711,7 @@ struct hb_unzip_t
|
||||
Sink1 s1;
|
||||
Sink2 s2;
|
||||
};
|
||||
static const struct
|
||||
struct
|
||||
{
|
||||
template <typename Sink1, typename Sink2> hb_unzip_t<Sink1, Sink2>
|
||||
operator () (Sink1&& s1, Sink2&& s2) const
|
||||
@ -562,59 +720,70 @@ static const struct
|
||||
template <typename Sink1, typename Sink2> hb_unzip_t<Sink1&, Sink2&>
|
||||
operator () (Sink1 *s1, Sink2 *s2) const
|
||||
{ return hb_unzip_t<Sink1&, Sink2&> (*s1, *s2); }
|
||||
} hb_unzip HB_UNUSED;
|
||||
}
|
||||
HB_FUNCOBJ (hb_unzip);
|
||||
|
||||
|
||||
/* hb-all, hb-any, hb-none. */
|
||||
|
||||
static const struct
|
||||
struct
|
||||
{
|
||||
template <typename Iterable,
|
||||
hb_enable_if (hb_is_iterable (Iterable))>
|
||||
bool
|
||||
operator () (Iterable&& c) const
|
||||
typename Pred = decltype ((hb_identity)),
|
||||
typename Proj = decltype ((hb_identity)),
|
||||
hb_requires (hb_is_iterable (Iterable))>
|
||||
bool operator () (Iterable&& c,
|
||||
Pred&& p = hb_identity,
|
||||
Proj&& f = hb_identity) const
|
||||
{
|
||||
for (auto it = hb_iter (c); it; ++it)
|
||||
if (!*it)
|
||||
if (!hb_match (hb_forward<Pred> (p), hb_get (hb_forward<Proj> (f), *it)))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
} hb_all HB_UNUSED;
|
||||
|
||||
static const struct
|
||||
}
|
||||
HB_FUNCOBJ (hb_all);
|
||||
struct
|
||||
{
|
||||
template <typename Iterable,
|
||||
hb_enable_if (hb_is_iterable (Iterable))>
|
||||
bool
|
||||
operator () (Iterable&& c) const
|
||||
typename Pred = decltype ((hb_identity)),
|
||||
typename Proj = decltype ((hb_identity)),
|
||||
hb_requires (hb_is_iterable (Iterable))>
|
||||
bool operator () (Iterable&& c,
|
||||
Pred&& p = hb_identity,
|
||||
Proj&& f = hb_identity) const
|
||||
{
|
||||
for (auto it = hb_iter (c); it; ++it)
|
||||
if (*it)
|
||||
if (hb_match (hb_forward<Pred> (p), hb_get (hb_forward<Proj> (f), *it)))
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
} hb_any HB_UNUSED;
|
||||
|
||||
static const struct
|
||||
}
|
||||
HB_FUNCOBJ (hb_any);
|
||||
struct
|
||||
{
|
||||
template <typename Iterable,
|
||||
hb_enable_if (hb_is_iterable (Iterable))>
|
||||
bool
|
||||
operator () (Iterable&& c) const
|
||||
typename Pred = decltype ((hb_identity)),
|
||||
typename Proj = decltype ((hb_identity)),
|
||||
hb_requires (hb_is_iterable (Iterable))>
|
||||
bool operator () (Iterable&& c,
|
||||
Pred&& p = hb_identity,
|
||||
Proj&& f = hb_identity) const
|
||||
{
|
||||
for (auto it = hb_iter (c); it; ++it)
|
||||
if (*it)
|
||||
if (hb_match (hb_forward<Pred> (p), hb_get (hb_forward<Proj> (f), *it)))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
} hb_none HB_UNUSED;
|
||||
}
|
||||
HB_FUNCOBJ (hb_none);
|
||||
|
||||
/*
|
||||
* Algorithms operating on iterators.
|
||||
*/
|
||||
|
||||
template <typename C, typename V,
|
||||
hb_enable_if (hb_is_iterable (C))>
|
||||
hb_requires (hb_is_iterable (C))>
|
||||
inline void
|
||||
hb_fill (C& c, const V &v)
|
||||
{
|
||||
|
@ -34,19 +34,17 @@
|
||||
* hb_hashmap_t
|
||||
*/
|
||||
|
||||
/* TODO if K/V is signed integer, -1 is not a good default.
|
||||
* Don't know how to get to -MAX using bit work. */
|
||||
template <typename K, typename V,
|
||||
K kINVALID = hb_is_pointer (K) ? 0 : (K) -1,
|
||||
V vINVALID = hb_is_pointer (V) ? 0 : (V) -1>
|
||||
K kINVALID = hb_is_pointer (K) ? 0 : hb_is_signed (K) ? hb_int_min (K) : (K) -1,
|
||||
V vINVALID = hb_is_pointer (V) ? 0 : hb_is_signed (V) ? hb_int_min (V) : (V) -1>
|
||||
struct hb_hashmap_t
|
||||
{
|
||||
HB_DELETE_COPY_ASSIGN (hb_hashmap_t);
|
||||
hb_hashmap_t () { init (); }
|
||||
~hb_hashmap_t () { fini (); }
|
||||
|
||||
static_assert (hb_is_integer (K) || hb_is_pointer (K), "");
|
||||
static_assert (hb_is_integer (V) || hb_is_pointer (V), "");
|
||||
static_assert (hb_is_integral (K) || hb_is_pointer (K), "");
|
||||
static_assert (hb_is_integral (V) || hb_is_pointer (V), "");
|
||||
|
||||
/* TODO If key type is a pointer, keep hash in item_t and use to:
|
||||
* 1. avoid rehashing when resizing table, and
|
||||
@ -59,11 +57,12 @@ struct hb_hashmap_t
|
||||
|
||||
void clear () { key = kINVALID; value = vINVALID; }
|
||||
|
||||
bool operator == (K o) { return hb_deref_pointer (key) == hb_deref_pointer (o); }
|
||||
bool operator == (K o) { return hb_deref (key) == hb_deref (o); }
|
||||
bool operator == (const item_t &o) { return *this == o.key; }
|
||||
bool is_unused () const { return key == kINVALID; }
|
||||
bool is_tombstone () const { return key != kINVALID && value == vINVALID; }
|
||||
bool is_real () const { return key != kINVALID && value != vINVALID; }
|
||||
hb_pair_t<K, V> get_pair() const { return hb_pair_t<K, V> (key, value); }
|
||||
};
|
||||
|
||||
hb_object_header_t header;
|
||||
@ -122,7 +121,7 @@ struct hb_hashmap_t
|
||||
return false;
|
||||
}
|
||||
+ hb_iter (new_items, new_size)
|
||||
| hb_apply ([] (item_t &_) { _.clear (); }) /* TODO make pointer-to-methods invokable. */
|
||||
| hb_apply (&item_t::clear)
|
||||
;
|
||||
|
||||
unsigned int old_size = mask + 1;
|
||||
@ -183,7 +182,12 @@ struct hb_hashmap_t
|
||||
static constexpr V SENTINEL = vINVALID;
|
||||
typedef V value_t;
|
||||
value_t operator [] (K k) const { return get (k); }
|
||||
bool has (K k) const { return (*this)[k] != SENTINEL; }
|
||||
bool has (K k, V *vp = nullptr) const
|
||||
{
|
||||
V v = (*this)[k];
|
||||
if (vp) *vp = v;
|
||||
return v != SENTINEL;
|
||||
}
|
||||
/* Projection. */
|
||||
V operator () (K k) const { return get (k); }
|
||||
|
||||
@ -193,7 +197,7 @@ struct hb_hashmap_t
|
||||
return;
|
||||
if (items)
|
||||
+ hb_iter (items, mask + 1)
|
||||
| hb_apply ([] (item_t &_) { _.clear (); }) /* TODO make pointer-to-methods invokable. */
|
||||
| hb_apply (&item_t::clear)
|
||||
;
|
||||
|
||||
population = occupancy = 0;
|
||||
@ -203,6 +207,34 @@ struct hb_hashmap_t
|
||||
|
||||
unsigned int get_population () const { return population; }
|
||||
|
||||
/*
|
||||
* Iterator
|
||||
*/
|
||||
auto iter () const HB_AUTO_RETURN
|
||||
(
|
||||
+ hb_array (items, mask ? mask + 1 : 0)
|
||||
| hb_filter (&item_t::is_real)
|
||||
| hb_map (&item_t::get_pair)
|
||||
)
|
||||
auto keys () const HB_AUTO_RETURN
|
||||
(
|
||||
+ hb_array (items, mask ? mask + 1 : 0)
|
||||
| hb_filter (&item_t::is_real)
|
||||
| hb_map (&item_t::key)
|
||||
| hb_map (hb_ridentity)
|
||||
)
|
||||
auto values () const HB_AUTO_RETURN
|
||||
(
|
||||
+ hb_array (items, mask ? mask + 1 : 0)
|
||||
| hb_filter (&item_t::is_real)
|
||||
| hb_map (&item_t::value)
|
||||
| hb_map (hb_ridentity)
|
||||
)
|
||||
|
||||
/* Sink interface. */
|
||||
hb_hashmap_t<K, V, kINVALID, vINVALID>& operator << (const hb_pair_t<K, V>& v)
|
||||
{ set (v.first, v.second); return *this; }
|
||||
|
||||
protected:
|
||||
|
||||
unsigned int bucket_for (K key) const
|
||||
@ -213,9 +245,9 @@ struct hb_hashmap_t
|
||||
while (!items[i].is_unused ())
|
||||
{
|
||||
if (items[i] == key)
|
||||
return i;
|
||||
return i;
|
||||
if (tombstone == (unsigned) -1 && items[i].is_tombstone ())
|
||||
tombstone = i;
|
||||
tombstone = i;
|
||||
i = (i + ++step) & mask;
|
||||
}
|
||||
return tombstone == (unsigned) -1 ? i : tombstone;
|
||||
|
398
src/hb-meta.hh
398
src/hb-meta.hh
@ -34,111 +34,367 @@
|
||||
* C++ template meta-programming & fundamentals used with them.
|
||||
*/
|
||||
|
||||
/* Void! For when we need a expression-type of void. */
|
||||
struct hb_empty_t {};
|
||||
|
||||
template <typename T> static inline T*
|
||||
hb_addressof (const T& arg)
|
||||
/* https://en.cppreference.com/w/cpp/types/void_t */
|
||||
template<typename... Ts> struct _hb_void_t { typedef void type; };
|
||||
template<typename... Ts> using hb_void_t = typename _hb_void_t<Ts...>::type;
|
||||
|
||||
template<typename Head, typename... Ts> struct _hb_head_t { typedef Head type; };
|
||||
template<typename... Ts> using hb_head_t = typename _hb_head_t<Ts...>::type;
|
||||
|
||||
template <typename T, T v> struct hb_integral_constant { static constexpr T value = v; };
|
||||
template <bool b> using hb_bool_constant = hb_integral_constant<bool, b>;
|
||||
using hb_true_type = hb_bool_constant<true>;
|
||||
using hb_false_type = hb_bool_constant<false>;
|
||||
|
||||
|
||||
/* Basic type SFINAE. */
|
||||
|
||||
template <bool B, typename T = void> struct hb_enable_if {};
|
||||
template <typename T> struct hb_enable_if<true, T> { typedef T type; };
|
||||
#define hb_enable_if(Cond) typename hb_enable_if<(Cond)>::type* = nullptr
|
||||
/* Concepts/Requires alias: */
|
||||
#define hb_requires(Cond) hb_enable_if((Cond))
|
||||
|
||||
template <typename T, typename T2> struct hb_is_same : hb_false_type {};
|
||||
template <typename T> struct hb_is_same<T, T> : hb_true_type {};
|
||||
#define hb_is_same(T, T2) hb_is_same<T, T2>::value
|
||||
|
||||
/* Function overloading SFINAE and priority. */
|
||||
|
||||
#define HB_RETURN(Ret, E) -> hb_head_t<Ret, decltype ((E))> { return (E); }
|
||||
#define HB_AUTO_RETURN(E) -> decltype ((E)) { return (E); }
|
||||
#define HB_VOID_RETURN(E) -> hb_void_t<decltype ((E))> { (E); }
|
||||
|
||||
template <unsigned Pri> struct hb_priority : hb_priority<Pri - 1> {};
|
||||
template <> struct hb_priority<0> {};
|
||||
#define hb_prioritize hb_priority<16> ()
|
||||
|
||||
#define HB_FUNCOBJ(x) static_const x HB_UNUSED
|
||||
|
||||
|
||||
template <typename T> struct hb_type_identity_t { typedef T type; };
|
||||
template <typename T> using hb_type_identity = typename hb_type_identity_t<T>::type;
|
||||
|
||||
struct
|
||||
{
|
||||
template <typename T>
|
||||
T* operator () (T& arg) const
|
||||
{
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wcast-align"
|
||||
/* https://en.cppreference.com/w/cpp/memory/addressof */
|
||||
return reinterpret_cast<T*>(
|
||||
&const_cast<char&>(
|
||||
reinterpret_cast<const volatile char&>(arg)));
|
||||
/* https://en.cppreference.com/w/cpp/memory/addressof */
|
||||
return reinterpret_cast<T*> (
|
||||
&const_cast<char&> (
|
||||
reinterpret_cast<const volatile char&> (arg)));
|
||||
#pragma GCC diagnostic pop
|
||||
}
|
||||
}
|
||||
HB_FUNCOBJ (hb_addressof);
|
||||
|
||||
template <typename T> static inline T hb_declval ();
|
||||
#define hb_declval(T) (hb_declval<T> ())
|
||||
|
||||
template <typename T> struct hb_match_const { typedef T type; enum { value = false }; };
|
||||
template <typename T> struct hb_match_const<const T> { typedef T type; enum { value = true }; };
|
||||
#define hb_remove_const(T) typename hb_match_const<T>::type
|
||||
template <typename T> struct hb_match_const : hb_type_identity_t<T>, hb_bool_constant<false>{};
|
||||
template <typename T> struct hb_match_const<const T> : hb_type_identity_t<T>, hb_bool_constant<true> {};
|
||||
template <typename T> using hb_remove_const = typename hb_match_const<T>::type;
|
||||
template <typename T> using hb_add_const = const T;
|
||||
#define hb_is_const(T) hb_match_const<T>::value
|
||||
template <typename T> struct hb_match_reference { typedef T type; enum { value = false }; };
|
||||
template <typename T> struct hb_match_reference<T &> { typedef T type; enum { value = true }; };
|
||||
#define hb_remove_reference(T) typename hb_match_reference<T>::type
|
||||
template <typename T> struct hb_match_reference : hb_type_identity_t<T>, hb_bool_constant<false>{};
|
||||
template <typename T> struct hb_match_reference<T &> : hb_type_identity_t<T>, hb_bool_constant<true> {};
|
||||
template <typename T> struct hb_match_reference<T &&> : hb_type_identity_t<T>, hb_bool_constant<true> {};
|
||||
template <typename T> using hb_remove_reference = typename hb_match_reference<T>::type;
|
||||
template <typename T> auto _hb_try_add_lvalue_reference (hb_priority<1>) -> hb_type_identity<T&>;
|
||||
template <typename T> auto _hb_try_add_lvalue_reference (hb_priority<0>) -> hb_type_identity<T>;
|
||||
template <typename T> using hb_add_lvalue_reference = decltype (_hb_try_add_lvalue_reference<T> (hb_prioritize));
|
||||
template <typename T> auto _hb_try_add_rvalue_reference (hb_priority<1>) -> hb_type_identity<T&&>;
|
||||
template <typename T> auto _hb_try_add_rvalue_reference (hb_priority<0>) -> hb_type_identity<T>;
|
||||
template <typename T> using hb_add_rvalue_reference = decltype (_hb_try_add_rvalue_reference<T> (hb_prioritize));
|
||||
#define hb_is_reference(T) hb_match_reference<T>::value
|
||||
template <typename T> struct hb_match_pointer { typedef T type; enum { value = false }; };
|
||||
template <typename T> struct hb_match_pointer<T *> { typedef T type; enum { value = true }; };
|
||||
#define hb_remove_pointer(T) typename hb_match_pointer<T>::type
|
||||
template <typename T> struct hb_match_pointer : hb_type_identity_t<T>, hb_bool_constant<false>{};
|
||||
template <typename T> struct hb_match_pointer<T *> : hb_type_identity_t<T>, hb_bool_constant<true> {};
|
||||
template <typename T> using hb_remove_pointer = typename hb_match_pointer<T>::type;
|
||||
template <typename T> auto _hb_try_add_pointer (hb_priority<1>) -> hb_type_identity<hb_remove_reference<T>*>;
|
||||
template <typename T> auto _hb_try_add_pointer (hb_priority<1>) -> hb_type_identity<T>;
|
||||
template <typename T> using hb_add_pointer = decltype (_hb_try_add_pointer<T> (hb_prioritize));
|
||||
#define hb_is_pointer(T) hb_match_pointer<T>::value
|
||||
|
||||
static const struct
|
||||
{
|
||||
template <typename T>
|
||||
T operator () (T v) const { return v; }
|
||||
template <typename T>
|
||||
T& operator () (T *v) const { return *v; }
|
||||
} hb_deref_pointer HB_UNUSED;
|
||||
|
||||
/* TODO Add feature-parity to std::decay. */
|
||||
template <typename T> using hb_decay = hb_remove_const<hb_remove_reference<T>>;
|
||||
|
||||
|
||||
template<bool B, class T, class F>
|
||||
struct _hb_conditional { typedef T type; };
|
||||
template<class T, class F>
|
||||
struct _hb_conditional<false, T, F> { typedef F type; };
|
||||
template<bool B, class T, class F>
|
||||
using hb_conditional = typename _hb_conditional<B, T, F>::type;
|
||||
|
||||
|
||||
template <typename From, typename To>
|
||||
struct hb_is_convertible
|
||||
{
|
||||
private:
|
||||
static constexpr bool from_void = hb_is_same (void, hb_decay<From>);
|
||||
static constexpr bool to_void = hb_is_same (void, hb_decay<To> );
|
||||
static constexpr bool either_void = from_void || to_void;
|
||||
static constexpr bool both_void = from_void && to_void;
|
||||
|
||||
static hb_true_type impl2 (hb_conditional<to_void, int, To>);
|
||||
|
||||
template <typename T>
|
||||
static auto impl (hb_priority<1>) -> decltype (impl2 (hb_declval (T)));
|
||||
template <typename T>
|
||||
static hb_false_type impl (hb_priority<0>);
|
||||
public:
|
||||
static constexpr bool value = both_void ||
|
||||
(!either_void &&
|
||||
decltype (impl<hb_conditional<from_void, int, From>> (hb_prioritize))::value);
|
||||
};
|
||||
#define hb_is_convertible(From,To) hb_is_convertible<From, To>::value
|
||||
|
||||
template <typename Base, typename Derived>
|
||||
using hb_is_base_of = hb_is_convertible<hb_decay<Derived> *, hb_decay<Base> *>;
|
||||
#define hb_is_base_of(Base,Derived) hb_is_base_of<Base, Derived>::value
|
||||
|
||||
template <typename From, typename To>
|
||||
using hb_is_cr_convertible = hb_bool_constant<
|
||||
hb_is_same (hb_decay<From>, hb_decay<To>) &&
|
||||
(!hb_is_const (From) || hb_is_const (To)) &&
|
||||
(!hb_is_reference (To) || hb_is_const (To) || hb_is_reference (To))
|
||||
>;
|
||||
#define hb_is_cr_convertible(From,To) hb_is_cr_convertible<From, To>::value
|
||||
|
||||
/* std::move and std::forward */
|
||||
|
||||
template <typename T>
|
||||
hb_remove_reference (T)&& hb_move (T&& t) { return (hb_remove_reference (T)&&) (t); }
|
||||
static hb_remove_reference<T>&& hb_move (T&& t) { return (hb_remove_reference<T>&&) (t); }
|
||||
|
||||
template <typename T>
|
||||
T&& hb_forward (hb_remove_reference (T)& t) { return (T&&) t; }
|
||||
static T&& hb_forward (hb_remove_reference<T>& t) { return (T&&) t; }
|
||||
template <typename T>
|
||||
T&& hb_forward (hb_remove_reference (T)&& t) { return (T&&) t; }
|
||||
static T&& hb_forward (hb_remove_reference<T>&& t) { return (T&&) t; }
|
||||
|
||||
struct
|
||||
{
|
||||
template <typename T> auto
|
||||
operator () (T&& v) const HB_AUTO_RETURN (hb_forward<T> (v))
|
||||
|
||||
/* Void! For when we need a expression-type of void. */
|
||||
struct hb_void_t { typedef void value; };
|
||||
template <typename T> auto
|
||||
operator () (T *v) const HB_AUTO_RETURN (*v)
|
||||
}
|
||||
HB_FUNCOBJ (hb_deref);
|
||||
|
||||
/* Bool! For when we need to evaluate type-dependent expressions
|
||||
* in a template argument. */
|
||||
template <bool b> struct hb_bool_tt { enum { value = b }; };
|
||||
typedef hb_bool_tt<true> hb_true_t;
|
||||
typedef hb_bool_tt<false> hb_false_t;
|
||||
struct
|
||||
{
|
||||
template <typename T> auto
|
||||
operator () (T&& v) const HB_AUTO_RETURN (hb_forward<T> (v))
|
||||
|
||||
template<bool B, typename T = void>
|
||||
struct hb_enable_if {};
|
||||
template<typename T>
|
||||
struct hb_enable_if<true, T> { typedef T type; };
|
||||
#define hb_enable_if(Cond) typename hb_enable_if<(Cond)>::type* = nullptr
|
||||
template <typename T> auto
|
||||
operator () (T& v) const HB_AUTO_RETURN (hb_addressof (v))
|
||||
}
|
||||
HB_FUNCOBJ (hb_ref);
|
||||
|
||||
template <typename T, typename T2>
|
||||
struct hb_is_same : hb_false_t {};
|
||||
template <typename T>
|
||||
struct hb_is_same<T, T> : hb_true_t {};
|
||||
#define hb_is_same(T, T2) hb_is_same<T, T2>::value
|
||||
struct hb_reference_wrapper
|
||||
{
|
||||
hb_reference_wrapper (T v) : v (v) {}
|
||||
bool operator == (const hb_reference_wrapper& o) const { return v == o.v; }
|
||||
bool operator != (const hb_reference_wrapper& o) const { return v != o.v; }
|
||||
operator T () const { return v; }
|
||||
T get () const { return v; }
|
||||
T v;
|
||||
};
|
||||
template <typename T>
|
||||
struct hb_reference_wrapper<T&>
|
||||
{
|
||||
hb_reference_wrapper (T& v) : v (hb_addressof (v)) {}
|
||||
bool operator == (const hb_reference_wrapper& o) const { return v == o.v; }
|
||||
bool operator != (const hb_reference_wrapper& o) const { return v != o.v; }
|
||||
operator T& () const { return *v; }
|
||||
T& get () const { return *v; }
|
||||
T* v;
|
||||
};
|
||||
|
||||
|
||||
/*
|
||||
* Meta-functions.
|
||||
*/
|
||||
template <typename T>
|
||||
using hb_is_integral = hb_bool_constant<
|
||||
hb_is_same (hb_decay<T>, char) ||
|
||||
hb_is_same (hb_decay<T>, signed char) ||
|
||||
hb_is_same (hb_decay<T>, unsigned char) ||
|
||||
hb_is_same (hb_decay<T>, signed int) ||
|
||||
hb_is_same (hb_decay<T>, unsigned int) ||
|
||||
hb_is_same (hb_decay<T>, signed short) ||
|
||||
hb_is_same (hb_decay<T>, unsigned short) ||
|
||||
hb_is_same (hb_decay<T>, signed long) ||
|
||||
hb_is_same (hb_decay<T>, unsigned long) ||
|
||||
hb_is_same (hb_decay<T>, signed long long) ||
|
||||
hb_is_same (hb_decay<T>, unsigned long long) ||
|
||||
false
|
||||
>;
|
||||
#define hb_is_integral(T) hb_is_integral<T>::value
|
||||
template <typename T>
|
||||
using hb_is_floating_point = hb_bool_constant<
|
||||
hb_is_same (hb_decay<T>, float) ||
|
||||
hb_is_same (hb_decay<T>, double) ||
|
||||
hb_is_same (hb_decay<T>, long double) ||
|
||||
false
|
||||
>;
|
||||
#define hb_is_floating_point(T) hb_is_floating_point<T>::value
|
||||
template <typename T>
|
||||
using hb_is_arithmetic = hb_bool_constant<
|
||||
hb_is_integral (T) ||
|
||||
hb_is_floating_point (T) ||
|
||||
false
|
||||
>;
|
||||
#define hb_is_arithmetic(T) hb_is_arithmetic<T>::value
|
||||
|
||||
template <typename T> struct hb_is_signed;
|
||||
/* https://github.com/harfbuzz/harfbuzz/issues/1535 */
|
||||
template <> struct hb_is_signed<int8_t> { enum { value = true }; };
|
||||
template <> struct hb_is_signed<int16_t> { enum { value = true }; };
|
||||
template <> struct hb_is_signed<int32_t> { enum { value = true }; };
|
||||
template <> struct hb_is_signed<int64_t> { enum { value = true }; };
|
||||
template <> struct hb_is_signed<uint8_t> { enum { value = false }; };
|
||||
template <> struct hb_is_signed<uint16_t> { enum { value = false }; };
|
||||
template <> struct hb_is_signed<uint32_t> { enum { value = false }; };
|
||||
template <> struct hb_is_signed<uint64_t> { enum { value = false }; };
|
||||
|
||||
template <typename T>
|
||||
using hb_is_signed = hb_conditional<hb_is_arithmetic (T),
|
||||
hb_bool_constant<(T) -1 < (T) 0>,
|
||||
hb_false_type>;
|
||||
#define hb_is_signed(T) hb_is_signed<T>::value
|
||||
template <typename T>
|
||||
using hb_is_unsigned = hb_conditional<hb_is_arithmetic (T),
|
||||
hb_bool_constant<(T) 0 < (T) -1>,
|
||||
hb_false_type>;
|
||||
#define hb_is_unsigned(T) hb_is_unsigned<T>::value
|
||||
|
||||
template <bool is_signed> struct hb_signedness_int;
|
||||
template <> struct hb_signedness_int<false> { typedef unsigned int value; };
|
||||
template <> struct hb_signedness_int<true> { typedef signed int value; };
|
||||
#define hb_signedness_int(T) hb_signedness_int<T>::value
|
||||
template <typename T> struct hb_int_min;
|
||||
template <> struct hb_int_min<char> : hb_integral_constant<char, CHAR_MIN> {};
|
||||
template <> struct hb_int_min<signed char> : hb_integral_constant<signed char, SCHAR_MIN> {};
|
||||
template <> struct hb_int_min<unsigned char> : hb_integral_constant<unsigned char, 0> {};
|
||||
template <> struct hb_int_min<signed short> : hb_integral_constant<signed short, SHRT_MIN> {};
|
||||
template <> struct hb_int_min<unsigned short> : hb_integral_constant<unsigned short, 0> {};
|
||||
template <> struct hb_int_min<signed int> : hb_integral_constant<signed int, INT_MIN> {};
|
||||
template <> struct hb_int_min<unsigned int> : hb_integral_constant<unsigned int, 0> {};
|
||||
template <> struct hb_int_min<signed long> : hb_integral_constant<signed long, LONG_MIN> {};
|
||||
template <> struct hb_int_min<unsigned long> : hb_integral_constant<unsigned long, 0> {};
|
||||
template <> struct hb_int_min<signed long long> : hb_integral_constant<signed long long, LLONG_MIN> {};
|
||||
template <> struct hb_int_min<unsigned long long> : hb_integral_constant<unsigned long long, 0> {};
|
||||
#define hb_int_min(T) hb_int_min<T>::value
|
||||
template <typename T> struct hb_int_max;
|
||||
template <> struct hb_int_max<char> : hb_integral_constant<char, CHAR_MAX> {};
|
||||
template <> struct hb_int_max<signed char> : hb_integral_constant<signed char, SCHAR_MAX> {};
|
||||
template <> struct hb_int_max<unsigned char> : hb_integral_constant<unsigned char, UCHAR_MAX> {};
|
||||
template <> struct hb_int_max<signed short> : hb_integral_constant<signed short, SHRT_MAX> {};
|
||||
template <> struct hb_int_max<unsigned short> : hb_integral_constant<unsigned short, USHRT_MAX> {};
|
||||
template <> struct hb_int_max<signed int> : hb_integral_constant<signed int, INT_MAX> {};
|
||||
template <> struct hb_int_max<unsigned int> : hb_integral_constant<unsigned int, UINT_MAX> {};
|
||||
template <> struct hb_int_max<signed long> : hb_integral_constant<signed long, LONG_MAX> {};
|
||||
template <> struct hb_int_max<unsigned long> : hb_integral_constant<unsigned long, ULONG_MAX> {};
|
||||
template <> struct hb_int_max<signed long long> : hb_integral_constant<signed long long, LLONG_MAX> {};
|
||||
template <> struct hb_int_max<unsigned long long> : hb_integral_constant<unsigned long long, ULLONG_MAX> {};
|
||||
#define hb_int_max(T) hb_int_max<T>::value
|
||||
|
||||
template <typename T> struct hb_is_integer { enum { value = false }; };
|
||||
template <> struct hb_is_integer<char> { enum { value = true }; };
|
||||
template <> struct hb_is_integer<signed char> { enum { value = true }; };
|
||||
template <> struct hb_is_integer<unsigned char> { enum { value = true }; };
|
||||
template <> struct hb_is_integer<signed short> { enum { value = true }; };
|
||||
template <> struct hb_is_integer<unsigned short> { enum { value = true }; };
|
||||
template <> struct hb_is_integer<signed int> { enum { value = true }; };
|
||||
template <> struct hb_is_integer<unsigned int> { enum { value = true }; };
|
||||
template <> struct hb_is_integer<signed long> { enum { value = true }; };
|
||||
template <> struct hb_is_integer<unsigned long> { enum { value = true }; };
|
||||
template <> struct hb_is_integer<signed long long> { enum { value = true }; };
|
||||
template <> struct hb_is_integer<unsigned long long> { enum { value = true }; };
|
||||
#define hb_is_integer(T) hb_is_integer<T>::value
|
||||
|
||||
|
||||
template <typename T, typename>
|
||||
struct _hb_is_destructible : hb_false_type {};
|
||||
template <typename T>
|
||||
struct _hb_is_destructible<T, hb_void_t<decltype (hb_declval (T).~T ())>> : hb_true_type {};
|
||||
template <typename T>
|
||||
using hb_is_destructible = _hb_is_destructible<T, void>;
|
||||
#define hb_is_destructible(T) hb_is_destructible<T>::value
|
||||
|
||||
template <typename T, typename, typename ...Ts>
|
||||
struct _hb_is_constructible : hb_false_type {};
|
||||
template <typename T, typename ...Ts>
|
||||
struct _hb_is_constructible<T, hb_void_t<decltype (T (hb_declval (Ts)...))>, Ts...> : hb_true_type {};
|
||||
template <typename T, typename ...Ts>
|
||||
using hb_is_constructible = _hb_is_constructible<T, void, Ts...>;
|
||||
#define hb_is_constructible(...) hb_is_constructible<__VA_ARGS__>::value
|
||||
|
||||
template <typename T>
|
||||
using hb_is_default_constructible = hb_is_constructible<T>;
|
||||
#define hb_is_default_constructible(T) hb_is_default_constructible<T>::value
|
||||
|
||||
template <typename T>
|
||||
using hb_is_copy_constructible = hb_is_constructible<T, hb_add_lvalue_reference<hb_add_const<T>>>;
|
||||
#define hb_is_copy_constructible(T) hb_is_copy_constructible<T>::value
|
||||
|
||||
template <typename T>
|
||||
using hb_is_move_constructible = hb_is_constructible<T, hb_add_rvalue_reference<hb_add_const<T>>>;
|
||||
#define hb_is_move_constructible(T) hb_is_move_constructible<T>::value
|
||||
|
||||
template <typename T, typename U, typename>
|
||||
struct _hb_is_assignable : hb_false_type {};
|
||||
template <typename T, typename U>
|
||||
struct _hb_is_assignable<T, U, hb_void_t<decltype (hb_declval (T) = hb_declval (U))>> : hb_true_type {};
|
||||
template <typename T, typename U>
|
||||
using hb_is_assignable = _hb_is_assignable<T, U, void>;
|
||||
#define hb_is_assignable(T,U) hb_is_assignable<T, U>::value
|
||||
|
||||
template <typename T>
|
||||
using hb_is_copy_assignable = hb_is_assignable<hb_add_lvalue_reference<T>,
|
||||
hb_add_lvalue_reference<hb_add_const<T>>>;
|
||||
#define hb_is_copy_assignable(T) hb_is_copy_assignable<T>::value
|
||||
|
||||
template <typename T>
|
||||
using hb_is_move_assignable = hb_is_assignable<hb_add_lvalue_reference<T>,
|
||||
hb_add_rvalue_reference<T>>;
|
||||
#define hb_is_move_assignable(T) hb_is_move_assignable<T>::value
|
||||
|
||||
/* Trivial versions. */
|
||||
|
||||
template <typename T> union hb_trivial { T value; };
|
||||
|
||||
/* Don't know how to do the following. */
|
||||
template <typename T>
|
||||
using hb_is_trivially_destructible= hb_is_destructible<hb_trivial<T>>;
|
||||
#define hb_is_trivially_destructible(T) hb_is_trivially_destructible<T>::value
|
||||
|
||||
/* Don't know how to do the following. */
|
||||
//template <typename T, typename ...Ts>
|
||||
//using hb_is_trivially_constructible= hb_is_constructible<hb_trivial<T>, hb_trivial<Ts>...>;
|
||||
//#define hb_is_trivially_constructible(...) hb_is_trivially_constructible<__VA_ARGS__>::value
|
||||
|
||||
template <typename T>
|
||||
using hb_is_trivially_default_constructible= hb_is_default_constructible<hb_trivial<T>>;
|
||||
#define hb_is_trivially_default_constructible(T) hb_is_trivially_default_constructible<T>::value
|
||||
|
||||
template <typename T>
|
||||
using hb_is_trivially_copy_constructible= hb_is_copy_constructible<hb_trivial<T>>;
|
||||
#define hb_is_trivially_copy_constructible(T) hb_is_trivially_copy_constructible<T>::value
|
||||
|
||||
template <typename T>
|
||||
using hb_is_trivially_move_constructible= hb_is_move_constructible<hb_trivial<T>>;
|
||||
#define hb_is_trivially_move_constructible(T) hb_is_trivially_move_constructible<T>::value
|
||||
|
||||
/* Don't know how to do the following. */
|
||||
//template <typename T, typename U>
|
||||
//using hb_is_trivially_assignable= hb_is_assignable<hb_trivial<T>, hb_trivial<U>>;
|
||||
//#define hb_is_trivially_assignable(T,U) hb_is_trivially_assignable<T, U>::value
|
||||
|
||||
template <typename T>
|
||||
using hb_is_trivially_copy_assignable= hb_is_copy_assignable<hb_trivial<T>>;
|
||||
#define hb_is_trivially_copy_assignable(T) hb_is_trivially_copy_assignable<T>::value
|
||||
|
||||
template <typename T>
|
||||
using hb_is_trivially_move_assignable= hb_is_move_assignable<hb_trivial<T>>;
|
||||
#define hb_is_trivially_move_assignable(T) hb_is_trivially_move_assignable<T>::value
|
||||
|
||||
template <typename T>
|
||||
using hb_is_trivially_copyable= hb_bool_constant<
|
||||
hb_is_trivially_destructible (T) &&
|
||||
(!hb_is_move_assignable (T) || hb_is_trivially_move_assignable (T)) &&
|
||||
(!hb_is_move_constructible (T) || hb_is_trivially_move_constructible (T)) &&
|
||||
(!hb_is_copy_assignable (T) || hb_is_trivially_copy_assignable (T)) &&
|
||||
(!hb_is_copy_constructible (T) || hb_is_trivially_copy_constructible (T)) &&
|
||||
true
|
||||
>;
|
||||
#define hb_is_trivially_copyable(T) hb_is_trivially_copyable<T>::value
|
||||
|
||||
template <typename T>
|
||||
using hb_is_trivial= hb_bool_constant<
|
||||
hb_is_trivially_copyable (T) &&
|
||||
hb_is_trivially_default_constructible (T)
|
||||
>;
|
||||
#define hb_is_trivial(T) hb_is_trivial<T>::value
|
||||
|
||||
|
||||
#endif /* HB_META_HH */
|
||||
|
@ -127,8 +127,6 @@ typedef int hb_mutex_impl_t;
|
||||
|
||||
struct hb_mutex_t
|
||||
{
|
||||
/* TODO Add tracing. */
|
||||
|
||||
hb_mutex_impl_t m;
|
||||
|
||||
void init () { hb_mutex_impl_init (&m); }
|
||||
|
@ -37,7 +37,7 @@
|
||||
|
||||
/* Global nul-content Null pool. Enlarge as necessary. */
|
||||
|
||||
#define HB_NULL_POOL_SIZE 9880
|
||||
#define HB_NULL_POOL_SIZE 384
|
||||
|
||||
/* Use SFINAE to sniff whether T has min_size; in which case return T::null_size,
|
||||
* otherwise return sizeof(T). */
|
||||
@ -46,16 +46,13 @@
|
||||
* https://stackoverflow.com/questions/7776448/sfinae-tried-with-bool-gives-compiler-error-template-argument-tvalue-invol
|
||||
*/
|
||||
|
||||
template <typename T, typename B>
|
||||
struct _hb_null_size
|
||||
{ enum { value = sizeof (T) }; };
|
||||
template <typename T, typename>
|
||||
struct _hb_null_size : hb_integral_constant<unsigned, sizeof (T)> {};
|
||||
template <typename T>
|
||||
struct _hb_null_size<T, hb_bool_tt<true || sizeof (T::min_size)> >
|
||||
{ enum { value = T::null_size }; };
|
||||
struct _hb_null_size<T, hb_void_t<decltype (T::min_size)>> : hb_integral_constant<unsigned, T::null_size> {};
|
||||
|
||||
template <typename T>
|
||||
struct hb_null_size
|
||||
{ enum { value = _hb_null_size<T, hb_true_t>::value }; };
|
||||
using hb_null_size = _hb_null_size<T, void>;
|
||||
#define hb_null_size(T) hb_null_size<T>::value
|
||||
|
||||
/* These doesn't belong here, but since is copy/paste from above, put it here. */
|
||||
@ -63,16 +60,12 @@ struct hb_null_size
|
||||
/* hb_static_size (T)
|
||||
* Returns T::static_size if T::min_size is defined, or sizeof (T) otherwise. */
|
||||
|
||||
template <typename T, typename B>
|
||||
struct _hb_static_size
|
||||
{ enum { value = sizeof (T) }; };
|
||||
template <typename T, typename>
|
||||
struct _hb_static_size : hb_integral_constant<unsigned, sizeof (T)> {};
|
||||
template <typename T>
|
||||
struct _hb_static_size<T, hb_bool_tt<true || sizeof (T::min_size)> >
|
||||
{ enum { value = T::static_size }; };
|
||||
|
||||
struct _hb_static_size<T, hb_void_t<decltype (T::min_size)>> : hb_integral_constant<unsigned, T::static_size> {};
|
||||
template <typename T>
|
||||
struct hb_static_size
|
||||
{ enum { value = _hb_static_size<T, hb_true_t>::value }; };
|
||||
using hb_static_size = _hb_static_size<T, void>;
|
||||
#define hb_static_size(T) hb_static_size<T>::value
|
||||
|
||||
|
||||
@ -95,7 +88,7 @@ struct Null {
|
||||
template <typename QType>
|
||||
struct NullHelper
|
||||
{
|
||||
typedef hb_remove_const (hb_remove_reference (QType)) Type;
|
||||
typedef hb_remove_const<hb_remove_reference<QType>> Type;
|
||||
static const Type & get_null () { return Null<Type>::get_null (); }
|
||||
};
|
||||
#define Null(Type) NullHelper<Type>::get_null ()
|
||||
@ -148,7 +141,7 @@ static inline Type& Crap () {
|
||||
template <typename QType>
|
||||
struct CrapHelper
|
||||
{
|
||||
typedef hb_remove_const (hb_remove_reference (QType)) Type;
|
||||
typedef hb_remove_const<hb_remove_reference<QType>> Type;
|
||||
static Type & get_crap () { return Crap<Type> (); }
|
||||
};
|
||||
#define Crap(Type) CrapHelper<Type>::get_crap ()
|
||||
@ -171,7 +164,7 @@ struct CrapOrNullHelper<const Type> {
|
||||
template <typename P>
|
||||
struct hb_nonnull_ptr_t
|
||||
{
|
||||
typedef hb_remove_pointer (P) T;
|
||||
typedef hb_remove_pointer<P> T;
|
||||
|
||||
hb_nonnull_ptr_t (T *v_ = nullptr) : v (v_) {}
|
||||
T * operator = (T *v_) { return v = v_; }
|
||||
|
@ -56,7 +56,7 @@ typedef struct TableRecord
|
||||
{
|
||||
int cmp (Tag t) const { return -t.cmp (tag); }
|
||||
|
||||
static int cmp (const void *pa, const void *pb)
|
||||
HB_INTERNAL static int cmp (const void *pa, const void *pb)
|
||||
{
|
||||
const TableRecord *a = (const TableRecord *) pa;
|
||||
const TableRecord *b = (const TableRecord *) pb;
|
||||
@ -86,15 +86,15 @@ typedef struct OffsetTable
|
||||
const TableRecord& get_table (unsigned int i) const
|
||||
{ return tables[i]; }
|
||||
unsigned int get_table_tags (unsigned int start_offset,
|
||||
unsigned int *table_count, /* IN/OUT */
|
||||
hb_tag_t *table_tags /* OUT */) const
|
||||
unsigned int *table_count, /* IN/OUT */
|
||||
hb_tag_t *table_tags /* OUT */) const
|
||||
{
|
||||
if (table_count)
|
||||
{
|
||||
if (start_offset >= tables.len)
|
||||
*table_count = 0;
|
||||
else
|
||||
*table_count = MIN<unsigned int> (*table_count, tables.len - start_offset);
|
||||
*table_count = hb_min (*table_count, tables.len - start_offset);
|
||||
|
||||
const TableRecord *sub_tables = tables.arrayZ + start_offset;
|
||||
unsigned int count = *table_count;
|
||||
@ -222,7 +222,7 @@ struct TTCHeaderVersion1
|
||||
Tag ttcTag; /* TrueType Collection ID string: 'ttcf' */
|
||||
FixedVersion<>version; /* Version of the TTC Header (1.0),
|
||||
* 0x00010000u */
|
||||
LArrayOf<LOffsetTo<OffsetTable> >
|
||||
LArrayOf<LOffsetTo<OffsetTable>>
|
||||
table; /* Array of offsets to the OffsetTable for each font
|
||||
* from the beginning of the file */
|
||||
public:
|
||||
@ -334,7 +334,7 @@ struct ResourceTypeRecord
|
||||
protected:
|
||||
Tag tag; /* Resource type. */
|
||||
HBUINT16 resCountM1; /* Number of resources minus 1. */
|
||||
NNOffsetTo<UnsizedArrayOf<ResourceRecord> >
|
||||
NNOffsetTo<UnsizedArrayOf<ResourceRecord>>
|
||||
resourcesZ; /* Offset from beginning of resource type list
|
||||
* to reference item list for this type. */
|
||||
public:
|
||||
@ -390,7 +390,7 @@ struct ResourceMap
|
||||
HBUINT32 reserved1; /* Reserved for handle to next resource map */
|
||||
HBUINT16 resreved2; /* Reserved for file reference number */
|
||||
HBUINT16 attrs; /* Resource fork attribute */
|
||||
NNOffsetTo<ArrayOfM1<ResourceTypeRecord> >
|
||||
NNOffsetTo<ArrayOfM1<ResourceTypeRecord>>
|
||||
typeList; /* Offset from beginning of map to
|
||||
* resource type list */
|
||||
Offset16 nameList; /* Offset from beginning of map to
|
||||
@ -422,7 +422,7 @@ struct ResourceForkHeader
|
||||
}
|
||||
|
||||
protected:
|
||||
LNNOffsetTo<UnsizedArrayOf<HBUINT8> >
|
||||
LNNOffsetTo<UnsizedArrayOf<HBUINT8>>
|
||||
data; /* Offset from beginning of resource fork
|
||||
* to resource data */
|
||||
LNNOffsetTo<ResourceMap >
|
||||
|
@ -57,13 +57,14 @@ template <typename Type, unsigned int Size>
|
||||
struct IntType
|
||||
{
|
||||
typedef Type type;
|
||||
typedef typename hb_signedness_int (hb_is_signed (Type)) wide_type;
|
||||
typedef hb_conditional<hb_is_signed (Type), signed, unsigned> wide_type;
|
||||
|
||||
IntType<Type, Size>& operator = (wide_type i) { v = i; return *this; }
|
||||
operator wide_type () const { return v; }
|
||||
bool operator == (const IntType<Type,Size> &o) const { return (Type) v == (Type) o.v; }
|
||||
bool operator != (const IntType<Type,Size> &o) const { return !(*this == o); }
|
||||
static int cmp (const IntType<Type,Size> *a, const IntType<Type,Size> *b) { return b->cmp (*a); }
|
||||
HB_INTERNAL static int cmp (const IntType<Type,Size> *a, const IntType<Type,Size> *b)
|
||||
{ return b->cmp (*a); }
|
||||
template <typename Type2>
|
||||
int cmp (Type2 a) const
|
||||
{
|
||||
@ -109,7 +110,7 @@ struct F2DOT14 : HBINT16
|
||||
F2DOT14& operator = (uint16_t i ) { HBINT16::operator= (i); return *this; }
|
||||
// 16384 means 1<<14
|
||||
float to_float () const { return ((int32_t) v) / 16384.f; }
|
||||
void set_float (float f) { v = round (f * 16384.f); }
|
||||
void set_float (float f) { v = roundf (f * 16384.f); }
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (2);
|
||||
};
|
||||
@ -120,7 +121,7 @@ struct Fixed : HBINT32
|
||||
Fixed& operator = (uint32_t i) { HBINT32::operator= (i); return *this; }
|
||||
// 65536 means 1<<16
|
||||
float to_float () const { return ((int32_t) v) / 65536.f; }
|
||||
void set_float (float f) { v = round (f * 65536.f); }
|
||||
void set_float (float f) { v = roundf (f * 65536.f); }
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (4);
|
||||
};
|
||||
@ -181,7 +182,7 @@ struct Offset : Type
|
||||
void *serialize (hb_serialize_context_t *c, const void *base)
|
||||
{
|
||||
void *t = c->start_embed<void> ();
|
||||
*this = (char *) t - (char *) base; /* TODO(serialize) Overflow? */
|
||||
c->check_assign (*this, (unsigned) ((char *) t - (char *) base));
|
||||
return t;
|
||||
}
|
||||
|
||||
@ -262,6 +263,9 @@ struct _hb_has_null<Type, true>
|
||||
template <typename Type, typename OffsetType=HBUINT16, bool has_null=true>
|
||||
struct OffsetTo : Offset<OffsetType, has_null>
|
||||
{
|
||||
HB_DELETE_COPY_ASSIGN (OffsetTo);
|
||||
OffsetTo () = default;
|
||||
|
||||
OffsetTo& operator = (typename OffsetType::type i) { OffsetType::operator= (i); return *this; }
|
||||
|
||||
const Type& operator () (const void *base) const
|
||||
@ -280,18 +284,18 @@ struct OffsetTo : Offset<OffsetType, has_null>
|
||||
return * (Type *) Offset<OffsetType>::serialize (c, base);
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
bool serialize_subset (hb_subset_context_t *c, const T &src, const void *base)
|
||||
template <typename ...Ts>
|
||||
bool serialize_subset (hb_subset_context_t *c, const Type &src, const void *base, Ts&&... ds)
|
||||
{
|
||||
*this = 0;
|
||||
if (has_null && &src == &Null (T))
|
||||
if (has_null && &src == _hb_has_null<Type, has_null>::get_null ())
|
||||
return false;
|
||||
|
||||
auto *s = c->serializer;
|
||||
|
||||
s->push ();
|
||||
|
||||
bool ret = src.subset (c);
|
||||
bool ret = c->dispatch (src, hb_forward<Ts> (ds)...);
|
||||
|
||||
if (ret || !has_null)
|
||||
s->add_link (*this, s->pop_pack (), base);
|
||||
@ -301,6 +305,23 @@ struct OffsetTo : Offset<OffsetType, has_null>
|
||||
return ret;
|
||||
}
|
||||
|
||||
/* TODO: Somehow merge this with previous function into a serialize_dispatch(). */
|
||||
template <typename ...Ts>
|
||||
bool serialize_copy (hb_serialize_context_t *c, const Type &src, const void *base, Ts&&... ds)
|
||||
{
|
||||
*this = 0;
|
||||
if (has_null && &src == _hb_has_null<Type, has_null>::get_null ())
|
||||
return false;
|
||||
|
||||
c->push ();
|
||||
|
||||
bool ret = c->copy (src, hb_forward<Ts> (ds)...);
|
||||
|
||||
c->add_link (*this, c->pop_pack (), base);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
bool sanitize_shallow (hb_sanitize_context_t *c, const void *base) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
@ -310,39 +331,13 @@ struct OffsetTo : Offset<OffsetType, has_null>
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c, const void *base) const
|
||||
template <typename ...Ts>
|
||||
bool sanitize (hb_sanitize_context_t *c, const void *base, Ts&&... ds) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (sanitize_shallow (c, base) &&
|
||||
(this->is_null () ||
|
||||
StructAtOffset<Type> (base, *this).sanitize (c) ||
|
||||
neuter (c)));
|
||||
}
|
||||
template <typename T1>
|
||||
bool sanitize (hb_sanitize_context_t *c, const void *base, T1 d1) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (sanitize_shallow (c, base) &&
|
||||
(this->is_null () ||
|
||||
StructAtOffset<Type> (base, *this).sanitize (c, d1) ||
|
||||
neuter (c)));
|
||||
}
|
||||
template <typename T1, typename T2>
|
||||
bool sanitize (hb_sanitize_context_t *c, const void *base, T1 d1, T2 d2) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (sanitize_shallow (c, base) &&
|
||||
(this->is_null () ||
|
||||
StructAtOffset<Type> (base, *this).sanitize (c, d1, d2) ||
|
||||
neuter (c)));
|
||||
}
|
||||
template <typename T1, typename T2, typename T3>
|
||||
bool sanitize (hb_sanitize_context_t *c, const void *base, T1 d1, T2 d2, T3 d3) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (sanitize_shallow (c, base) &&
|
||||
(this->is_null () ||
|
||||
StructAtOffset<Type> (base, *this).sanitize (c, d1, d2, d3) ||
|
||||
c->dispatch (StructAtOffset<Type> (base, *this), hb_forward<Ts> (ds)...) ||
|
||||
neuter (c)));
|
||||
}
|
||||
|
||||
@ -417,38 +412,42 @@ struct UnsizedArrayOf
|
||||
void qsort (unsigned int len, unsigned int start = 0, unsigned int end = (unsigned int) -1)
|
||||
{ as_array (len).qsort (start, end); }
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c, unsigned int count) const
|
||||
bool serialize (hb_serialize_context_t *c, unsigned int items_len)
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
if (unlikely (!sanitize_shallow (c, count))) return_trace (false);
|
||||
|
||||
/* Note: for structs that do not reference other structs,
|
||||
* we do not need to call their sanitize() as we already did
|
||||
* a bound check on the aggregate array size. We just include
|
||||
* a small unreachable expression to make sure the structs
|
||||
* pointed to do have a simple sanitize(), ie. they do not
|
||||
* reference other structs via offsets.
|
||||
*/
|
||||
(void) (false && arrayZ[0].sanitize (c));
|
||||
|
||||
TRACE_SERIALIZE (this);
|
||||
if (unlikely (!c->extend (*this, items_len))) return_trace (false);
|
||||
return_trace (true);
|
||||
}
|
||||
bool sanitize (hb_sanitize_context_t *c, unsigned int count, const void *base) const
|
||||
template <typename Iterator,
|
||||
hb_requires (hb_is_source_of (Iterator, Type))>
|
||||
bool serialize (hb_serialize_context_t *c, Iterator items)
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
if (unlikely (!sanitize_shallow (c, count))) return_trace (false);
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
if (unlikely (!arrayZ[i].sanitize (c, base)))
|
||||
return_trace (false);
|
||||
TRACE_SERIALIZE (this);
|
||||
unsigned count = items.len ();
|
||||
if (unlikely (!serialize (c, count))) return_trace (false);
|
||||
/* TODO Umm. Just exhaust the iterator instead? Being extra
|
||||
* cautious right now.. */
|
||||
for (unsigned i = 0; i < count; i++, ++items)
|
||||
arrayZ[i] = *items;
|
||||
return_trace (true);
|
||||
}
|
||||
template <typename T>
|
||||
bool sanitize (hb_sanitize_context_t *c, unsigned int count, const void *base, T user_data) const
|
||||
|
||||
UnsizedArrayOf* copy (hb_serialize_context_t *c, unsigned count) const
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
auto *out = c->start_embed (this);
|
||||
if (unlikely (!as_array (count).copy (c))) return_trace (nullptr);
|
||||
return_trace (out);
|
||||
}
|
||||
|
||||
template <typename ...Ts>
|
||||
bool sanitize (hb_sanitize_context_t *c, unsigned int count, Ts&&... ds) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
if (unlikely (!sanitize_shallow (c, count))) return_trace (false);
|
||||
if (!sizeof... (Ts) && hb_is_trivially_copyable (Type)) return_trace (true);
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
if (unlikely (!arrayZ[i].sanitize (c, base, user_data)))
|
||||
if (unlikely (!c->dispatch (arrayZ[i], hb_forward<Ts> (ds)...)))
|
||||
return_trace (false);
|
||||
return_trace (true);
|
||||
}
|
||||
@ -467,7 +466,7 @@ struct UnsizedArrayOf
|
||||
|
||||
/* Unsized array of offset's */
|
||||
template <typename Type, typename OffsetType, bool has_null=true>
|
||||
using UnsizedOffsetArrayOf = UnsizedArrayOf<OffsetTo<Type, OffsetType, has_null> >;
|
||||
using UnsizedOffsetArrayOf = UnsizedArrayOf<OffsetTo<Type, OffsetType, has_null>>;
|
||||
|
||||
/* Unsized array of offsets relative to the beginning of the array itself. */
|
||||
template <typename Type, typename OffsetType, bool has_null=true>
|
||||
@ -488,17 +487,12 @@ struct UnsizedOffsetListOf : UnsizedOffsetArrayOf<Type, OffsetType, has_null>
|
||||
return this+*p;
|
||||
}
|
||||
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c, unsigned int count) const
|
||||
template <typename ...Ts>
|
||||
bool sanitize (hb_sanitize_context_t *c, unsigned int count, Ts&&... ds) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace ((UnsizedOffsetArrayOf<Type, OffsetType, has_null>::sanitize (c, count, this)));
|
||||
}
|
||||
template <typename T>
|
||||
bool sanitize (hb_sanitize_context_t *c, unsigned int count, T user_data) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace ((UnsizedOffsetArrayOf<Type, OffsetType, has_null>::sanitize (c, count, this, user_data)));
|
||||
return_trace ((UnsizedOffsetArrayOf<Type, OffsetType, has_null>
|
||||
::sanitize (c, count, this, hb_forward<Ts> (ds)...)));
|
||||
}
|
||||
};
|
||||
|
||||
@ -578,12 +572,12 @@ struct ArrayOf
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
if (unlikely (!c->extend_min (*this))) return_trace (false);
|
||||
len = items_len; /* TODO(serialize) Overflow? */
|
||||
c->check_assign (len, items_len);
|
||||
if (unlikely (!c->extend (*this))) return_trace (false);
|
||||
return_trace (true);
|
||||
}
|
||||
template <typename Iterator,
|
||||
hb_enable_if (hb_is_iterator_of (Iterator, const Type))>
|
||||
hb_requires (hb_is_source_of (Iterator, Type))>
|
||||
bool serialize (hb_serialize_context_t *c, Iterator items)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
@ -591,45 +585,30 @@ struct ArrayOf
|
||||
if (unlikely (!serialize (c, count))) return_trace (false);
|
||||
/* TODO Umm. Just exhaust the iterator instead? Being extra
|
||||
* cautious right now.. */
|
||||
for (unsigned i = 0; i < count; i++, items++)
|
||||
for (unsigned i = 0; i < count; i++, ++items)
|
||||
arrayZ[i] = *items;
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
ArrayOf* copy (hb_serialize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
if (unlikely (!sanitize_shallow (c))) return_trace (false);
|
||||
|
||||
/* Note: for structs that do not reference other structs,
|
||||
* we do not need to call their sanitize() as we already did
|
||||
* a bound check on the aggregate array size. We just include
|
||||
* a small unreachable expression to make sure the structs
|
||||
* pointed to do have a simple sanitize(), ie. they do not
|
||||
* reference other structs via offsets.
|
||||
*/
|
||||
(void) (false && arrayZ[0].sanitize (c));
|
||||
|
||||
return_trace (true);
|
||||
TRACE_SERIALIZE (this);
|
||||
auto *out = c->start_embed (this);
|
||||
if (unlikely (!c->extend_min (out))) return_trace (nullptr);
|
||||
c->check_assign (out->len, len);
|
||||
if (unlikely (!as_array ().copy (c))) return_trace (nullptr);
|
||||
return_trace (out);
|
||||
}
|
||||
bool sanitize (hb_sanitize_context_t *c, const void *base) const
|
||||
|
||||
template <typename ...Ts>
|
||||
bool sanitize (hb_sanitize_context_t *c, Ts&&... ds) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
if (unlikely (!sanitize_shallow (c))) return_trace (false);
|
||||
if (!sizeof... (Ts) && hb_is_trivially_copyable (Type)) return_trace (true);
|
||||
unsigned int count = len;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
if (unlikely (!arrayZ[i].sanitize (c, base)))
|
||||
return_trace (false);
|
||||
return_trace (true);
|
||||
}
|
||||
template <typename T>
|
||||
bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
if (unlikely (!sanitize_shallow (c))) return_trace (false);
|
||||
unsigned int count = len;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
if (unlikely (!arrayZ[i].sanitize (c, base, user_data)))
|
||||
if (unlikely (!c->dispatch (arrayZ[i], hb_forward<Ts> (ds)...)))
|
||||
return_trace (false);
|
||||
return_trace (true);
|
||||
}
|
||||
@ -662,9 +641,9 @@ using PString = ArrayOf<HBUINT8, HBUINT8>;
|
||||
|
||||
/* Array of Offset's */
|
||||
template <typename Type>
|
||||
using OffsetArrayOf = ArrayOf<OffsetTo<Type, HBUINT16> >;
|
||||
using OffsetArrayOf = ArrayOf<OffsetTo<Type, HBUINT16>>;
|
||||
template <typename Type>
|
||||
using LOffsetArrayOf = ArrayOf<OffsetTo<Type, HBUINT32> >;
|
||||
using LOffsetArrayOf = ArrayOf<OffsetTo<Type, HBUINT32>>;
|
||||
template <typename Type>
|
||||
using LOffsetLArrayOf = ArrayOf<OffsetTo<Type, HBUINT32>, HBUINT32>;
|
||||
|
||||
@ -696,16 +675,11 @@ struct OffsetListOf : OffsetArrayOf<Type>
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
template <typename ...Ts>
|
||||
bool sanitize (hb_sanitize_context_t *c, Ts&&... ds) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (OffsetArrayOf<Type>::sanitize (c, this));
|
||||
}
|
||||
template <typename T>
|
||||
bool sanitize (hb_sanitize_context_t *c, T user_data) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (OffsetArrayOf<Type>::sanitize (c, this, user_data));
|
||||
return_trace (OffsetArrayOf<Type>::sanitize (c, this, hb_forward<Ts> (ds)...));
|
||||
}
|
||||
};
|
||||
|
||||
@ -737,27 +711,23 @@ struct HeadlessArrayOf
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
if (unlikely (!c->extend_min (*this))) return_trace (false);
|
||||
lenP1 = items.length + 1; /* TODO(serialize) Overflow? */
|
||||
c->check_assign (lenP1, items.length + 1);
|
||||
if (unlikely (!c->extend (*this))) return_trace (false);
|
||||
for (unsigned int i = 0; i < items.length; i++)
|
||||
arrayZ[i] = items[i];
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
template <typename ...Ts>
|
||||
bool sanitize (hb_sanitize_context_t *c, Ts&&... ds) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
if (unlikely (!sanitize_shallow (c))) return_trace (false);
|
||||
|
||||
/* Note: for structs that do not reference other structs,
|
||||
* we do not need to call their sanitize() as we already did
|
||||
* a bound check on the aggregate array size. We just include
|
||||
* a small unreachable expression to make sure the structs
|
||||
* pointed to do have a simple sanitize(), ie. they do not
|
||||
* reference other structs via offsets.
|
||||
*/
|
||||
(void) (false && arrayZ[0].sanitize (c));
|
||||
|
||||
if (!sizeof... (Ts) && hb_is_trivially_copyable (Type)) return_trace (true);
|
||||
unsigned int count = lenP1 ? lenP1 - 1 : 0;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
if (unlikely (!c->dispatch (arrayZ[i], hb_forward<Ts> (ds)...)))
|
||||
return_trace (false);
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
@ -797,14 +767,14 @@ struct ArrayOfM1
|
||||
unsigned int get_size () const
|
||||
{ return lenM1.static_size + (lenM1 + 1) * Type::static_size; }
|
||||
|
||||
template <typename T>
|
||||
bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const
|
||||
template <typename ...Ts>
|
||||
bool sanitize (hb_sanitize_context_t *c, Ts&&... ds) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
if (unlikely (!sanitize_shallow (c))) return_trace (false);
|
||||
unsigned int count = lenM1 + 1;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
if (unlikely (!arrayZ[i].sanitize (c, base, user_data)))
|
||||
if (unlikely (!c->dispatch (arrayZ[i], hb_forward<Ts> (ds)...)))
|
||||
return_trace (false);
|
||||
return_trace (true);
|
||||
}
|
||||
@ -855,7 +825,7 @@ struct SortedArrayOf : ArrayOf<Type, LenType>
|
||||
return_trace (ret);
|
||||
}
|
||||
template <typename Iterator,
|
||||
hb_enable_if (hb_is_sorted_iterator_of (Iterator, const Type))>
|
||||
hb_requires (hb_is_sorted_source_of (Iterator, Type))>
|
||||
bool serialize (hb_serialize_context_t *c, Iterator items)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
@ -863,7 +833,6 @@ struct SortedArrayOf : ArrayOf<Type, LenType>
|
||||
return_trace (ret);
|
||||
}
|
||||
|
||||
|
||||
template <typename T>
|
||||
Type &bsearch (const T &x, Type ¬_found = Crap (Type))
|
||||
{ return *as_array ().bsearch (x, ¬_found); }
|
||||
@ -896,7 +865,7 @@ struct BinSearchHeader
|
||||
{
|
||||
len = v;
|
||||
assert (len == v);
|
||||
entrySelector = MAX (1u, hb_bit_storage (v)) - 1;
|
||||
entrySelector = hb_max (1u, hb_bit_storage (v)) - 1;
|
||||
searchRange = 16 * (1u << entrySelector);
|
||||
rangeShift = v * 16 > searchRange
|
||||
? 16 * v - searchRange
|
||||
@ -915,7 +884,7 @@ struct BinSearchHeader
|
||||
};
|
||||
|
||||
template <typename Type, typename LenType=HBUINT16>
|
||||
using BinSearchArrayOf = SortedArrayOf<Type, BinSearchHeader<LenType> >;
|
||||
using BinSearchArrayOf = SortedArrayOf<Type, BinSearchHeader<LenType>>;
|
||||
|
||||
|
||||
struct VarSizedBinSearchHeader
|
||||
@ -980,40 +949,15 @@ struct VarSizedBinSearchArrayOf
|
||||
unsigned int get_size () const
|
||||
{ return header.static_size + header.nUnits * header.unitSize; }
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
if (unlikely (!sanitize_shallow (c))) return_trace (false);
|
||||
|
||||
/* Note: for structs that do not reference other structs,
|
||||
* we do not need to call their sanitize() as we already did
|
||||
* a bound check on the aggregate array size. We just include
|
||||
* a small unreachable expression to make sure the structs
|
||||
* pointed to do have a simple sanitize(), ie. they do not
|
||||
* reference other structs via offsets.
|
||||
*/
|
||||
(void) (false && StructAtOffset<Type> (&bytesZ, 0).sanitize (c));
|
||||
|
||||
return_trace (true);
|
||||
}
|
||||
bool sanitize (hb_sanitize_context_t *c, const void *base) const
|
||||
template <typename ...Ts>
|
||||
bool sanitize (hb_sanitize_context_t *c, Ts&&... ds) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
if (unlikely (!sanitize_shallow (c))) return_trace (false);
|
||||
if (!sizeof... (Ts) && hb_is_trivially_copyable (Type)) return_trace (true);
|
||||
unsigned int count = get_length ();
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
if (unlikely (!(*this)[i].sanitize (c, base)))
|
||||
return_trace (false);
|
||||
return_trace (true);
|
||||
}
|
||||
template <typename T>
|
||||
bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
if (unlikely (!sanitize_shallow (c))) return_trace (false);
|
||||
unsigned int count = get_length ();
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
if (unlikely (!(*this)[i].sanitize (c, base, user_data)))
|
||||
if (unlikely (!(*this)[i].sanitize (c, hb_forward<Ts> (ds)...)))
|
||||
return_trace (false);
|
||||
return_trace (true);
|
||||
}
|
||||
|
@ -167,7 +167,7 @@ struct CFFIndex
|
||||
byteArray.resize (buffArray.length);
|
||||
for (unsigned int i = 0; i < byteArray.length; i++)
|
||||
{
|
||||
byteArray[i] = byte_str_t (buffArray[i].arrayZ (), buffArray[i].length);
|
||||
byteArray[i] = byte_str_t (buffArray[i].arrayZ, buffArray[i].length);
|
||||
}
|
||||
bool result = this->serialize (c, offSize_, byteArray);
|
||||
byteArray.fini ();
|
||||
|
@ -165,8 +165,8 @@ struct bounds_t
|
||||
{
|
||||
void init ()
|
||||
{
|
||||
min.set_int (0x7FFFFFFF, 0x7FFFFFFF);
|
||||
max.set_int (-0x80000000, -0x80000000);
|
||||
min.set_int (INT_MAX, INT_MAX);
|
||||
max.set_int (INT_MIN, INT_MIN);
|
||||
}
|
||||
|
||||
void update (const point_t &pt)
|
||||
@ -305,6 +305,11 @@ bool _get_bounds (const OT::cff1::accelerator_t *cff, hb_codepoint_t glyph, boun
|
||||
|
||||
bool OT::cff1::accelerator_t::get_extents (hb_codepoint_t glyph, hb_glyph_extents_t *extents) const
|
||||
{
|
||||
#ifdef HB_NO_OT_FONT_CFF
|
||||
/* XXX Remove check when this code moves to .hh file. */
|
||||
return true;
|
||||
#endif
|
||||
|
||||
bounds_t bounds;
|
||||
|
||||
if (!_get_bounds (this, glyph, bounds))
|
||||
|
@ -110,7 +110,8 @@ struct Encoding1 {
|
||||
{
|
||||
if (glyph <= ranges[i].nLeft)
|
||||
{
|
||||
return (hb_codepoint_t)ranges[i].first + glyph;
|
||||
hb_codepoint_t code = (hb_codepoint_t) ranges[i].first + glyph;
|
||||
return (likely (code < 0x100) ? code: CFF_UNDEF_CODE);
|
||||
}
|
||||
glyph -= (ranges[i].nLeft + 1);
|
||||
}
|
||||
|
@ -34,10 +34,10 @@ struct extents_param_t
|
||||
void init ()
|
||||
{
|
||||
path_open = false;
|
||||
min_x.set_int (0x7FFFFFFF);
|
||||
min_y.set_int (0x7FFFFFFF);
|
||||
max_x.set_int (-0x80000000);
|
||||
max_y.set_int (-0x80000000);
|
||||
min_x.set_int (INT_MAX);
|
||||
min_y.set_int (INT_MAX);
|
||||
max_x.set_int (INT_MIN);
|
||||
max_y.set_int (INT_MIN);
|
||||
}
|
||||
|
||||
void start_path () { path_open = true; }
|
||||
@ -99,6 +99,11 @@ bool OT::cff2::accelerator_t::get_extents (hb_font_t *font,
|
||||
hb_codepoint_t glyph,
|
||||
hb_glyph_extents_t *extents) const
|
||||
{
|
||||
#ifdef HB_NO_OT_FONT_CFF
|
||||
/* XXX Remove check when this code moves to .hh file. */
|
||||
return true;
|
||||
#endif
|
||||
|
||||
if (unlikely (!is_valid () || (glyph >= num_glyphs))) return false;
|
||||
|
||||
unsigned int num_coords;
|
||||
|
@ -93,7 +93,7 @@ struct CmapSubtableFormat4
|
||||
this->length = get_sub_table_size (segments);
|
||||
|
||||
this->segCountX2 = segments.length * 2;
|
||||
this->entrySelector = MAX (1u, hb_bit_storage (segments.length)) - 1;
|
||||
this->entrySelector = hb_max (1u, hb_bit_storage (segments.length)) - 1;
|
||||
this->searchRange = 2 * (1u << this->entrySelector);
|
||||
this->rangeShift = segments.length * 2 > this->searchRange
|
||||
? 2 * segments.length - this->searchRange
|
||||
@ -142,7 +142,7 @@ struct CmapSubtableFormat4
|
||||
for (unsigned int j = 0; j < num_codepoints; j++)
|
||||
{
|
||||
hb_codepoint_t cp = segments[i].start_code + j;
|
||||
hb_codepoint_t new_gid;
|
||||
hb_codepoint_t new_gid = 0;
|
||||
if (unlikely (!plan->new_gid_for_codepoint (cp, &new_gid)))
|
||||
return_trace (false);
|
||||
glyph_id_array[j] = new_gid;
|
||||
@ -183,7 +183,7 @@ struct CmapSubtableFormat4
|
||||
|
||||
hb_codepoint_t cp = HB_SET_VALUE_INVALID;
|
||||
while (plan->unicodes->next (&cp)) {
|
||||
hb_codepoint_t new_gid;
|
||||
hb_codepoint_t new_gid = 0;
|
||||
if (unlikely (!plan->new_gid_for_codepoint (cp, &new_gid)))
|
||||
{
|
||||
DEBUG_MSG(SUBSET, nullptr, "Unable to find new gid for %04x", cp);
|
||||
@ -285,7 +285,7 @@ struct CmapSubtableFormat4
|
||||
*glyph = gid;
|
||||
return true;
|
||||
}
|
||||
static bool get_glyph_func (const void *obj, hb_codepoint_t codepoint, hb_codepoint_t *glyph)
|
||||
HB_INTERNAL static bool get_glyph_func (const void *obj, hb_codepoint_t codepoint, hb_codepoint_t *glyph)
|
||||
{
|
||||
return ((const accelerator_t *) obj)->get_glyph (codepoint, glyph);
|
||||
}
|
||||
@ -348,7 +348,7 @@ struct CmapSubtableFormat4
|
||||
/* Some broken fonts have too long of a "length" value.
|
||||
* If that is the case, just change the value to truncate
|
||||
* the subtable at the end of the blob. */
|
||||
uint16_t new_length = (uint16_t) MIN ((uintptr_t) 65535,
|
||||
uint16_t new_length = (uint16_t) hb_min ((uintptr_t) 65535,
|
||||
(uintptr_t) (c->end -
|
||||
(char *) this));
|
||||
if (!c->try_set (&length, new_length))
|
||||
@ -478,7 +478,7 @@ struct CmapSubtableLongSegmented
|
||||
{
|
||||
for (unsigned int i = 0; i < this->groups.len; i++) {
|
||||
out->add_range (this->groups[i].startCharCode,
|
||||
MIN ((hb_codepoint_t) this->groups[i].endCharCode,
|
||||
hb_min ((hb_codepoint_t) this->groups[i].endCharCode,
|
||||
(hb_codepoint_t) HB_UNICODE_MAX));
|
||||
}
|
||||
}
|
||||
@ -542,7 +542,7 @@ struct CmapSubtableFormat12 : CmapSubtableLongSegmented<CmapSubtableFormat12>
|
||||
|
||||
hb_codepoint_t cp = HB_SET_VALUE_INVALID;
|
||||
while (plan->unicodes->next (&cp)) {
|
||||
hb_codepoint_t new_gid;
|
||||
hb_codepoint_t new_gid = 0;
|
||||
if (unlikely (!plan->new_gid_for_codepoint (cp, &new_gid)))
|
||||
{
|
||||
DEBUG_MSG(SUBSET, nullptr, "Unable to find new gid for %04x", cp);
|
||||
@ -623,7 +623,7 @@ struct DefaultUVS : SortedArrayOf<UnicodeValueRange, HBUINT32>
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
{
|
||||
hb_codepoint_t first = arrayZ[i].startUnicodeValue;
|
||||
hb_codepoint_t last = MIN ((hb_codepoint_t) (first + arrayZ[i].additionalCount),
|
||||
hb_codepoint_t last = hb_min ((hb_codepoint_t) (first + arrayZ[i].additionalCount),
|
||||
(hb_codepoint_t) HB_UNICODE_MAX);
|
||||
out->add_range (first, last);
|
||||
}
|
||||
@ -904,7 +904,7 @@ struct cmap
|
||||
// Write out format 4 sub table
|
||||
{
|
||||
CmapSubtable &subtable = format4_plat0_rec.subtable.serialize (&c, table);
|
||||
format4_plat3_rec.subtable = format4_plat0_rec.subtable;
|
||||
format4_plat3_rec.subtable = (unsigned int) format4_plat0_rec.subtable;
|
||||
subtable.u.format = 4;
|
||||
|
||||
CmapSubtableFormat4 &format4 = subtable.u.format4;
|
||||
@ -1096,18 +1096,18 @@ struct cmap
|
||||
hb_codepoint_t *glyph);
|
||||
|
||||
template <typename Type>
|
||||
static bool get_glyph_from (const void *obj,
|
||||
hb_codepoint_t codepoint,
|
||||
hb_codepoint_t *glyph)
|
||||
HB_INTERNAL static bool get_glyph_from (const void *obj,
|
||||
hb_codepoint_t codepoint,
|
||||
hb_codepoint_t *glyph)
|
||||
{
|
||||
const Type *typed_obj = (const Type *) obj;
|
||||
return typed_obj->get_glyph (codepoint, glyph);
|
||||
}
|
||||
|
||||
template <typename Type>
|
||||
static bool get_glyph_from_symbol (const void *obj,
|
||||
hb_codepoint_t codepoint,
|
||||
hb_codepoint_t *glyph)
|
||||
HB_INTERNAL static bool get_glyph_from_symbol (const void *obj,
|
||||
hb_codepoint_t codepoint,
|
||||
hb_codepoint_t *glyph)
|
||||
{
|
||||
const Type *typed_obj = (const Type *) obj;
|
||||
if (likely (typed_obj->get_glyph (codepoint, glyph)))
|
||||
|
@ -144,7 +144,7 @@ struct IndexSubtableFormat1Or3
|
||||
}
|
||||
|
||||
IndexSubtableHeader header;
|
||||
UnsizedArrayOf<Offset<OffsetType> >
|
||||
UnsizedArrayOf<Offset<OffsetType>>
|
||||
offsetArrayZ;
|
||||
public:
|
||||
DEFINE_SIZE_ARRAY(8, offsetArrayZ);
|
||||
@ -349,15 +349,15 @@ struct CBLC
|
||||
if (unlikely (!count))
|
||||
return Null(BitmapSizeTable);
|
||||
|
||||
unsigned int requested_ppem = MAX (font->x_ppem, font->y_ppem);
|
||||
unsigned int requested_ppem = hb_max (font->x_ppem, font->y_ppem);
|
||||
if (!requested_ppem)
|
||||
requested_ppem = 1<<30; /* Choose largest strike. */
|
||||
unsigned int best_i = 0;
|
||||
unsigned int best_ppem = MAX (sizeTables[0].ppemX, sizeTables[0].ppemY);
|
||||
unsigned int best_ppem = hb_max (sizeTables[0].ppemX, sizeTables[0].ppemY);
|
||||
|
||||
for (unsigned int i = 1; i < count; i++)
|
||||
{
|
||||
unsigned int ppem = MAX (sizeTables[i].ppemX, sizeTables[i].ppemY);
|
||||
unsigned int ppem = hb_max (sizeTables[i].ppemX, sizeTables[i].ppemY);
|
||||
if ((requested_ppem <= ppem && ppem < best_ppem) ||
|
||||
(requested_ppem > best_ppem && ppem > best_ppem))
|
||||
{
|
||||
@ -442,12 +442,12 @@ struct CBDT
|
||||
}
|
||||
|
||||
/* Convert to font units. */
|
||||
double x_scale = upem / (double) strike.ppemX;
|
||||
double y_scale = upem / (double) strike.ppemY;
|
||||
extents->x_bearing = round (extents->x_bearing * x_scale);
|
||||
extents->y_bearing = round (extents->y_bearing * y_scale);
|
||||
extents->width = round (extents->width * x_scale);
|
||||
extents->height = round (extents->height * y_scale);
|
||||
float x_scale = upem / (float) strike.ppemX;
|
||||
float y_scale = upem / (float) strike.ppemY;
|
||||
extents->x_bearing = roundf (extents->x_bearing * x_scale);
|
||||
extents->y_bearing = roundf (extents->y_bearing * y_scale);
|
||||
extents->width = roundf (extents->width * x_scale);
|
||||
extents->height = roundf (extents->height * y_scale);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
@ -125,9 +125,9 @@ struct COLR
|
||||
protected:
|
||||
HBUINT16 version; /* Table version number (starts at 0). */
|
||||
HBUINT16 numBaseGlyphs; /* Number of Base Glyph Records. */
|
||||
LNNOffsetTo<SortedUnsizedArrayOf<BaseGlyphRecord> >
|
||||
LNNOffsetTo<SortedUnsizedArrayOf<BaseGlyphRecord>>
|
||||
baseGlyphsZ; /* Offset to Base Glyph records. */
|
||||
LNNOffsetTo<UnsizedArrayOf<LayerRecord> >
|
||||
LNNOffsetTo<UnsizedArrayOf<LayerRecord>>
|
||||
layersZ; /* Offset to Layer Records. */
|
||||
HBUINT16 numLayers; /* Number of Layer Records. */
|
||||
public:
|
||||
|
@ -87,15 +87,15 @@ struct CPALV1Tail
|
||||
}
|
||||
|
||||
protected:
|
||||
LNNOffsetTo<UnsizedArrayOf<HBUINT32> >
|
||||
LNNOffsetTo<UnsizedArrayOf<HBUINT32>>
|
||||
paletteFlagsZ; /* Offset from the beginning of CPAL table to
|
||||
* the Palette Type Array. Set to 0 if no array
|
||||
* is provided. */
|
||||
LNNOffsetTo<UnsizedArrayOf<NameID> >
|
||||
LNNOffsetTo<UnsizedArrayOf<NameID>>
|
||||
paletteLabelsZ; /* Offset from the beginning of CPAL table to
|
||||
* the palette labels array. Set to 0 if no
|
||||
* array is provided. */
|
||||
LNNOffsetTo<UnsizedArrayOf<NameID> >
|
||||
LNNOffsetTo<UnsizedArrayOf<NameID>>
|
||||
colorLabelsZ; /* Offset from the beginning of CPAL table to
|
||||
* the color labels array. Set to 0
|
||||
* if no array is provided. */
|
||||
@ -144,7 +144,7 @@ struct CPAL
|
||||
{
|
||||
hb_array_t<const BGRAColor> segment_colors = palette_colors.sub_array (start_offset, *color_count);
|
||||
/* Always return numColors colors per palette even if it has out-of-bounds start index. */
|
||||
unsigned int count = MIN<unsigned int> (MAX<int> (numColors - start_offset, 0), *color_count);
|
||||
unsigned int count = hb_min ((unsigned) hb_max ((int) (numColors - start_offset), 0), *color_count);
|
||||
*color_count = count;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
colors[i] = segment_colors[i]; /* Bound-checked read. */
|
||||
@ -176,7 +176,7 @@ struct CPAL
|
||||
HBUINT16 numPalettes; /* Number of palettes in the table. */
|
||||
HBUINT16 numColorRecords; /* Total number of color records, combined for
|
||||
* all palettes. */
|
||||
LNNOffsetTo<UnsizedArrayOf<BGRAColor> >
|
||||
LNNOffsetTo<UnsizedArrayOf<BGRAColor>>
|
||||
colorRecordsZ; /* Offset from the beginning of CPAL table to
|
||||
* the first ColorRecord. */
|
||||
UnsizedArrayOf<HBUINT16>
|
||||
|
@ -121,7 +121,7 @@ struct SBIXStrike
|
||||
HBUINT16 resolution; /* The device pixel density (in PPI) for which this
|
||||
* strike was designed. (E.g., 96 PPI, 192 PPI.) */
|
||||
protected:
|
||||
UnsizedArrayOf<LOffsetTo<SBIXGlyph> >
|
||||
UnsizedArrayOf<LOffsetTo<SBIXGlyph>>
|
||||
imageOffsetsZ; /* Offset from the beginning of the strike data header
|
||||
* to bitmap data for an individual glyph ID. */
|
||||
public:
|
||||
@ -175,7 +175,7 @@ struct sbix
|
||||
if (unlikely (!count))
|
||||
return Null(SBIXStrike);
|
||||
|
||||
unsigned int requested_ppem = MAX (font->x_ppem, font->y_ppem);
|
||||
unsigned int requested_ppem = hb_max (font->x_ppem, font->y_ppem);
|
||||
if (!requested_ppem)
|
||||
requested_ppem = 1<<30; /* Choose largest strike. */
|
||||
/* TODO Add DPI sensitivity as well? */
|
||||
@ -242,11 +242,11 @@ struct sbix
|
||||
/* Convert to font units. */
|
||||
if (strike_ppem)
|
||||
{
|
||||
double scale = font->face->get_upem () / (double) strike_ppem;
|
||||
extents->x_bearing = round (extents->x_bearing * scale);
|
||||
extents->y_bearing = round (extents->y_bearing * scale);
|
||||
extents->width = round (extents->width * scale);
|
||||
extents->height = round (extents->height * scale);
|
||||
float scale = font->face->get_upem () / (float) strike_ppem;
|
||||
extents->x_bearing = roundf (extents->x_bearing * scale);
|
||||
extents->y_bearing = roundf (extents->y_bearing * scale);
|
||||
extents->width = roundf (extents->width * scale);
|
||||
extents->height = roundf (extents->height * scale);
|
||||
}
|
||||
|
||||
hb_blob_destroy (blob);
|
||||
|
@ -62,7 +62,7 @@ struct SVGDocumentIndexEntry
|
||||
* this index entry. */
|
||||
HBUINT16 endGlyphID; /* The last glyph ID in the range described by
|
||||
* this index entry. Must be >= startGlyphID. */
|
||||
LNNOffsetTo<UnsizedArrayOf<HBUINT8> >
|
||||
LNNOffsetTo<UnsizedArrayOf<HBUINT8>>
|
||||
svgDoc; /* Offset from the beginning of the SVG Document Index
|
||||
* to an SVG document. Must be non-zero. */
|
||||
HBUINT32 svgDocLength; /* Length of the SVG document.
|
||||
@ -107,7 +107,7 @@ struct SVG
|
||||
|
||||
protected:
|
||||
HBUINT16 version; /* Table version (starting at 0). */
|
||||
LOffsetTo<SortedArrayOf<SVGDocumentIndexEntry> >
|
||||
LOffsetTo<SortedArrayOf<SVGDocumentIndexEntry>>
|
||||
svgDocEntries; /* Offset (relative to the start of the SVG table) to the
|
||||
* SVG Documents Index. Must be non-zero. */
|
||||
/* Array of SVG Document Index Entries. */
|
||||
|
@ -70,6 +70,9 @@
|
||||
hb_bool_t
|
||||
hb_ot_color_has_palettes (hb_face_t *face)
|
||||
{
|
||||
#ifdef HB_NO_COLOR
|
||||
return false;
|
||||
#endif
|
||||
return face->table.CPAL->has_data ();
|
||||
}
|
||||
|
||||
@ -86,6 +89,9 @@ hb_ot_color_has_palettes (hb_face_t *face)
|
||||
unsigned int
|
||||
hb_ot_color_palette_get_count (hb_face_t *face)
|
||||
{
|
||||
#ifdef HB_NO_COLOR
|
||||
return 0;
|
||||
#endif
|
||||
return face->table.CPAL->get_palette_count ();
|
||||
}
|
||||
|
||||
@ -109,6 +115,9 @@ hb_ot_name_id_t
|
||||
hb_ot_color_palette_get_name_id (hb_face_t *face,
|
||||
unsigned int palette_index)
|
||||
{
|
||||
#ifdef HB_NO_COLOR
|
||||
return HB_OT_NAME_ID_INVALID;
|
||||
#endif
|
||||
return face->table.CPAL->get_palette_name_id (palette_index);
|
||||
}
|
||||
|
||||
@ -131,6 +140,9 @@ hb_ot_name_id_t
|
||||
hb_ot_color_palette_color_get_name_id (hb_face_t *face,
|
||||
unsigned int color_index)
|
||||
{
|
||||
#ifdef HB_NO_COLOR
|
||||
return HB_OT_NAME_ID_INVALID;
|
||||
#endif
|
||||
return face->table.CPAL->get_color_name_id (color_index);
|
||||
}
|
||||
|
||||
@ -149,6 +161,9 @@ hb_ot_color_palette_flags_t
|
||||
hb_ot_color_palette_get_flags (hb_face_t *face,
|
||||
unsigned int palette_index)
|
||||
{
|
||||
#ifdef HB_NO_COLOR
|
||||
return HB_OT_COLOR_PALETTE_FLAG_DEFAULT;
|
||||
#endif
|
||||
return face->table.CPAL->get_palette_flags (palette_index);
|
||||
}
|
||||
|
||||
@ -180,6 +195,11 @@ hb_ot_color_palette_get_colors (hb_face_t *face,
|
||||
unsigned int *colors_count /* IN/OUT. May be NULL. */,
|
||||
hb_color_t *colors /* OUT. May be NULL. */)
|
||||
{
|
||||
#ifdef HB_NO_COLOR
|
||||
if (colors_count)
|
||||
*colors_count = 0;
|
||||
return 0;
|
||||
#endif
|
||||
return face->table.CPAL->get_palette_colors (palette_index, start_offset, colors_count, colors);
|
||||
}
|
||||
|
||||
@ -201,6 +221,9 @@ hb_ot_color_palette_get_colors (hb_face_t *face,
|
||||
hb_bool_t
|
||||
hb_ot_color_has_layers (hb_face_t *face)
|
||||
{
|
||||
#ifdef HB_NO_COLOR
|
||||
return false;
|
||||
#endif
|
||||
return face->table.COLR->has_data ();
|
||||
}
|
||||
|
||||
@ -209,9 +232,9 @@ hb_ot_color_has_layers (hb_face_t *face)
|
||||
* @face: #hb_face_t to work upon
|
||||
* @glyph: The glyph index to query
|
||||
* @start_offset: offset of the first layer to retrieve
|
||||
* @count: (inout) (optional): Input = the maximum number of layers to return;
|
||||
* @layer_count: (inout) (optional): Input = the maximum number of layers to return;
|
||||
* Output = the actual number of layers returned (may be zero)
|
||||
* @layers: (out) (array length=count) (nullable): The array of layers found
|
||||
* @layers: (out) (array length=layer_count) (nullable): The array of layers found
|
||||
*
|
||||
* Fetches a list of all color layers for the specified glyph index in the specified
|
||||
* face. The list returned will begin at the offset provided.
|
||||
@ -224,10 +247,15 @@ unsigned int
|
||||
hb_ot_color_glyph_get_layers (hb_face_t *face,
|
||||
hb_codepoint_t glyph,
|
||||
unsigned int start_offset,
|
||||
unsigned int *count, /* IN/OUT. May be NULL. */
|
||||
unsigned int *layer_count, /* IN/OUT. May be NULL. */
|
||||
hb_ot_color_layer_t *layers /* OUT. May be NULL. */)
|
||||
{
|
||||
return face->table.COLR->get_glyph_layers (glyph, start_offset, count, layers);
|
||||
#ifdef HB_NO_COLOR
|
||||
if (layer_count)
|
||||
*layer_count = 0;
|
||||
return 0;
|
||||
#endif
|
||||
return face->table.COLR->get_glyph_layers (glyph, start_offset, layer_count, layers);
|
||||
}
|
||||
|
||||
|
||||
@ -248,6 +276,9 @@ hb_ot_color_glyph_get_layers (hb_face_t *face,
|
||||
hb_bool_t
|
||||
hb_ot_color_has_svg (hb_face_t *face)
|
||||
{
|
||||
#ifdef HB_NO_COLOR
|
||||
return false;
|
||||
#endif
|
||||
return face->table.SVG->has_data ();
|
||||
}
|
||||
|
||||
@ -265,6 +296,9 @@ hb_ot_color_has_svg (hb_face_t *face)
|
||||
hb_blob_t *
|
||||
hb_ot_color_glyph_reference_svg (hb_face_t *face, hb_codepoint_t glyph)
|
||||
{
|
||||
#ifdef HB_NO_COLOR
|
||||
return hb_blob_get_empty ();
|
||||
#endif
|
||||
return face->table.SVG->reference_blob_for_glyph (glyph);
|
||||
}
|
||||
|
||||
@ -286,6 +320,9 @@ hb_ot_color_glyph_reference_svg (hb_face_t *face, hb_codepoint_t glyph)
|
||||
hb_bool_t
|
||||
hb_ot_color_has_png (hb_face_t *face)
|
||||
{
|
||||
#ifdef HB_NO_COLOR
|
||||
return false;
|
||||
#endif
|
||||
return face->table.CBDT->has_data () || face->table.sbix->has_data ();
|
||||
}
|
||||
|
||||
@ -305,6 +342,10 @@ hb_ot_color_has_png (hb_face_t *face)
|
||||
hb_blob_t *
|
||||
hb_ot_color_glyph_reference_png (hb_font_t *font, hb_codepoint_t glyph)
|
||||
{
|
||||
#ifdef HB_NO_COLOR
|
||||
return hb_blob_get_empty ();
|
||||
#endif
|
||||
|
||||
hb_blob_t *blob = hb_blob_get_empty ();
|
||||
|
||||
if (font->face->table.sbix->has_data ())
|
||||
|
@ -110,7 +110,7 @@ HB_EXTERN unsigned int
|
||||
hb_ot_color_glyph_get_layers (hb_face_t *face,
|
||||
hb_codepoint_t glyph,
|
||||
unsigned int start_offset,
|
||||
unsigned int *count, /* IN/OUT. May be NULL. */
|
||||
unsigned int *layer_count, /* IN/OUT. May be NULL. */
|
||||
hb_ot_color_layer_t *layers /* OUT. May be NULL. */);
|
||||
|
||||
/*
|
||||
|
@ -180,15 +180,20 @@ hb_ot_get_glyph_extents (hb_font_t *font,
|
||||
void *user_data HB_UNUSED)
|
||||
{
|
||||
const hb_ot_face_t *ot_face = (const hb_ot_face_t *) font_data;
|
||||
bool ret = ot_face->sbix->get_extents (font, glyph, extents);
|
||||
if (!ret)
|
||||
ret = ot_face->glyf->get_extents (glyph, extents);
|
||||
if (!ret)
|
||||
ret = ot_face->cff1->get_extents (glyph, extents);
|
||||
if (!ret)
|
||||
ret = ot_face->cff2->get_extents (font, glyph, extents);
|
||||
if (!ret)
|
||||
ret = ot_face->CBDT->get_extents (font, glyph, extents);
|
||||
bool ret = false;
|
||||
|
||||
#if !defined(HB_NO_OT_FONT_BITMAP) && !defined(HB_NO_COLOR)
|
||||
if (!ret) ret = ot_face->sbix->get_extents (font, glyph, extents);
|
||||
#endif
|
||||
if (!ret) ret = ot_face->glyf->get_extents (glyph, extents);
|
||||
#ifndef HB_NO_OT_FONT_CFF
|
||||
if (!ret) ret = ot_face->cff1->get_extents (glyph, extents);
|
||||
if (!ret) ret = ot_face->cff2->get_extents (font, glyph, extents);
|
||||
#endif
|
||||
#if !defined(HB_NO_OT_FONT_BITMAP) && !defined(HB_NO_COLOR)
|
||||
if (!ret) ret = ot_face->CBDT->get_extents (font, glyph, extents);
|
||||
#endif
|
||||
|
||||
// TODO Hook up side-bearings variations.
|
||||
extents->x_bearing = font->em_scale_x (extents->x_bearing);
|
||||
extents->y_bearing = font->em_scale_y (extents->y_bearing);
|
||||
|
@ -58,7 +58,7 @@ struct loca
|
||||
public:
|
||||
DEFINE_SIZE_MIN (0); /* In reality, this is UNBOUNDED() type; but since we always
|
||||
* check the size externally, allow Null() object of it by
|
||||
* defining it MIN() instead. */
|
||||
* defining it _MIN instead. */
|
||||
};
|
||||
|
||||
|
||||
@ -241,7 +241,7 @@ struct glyf
|
||||
loca_table = hb_sanitize_context_t ().reference_table<loca> (face);
|
||||
glyf_table = hb_sanitize_context_t ().reference_table<glyf> (face);
|
||||
|
||||
num_glyphs = MAX (1u, loca_table.get_length () / (short_offset ? 2 : 4)) - 1;
|
||||
num_glyphs = hb_max (1u, loca_table.get_length () / (short_offset ? 2 : 4)) - 1;
|
||||
}
|
||||
|
||||
void fini ()
|
||||
@ -283,7 +283,7 @@ struct glyf
|
||||
|
||||
/* based on FontTools _g_l_y_f.py::trim */
|
||||
bool remove_padding (unsigned int start_offset,
|
||||
unsigned int *end_offset) const
|
||||
unsigned int *end_offset) const
|
||||
{
|
||||
if (*end_offset - start_offset < GlyphHeader::static_size) return true;
|
||||
|
||||
@ -451,10 +451,10 @@ struct glyf
|
||||
|
||||
const GlyphHeader &glyph_header = StructAtOffset<GlyphHeader> (glyf_table, start_offset);
|
||||
|
||||
extents->x_bearing = MIN (glyph_header.xMin, glyph_header.xMax);
|
||||
extents->y_bearing = MAX (glyph_header.yMin, glyph_header.yMax);
|
||||
extents->width = MAX (glyph_header.xMin, glyph_header.xMax) - extents->x_bearing;
|
||||
extents->height = MIN (glyph_header.yMin, glyph_header.yMax) - extents->y_bearing;
|
||||
extents->x_bearing = hb_min (glyph_header.xMin, glyph_header.xMax);
|
||||
extents->y_bearing = hb_max (glyph_header.yMin, glyph_header.yMax);
|
||||
extents->width = hb_max (glyph_header.xMin, glyph_header.xMax) - extents->x_bearing;
|
||||
extents->height = hb_min (glyph_header.yMin, glyph_header.yMax) - extents->y_bearing;
|
||||
|
||||
return true;
|
||||
}
|
||||
@ -471,7 +471,7 @@ struct glyf
|
||||
public:
|
||||
DEFINE_SIZE_MIN (0); /* In reality, this is UNBOUNDED() type; but since we always
|
||||
* check the size externally, allow Null() object of it by
|
||||
* defining it MIN() instead. */
|
||||
* defining it _MIN instead. */
|
||||
};
|
||||
|
||||
struct glyf_accelerator_t : glyf::accelerator_t {};
|
||||
|
@ -41,71 +41,31 @@ namespace OT {
|
||||
|
||||
struct DeviceRecord
|
||||
{
|
||||
struct SubsetView
|
||||
{
|
||||
const DeviceRecord *source_device_record;
|
||||
unsigned int sizeDeviceRecord;
|
||||
hb_subset_plan_t *subset_plan;
|
||||
|
||||
void init (const DeviceRecord *source_device_record,
|
||||
unsigned int sizeDeviceRecord,
|
||||
hb_subset_plan_t *subset_plan)
|
||||
{
|
||||
this->source_device_record = source_device_record;
|
||||
this->sizeDeviceRecord = sizeDeviceRecord;
|
||||
this->subset_plan = subset_plan;
|
||||
}
|
||||
|
||||
unsigned int len () const
|
||||
{ return this->subset_plan->num_output_glyphs (); }
|
||||
|
||||
const HBUINT8* operator [] (unsigned int new_gid) const
|
||||
{
|
||||
if (unlikely (new_gid >= len ())) return nullptr;
|
||||
|
||||
hb_codepoint_t old_gid;
|
||||
if (!this->subset_plan->old_gid_for_new_gid (new_gid, &old_gid))
|
||||
return &Null(HBUINT8);
|
||||
|
||||
if (old_gid >= sizeDeviceRecord - DeviceRecord::min_size)
|
||||
return nullptr;
|
||||
return &(this->source_device_record->widthsZ[old_gid]);
|
||||
}
|
||||
};
|
||||
|
||||
static unsigned int get_size (unsigned int count)
|
||||
static unsigned int get_size (unsigned count)
|
||||
{ return hb_ceil_to_4 (min_size + count * HBUINT8::static_size); }
|
||||
|
||||
bool serialize (hb_serialize_context_t *c, const SubsetView &subset_view)
|
||||
template<typename Iterator,
|
||||
hb_requires (hb_is_iterator (Iterator))>
|
||||
bool serialize (hb_serialize_context_t *c, unsigned pixelSize, Iterator it)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
|
||||
unsigned int size = get_size (subset_view.len ());
|
||||
if (unlikely (!c->allocate_size<DeviceRecord> (size)))
|
||||
{
|
||||
DEBUG_MSG(SUBSET, nullptr, "Couldn't allocate enough space for DeviceRecord: %d.",
|
||||
size);
|
||||
return_trace (false);
|
||||
}
|
||||
unsigned length = it.len ();
|
||||
|
||||
this->pixelSize = subset_view.source_device_record->pixelSize;
|
||||
this->maxWidth = subset_view.source_device_record->maxWidth;
|
||||
if (unlikely (!c->extend (*this, length))) return_trace (false);
|
||||
|
||||
for (unsigned int i = 0; i < subset_view.len (); i++)
|
||||
{
|
||||
const HBUINT8 *width = subset_view[i];
|
||||
if (!width)
|
||||
{
|
||||
DEBUG_MSG(SUBSET, nullptr, "HDMX width for new gid %d is missing.", i);
|
||||
return_trace (false);
|
||||
}
|
||||
widthsZ[i] = *width;
|
||||
}
|
||||
this->pixelSize = pixelSize;
|
||||
this->maxWidth =
|
||||
+ it
|
||||
| hb_reduce (hb_max, 0u);
|
||||
|
||||
+ it
|
||||
| hb_sink (widthsZ.as_array (length));
|
||||
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c, unsigned int sizeDeviceRecord) const
|
||||
bool sanitize (hb_sanitize_context_t *c, unsigned sizeDeviceRecord) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (likely (c->check_struct (this) &&
|
||||
@ -135,62 +95,63 @@ struct hdmx
|
||||
return StructAtOffset<DeviceRecord> (&this->firstDeviceRecord, i * sizeDeviceRecord);
|
||||
}
|
||||
|
||||
bool serialize (hb_serialize_context_t *c, const hdmx *source_hdmx, hb_subset_plan_t *plan)
|
||||
template<typename Iterator,
|
||||
hb_requires (hb_is_iterator (Iterator))>
|
||||
bool serialize (hb_serialize_context_t *c, unsigned version, Iterator it)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
|
||||
if (unlikely (!c->extend_min ((*this)))) return_trace (false);
|
||||
|
||||
this->version = source_hdmx->version;
|
||||
this->numRecords = source_hdmx->numRecords;
|
||||
this->sizeDeviceRecord = DeviceRecord::get_size (plan->num_output_glyphs ());
|
||||
this->version = version;
|
||||
this->numRecords = it.len ();
|
||||
this->sizeDeviceRecord = DeviceRecord::get_size (it ? (*it).second.len () : 0);
|
||||
|
||||
for (unsigned int i = 0; i < source_hdmx->numRecords; i++)
|
||||
{
|
||||
DeviceRecord::SubsetView subset_view;
|
||||
subset_view.init (&(*source_hdmx)[i], source_hdmx->sizeDeviceRecord, plan);
|
||||
+ it
|
||||
| hb_apply ([&] (const hb_item_type<Iterator>& _) {
|
||||
c->start_embed<DeviceRecord> ()->serialize (c, _.first, _.second);
|
||||
})
|
||||
;
|
||||
|
||||
if (!c->start_embed<DeviceRecord> ()->serialize (c, subset_view))
|
||||
return_trace (false);
|
||||
}
|
||||
return_trace (c->successful);
|
||||
}
|
||||
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
|
||||
hdmx *hdmx_prime = c->serializer->start_embed <hdmx> ();
|
||||
if (unlikely (!hdmx_prime)) return_trace (false);
|
||||
|
||||
auto it =
|
||||
+ hb_range ((unsigned) numRecords)
|
||||
| hb_map ([&] (unsigned _)
|
||||
{
|
||||
const DeviceRecord *device_record =
|
||||
&StructAtOffset<DeviceRecord> (&firstDeviceRecord,
|
||||
_ * sizeDeviceRecord);
|
||||
auto row =
|
||||
+ hb_range (c->plan->num_output_glyphs ())
|
||||
| hb_map (c->plan->reverse_glyph_map)
|
||||
| hb_map ([=] (hb_codepoint_t _)
|
||||
{
|
||||
if (c->plan->is_empty_glyph (_))
|
||||
return Null(HBUINT8);
|
||||
return device_record->widthsZ.as_array (get_num_glyphs ()) [_];
|
||||
})
|
||||
;
|
||||
return hb_pair ((unsigned) device_record->pixelSize, +row);
|
||||
})
|
||||
;
|
||||
|
||||
hdmx_prime->serialize (c->serializer, version, it);
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
static size_t get_subsetted_size (const hdmx *source_hdmx, hb_subset_plan_t *plan)
|
||||
unsigned get_num_glyphs () const
|
||||
{
|
||||
return min_size + source_hdmx->numRecords * DeviceRecord::get_size (plan->num_output_glyphs ());
|
||||
}
|
||||
|
||||
bool subset (hb_subset_plan_t *plan) const
|
||||
{
|
||||
size_t dest_size = get_subsetted_size (this, plan);
|
||||
hdmx *dest = (hdmx *) malloc (dest_size);
|
||||
if (unlikely (!dest))
|
||||
{
|
||||
DEBUG_MSG(SUBSET, nullptr, "Unable to alloc %lu for hdmx subset output.", (unsigned long) dest_size);
|
||||
return false;
|
||||
}
|
||||
|
||||
hb_serialize_context_t c (dest, dest_size);
|
||||
hdmx *hdmx_prime = c.start_serialize<hdmx> ();
|
||||
if (!hdmx_prime || !hdmx_prime->serialize (&c, this, plan))
|
||||
{
|
||||
free (dest);
|
||||
DEBUG_MSG(SUBSET, nullptr, "Failed to serialize write new hdmx.");
|
||||
return false;
|
||||
}
|
||||
c.end_serialize ();
|
||||
|
||||
hb_blob_t *hdmx_prime_blob = hb_blob_create ((const char *) dest,
|
||||
dest_size,
|
||||
HB_MEMORY_MODE_READONLY,
|
||||
dest,
|
||||
free);
|
||||
bool result = plan->add_table (HB_OT_TAG_hdmx, hdmx_prime_blob);
|
||||
hb_blob_destroy (hdmx_prime_blob);
|
||||
|
||||
return result;
|
||||
return sizeDeviceRecord - DeviceRecord::min_size;
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
|
@ -240,7 +240,7 @@ struct hmtxvmtx
|
||||
return default_advance;
|
||||
}
|
||||
|
||||
return table->longMetricZ[MIN (glyph, (uint32_t) num_advances - 1)].advance;
|
||||
return table->longMetricZ[hb_min (glyph, (uint32_t) num_advances - 1)].advance;
|
||||
}
|
||||
|
||||
unsigned int get_advance (hb_codepoint_t glyph,
|
||||
|
@ -47,9 +47,9 @@ struct KernSubTableFormat3
|
||||
int get_kerning (hb_codepoint_t left, hb_codepoint_t right) const
|
||||
{
|
||||
hb_array_t<const FWORD> kernValue = kernValueZ.as_array (kernValueCount);
|
||||
hb_array_t<const HBUINT8> leftClass = StructAfter<const UnsizedArrayOf<HBUINT8> > (kernValue).as_array (glyphCount);
|
||||
hb_array_t<const HBUINT8> rightClass = StructAfter<const UnsizedArrayOf<HBUINT8> > (leftClass).as_array (glyphCount);
|
||||
hb_array_t<const HBUINT8> kernIndex = StructAfter<const UnsizedArrayOf<HBUINT8> > (rightClass).as_array (leftClassCount * rightClassCount);
|
||||
hb_array_t<const HBUINT8> leftClass = StructAfter<const UnsizedArrayOf<HBUINT8>> (kernValue).as_array (glyphCount);
|
||||
hb_array_t<const HBUINT8> rightClass = StructAfter<const UnsizedArrayOf<HBUINT8>> (leftClass).as_array (glyphCount);
|
||||
hb_array_t<const HBUINT8> kernIndex = StructAfter<const UnsizedArrayOf<HBUINT8>> (rightClass).as_array (leftClassCount * rightClassCount);
|
||||
|
||||
unsigned int leftC = leftClass[left];
|
||||
unsigned int rightC = rightClass[right];
|
||||
@ -121,16 +121,20 @@ struct KernSubTable
|
||||
}
|
||||
}
|
||||
|
||||
template <typename context_t>
|
||||
typename context_t::return_t dispatch (context_t *c) const
|
||||
template <typename context_t, typename ...Ts>
|
||||
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
|
||||
{
|
||||
unsigned int subtable_type = get_type ();
|
||||
TRACE_DISPATCH (this, subtable_type);
|
||||
switch (subtable_type) {
|
||||
case 0: return_trace (c->dispatch (u.format0));
|
||||
case 1: return_trace (u.header.apple ? c->dispatch (u.format1) : c->default_return_value ());
|
||||
#ifndef HB_NO_SHAPE_AAT
|
||||
case 1: return_trace (u.header.apple ? c->dispatch (u.format1, hb_forward<Ts> (ds)...) : c->default_return_value ());
|
||||
#endif
|
||||
case 2: return_trace (c->dispatch (u.format2));
|
||||
case 3: return_trace (u.header.apple ? c->dispatch (u.format3) : c->default_return_value ());
|
||||
#ifndef HB_NO_SHAPE_AAT
|
||||
case 3: return_trace (u.header.apple ? c->dispatch (u.format3, hb_forward<Ts> (ds)...) : c->default_return_value ());
|
||||
#endif
|
||||
default: return_trace (c->default_return_value ());
|
||||
}
|
||||
}
|
||||
@ -278,7 +282,9 @@ struct kern
|
||||
{
|
||||
switch (get_type ()) {
|
||||
case 0: return u.ot.has_state_machine ();
|
||||
#ifndef HB_NO_SHAPE_AAT
|
||||
case 1: return u.aat.has_state_machine ();
|
||||
#endif
|
||||
default:return false;
|
||||
}
|
||||
}
|
||||
@ -287,7 +293,9 @@ struct kern
|
||||
{
|
||||
switch (get_type ()) {
|
||||
case 0: return u.ot.has_cross_stream ();
|
||||
#ifndef HB_NO_SHAPE_AAT
|
||||
case 1: return u.aat.has_cross_stream ();
|
||||
#endif
|
||||
default:return false;
|
||||
}
|
||||
}
|
||||
@ -296,7 +304,9 @@ struct kern
|
||||
{
|
||||
switch (get_type ()) {
|
||||
case 0: return u.ot.get_h_kerning (left, right);
|
||||
#ifndef HB_NO_SHAPE_AAT
|
||||
case 1: return u.aat.get_h_kerning (left, right);
|
||||
#endif
|
||||
default:return 0;
|
||||
}
|
||||
}
|
||||
@ -304,14 +314,16 @@ struct kern
|
||||
bool apply (AAT::hb_aat_apply_context_t *c) const
|
||||
{ return dispatch (c); }
|
||||
|
||||
template <typename context_t>
|
||||
typename context_t::return_t dispatch (context_t *c) const
|
||||
template <typename context_t, typename ...Ts>
|
||||
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
|
||||
{
|
||||
unsigned int subtable_type = get_type ();
|
||||
TRACE_DISPATCH (this, subtable_type);
|
||||
switch (subtable_type) {
|
||||
case 0: return_trace (c->dispatch (u.ot));
|
||||
case 1: return_trace (c->dispatch (u.aat));
|
||||
case 0: return_trace (c->dispatch (u.ot, hb_forward<Ts> (ds)...));
|
||||
#ifndef HB_NO_SHAPE_AAT
|
||||
case 1: return_trace (c->dispatch (u.aat, hb_forward<Ts> (ds)...));
|
||||
#endif
|
||||
default: return_trace (c->default_return_value ());
|
||||
}
|
||||
}
|
||||
@ -328,7 +340,9 @@ struct kern
|
||||
HBUINT32 version32;
|
||||
HBUINT16 major;
|
||||
KernOT ot;
|
||||
#ifndef HB_NO_SHAPE_AAT
|
||||
KernAAT aat;
|
||||
#endif
|
||||
} u;
|
||||
public:
|
||||
DEFINE_SIZE_UNION (4, version32);
|
||||
|
@ -153,7 +153,7 @@ struct BaseCoord
|
||||
|
||||
struct FeatMinMaxRecord
|
||||
{
|
||||
static int cmp (const void *key_, const void *entry_)
|
||||
HB_INTERNAL static int cmp (const void *key_, const void *entry_)
|
||||
{
|
||||
hb_tag_t key = * (hb_tag_t *) key_;
|
||||
const FeatMinMaxRecord &entry = * (const FeatMinMaxRecord *) entry_;
|
||||
@ -271,7 +271,7 @@ struct BaseValues
|
||||
|
||||
struct BaseLangSysRecord
|
||||
{
|
||||
static int cmp (const void *key_, const void *entry_)
|
||||
HB_INTERNAL static int cmp (const void *key_, const void *entry_)
|
||||
{
|
||||
hb_tag_t key = * (hb_tag_t *) key_;
|
||||
const BaseLangSysRecord &entry = * (const BaseLangSysRecord *) entry_;
|
||||
@ -345,7 +345,7 @@ struct BaseScript
|
||||
struct BaseScriptList;
|
||||
struct BaseScriptRecord
|
||||
{
|
||||
static int cmp (const void *key_, const void *entry_)
|
||||
HB_INTERNAL static int cmp (const void *key_, const void *entry_)
|
||||
{
|
||||
hb_tag_t key = * (hb_tag_t *) key_;
|
||||
const BaseScriptRecord &entry = * (const BaseScriptRecord *) entry_;
|
||||
@ -447,7 +447,7 @@ struct Axis
|
||||
}
|
||||
|
||||
protected:
|
||||
OffsetTo<SortedArrayOf<Tag> >
|
||||
OffsetTo<SortedArrayOf<Tag>>
|
||||
baseTagList; /* Offset to BaseTagList table, from beginning
|
||||
* of Axis table (may be NULL)
|
||||
* Array of 4-byte baseline identification tags — must
|
||||
|
@ -103,7 +103,7 @@ struct Record
|
||||
};
|
||||
|
||||
template <typename Type>
|
||||
struct RecordArrayOf : SortedArrayOf<Record<Type> >
|
||||
struct RecordArrayOf : SortedArrayOf<Record<Type>>
|
||||
{
|
||||
const OffsetTo<Type>& get_offset (unsigned int i) const
|
||||
{ return (*this)[i].offset; }
|
||||
@ -138,7 +138,7 @@ struct RecordListOf : RecordArrayOf<Type>
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
struct RecordListOf<Type> *out = c->serializer->embed (*this);
|
||||
auto *out = c->serializer->embed (*this);
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
unsigned int count = this->len;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
@ -229,10 +229,10 @@ struct LangSys
|
||||
return reqFeatureIndex;;
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
LangSys* copy (hb_serialize_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
return_trace (c->serializer->embed (*this));
|
||||
TRACE_SERIALIZE (this);
|
||||
return_trace (c->embed (*this));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c,
|
||||
@ -277,12 +277,12 @@ struct Script
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
struct Script *out = c->serializer->embed (*this);
|
||||
auto *out = c->serializer->embed (*this);
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
out->defaultLangSys.serialize_subset (c, this+defaultLangSys, out);
|
||||
out->defaultLangSys.serialize_copy (c->serializer, this+defaultLangSys, out);
|
||||
unsigned int count = langSys.len;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
out->langSys.arrayZ[i].offset.serialize_subset (c, this+langSys[i].offset, out);
|
||||
out->langSys.arrayZ[i].offset.serialize_copy (c->serializer, this+langSys[i].offset, out);
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
@ -559,7 +559,7 @@ struct Feature
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
struct Feature *out = c->serializer->embed (*this);
|
||||
auto *out = c->serializer->embed (*this);
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
out->featureParams = 0; /* TODO(subset) FeatureParams. */
|
||||
return_trace (true);
|
||||
@ -583,25 +583,25 @@ struct Feature
|
||||
* Adobe tools, only the 'size' feature had FeatureParams defined.
|
||||
*/
|
||||
|
||||
OffsetTo<FeatureParams> orig_offset = featureParams;
|
||||
if (likely (featureParams.is_null ()))
|
||||
return_trace (true);
|
||||
|
||||
unsigned int orig_offset = featureParams;
|
||||
if (unlikely (!featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE)))
|
||||
return_trace (false);
|
||||
|
||||
if (likely (orig_offset.is_null ()))
|
||||
return_trace (true);
|
||||
|
||||
if (featureParams == 0 && closure &&
|
||||
closure->tag == HB_TAG ('s','i','z','e') &&
|
||||
closure->list_base && closure->list_base < this)
|
||||
{
|
||||
unsigned int new_offset_int = (unsigned int) orig_offset -
|
||||
unsigned int new_offset_int = orig_offset -
|
||||
(((char *) this) - ((char *) closure->list_base));
|
||||
|
||||
OffsetTo<FeatureParams> new_offset;
|
||||
/* Check that it did not overflow. */
|
||||
/* Check that it would not overflow. */
|
||||
new_offset = new_offset_int;
|
||||
if (new_offset == new_offset_int &&
|
||||
c->try_set (&featureParams, new_offset) &&
|
||||
c->try_set (&featureParams, new_offset_int) &&
|
||||
!featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE))
|
||||
return_trace (false);
|
||||
}
|
||||
@ -647,16 +647,19 @@ struct Lookup
|
||||
{
|
||||
unsigned int get_subtable_count () const { return subTable.len; }
|
||||
|
||||
template <typename TSubTable>
|
||||
const TSubTable& get_subtable (unsigned int i) const
|
||||
{ return this+CastR<OffsetArrayOf<TSubTable> > (subTable)[i]; }
|
||||
|
||||
template <typename TSubTable>
|
||||
const OffsetArrayOf<TSubTable>& get_subtables () const
|
||||
{ return CastR<OffsetArrayOf<TSubTable> > (subTable); }
|
||||
{ return CastR<OffsetArrayOf<TSubTable>> (subTable); }
|
||||
template <typename TSubTable>
|
||||
OffsetArrayOf<TSubTable>& get_subtables ()
|
||||
{ return CastR<OffsetArrayOf<TSubTable> > (subTable); }
|
||||
{ return CastR<OffsetArrayOf<TSubTable>> (subTable); }
|
||||
|
||||
template <typename TSubTable>
|
||||
const TSubTable& get_subtable (unsigned int i) const
|
||||
{ return this+get_subtables<TSubTable> ()[i]; }
|
||||
template <typename TSubTable>
|
||||
TSubTable& get_subtable (unsigned int i)
|
||||
{ return this+get_subtables<TSubTable> ()[i]; }
|
||||
|
||||
unsigned int get_size () const
|
||||
{
|
||||
@ -682,14 +685,14 @@ struct Lookup
|
||||
return flag;
|
||||
}
|
||||
|
||||
template <typename TSubTable, typename context_t>
|
||||
typename context_t::return_t dispatch (context_t *c) const
|
||||
template <typename TSubTable, typename context_t, typename ...Ts>
|
||||
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
|
||||
{
|
||||
unsigned int lookup_type = get_type ();
|
||||
TRACE_DISPATCH (this, lookup_type);
|
||||
unsigned int count = get_subtable_count ();
|
||||
for (unsigned int i = 0; i < count; i++) {
|
||||
typename context_t::return_t r = get_subtable<TSubTable> (i).dispatch (c, lookup_type);
|
||||
typename context_t::return_t r = get_subtable<TSubTable> (i).dispatch (c, lookup_type, hb_forward<Ts> (ds)...);
|
||||
if (c->stop_sublookup_iteration (r))
|
||||
return_trace (r);
|
||||
}
|
||||
@ -715,28 +718,11 @@ struct Lookup
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
/* Older compilers need this to NOT be locally defined in a function. */
|
||||
template <typename TSubTable>
|
||||
struct SubTableSubsetWrapper
|
||||
{
|
||||
SubTableSubsetWrapper (const TSubTable &subtable_,
|
||||
unsigned int lookup_type_) :
|
||||
subtable (subtable_),
|
||||
lookup_type (lookup_type_) {}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{ return subtable.dispatch (c, lookup_type); }
|
||||
|
||||
private:
|
||||
const TSubTable &subtable;
|
||||
unsigned int lookup_type;
|
||||
};
|
||||
|
||||
template <typename TSubTable>
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
struct Lookup *out = c->serializer->embed (*this);
|
||||
auto *out = c->serializer->embed (*this);
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
|
||||
/* Subset the actual subtables. */
|
||||
@ -746,23 +732,11 @@ struct Lookup
|
||||
OffsetArrayOf<TSubTable>& out_subtables = out->get_subtables<TSubTable> ();
|
||||
unsigned int count = subTable.len;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
{
|
||||
SubTableSubsetWrapper<TSubTable> wrapper (this+subtables[i], get_type ());
|
||||
|
||||
out_subtables[i].serialize_subset (c, wrapper, out);
|
||||
}
|
||||
out_subtables[i].serialize_subset (c, this+subtables[i], out, get_type ());
|
||||
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
/* Older compilers need this to NOT be locally defined in a function. */
|
||||
template <typename TSubTable>
|
||||
struct SubTableSanitizeWrapper : TSubTable
|
||||
{
|
||||
bool sanitize (hb_sanitize_context_t *c, unsigned int lookup_type) const
|
||||
{ return this->dispatch (c, lookup_type); }
|
||||
};
|
||||
|
||||
template <typename TSubTable>
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
@ -774,16 +748,21 @@ struct Lookup
|
||||
if (!markFilteringSet.sanitize (c)) return_trace (false);
|
||||
}
|
||||
|
||||
if (unlikely (!CastR<OffsetArrayOf<SubTableSanitizeWrapper<TSubTable> > > (subTable)
|
||||
.sanitize (c, this, get_type ())))
|
||||
if (unlikely (!get_subtables<TSubTable> ().sanitize (c, this, get_type ())))
|
||||
return_trace (false);
|
||||
|
||||
if (unlikely (get_type () == TSubTable::Extension))
|
||||
if (unlikely (get_type () == TSubTable::Extension && !c->get_edit_count ()))
|
||||
{
|
||||
/* The spec says all subtables of an Extension lookup should
|
||||
* have the same type, which shall not be the Extension type
|
||||
* itself (but we already checked for that).
|
||||
* This is specially important if one has a reverse type! */
|
||||
* This is specially important if one has a reverse type!
|
||||
*
|
||||
* We only do this if sanitizer edit_count is zero. Otherwise,
|
||||
* some of the subtables might have become insane after they
|
||||
* were sanity-checked by the edits of subsequent subtables.
|
||||
* https://bugs.chromium.org/p/chromium/issues/detail?id=960331
|
||||
*/
|
||||
unsigned int type = get_subtable<TSubTable> (0).u.extension.get_type ();
|
||||
unsigned int count = get_subtable_count ();
|
||||
for (unsigned int i = 1; i < count; i++)
|
||||
@ -791,7 +770,6 @@ struct Lookup
|
||||
return_trace (false);
|
||||
}
|
||||
return_trace (true);
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
private:
|
||||
@ -826,7 +804,7 @@ struct CoverageFormat1
|
||||
}
|
||||
|
||||
template <typename Iterator,
|
||||
hb_enable_if (hb_is_sorted_iterator_of (Iterator, const GlyphID))>
|
||||
hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
|
||||
bool serialize (hb_serialize_context_t *c, Iterator glyphs)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
@ -864,6 +842,8 @@ struct CoverageFormat1
|
||||
bool more () const { return i < c->glyphArray.len; }
|
||||
void next () { i++; }
|
||||
hb_codepoint_t get_glyph () const { return c->glyphArray[i]; }
|
||||
bool operator != (const iter_t& o) const
|
||||
{ return i != o.i || c != o.c; }
|
||||
|
||||
private:
|
||||
const struct CoverageFormat1 *c;
|
||||
@ -893,7 +873,7 @@ struct CoverageFormat2
|
||||
}
|
||||
|
||||
template <typename Iterator,
|
||||
hb_enable_if (hb_is_sorted_iterator_of (Iterator, const GlyphID))>
|
||||
hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
|
||||
bool serialize (hb_serialize_context_t *c, Iterator glyphs)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
@ -904,30 +884,36 @@ struct CoverageFormat2
|
||||
rangeRecord.len = 0;
|
||||
return_trace (true);
|
||||
}
|
||||
/* TODO(iter) Port to non-random-access iterator interface. */
|
||||
unsigned int count = glyphs.len ();
|
||||
|
||||
unsigned int num_ranges = 1;
|
||||
for (unsigned int i = 1; i < count; i++)
|
||||
if (glyphs[i - 1] + 1 != glyphs[i])
|
||||
num_ranges++;
|
||||
rangeRecord.len = num_ranges;
|
||||
if (unlikely (!c->extend (rangeRecord))) return_trace (false);
|
||||
/* TODO(iter) Write more efficiently? */
|
||||
|
||||
unsigned int range = 0;
|
||||
rangeRecord[range].start = glyphs[0];
|
||||
rangeRecord[range].value = 0;
|
||||
for (unsigned int i = 1; i < count; i++)
|
||||
unsigned num_ranges = 0;
|
||||
hb_codepoint_t last = (hb_codepoint_t) -2;
|
||||
for (auto g: glyphs)
|
||||
{
|
||||
if (glyphs[i - 1] + 1 != glyphs[i])
|
||||
{
|
||||
rangeRecord[range].end = glyphs[i - 1];
|
||||
range++;
|
||||
rangeRecord[range].start = glyphs[i];
|
||||
rangeRecord[range].value = i;
|
||||
}
|
||||
if (last + 1 != g)
|
||||
num_ranges++;
|
||||
last = g;
|
||||
}
|
||||
rangeRecord[range].end = glyphs[count - 1];
|
||||
|
||||
if (unlikely (!rangeRecord.serialize (c, num_ranges))) return_trace (false);
|
||||
|
||||
unsigned count = 0;
|
||||
unsigned range = (unsigned) -1;
|
||||
last = (hb_codepoint_t) -2;
|
||||
for (auto g: glyphs)
|
||||
{
|
||||
if (last + 1 != g)
|
||||
{
|
||||
range++;
|
||||
rangeRecord[range].start = g;
|
||||
rangeRecord[range].value = count;
|
||||
}
|
||||
rangeRecord[range].end = g;
|
||||
last = g;
|
||||
count++;
|
||||
}
|
||||
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
@ -1016,6 +1002,8 @@ struct CoverageFormat2
|
||||
j++;
|
||||
}
|
||||
hb_codepoint_t get_glyph () const { return j; }
|
||||
bool operator != (const iter_t& o) const
|
||||
{ return i != o.i || j != o.j || c != o.c; }
|
||||
|
||||
private:
|
||||
const struct CoverageFormat2 *c;
|
||||
@ -1055,18 +1043,22 @@ struct Coverage
|
||||
}
|
||||
|
||||
template <typename Iterator,
|
||||
hb_enable_if (hb_is_sorted_iterator_of (Iterator, const GlyphID))>
|
||||
hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
|
||||
bool serialize (hb_serialize_context_t *c, Iterator glyphs)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
if (unlikely (!c->extend_min (*this))) return_trace (false);
|
||||
|
||||
/* TODO(iter) Port to non-random-access iterator interface. */
|
||||
unsigned int count = glyphs.len ();
|
||||
unsigned int num_ranges = 1;
|
||||
for (unsigned int i = 1; i < count; i++)
|
||||
if (glyphs[i - 1] + 1 != glyphs[i])
|
||||
num_ranges++;
|
||||
unsigned count = 0;
|
||||
unsigned num_ranges = 0;
|
||||
hb_codepoint_t last = (hb_codepoint_t) -2;
|
||||
for (auto g: glyphs)
|
||||
{
|
||||
if (last + 1 != g)
|
||||
num_ranges++;
|
||||
last = g;
|
||||
count++;
|
||||
}
|
||||
u.format = count * 2 < num_ranges * 3 ? 1 : 2;
|
||||
|
||||
switch (u.format)
|
||||
@ -1165,6 +1157,16 @@ struct Coverage
|
||||
default:return 0;
|
||||
}
|
||||
}
|
||||
bool operator != (const iter_t& o) const
|
||||
{
|
||||
if (format != o.format) return true;
|
||||
switch (format)
|
||||
{
|
||||
case 1: return u.format1 != o.u.format1;
|
||||
case 2: return u.format2 != o.u.format2;
|
||||
default:return false;
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
unsigned int format;
|
||||
@ -1222,7 +1224,7 @@ struct ClassDefFormat1
|
||||
hb_codepoint_t glyph_max = +glyphs | hb_reduce (hb_max, 0u);
|
||||
|
||||
startGlyph = glyph_min;
|
||||
classValue.len = glyph_max - glyph_min + 1;
|
||||
c->check_assign (classValue.len, glyph_max - glyph_min + 1);
|
||||
if (unlikely (!c->extend (classValue))) return_trace (false);
|
||||
|
||||
for (unsigned int i = 0; i < glyphs.length; i++)
|
||||
@ -1999,10 +2001,10 @@ struct FeatureVariations
|
||||
return (this+record.substitutions).find_substitute (feature_index);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
FeatureVariations* copy (hb_serialize_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
return_trace (c->serializer->embed (*this));
|
||||
TRACE_SERIALIZE (this);
|
||||
return_trace (c->embed (*this));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
|
@ -220,7 +220,7 @@ struct LigGlyph
|
||||
{
|
||||
if (caret_count)
|
||||
{
|
||||
hb_array_t <const OffsetTo<CaretValue> > array = carets.sub_array (start_offset, caret_count);
|
||||
hb_array_t <const OffsetTo<CaretValue>> array = carets.sub_array (start_offset, caret_count);
|
||||
unsigned int count = array.length;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
caret_array[i] = (this+array[i]).get_caret_value (font, direction, glyph_id, var_store);
|
||||
@ -296,7 +296,7 @@ struct MarkGlyphSetsFormat1
|
||||
|
||||
protected:
|
||||
HBUINT16 format; /* Format identifier--format = 1 */
|
||||
ArrayOf<LOffsetTo<Coverage> >
|
||||
ArrayOf<LOffsetTo<Coverage>>
|
||||
coverage; /* Array of long offsets to mark set
|
||||
* coverage tables */
|
||||
public:
|
||||
@ -439,7 +439,7 @@ struct GDEF
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
struct GDEF *out = c->serializer->embed (*this);
|
||||
auto *out = c->serializer->embed (*this);
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
|
||||
out->glyphClassDef.serialize_subset (c, this+glyphClassDef, out);
|
||||
|
@ -173,15 +173,15 @@ struct ValueFormat : HBUINT16
|
||||
return true;
|
||||
}
|
||||
|
||||
static OffsetTo<Device>& get_device (Value* value)
|
||||
{ return *CastP<OffsetTo<Device> > (value); }
|
||||
static const OffsetTo<Device>& get_device (const Value* value, bool *worked=nullptr)
|
||||
HB_INTERNAL static OffsetTo<Device>& get_device (Value* value)
|
||||
{ return *CastP<OffsetTo<Device>> (value); }
|
||||
HB_INTERNAL static const OffsetTo<Device>& get_device (const Value* value, bool *worked=nullptr)
|
||||
{
|
||||
if (worked) *worked |= bool (*value);
|
||||
return *CastP<OffsetTo<Device> > (value);
|
||||
return *CastP<OffsetTo<Device>> (value);
|
||||
}
|
||||
|
||||
static const HBINT16& get_short (const Value* value, bool *worked=nullptr)
|
||||
HB_INTERNAL static const HBINT16& get_short (const Value* value, bool *worked=nullptr)
|
||||
{
|
||||
if (worked) *worked |= bool (*value);
|
||||
return *CastP<HBINT16> (value);
|
||||
@ -393,7 +393,7 @@ struct AnchorMatrix
|
||||
|
||||
HBUINT16 rows; /* Number of rows */
|
||||
protected:
|
||||
UnsizedArrayOf<OffsetTo<Anchor> >
|
||||
UnsizedArrayOf<OffsetTo<Anchor>>
|
||||
matrixZ; /* Matrix of offsets to Anchor tables--
|
||||
* from beginning of AnchorMatrix table */
|
||||
public:
|
||||
@ -446,8 +446,8 @@ struct MarkArray : ArrayOf<MarkRecord> /* Array of MarkRecords--in Coverage orde
|
||||
glyph_anchor.get_anchor (c, buffer->info[glyph_pos].codepoint, &base_x, &base_y);
|
||||
|
||||
hb_glyph_position_t &o = buffer->cur_pos();
|
||||
o.x_offset = round (base_x - mark_x);
|
||||
o.y_offset = round (base_y - mark_y);
|
||||
o.x_offset = roundf (base_x - mark_x);
|
||||
o.y_offset = roundf (base_y - mark_y);
|
||||
o.attach_type() = ATTACH_TYPE_MARK;
|
||||
o.attach_chain() = (int) glyph_pos - (int) buffer->idx;
|
||||
buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT;
|
||||
@ -576,14 +576,14 @@ struct SinglePosFormat2
|
||||
|
||||
struct SinglePos
|
||||
{
|
||||
template <typename context_t>
|
||||
typename context_t::return_t dispatch (context_t *c) const
|
||||
template <typename context_t, typename ...Ts>
|
||||
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
|
||||
{
|
||||
TRACE_DISPATCH (this, u.format);
|
||||
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
|
||||
switch (u.format) {
|
||||
case 1: return_trace (c->dispatch (u.format1));
|
||||
case 2: return_trace (c->dispatch (u.format2));
|
||||
case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
|
||||
case 2: return_trace (c->dispatch (u.format2, hb_forward<Ts> (ds)...));
|
||||
default:return_trace (c->default_return_value ());
|
||||
}
|
||||
}
|
||||
@ -907,14 +907,14 @@ struct PairPosFormat2
|
||||
|
||||
struct PairPos
|
||||
{
|
||||
template <typename context_t>
|
||||
typename context_t::return_t dispatch (context_t *c) const
|
||||
template <typename context_t, typename ...Ts>
|
||||
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
|
||||
{
|
||||
TRACE_DISPATCH (this, u.format);
|
||||
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
|
||||
switch (u.format) {
|
||||
case 1: return_trace (c->dispatch (u.format1));
|
||||
case 2: return_trace (c->dispatch (u.format2));
|
||||
case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
|
||||
case 2: return_trace (c->dispatch (u.format2, hb_forward<Ts> (ds)...));
|
||||
default:return_trace (c->default_return_value ());
|
||||
}
|
||||
}
|
||||
@ -993,32 +993,32 @@ struct CursivePosFormat1
|
||||
/* Main-direction adjustment */
|
||||
switch (c->direction) {
|
||||
case HB_DIRECTION_LTR:
|
||||
pos[i].x_advance = round (exit_x) + pos[i].x_offset;
|
||||
pos[i].x_advance = roundf (exit_x) + pos[i].x_offset;
|
||||
|
||||
d = round (entry_x) + pos[j].x_offset;
|
||||
d = roundf (entry_x) + pos[j].x_offset;
|
||||
pos[j].x_advance -= d;
|
||||
pos[j].x_offset -= d;
|
||||
break;
|
||||
case HB_DIRECTION_RTL:
|
||||
d = round (exit_x) + pos[i].x_offset;
|
||||
d = roundf (exit_x) + pos[i].x_offset;
|
||||
pos[i].x_advance -= d;
|
||||
pos[i].x_offset -= d;
|
||||
|
||||
pos[j].x_advance = round (entry_x) + pos[j].x_offset;
|
||||
pos[j].x_advance = roundf (entry_x) + pos[j].x_offset;
|
||||
break;
|
||||
case HB_DIRECTION_TTB:
|
||||
pos[i].y_advance = round (exit_y) + pos[i].y_offset;
|
||||
pos[i].y_advance = roundf (exit_y) + pos[i].y_offset;
|
||||
|
||||
d = round (entry_y) + pos[j].y_offset;
|
||||
d = roundf (entry_y) + pos[j].y_offset;
|
||||
pos[j].y_advance -= d;
|
||||
pos[j].y_offset -= d;
|
||||
break;
|
||||
case HB_DIRECTION_BTT:
|
||||
d = round (exit_y) + pos[i].y_offset;
|
||||
d = roundf (exit_y) + pos[i].y_offset;
|
||||
pos[i].y_advance -= d;
|
||||
pos[i].y_offset -= d;
|
||||
|
||||
pos[j].y_advance = round (entry_y);
|
||||
pos[j].y_advance = roundf (entry_y);
|
||||
break;
|
||||
case HB_DIRECTION_INVALID:
|
||||
default:
|
||||
@ -1092,13 +1092,13 @@ struct CursivePosFormat1
|
||||
|
||||
struct CursivePos
|
||||
{
|
||||
template <typename context_t>
|
||||
typename context_t::return_t dispatch (context_t *c) const
|
||||
template <typename context_t, typename ...Ts>
|
||||
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
|
||||
{
|
||||
TRACE_DISPATCH (this, u.format);
|
||||
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
|
||||
switch (u.format) {
|
||||
case 1: return_trace (c->dispatch (u.format1));
|
||||
case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
|
||||
default:return_trace (c->default_return_value ());
|
||||
}
|
||||
}
|
||||
@ -1208,13 +1208,13 @@ struct MarkBasePosFormat1
|
||||
|
||||
struct MarkBasePos
|
||||
{
|
||||
template <typename context_t>
|
||||
typename context_t::return_t dispatch (context_t *c) const
|
||||
template <typename context_t, typename ...Ts>
|
||||
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
|
||||
{
|
||||
TRACE_DISPATCH (this, u.format);
|
||||
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
|
||||
switch (u.format) {
|
||||
case 1: return_trace (c->dispatch (u.format1));
|
||||
case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
|
||||
default:return_trace (c->default_return_value ());
|
||||
}
|
||||
}
|
||||
@ -1287,7 +1287,7 @@ struct MarkLigPosFormat1
|
||||
unsigned int mark_id = _hb_glyph_info_get_lig_id (&buffer->cur());
|
||||
unsigned int mark_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
|
||||
if (lig_id && lig_id == mark_id && mark_comp > 0)
|
||||
comp_index = MIN (comp_count, _hb_glyph_info_get_lig_comp (&buffer->cur())) - 1;
|
||||
comp_index = hb_min (comp_count, _hb_glyph_info_get_lig_comp (&buffer->cur())) - 1;
|
||||
else
|
||||
comp_index = comp_count - 1;
|
||||
|
||||
@ -1333,13 +1333,13 @@ struct MarkLigPosFormat1
|
||||
|
||||
struct MarkLigPos
|
||||
{
|
||||
template <typename context_t>
|
||||
typename context_t::return_t dispatch (context_t *c) const
|
||||
template <typename context_t, typename ...Ts>
|
||||
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
|
||||
{
|
||||
TRACE_DISPATCH (this, u.format);
|
||||
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
|
||||
switch (u.format) {
|
||||
case 1: return_trace (c->dispatch (u.format1));
|
||||
case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
|
||||
default:return_trace (c->default_return_value ());
|
||||
}
|
||||
}
|
||||
@ -1455,13 +1455,13 @@ struct MarkMarkPosFormat1
|
||||
|
||||
struct MarkMarkPos
|
||||
{
|
||||
template <typename context_t>
|
||||
typename context_t::return_t dispatch (context_t *c) const
|
||||
template <typename context_t, typename ...Ts>
|
||||
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
|
||||
{
|
||||
TRACE_DISPATCH (this, u.format);
|
||||
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
|
||||
switch (u.format) {
|
||||
case 1: return_trace (c->dispatch (u.format1));
|
||||
case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
|
||||
default:return_trace (c->default_return_value ());
|
||||
}
|
||||
}
|
||||
@ -1507,20 +1507,20 @@ struct PosLookupSubTable
|
||||
Extension = 9
|
||||
};
|
||||
|
||||
template <typename context_t>
|
||||
typename context_t::return_t dispatch (context_t *c, unsigned int lookup_type) const
|
||||
template <typename context_t, typename ...Ts>
|
||||
typename context_t::return_t dispatch (context_t *c, unsigned int lookup_type, Ts&&... ds) const
|
||||
{
|
||||
TRACE_DISPATCH (this, lookup_type);
|
||||
switch (lookup_type) {
|
||||
case Single: return_trace (u.single.dispatch (c));
|
||||
case Pair: return_trace (u.pair.dispatch (c));
|
||||
case Cursive: return_trace (u.cursive.dispatch (c));
|
||||
case MarkBase: return_trace (u.markBase.dispatch (c));
|
||||
case MarkLig: return_trace (u.markLig.dispatch (c));
|
||||
case MarkMark: return_trace (u.markMark.dispatch (c));
|
||||
case Context: return_trace (u.context.dispatch (c));
|
||||
case ChainContext: return_trace (u.chainContext.dispatch (c));
|
||||
case Extension: return_trace (u.extension.dispatch (c));
|
||||
case Single: return_trace (u.single.dispatch (c, hb_forward<Ts> (ds)...));
|
||||
case Pair: return_trace (u.pair.dispatch (c, hb_forward<Ts> (ds)...));
|
||||
case Cursive: return_trace (u.cursive.dispatch (c, hb_forward<Ts> (ds)...));
|
||||
case MarkBase: return_trace (u.markBase.dispatch (c, hb_forward<Ts> (ds)...));
|
||||
case MarkLig: return_trace (u.markLig.dispatch (c, hb_forward<Ts> (ds)...));
|
||||
case MarkMark: return_trace (u.markMark.dispatch (c, hb_forward<Ts> (ds)...));
|
||||
case Context: return_trace (u.context.dispatch (c, hb_forward<Ts> (ds)...));
|
||||
case ChainContext: return_trace (u.chainContext.dispatch (c, hb_forward<Ts> (ds)...));
|
||||
case Extension: return_trace (u.extension.dispatch (c, hb_forward<Ts> (ds)...));
|
||||
default: return_trace (c->default_return_value ());
|
||||
}
|
||||
}
|
||||
@ -1576,14 +1576,14 @@ struct PosLookup : Lookup
|
||||
dispatch (&c);
|
||||
}
|
||||
|
||||
static bool apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index);
|
||||
HB_INTERNAL static bool apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index);
|
||||
|
||||
template <typename context_t>
|
||||
static typename context_t::return_t dispatch_recurse_func (context_t *c, unsigned int lookup_index);
|
||||
|
||||
template <typename context_t>
|
||||
typename context_t::return_t dispatch (context_t *c) const
|
||||
{ return Lookup::dispatch<SubTable> (c); }
|
||||
template <typename context_t, typename ...Ts>
|
||||
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
|
||||
{ return Lookup::dispatch<SubTable> (c, hb_forward<Ts> (ds)...); }
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{ return Lookup::subset<SubTable> (c); }
|
||||
|
@ -85,12 +85,12 @@ struct SingleSubstFormat1
|
||||
|
||||
bool serialize (hb_serialize_context_t *c,
|
||||
hb_sorted_array_t<const GlyphID> glyphs,
|
||||
int delta)
|
||||
unsigned delta)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
if (unlikely (!c->extend_min (*this))) return_trace (false);
|
||||
if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs))) return_trace (false);
|
||||
deltaGlyphID = delta; /* TODO(serialize) overflow? */
|
||||
c->check_assign (deltaGlyphID, delta);
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
@ -127,8 +127,8 @@ struct SingleSubstFormat1
|
||||
OffsetTo<Coverage>
|
||||
coverage; /* Offset to Coverage table--from
|
||||
* beginning of Substitution table */
|
||||
HBINT16 deltaGlyphID; /* Add to original GlyphID to get
|
||||
* substitute GlyphID */
|
||||
HBUINT16 deltaGlyphID; /* Add to original GlyphID to get
|
||||
* substitute GlyphID, modulo 0x10000 */
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (6);
|
||||
};
|
||||
@ -231,15 +231,14 @@ struct SingleSubst
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
if (unlikely (!c->extend_min (u.format))) return_trace (false);
|
||||
unsigned int format = 2;
|
||||
int delta = 0;
|
||||
unsigned format = 2;
|
||||
unsigned delta = 0;
|
||||
if (glyphs.length)
|
||||
{
|
||||
format = 1;
|
||||
/* TODO(serialize) check for wrap-around */
|
||||
delta = substitutes[0] - glyphs[0];
|
||||
delta = (unsigned) (substitutes[0] - glyphs[0]) & 0xFFFF;
|
||||
for (unsigned int i = 1; i < glyphs.length; i++)
|
||||
if (delta != (int) (substitutes[i] - glyphs[i])) {
|
||||
if (delta != ((unsigned) (substitutes[i] - glyphs[i]) & 0xFFFF)) {
|
||||
format = 2;
|
||||
break;
|
||||
}
|
||||
@ -252,14 +251,14 @@ struct SingleSubst
|
||||
}
|
||||
}
|
||||
|
||||
template <typename context_t>
|
||||
typename context_t::return_t dispatch (context_t *c) const
|
||||
template <typename context_t, typename ...Ts>
|
||||
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
|
||||
{
|
||||
TRACE_DISPATCH (this, u.format);
|
||||
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
|
||||
switch (u.format) {
|
||||
case 1: return_trace (c->dispatch (u.format1));
|
||||
case 2: return_trace (c->dispatch (u.format2));
|
||||
case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
|
||||
case 2: return_trace (c->dispatch (u.format2, hb_forward<Ts> (ds)...));
|
||||
default:return_trace (c->default_return_value ());
|
||||
}
|
||||
}
|
||||
@ -441,13 +440,13 @@ struct MultipleSubst
|
||||
}
|
||||
}
|
||||
|
||||
template <typename context_t>
|
||||
typename context_t::return_t dispatch (context_t *c) const
|
||||
template <typename context_t, typename ...Ts>
|
||||
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
|
||||
{
|
||||
TRACE_DISPATCH (this, u.format);
|
||||
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
|
||||
switch (u.format) {
|
||||
case 1: return_trace (c->dispatch (u.format1));
|
||||
case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
|
||||
default:return_trace (c->default_return_value ());
|
||||
}
|
||||
}
|
||||
@ -485,7 +484,7 @@ struct AlternateSet
|
||||
unsigned int shift = hb_ctz (lookup_mask);
|
||||
unsigned int alt_index = ((lookup_mask & glyph_mask) >> shift);
|
||||
|
||||
/* If alt_index is MAX, randomize feature if it is the rand feature. */
|
||||
/* If alt_index is MAX_VALUE, randomize feature if it is the rand feature. */
|
||||
if (alt_index == HB_OT_MAP_MAX_VALUE && c->random)
|
||||
alt_index = c->random_number () % count + 1;
|
||||
|
||||
@ -615,13 +614,13 @@ struct AlternateSubst
|
||||
}
|
||||
}
|
||||
|
||||
template <typename context_t>
|
||||
typename context_t::return_t dispatch (context_t *c) const
|
||||
template <typename context_t, typename ...Ts>
|
||||
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
|
||||
{
|
||||
TRACE_DISPATCH (this, u.format);
|
||||
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
|
||||
switch (u.format) {
|
||||
case 1: return_trace (c->dispatch (u.format1));
|
||||
case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
|
||||
default:return_trace (c->default_return_value ());
|
||||
}
|
||||
}
|
||||
@ -793,7 +792,7 @@ struct LigatureSet
|
||||
if (unlikely (!ligature.serialize (c, ligatures.length))) return_trace (false);
|
||||
for (unsigned int i = 0; i < ligatures.length; i++)
|
||||
{
|
||||
unsigned int component_count = MAX<int> (component_count_list[i] - 1, 0);
|
||||
unsigned int component_count = (unsigned) hb_max ((int) component_count_list[i] - 1, 0);
|
||||
if (unlikely (!ligature[i].serialize (c, this)
|
||||
.serialize (c,
|
||||
ligatures[i],
|
||||
@ -946,13 +945,13 @@ struct LigatureSubst
|
||||
}
|
||||
}
|
||||
|
||||
template <typename context_t>
|
||||
typename context_t::return_t dispatch (context_t *c) const
|
||||
template <typename context_t, typename ...Ts>
|
||||
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
|
||||
{
|
||||
TRACE_DISPATCH (this, u.format);
|
||||
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
|
||||
switch (u.format) {
|
||||
case 1: return_trace (c->dispatch (u.format1));
|
||||
case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
|
||||
default:return_trace (c->default_return_value ());
|
||||
}
|
||||
}
|
||||
@ -984,7 +983,7 @@ struct ReverseChainSingleSubstFormat1
|
||||
if (!(this+coverage).intersects (glyphs))
|
||||
return false;
|
||||
|
||||
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
|
||||
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
|
||||
|
||||
unsigned int count;
|
||||
|
||||
@ -1005,8 +1004,8 @@ struct ReverseChainSingleSubstFormat1
|
||||
{
|
||||
if (!intersects (c->glyphs)) return;
|
||||
|
||||
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
|
||||
const ArrayOf<GlyphID> &substitute = StructAfter<ArrayOf<GlyphID> > (lookahead);
|
||||
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
|
||||
const ArrayOf<GlyphID> &substitute = StructAfter<ArrayOf<GlyphID>> (lookahead);
|
||||
|
||||
+ hb_zip (this+coverage, substitute)
|
||||
| hb_filter (*c->glyphs, hb_first)
|
||||
@ -1025,12 +1024,12 @@ struct ReverseChainSingleSubstFormat1
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
if (unlikely (!(this+backtrack[i]).add_coverage (c->before))) return;
|
||||
|
||||
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
|
||||
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
|
||||
count = lookahead.len;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
if (unlikely (!(this+lookahead[i]).add_coverage (c->after))) return;
|
||||
|
||||
const ArrayOf<GlyphID> &substitute = StructAfter<ArrayOf<GlyphID> > (lookahead);
|
||||
const ArrayOf<GlyphID> &substitute = StructAfter<ArrayOf<GlyphID>> (lookahead);
|
||||
count = substitute.len;
|
||||
c->output->add_array (substitute.arrayZ, substitute.len);
|
||||
}
|
||||
@ -1049,8 +1048,8 @@ struct ReverseChainSingleSubstFormat1
|
||||
unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
|
||||
if (likely (index == NOT_COVERED)) return_trace (false);
|
||||
|
||||
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
|
||||
const ArrayOf<GlyphID> &substitute = StructAfter<ArrayOf<GlyphID> > (lookahead);
|
||||
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
|
||||
const ArrayOf<GlyphID> &substitute = StructAfter<ArrayOf<GlyphID>> (lookahead);
|
||||
|
||||
unsigned int start_index = 0, end_index = 0;
|
||||
if (match_backtrack (c,
|
||||
@ -1085,10 +1084,10 @@ struct ReverseChainSingleSubstFormat1
|
||||
TRACE_SANITIZE (this);
|
||||
if (!(coverage.sanitize (c, this) && backtrack.sanitize (c, this)))
|
||||
return_trace (false);
|
||||
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
|
||||
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
|
||||
if (!lookahead.sanitize (c, this))
|
||||
return_trace (false);
|
||||
const ArrayOf<GlyphID> &substitute = StructAfter<ArrayOf<GlyphID> > (lookahead);
|
||||
const ArrayOf<GlyphID> &substitute = StructAfter<ArrayOf<GlyphID>> (lookahead);
|
||||
return_trace (substitute.sanitize (c));
|
||||
}
|
||||
|
||||
@ -1114,13 +1113,13 @@ struct ReverseChainSingleSubstFormat1
|
||||
|
||||
struct ReverseChainSingleSubst
|
||||
{
|
||||
template <typename context_t>
|
||||
typename context_t::return_t dispatch (context_t *c) const
|
||||
template <typename context_t, typename ...Ts>
|
||||
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
|
||||
{
|
||||
TRACE_DISPATCH (this, u.format);
|
||||
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
|
||||
switch (u.format) {
|
||||
case 1: return_trace (c->dispatch (u.format1));
|
||||
case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
|
||||
default:return_trace (c->default_return_value ());
|
||||
}
|
||||
}
|
||||
@ -1154,19 +1153,19 @@ struct SubstLookupSubTable
|
||||
ReverseChainSingle = 8
|
||||
};
|
||||
|
||||
template <typename context_t>
|
||||
typename context_t::return_t dispatch (context_t *c, unsigned int lookup_type) const
|
||||
template <typename context_t, typename ...Ts>
|
||||
typename context_t::return_t dispatch (context_t *c, unsigned int lookup_type, Ts&&... ds) const
|
||||
{
|
||||
TRACE_DISPATCH (this, lookup_type);
|
||||
switch (lookup_type) {
|
||||
case Single: return_trace (u.single.dispatch (c));
|
||||
case Multiple: return_trace (u.multiple.dispatch (c));
|
||||
case Alternate: return_trace (u.alternate.dispatch (c));
|
||||
case Ligature: return_trace (u.ligature.dispatch (c));
|
||||
case Context: return_trace (u.context.dispatch (c));
|
||||
case ChainContext: return_trace (u.chainContext.dispatch (c));
|
||||
case Extension: return_trace (u.extension.dispatch (c));
|
||||
case ReverseChainSingle: return_trace (u.reverseChainContextSingle.dispatch (c));
|
||||
case Single: return_trace (u.single.dispatch (c, hb_forward<Ts> (ds)...));
|
||||
case Multiple: return_trace (u.multiple.dispatch (c, hb_forward<Ts> (ds)...));
|
||||
case Alternate: return_trace (u.alternate.dispatch (c, hb_forward<Ts> (ds)...));
|
||||
case Ligature: return_trace (u.ligature.dispatch (c, hb_forward<Ts> (ds)...));
|
||||
case Context: return_trace (u.context.dispatch (c, hb_forward<Ts> (ds)...));
|
||||
case ChainContext: return_trace (u.chainContext.dispatch (c, hb_forward<Ts> (ds)...));
|
||||
case Extension: return_trace (u.extension.dispatch (c, hb_forward<Ts> (ds)...));
|
||||
case ReverseChainSingle: return_trace (u.reverseChainContextSingle.dispatch (c, hb_forward<Ts> (ds)...));
|
||||
default: return_trace (c->default_return_value ());
|
||||
}
|
||||
}
|
||||
@ -1194,7 +1193,7 @@ struct SubstLookup : Lookup
|
||||
const SubTable& get_subtable (unsigned int i) const
|
||||
{ return Lookup::get_subtable<SubTable> (i); }
|
||||
|
||||
static bool lookup_type_is_reverse (unsigned int lookup_type)
|
||||
HB_INTERNAL static bool lookup_type_is_reverse (unsigned int lookup_type)
|
||||
{ return lookup_type == SubTable::ReverseChainSingle; }
|
||||
|
||||
bool is_reverse () const
|
||||
@ -1252,7 +1251,7 @@ struct SubstLookup : Lookup
|
||||
return dispatch (c);
|
||||
}
|
||||
|
||||
static bool apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index);
|
||||
HB_INTERNAL static bool apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index);
|
||||
|
||||
SubTable& serialize_subtable (hb_serialize_context_t *c,
|
||||
unsigned int i)
|
||||
@ -1315,12 +1314,12 @@ struct SubstLookup : Lookup
|
||||
}
|
||||
|
||||
template <typename context_t>
|
||||
static typename context_t::return_t dispatch_recurse_func (context_t *c, unsigned int lookup_index);
|
||||
HB_INTERNAL static typename context_t::return_t dispatch_recurse_func (context_t *c, unsigned int lookup_index);
|
||||
|
||||
static hb_closure_context_t::return_t dispatch_closure_recurse_func (hb_closure_context_t *c, unsigned int lookup_index)
|
||||
HB_INTERNAL static hb_closure_context_t::return_t dispatch_closure_recurse_func (hb_closure_context_t *c, unsigned int lookup_index)
|
||||
{
|
||||
if (!c->should_visit_lookup (lookup_index))
|
||||
return hb_void_t ();
|
||||
return hb_empty_t ();
|
||||
|
||||
hb_closure_context_t::return_t ret = dispatch_recurse_func (c, lookup_index);
|
||||
|
||||
@ -1332,9 +1331,9 @@ struct SubstLookup : Lookup
|
||||
return ret;
|
||||
}
|
||||
|
||||
template <typename context_t>
|
||||
typename context_t::return_t dispatch (context_t *c) const
|
||||
{ return Lookup::dispatch<SubTable> (c); }
|
||||
template <typename context_t, typename ...Ts>
|
||||
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
|
||||
{ return Lookup::dispatch<SubTable> (c, hb_forward<Ts> (ds)...); }
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{ return Lookup::subset<SubTable> (c); }
|
||||
|
@ -59,13 +59,13 @@ struct hb_intersects_context_t :
|
||||
};
|
||||
|
||||
struct hb_closure_context_t :
|
||||
hb_dispatch_context_t<hb_closure_context_t, hb_void_t, 0>
|
||||
hb_dispatch_context_t<hb_closure_context_t, hb_empty_t, 0>
|
||||
{
|
||||
const char *get_name () { return "CLOSURE"; }
|
||||
typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned int lookup_index);
|
||||
template <typename T>
|
||||
return_t dispatch (const T &obj) { obj.closure (this); return hb_void_t (); }
|
||||
static return_t default_return_value () { return hb_void_t (); }
|
||||
return_t dispatch (const T &obj) { obj.closure (this); return hb_empty_t (); }
|
||||
static return_t default_return_value () { return hb_empty_t (); }
|
||||
void recurse (unsigned int lookup_index)
|
||||
{
|
||||
if (unlikely (nesting_level_left == 0 || !recurse_func))
|
||||
@ -151,13 +151,13 @@ struct hb_would_apply_context_t :
|
||||
|
||||
|
||||
struct hb_collect_glyphs_context_t :
|
||||
hb_dispatch_context_t<hb_collect_glyphs_context_t, hb_void_t, 0>
|
||||
hb_dispatch_context_t<hb_collect_glyphs_context_t, hb_empty_t, 0>
|
||||
{
|
||||
const char *get_name () { return "COLLECT_GLYPHS"; }
|
||||
typedef return_t (*recurse_func_t) (hb_collect_glyphs_context_t *c, unsigned int lookup_index);
|
||||
template <typename T>
|
||||
return_t dispatch (const T &obj) { obj.collect_glyphs (this); return hb_void_t (); }
|
||||
static return_t default_return_value () { return hb_void_t (); }
|
||||
return_t dispatch (const T &obj) { obj.collect_glyphs (this); return hb_empty_t (); }
|
||||
static return_t default_return_value () { return hb_empty_t (); }
|
||||
void recurse (unsigned int lookup_index)
|
||||
{
|
||||
if (unlikely (nesting_level_left == 0 || !recurse_func))
|
||||
@ -286,7 +286,7 @@ struct hb_ot_apply_context_t :
|
||||
};
|
||||
|
||||
may_match_t may_match (const hb_glyph_info_t &info,
|
||||
const HBUINT16 *glyph_data) const
|
||||
const HBUINT16 *glyph_data) const
|
||||
{
|
||||
if (!(info.mask & mask) ||
|
||||
(syllable && syllable != info.syllable ()))
|
||||
@ -610,10 +610,10 @@ struct hb_ot_apply_context_t :
|
||||
|
||||
|
||||
struct hb_get_subtables_context_t :
|
||||
hb_dispatch_context_t<hb_get_subtables_context_t, hb_void_t, HB_DEBUG_APPLY>
|
||||
hb_dispatch_context_t<hb_get_subtables_context_t, hb_empty_t, HB_DEBUG_APPLY>
|
||||
{
|
||||
template <typename Type>
|
||||
static bool apply_to (const void *obj, OT::hb_ot_apply_context_t *c)
|
||||
HB_INTERNAL static bool apply_to (const void *obj, OT::hb_ot_apply_context_t *c)
|
||||
{
|
||||
const Type *typed_obj = (const Type *) obj;
|
||||
return typed_obj->apply (c);
|
||||
@ -652,9 +652,9 @@ struct hb_get_subtables_context_t :
|
||||
{
|
||||
hb_applicable_t *entry = array.push();
|
||||
entry->init (obj, apply_to<T>);
|
||||
return hb_void_t ();
|
||||
return hb_empty_t ();
|
||||
}
|
||||
static return_t default_return_value () { return hb_void_t (); }
|
||||
static return_t default_return_value () { return hb_empty_t (); }
|
||||
|
||||
hb_get_subtables_context_t (array_t &array_) :
|
||||
array (array_),
|
||||
@ -849,7 +849,7 @@ static inline bool match_input (hb_ot_apply_context_t *c,
|
||||
if (ligbase == LIGBASE_NOT_CHECKED)
|
||||
{
|
||||
bool found = false;
|
||||
const hb_glyph_info_t *out = buffer->out_info;
|
||||
const auto *out = buffer->out_info;
|
||||
unsigned int j = buffer->out_len;
|
||||
while (j && _hb_glyph_info_get_lig_id (&out[j - 1]) == first_lig_id)
|
||||
{
|
||||
@ -973,7 +973,7 @@ static inline bool ligate_input (hb_ot_apply_context_t *c,
|
||||
if (this_comp == 0)
|
||||
this_comp = last_num_components;
|
||||
unsigned int new_lig_comp = components_so_far - last_num_components +
|
||||
MIN (this_comp, last_num_components);
|
||||
hb_min (this_comp, last_num_components);
|
||||
_hb_glyph_info_set_lig_props_for_mark (&buffer->cur(), lig_id, new_lig_comp);
|
||||
}
|
||||
buffer->next_glyph ();
|
||||
@ -995,7 +995,7 @@ static inline bool ligate_input (hb_ot_apply_context_t *c,
|
||||
if (!this_comp)
|
||||
break;
|
||||
unsigned int new_lig_comp = components_so_far - last_num_components +
|
||||
MIN (this_comp, last_num_components);
|
||||
hb_min (this_comp, last_num_components);
|
||||
_hb_glyph_info_set_lig_props_for_mark (&buffer->info[i], lig_id, new_lig_comp);
|
||||
} else
|
||||
break;
|
||||
@ -1173,7 +1173,7 @@ static inline bool apply_lookup (hb_ot_apply_context_t *c,
|
||||
else
|
||||
{
|
||||
/* NOTE: delta is negative. */
|
||||
delta = MAX (delta, (int) next - (int) count);
|
||||
delta = hb_max (delta, (int) next - (int) count);
|
||||
next -= delta;
|
||||
}
|
||||
|
||||
@ -1299,7 +1299,7 @@ struct Rule
|
||||
|
||||
void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const
|
||||
{
|
||||
const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord> >
|
||||
const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
|
||||
(inputZ.as_array ((inputCount ? inputCount - 1 : 0)));
|
||||
context_closure_lookup (c,
|
||||
inputCount, inputZ.arrayZ,
|
||||
@ -1310,7 +1310,7 @@ struct Rule
|
||||
void collect_glyphs (hb_collect_glyphs_context_t *c,
|
||||
ContextCollectGlyphsLookupContext &lookup_context) const
|
||||
{
|
||||
const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord> >
|
||||
const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
|
||||
(inputZ.as_array (inputCount ? inputCount - 1 : 0));
|
||||
context_collect_glyphs_lookup (c,
|
||||
inputCount, inputZ.arrayZ,
|
||||
@ -1321,7 +1321,7 @@ struct Rule
|
||||
bool would_apply (hb_would_apply_context_t *c,
|
||||
ContextApplyLookupContext &lookup_context) const
|
||||
{
|
||||
const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord> >
|
||||
const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
|
||||
(inputZ.as_array (inputCount ? inputCount - 1 : 0));
|
||||
return context_would_apply_lookup (c,
|
||||
inputCount, inputZ.arrayZ,
|
||||
@ -1333,7 +1333,7 @@ struct Rule
|
||||
ContextApplyLookupContext &lookup_context) const
|
||||
{
|
||||
TRACE_APPLY (this);
|
||||
const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord> >
|
||||
const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
|
||||
(inputZ.as_array (inputCount ? inputCount - 1 : 0));
|
||||
return_trace (context_apply_lookup (c, inputCount, inputZ.arrayZ, lookupCount, lookupRecord.arrayZ, lookup_context));
|
||||
}
|
||||
@ -1751,7 +1751,7 @@ struct ContextFormat3
|
||||
HBUINT16 glyphCount; /* Number of glyphs in the input glyph
|
||||
* sequence */
|
||||
HBUINT16 lookupCount; /* Number of LookupRecords */
|
||||
UnsizedArrayOf<OffsetTo<Coverage> >
|
||||
UnsizedArrayOf<OffsetTo<Coverage>>
|
||||
coverageZ; /* Array of offsets to Coverage
|
||||
* table in glyph sequence order */
|
||||
/*UnsizedArrayOf<LookupRecord>
|
||||
@ -1763,15 +1763,15 @@ struct ContextFormat3
|
||||
|
||||
struct Context
|
||||
{
|
||||
template <typename context_t>
|
||||
typename context_t::return_t dispatch (context_t *c) const
|
||||
template <typename context_t, typename ...Ts>
|
||||
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
|
||||
{
|
||||
TRACE_DISPATCH (this, u.format);
|
||||
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
|
||||
switch (u.format) {
|
||||
case 1: return_trace (c->dispatch (u.format1));
|
||||
case 2: return_trace (c->dispatch (u.format2));
|
||||
case 3: return_trace (c->dispatch (u.format3));
|
||||
case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
|
||||
case 2: return_trace (c->dispatch (u.format2, hb_forward<Ts> (ds)...));
|
||||
case 3: return_trace (c->dispatch (u.format3, hb_forward<Ts> (ds)...));
|
||||
default:return_trace (c->default_return_value ());
|
||||
}
|
||||
}
|
||||
@ -1923,8 +1923,8 @@ struct ChainRule
|
||||
{
|
||||
bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const
|
||||
{
|
||||
const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16> > (backtrack);
|
||||
const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16> > (input);
|
||||
const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
|
||||
const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
|
||||
return chain_context_intersects (glyphs,
|
||||
backtrack.len, backtrack.arrayZ,
|
||||
input.lenP1, input.arrayZ,
|
||||
@ -1935,9 +1935,9 @@ struct ChainRule
|
||||
void closure (hb_closure_context_t *c,
|
||||
ChainContextClosureLookupContext &lookup_context) const
|
||||
{
|
||||
const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16> > (backtrack);
|
||||
const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16> > (input);
|
||||
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
|
||||
const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
|
||||
const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
|
||||
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
|
||||
chain_context_closure_lookup (c,
|
||||
backtrack.len, backtrack.arrayZ,
|
||||
input.lenP1, input.arrayZ,
|
||||
@ -1949,9 +1949,9 @@ struct ChainRule
|
||||
void collect_glyphs (hb_collect_glyphs_context_t *c,
|
||||
ChainContextCollectGlyphsLookupContext &lookup_context) const
|
||||
{
|
||||
const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16> > (backtrack);
|
||||
const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16> > (input);
|
||||
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
|
||||
const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
|
||||
const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
|
||||
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
|
||||
chain_context_collect_glyphs_lookup (c,
|
||||
backtrack.len, backtrack.arrayZ,
|
||||
input.lenP1, input.arrayZ,
|
||||
@ -1963,9 +1963,9 @@ struct ChainRule
|
||||
bool would_apply (hb_would_apply_context_t *c,
|
||||
ChainContextApplyLookupContext &lookup_context) const
|
||||
{
|
||||
const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16> > (backtrack);
|
||||
const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16> > (input);
|
||||
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
|
||||
const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
|
||||
const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
|
||||
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
|
||||
return chain_context_would_apply_lookup (c,
|
||||
backtrack.len, backtrack.arrayZ,
|
||||
input.lenP1, input.arrayZ,
|
||||
@ -1976,9 +1976,9 @@ struct ChainRule
|
||||
bool apply (hb_ot_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
|
||||
{
|
||||
TRACE_APPLY (this);
|
||||
const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16> > (backtrack);
|
||||
const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16> > (input);
|
||||
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
|
||||
const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
|
||||
const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
|
||||
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
|
||||
return_trace (chain_context_apply_lookup (c,
|
||||
backtrack.len, backtrack.arrayZ,
|
||||
input.lenP1, input.arrayZ,
|
||||
@ -1990,11 +1990,11 @@ struct ChainRule
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
if (!backtrack.sanitize (c)) return_trace (false);
|
||||
const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16> > (backtrack);
|
||||
const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
|
||||
if (!input.sanitize (c)) return_trace (false);
|
||||
const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16> > (input);
|
||||
const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
|
||||
if (!lookahead.sanitize (c)) return_trace (false);
|
||||
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
|
||||
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
|
||||
return_trace (lookup.sanitize (c));
|
||||
}
|
||||
|
||||
@ -2330,12 +2330,12 @@ struct ChainContextFormat3
|
||||
{
|
||||
bool intersects (const hb_set_t *glyphs) const
|
||||
{
|
||||
const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
|
||||
const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
|
||||
|
||||
if (!(this+input[0]).intersects (glyphs))
|
||||
return false;
|
||||
|
||||
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
|
||||
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (input);
|
||||
struct ChainContextClosureLookupContext lookup_context = {
|
||||
{intersects_coverage},
|
||||
{this, this, this}
|
||||
@ -2349,13 +2349,13 @@ struct ChainContextFormat3
|
||||
|
||||
void closure (hb_closure_context_t *c) const
|
||||
{
|
||||
const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
|
||||
const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
|
||||
|
||||
if (!(this+input[0]).intersects (c->glyphs))
|
||||
return;
|
||||
|
||||
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
|
||||
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
|
||||
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (input);
|
||||
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
|
||||
struct ChainContextClosureLookupContext lookup_context = {
|
||||
{intersects_coverage},
|
||||
{this, this, this}
|
||||
@ -2370,12 +2370,12 @@ struct ChainContextFormat3
|
||||
|
||||
void collect_glyphs (hb_collect_glyphs_context_t *c) const
|
||||
{
|
||||
const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
|
||||
const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
|
||||
|
||||
(this+input[0]).add_coverage (c->input);
|
||||
|
||||
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
|
||||
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
|
||||
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (input);
|
||||
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
|
||||
struct ChainContextCollectGlyphsLookupContext lookup_context = {
|
||||
{collect_coverage},
|
||||
{this, this, this}
|
||||
@ -2390,9 +2390,9 @@ struct ChainContextFormat3
|
||||
|
||||
bool would_apply (hb_would_apply_context_t *c) const
|
||||
{
|
||||
const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
|
||||
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
|
||||
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
|
||||
const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
|
||||
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (input);
|
||||
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
|
||||
struct ChainContextApplyLookupContext lookup_context = {
|
||||
{match_coverage},
|
||||
{this, this, this}
|
||||
@ -2406,20 +2406,20 @@ struct ChainContextFormat3
|
||||
|
||||
const Coverage &get_coverage () const
|
||||
{
|
||||
const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
|
||||
const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
|
||||
return this+input[0];
|
||||
}
|
||||
|
||||
bool apply (hb_ot_apply_context_t *c) const
|
||||
{
|
||||
TRACE_APPLY (this);
|
||||
const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
|
||||
const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
|
||||
|
||||
unsigned int index = (this+input[0]).get_coverage (c->buffer->cur().codepoint);
|
||||
if (likely (index == NOT_COVERED)) return_trace (false);
|
||||
|
||||
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
|
||||
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
|
||||
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (input);
|
||||
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
|
||||
struct ChainContextApplyLookupContext lookup_context = {
|
||||
{match_coverage},
|
||||
{this, this, this}
|
||||
@ -2442,12 +2442,12 @@ struct ChainContextFormat3
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
if (!backtrack.sanitize (c, this)) return_trace (false);
|
||||
const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
|
||||
const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
|
||||
if (!input.sanitize (c, this)) return_trace (false);
|
||||
if (!input.len) return_trace (false); /* To be consistent with Context. */
|
||||
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
|
||||
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (input);
|
||||
if (!lookahead.sanitize (c, this)) return_trace (false);
|
||||
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
|
||||
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
|
||||
return_trace (lookup.sanitize (c));
|
||||
}
|
||||
|
||||
@ -2474,15 +2474,15 @@ struct ChainContextFormat3
|
||||
|
||||
struct ChainContext
|
||||
{
|
||||
template <typename context_t>
|
||||
typename context_t::return_t dispatch (context_t *c) const
|
||||
template <typename context_t, typename ...Ts>
|
||||
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
|
||||
{
|
||||
TRACE_DISPATCH (this, u.format);
|
||||
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
|
||||
switch (u.format) {
|
||||
case 1: return_trace (c->dispatch (u.format1));
|
||||
case 2: return_trace (c->dispatch (u.format2));
|
||||
case 3: return_trace (c->dispatch (u.format3));
|
||||
case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
|
||||
case 2: return_trace (c->dispatch (u.format2, hb_forward<Ts> (ds)...));
|
||||
case 3: return_trace (c->dispatch (u.format3, hb_forward<Ts> (ds)...));
|
||||
default:return_trace (c->default_return_value ());
|
||||
}
|
||||
}
|
||||
@ -2504,18 +2504,14 @@ struct ExtensionFormat1
|
||||
|
||||
template <typename X>
|
||||
const X& get_subtable () const
|
||||
{
|
||||
unsigned int offset = extensionOffset;
|
||||
if (unlikely (!offset)) return Null(typename T::SubTable);
|
||||
return StructAtOffset<typename T::SubTable> (this, offset);
|
||||
}
|
||||
{ return this + CastR<LOffsetTo<typename T::SubTable>> (extensionOffset); }
|
||||
|
||||
template <typename context_t>
|
||||
typename context_t::return_t dispatch (context_t *c) const
|
||||
template <typename context_t, typename ...Ts>
|
||||
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
|
||||
{
|
||||
TRACE_DISPATCH (this, format);
|
||||
if (unlikely (!c->may_dispatch (this, this))) return_trace (c->no_dispatch_return_value ());
|
||||
return_trace (get_subtable<typename T::SubTable> ().dispatch (c, get_type ()));
|
||||
return_trace (get_subtable<typename T::SubTable> ().dispatch (c, get_type (), hb_forward<Ts> (ds)...));
|
||||
}
|
||||
|
||||
/* This is called from may_dispatch() above with hb_sanitize_context_t. */
|
||||
@ -2523,7 +2519,6 @@ struct ExtensionFormat1
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (c->check_struct (this) &&
|
||||
extensionOffset != 0 &&
|
||||
extensionLookupType != T::SubTable::Extension);
|
||||
}
|
||||
|
||||
@ -2532,7 +2527,7 @@ struct ExtensionFormat1
|
||||
HBUINT16 extensionLookupType; /* Lookup type of subtable referenced
|
||||
* by ExtensionOffset (i.e. the
|
||||
* extension subtable). */
|
||||
HBUINT32 extensionOffset; /* Offset to the extension subtable,
|
||||
Offset32 extensionOffset; /* Offset to the extension subtable,
|
||||
* of lookup type subtable. */
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (8);
|
||||
@ -2557,13 +2552,13 @@ struct Extension
|
||||
}
|
||||
}
|
||||
|
||||
template <typename context_t>
|
||||
typename context_t::return_t dispatch (context_t *c) const
|
||||
template <typename context_t, typename ...Ts>
|
||||
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
|
||||
{
|
||||
TRACE_DISPATCH (this, u.format);
|
||||
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
|
||||
switch (u.format) {
|
||||
case 1: return_trace (u.format1.dispatch (c));
|
||||
case 1: return_trace (u.format1.dispatch (c, hb_forward<Ts> (ds)...));
|
||||
default:return_trace (c->default_return_value ());
|
||||
}
|
||||
}
|
||||
@ -2666,7 +2661,7 @@ struct GSUBGPOS
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
struct GSUBGPOS *out = c->serializer->embed (*this);
|
||||
auto *out = c->serializer->embed (*this);
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
|
||||
out->scriptList.serialize_subset (c, this+scriptList, out);
|
||||
@ -2674,13 +2669,13 @@ struct GSUBGPOS
|
||||
|
||||
typedef OffsetListOf<TLookup> TLookupList;
|
||||
/* TODO Use intersects() to count how many subtables survive? */
|
||||
CastR<OffsetTo<TLookupList> > (out->lookupList)
|
||||
CastR<OffsetTo<TLookupList>> (out->lookupList)
|
||||
.serialize_subset (c,
|
||||
this+CastR<const OffsetTo<TLookupList> > (lookupList),
|
||||
this+CastR<OffsetTo<TLookupList>> (lookupList),
|
||||
out);
|
||||
|
||||
if (version.to_int () >= 0x00010001u)
|
||||
out->featureVars.serialize_subset (c, this+featureVars, out);
|
||||
out->featureVars.serialize_copy (c->serializer, this+featureVars, out);
|
||||
|
||||
return_trace (true);
|
||||
}
|
||||
@ -2700,7 +2695,7 @@ struct GSUBGPOS
|
||||
likely (version.major == 1) &&
|
||||
scriptList.sanitize (c, this) &&
|
||||
featureList.sanitize (c, this) &&
|
||||
CastR<OffsetTo<TLookupList> > (lookupList).sanitize (c, this) &&
|
||||
CastR<OffsetTo<TLookupList>> (lookupList).sanitize (c, this) &&
|
||||
(version.to_int () < 0x00010001u || featureVars.sanitize (c, this)));
|
||||
}
|
||||
|
||||
|
@ -138,7 +138,7 @@ bool
|
||||
OT::GDEF::is_blacklisted (hb_blob_t *blob,
|
||||
hb_face_t *face) const
|
||||
{
|
||||
#if defined(HB_NO_OT_LAYOUT_BLACKLIST)
|
||||
#ifdef HB_NO_OT_LAYOUT_BLACKLIST
|
||||
return false;
|
||||
#endif
|
||||
/* The ugly business of blacklisting individual fonts' tables happen here!
|
||||
@ -335,6 +335,12 @@ hb_ot_layout_get_attach_points (hb_face_t *face,
|
||||
unsigned int *point_count /* IN/OUT */,
|
||||
unsigned int *point_array /* OUT */)
|
||||
{
|
||||
#ifdef HB_NO_LAYOUT_UNUSED
|
||||
if (point_count)
|
||||
*point_count = 0;
|
||||
return 0;
|
||||
#endif
|
||||
|
||||
return face->table.GDEF->table->get_attach_points (glyph,
|
||||
start_offset,
|
||||
point_count,
|
||||
@ -364,6 +370,12 @@ hb_ot_layout_get_ligature_carets (hb_font_t *font,
|
||||
unsigned int *caret_count /* IN/OUT */,
|
||||
hb_position_t *caret_array /* OUT */)
|
||||
{
|
||||
#ifdef HB_NO_LAYOUT_UNUSED
|
||||
if (caret_count)
|
||||
*caret_count = 0;
|
||||
return 0;
|
||||
#endif
|
||||
|
||||
unsigned int result_caret_count = 0;
|
||||
unsigned int result = font->face->table.GDEF->table->get_lig_carets (font, direction, glyph, start_offset, &result_caret_count, caret_array);
|
||||
if (result)
|
||||
@ -384,7 +396,7 @@ bool
|
||||
OT::GSUB::is_blacklisted (hb_blob_t *blob HB_UNUSED,
|
||||
hb_face_t *face) const
|
||||
{
|
||||
#if defined(HB_NO_OT_LAYOUT_BLACKLIST)
|
||||
#ifdef HB_NO_OT_LAYOUT_BLACKLIST
|
||||
return false;
|
||||
#endif
|
||||
/* Mac OS X prefers morx over GSUB. It also ships with various Indic fonts,
|
||||
@ -412,7 +424,7 @@ bool
|
||||
OT::GPOS::is_blacklisted (hb_blob_t *blob HB_UNUSED,
|
||||
hb_face_t *face HB_UNUSED) const
|
||||
{
|
||||
#if defined(HB_NO_OT_LAYOUT_BLACKLIST)
|
||||
#ifdef HB_NO_OT_LAYOUT_BLACKLIST
|
||||
return false;
|
||||
#endif
|
||||
return false;
|
||||
@ -500,6 +512,7 @@ hb_ot_layout_table_find_script (hb_face_t *face,
|
||||
return false;
|
||||
}
|
||||
|
||||
#ifndef HB_DISABLE_DEPRECATED
|
||||
/**
|
||||
* hb_ot_layout_table_choose_script:
|
||||
* @face: #hb_face_t to work upon
|
||||
@ -521,6 +534,7 @@ hb_ot_layout_table_choose_script (hb_face_t *face,
|
||||
for (t = script_tags; *t; t++);
|
||||
return hb_ot_layout_table_select_script (face, table_tag, t - script_tags, script_tags, script_index, chosen_script);
|
||||
}
|
||||
#endif
|
||||
|
||||
/**
|
||||
* hb_ot_layout_table_select_script:
|
||||
@ -672,6 +686,7 @@ hb_ot_layout_script_get_language_tags (hb_face_t *face,
|
||||
}
|
||||
|
||||
|
||||
#ifndef HB_DISABLE_DEPRECATED
|
||||
/**
|
||||
* hb_ot_layout_script_find_language:
|
||||
* @face: #hb_face_t to work upon
|
||||
@ -685,6 +700,8 @@ hb_ot_layout_script_get_language_tags (hb_face_t *face,
|
||||
*
|
||||
* Return value: true if the language tag is found, false otherwise
|
||||
*
|
||||
* Since: ??
|
||||
* Deprecated: ??
|
||||
**/
|
||||
hb_bool_t
|
||||
hb_ot_layout_script_find_language (hb_face_t *face,
|
||||
@ -700,6 +717,7 @@ hb_ot_layout_script_find_language (hb_face_t *face,
|
||||
&language_tag,
|
||||
language_index);
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
/**
|
||||
@ -716,7 +734,6 @@ hb_ot_layout_script_find_language (hb_face_t *face,
|
||||
*
|
||||
* Return value: true if the language tag is found, false otherwise
|
||||
*
|
||||
*
|
||||
* Since: 2.0.0
|
||||
**/
|
||||
hb_bool_t
|
||||
@ -1735,7 +1752,7 @@ hb_ot_layout_feature_get_characters (hb_face_t *face,
|
||||
unsigned int len = 0;
|
||||
if (char_count && characters && start_offset < cv_params.characters.len)
|
||||
{
|
||||
len = MIN (cv_params.characters.len - start_offset, *char_count);
|
||||
len = hb_min (cv_params.characters.len - start_offset, *char_count);
|
||||
for (unsigned int i = 0; i < len; ++i)
|
||||
characters[i] = cv_params.characters[start_offset + i];
|
||||
}
|
||||
|
@ -94,7 +94,7 @@ HB_EXTERN hb_bool_t
|
||||
hb_ot_layout_has_glyph_classes (hb_face_t *face);
|
||||
|
||||
/**
|
||||
* hb_ot_layout_get_glyph_class:
|
||||
* hb_ot_layout_glyph_class_t:
|
||||
* @HB_OT_LAYOUT_GLYPH_CLASS_UNCLASSIFIED: Glyphs not matching the other classifications
|
||||
* @HB_OT_LAYOUT_GLYPH_CLASS_BASE_GLYPH: Spacing, single characters, capable of accepting marks
|
||||
* @HB_OT_LAYOUT_GLYPH_CLASS_LIGATURE: Glyphs that represent ligation of multiple characters
|
||||
|
@ -188,12 +188,12 @@ hb_ot_map_builder_t::compile (hb_ot_map_t &m,
|
||||
feature_infos[j].default_value = feature_infos[i].default_value;
|
||||
} else {
|
||||
feature_infos[j].flags &= ~F_GLOBAL;
|
||||
feature_infos[j].max_value = MAX (feature_infos[j].max_value, feature_infos[i].max_value);
|
||||
feature_infos[j].max_value = hb_max (feature_infos[j].max_value, feature_infos[i].max_value);
|
||||
/* Inherit default_value from j */
|
||||
}
|
||||
feature_infos[j].flags |= (feature_infos[i].flags & F_HAS_FALLBACK);
|
||||
feature_infos[j].stage[0] = MIN (feature_infos[j].stage[0], feature_infos[i].stage[0]);
|
||||
feature_infos[j].stage[1] = MIN (feature_infos[j].stage[1], feature_infos[i].stage[1]);
|
||||
feature_infos[j].stage[0] = hb_min (feature_infos[j].stage[0], feature_infos[i].stage[0]);
|
||||
feature_infos[j].stage[1] = hb_min (feature_infos[j].stage[1], feature_infos[i].stage[1]);
|
||||
}
|
||||
feature_infos.shrink (j + 1);
|
||||
}
|
||||
@ -213,34 +213,34 @@ hb_ot_map_builder_t::compile (hb_ot_map_t &m,
|
||||
bits_needed = 0;
|
||||
else
|
||||
/* Limit bits per feature. */
|
||||
bits_needed = MIN(HB_OT_MAP_MAX_BITS, hb_bit_storage (info->max_value));
|
||||
bits_needed = hb_min (HB_OT_MAP_MAX_BITS, hb_bit_storage (info->max_value));
|
||||
|
||||
if (!info->max_value || next_bit + bits_needed > 8 * sizeof (hb_mask_t))
|
||||
continue; /* Feature disabled, or not enough bits. */
|
||||
|
||||
|
||||
hb_bool_t found = false;
|
||||
bool found = false;
|
||||
unsigned int feature_index[2];
|
||||
for (unsigned int table_index = 0; table_index < 2; table_index++)
|
||||
{
|
||||
if (required_feature_tag[table_index] == info->tag)
|
||||
required_feature_stage[table_index] = info->stage[table_index];
|
||||
|
||||
found |= hb_ot_layout_language_find_feature (face,
|
||||
table_tags[table_index],
|
||||
script_index[table_index],
|
||||
language_index[table_index],
|
||||
info->tag,
|
||||
&feature_index[table_index]);
|
||||
found |= (bool) hb_ot_layout_language_find_feature (face,
|
||||
table_tags[table_index],
|
||||
script_index[table_index],
|
||||
language_index[table_index],
|
||||
info->tag,
|
||||
&feature_index[table_index]);
|
||||
}
|
||||
if (!found && (info->flags & F_GLOBAL_SEARCH))
|
||||
{
|
||||
for (unsigned int table_index = 0; table_index < 2; table_index++)
|
||||
{
|
||||
found |= hb_ot_layout_table_find_feature (face,
|
||||
table_tags[table_index],
|
||||
info->tag,
|
||||
&feature_index[table_index]);
|
||||
found |= (bool) hb_ot_layout_table_find_feature (face,
|
||||
table_tags[table_index],
|
||||
info->tag,
|
||||
&feature_index[table_index]);
|
||||
}
|
||||
}
|
||||
if (!found && !(info->flags & F_HAS_FALLBACK))
|
||||
|
@ -68,7 +68,7 @@ struct hb_ot_map_t
|
||||
unsigned short random : 1;
|
||||
hb_mask_t mask;
|
||||
|
||||
static int cmp (const void *pa, const void *pb)
|
||||
HB_INTERNAL static int cmp (const void *pa, const void *pb)
|
||||
{
|
||||
const lookup_map_t *a = (const lookup_map_t *) pa;
|
||||
const lookup_map_t *b = (const lookup_map_t *) pb;
|
||||
@ -247,7 +247,7 @@ struct hb_ot_map_builder_t
|
||||
unsigned int default_value; /* for non-global features, what should the unset glyphs take */
|
||||
unsigned int stage[2]; /* GSUB/GPOS */
|
||||
|
||||
static int cmp (const void *pa, const void *pb)
|
||||
HB_INTERNAL static int cmp (const void *pa, const void *pb)
|
||||
{
|
||||
const feature_info_t *a = (const feature_info_t *) pa;
|
||||
const feature_info_t *b = (const feature_info_t *) pb;
|
||||
|
@ -664,7 +664,7 @@ struct MathVariants
|
||||
/* Array of offsets to MathGlyphConstruction tables - from the beginning of
|
||||
the MathVariants table, for shapes growing in vertical/horizontal
|
||||
direction. */
|
||||
UnsizedArrayOf<OffsetTo<MathGlyphConstruction> >
|
||||
UnsizedArrayOf<OffsetTo<MathGlyphConstruction>>
|
||||
glyphConstruction;
|
||||
|
||||
public:
|
||||
|
@ -62,6 +62,10 @@
|
||||
hb_bool_t
|
||||
hb_ot_math_has_data (hb_face_t *face)
|
||||
{
|
||||
#ifdef HB_NO_MATH
|
||||
return false;
|
||||
#endif
|
||||
|
||||
return face->table.MATH->has_data ();
|
||||
}
|
||||
|
||||
@ -86,6 +90,10 @@ hb_position_t
|
||||
hb_ot_math_get_constant (hb_font_t *font,
|
||||
hb_ot_math_constant_t constant)
|
||||
{
|
||||
#ifdef HB_NO_MATH
|
||||
return 0;
|
||||
#endif
|
||||
|
||||
return font->face->table.MATH->get_constant(constant, font);
|
||||
}
|
||||
|
||||
@ -105,6 +113,10 @@ hb_position_t
|
||||
hb_ot_math_get_glyph_italics_correction (hb_font_t *font,
|
||||
hb_codepoint_t glyph)
|
||||
{
|
||||
#ifdef HB_NO_MATH
|
||||
return 0;
|
||||
#endif
|
||||
|
||||
return font->face->table.MATH->get_glyph_info().get_italics_correction (glyph, font);
|
||||
}
|
||||
|
||||
@ -131,6 +143,10 @@ hb_position_t
|
||||
hb_ot_math_get_glyph_top_accent_attachment (hb_font_t *font,
|
||||
hb_codepoint_t glyph)
|
||||
{
|
||||
#ifdef HB_NO_MATH
|
||||
return 0;
|
||||
#endif
|
||||
|
||||
return font->face->table.MATH->get_glyph_info().get_top_accent_attachment (glyph, font);
|
||||
}
|
||||
|
||||
@ -149,6 +165,10 @@ hb_bool_t
|
||||
hb_ot_math_is_glyph_extended_shape (hb_face_t *face,
|
||||
hb_codepoint_t glyph)
|
||||
{
|
||||
#ifdef HB_NO_MATH
|
||||
return false;
|
||||
#endif
|
||||
|
||||
return face->table.MATH->get_glyph_info().is_extended_shape (glyph);
|
||||
}
|
||||
|
||||
@ -177,6 +197,10 @@ hb_ot_math_get_glyph_kerning (hb_font_t *font,
|
||||
hb_ot_math_kern_t kern,
|
||||
hb_position_t correction_height)
|
||||
{
|
||||
#ifdef HB_NO_MATH
|
||||
return 0;
|
||||
#endif
|
||||
|
||||
return font->face->table.MATH->get_glyph_info().get_kerning (glyph,
|
||||
kern,
|
||||
correction_height,
|
||||
@ -214,6 +238,12 @@ hb_ot_math_get_glyph_variants (hb_font_t *font,
|
||||
unsigned int *variants_count, /* IN/OUT */
|
||||
hb_ot_math_glyph_variant_t *variants /* OUT */)
|
||||
{
|
||||
#ifdef HB_NO_MATH
|
||||
if (variants_count)
|
||||
*variants_count = 0;
|
||||
return 0;
|
||||
#endif
|
||||
|
||||
return font->face->table.MATH->get_variants().get_glyph_variants (glyph, direction, font,
|
||||
start_offset,
|
||||
variants_count,
|
||||
@ -242,6 +272,10 @@ hb_position_t
|
||||
hb_ot_math_get_min_connector_overlap (hb_font_t *font,
|
||||
hb_direction_t direction)
|
||||
{
|
||||
#ifdef HB_NO_MATH
|
||||
return 0;
|
||||
#endif
|
||||
|
||||
return font->face->table.MATH->get_variants().get_min_connector_overlap (direction, font);
|
||||
}
|
||||
|
||||
@ -279,6 +313,12 @@ hb_ot_math_get_glyph_assembly (hb_font_t *font,
|
||||
hb_ot_math_glyph_part_t *parts, /* OUT */
|
||||
hb_position_t *italics_correction /* OUT */)
|
||||
{
|
||||
#ifdef HB_NO_MATH
|
||||
if (parts_count)
|
||||
*parts_count = 0;
|
||||
return 0;
|
||||
#endif
|
||||
|
||||
return font->face->table.MATH->get_variants().get_glyph_parts (glyph,
|
||||
direction,
|
||||
font,
|
||||
|
@ -24,6 +24,9 @@
|
||||
* Google Author(s): Behdad Esfahbod
|
||||
*/
|
||||
|
||||
#ifndef HB_OT_NAME_LANGUAGE_STATIC_HH
|
||||
#define HB_OT_NAME_LANGUAGE_STATIC_HH
|
||||
|
||||
#include "hb-ot-name-language.hh"
|
||||
|
||||
/* Following two tables were generated by joining FreeType, FontConfig,
|
||||
@ -427,6 +430,9 @@ _hb_ot_name_language_for (unsigned int code,
|
||||
const hb_ot_language_map_t *array,
|
||||
unsigned int len)
|
||||
{
|
||||
#ifdef HB_NO_OT_NAME_LANGUAGE
|
||||
return HB_LANGUAGE_INVALID;
|
||||
#endif
|
||||
const hb_ot_language_map_t *entry = (const hb_ot_language_map_t *)
|
||||
hb_bsearch (&code,
|
||||
array,
|
||||
@ -455,3 +461,5 @@ _hb_ot_name_language_for_mac_code (unsigned int code)
|
||||
hb_mac_language_map,
|
||||
ARRAY_LENGTH (hb_mac_language_map));
|
||||
}
|
||||
|
||||
#endif /* HB_OT_NAME_LANGUAGE_STATIC_HH */
|
@ -51,6 +51,7 @@ struct NameRecord
|
||||
{
|
||||
hb_language_t language (hb_face_t *face) const
|
||||
{
|
||||
#ifndef HB_NO_OT_NAME_LANGUAGE
|
||||
unsigned int p = platformID;
|
||||
unsigned int l = languageID;
|
||||
|
||||
@ -60,9 +61,12 @@ struct NameRecord
|
||||
if (p == 1)
|
||||
return _hb_ot_name_language_for_mac_code (l);
|
||||
|
||||
#ifndef HB_NO_OT_NAME_LANGUAGE_AAT
|
||||
if (p == 0)
|
||||
return _hb_aat_language_get (face, l);
|
||||
#endif
|
||||
|
||||
#endif
|
||||
return HB_LANGUAGE_INVALID;
|
||||
}
|
||||
|
||||
@ -93,11 +97,21 @@ struct NameRecord
|
||||
return UNSUPPORTED;
|
||||
}
|
||||
|
||||
NameRecord* copy (hb_serialize_context_t *c,
|
||||
const void *src_base,
|
||||
const void *dst_base) const
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
auto *out = c->embed (this);
|
||||
if (unlikely (!out)) return_trace (nullptr);
|
||||
out->offset.serialize_copy (c, src_base + offset, dst_base, length);
|
||||
return_trace (out);
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c, const void *base) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
/* We can check from base all the way up to the end of string... */
|
||||
return_trace (c->check_struct (this) && c->check_range ((char *) base, (unsigned int) length + offset));
|
||||
return_trace (c->check_struct (this) && offset.sanitize (c, base, length));
|
||||
}
|
||||
|
||||
HBUINT16 platformID; /* Platform ID. */
|
||||
@ -105,7 +119,8 @@ struct NameRecord
|
||||
HBUINT16 languageID; /* Language ID. */
|
||||
HBUINT16 nameID; /* Name ID. */
|
||||
HBUINT16 length; /* String length (in bytes). */
|
||||
HBUINT16 offset; /* String offset from start of storage area (in bytes). */
|
||||
NNOffsetTo<UnsizedArrayOf<HBUINT8>>
|
||||
offset; /* String offset from start of storage area (in bytes). */
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (12);
|
||||
};
|
||||
@ -156,15 +171,58 @@ struct name
|
||||
unsigned int get_size () const
|
||||
{ return min_size + count * nameRecordZ.item_size; }
|
||||
|
||||
template <typename Iterator,
|
||||
hb_requires (hb_is_source_of (Iterator, const NameRecord &))>
|
||||
bool serialize (hb_serialize_context_t *c,
|
||||
Iterator it,
|
||||
const void *src_string_pool)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
|
||||
if (unlikely (!c->extend_min ((*this)))) return_trace (false);
|
||||
|
||||
this->format = 0;
|
||||
this->count = it.len ();
|
||||
|
||||
auto snap = c->snapshot ();
|
||||
this->nameRecordZ.serialize (c, this->count);
|
||||
this->stringOffset = c->length ();
|
||||
c->revert (snap);
|
||||
|
||||
const void *dst_string_pool = &(this + this->stringOffset);
|
||||
|
||||
+ it
|
||||
| hb_apply ([&] (const NameRecord& _) { c->copy (_, src_string_pool, dst_string_pool); })
|
||||
;
|
||||
|
||||
if (unlikely (c->ran_out_of_room)) return_trace (false);
|
||||
|
||||
assert (this->stringOffset == c->length ());
|
||||
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
|
||||
name *name_prime = c->serializer->start_embed<name> ();
|
||||
if (unlikely (!name_prime)) return_trace (false);
|
||||
|
||||
auto it =
|
||||
+ nameRecordZ.as_array (count)
|
||||
| hb_filter (c->plan->name_ids, &NameRecord::nameID)
|
||||
;
|
||||
|
||||
name_prime->serialize (c->serializer, it, hb_addressof (this + stringOffset));
|
||||
return_trace (name_prime->count);
|
||||
}
|
||||
|
||||
bool sanitize_records (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
const void *string_pool = (this+stringOffset).arrayZ;
|
||||
unsigned int _count = count;
|
||||
/* Move to run-time?! */
|
||||
for (unsigned int i = 0; i < _count; i++)
|
||||
if (!nameRecordZ[i].sanitize (c, string_pool)) return_trace (false);
|
||||
return_trace (true);
|
||||
return_trace (nameRecordZ.sanitize (c, count, string_pool));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
@ -173,7 +231,8 @@ struct name
|
||||
return_trace (c->check_struct (this) &&
|
||||
likely (format == 0 || format == 1) &&
|
||||
c->check_array (nameRecordZ.arrayZ, count) &&
|
||||
c->check_range (this, stringOffset));
|
||||
c->check_range (this, stringOffset) &&
|
||||
sanitize_records (c));
|
||||
}
|
||||
|
||||
struct accelerator_t
|
||||
@ -263,7 +322,7 @@ struct name
|
||||
/* We only implement format 0 for now. */
|
||||
HBUINT16 format; /* Format selector (=0/1). */
|
||||
HBUINT16 count; /* Number of name records. */
|
||||
NNOffsetTo<UnsizedArrayOf<HBUINT8> >
|
||||
NNOffsetTo<UnsizedArrayOf<HBUINT8>>
|
||||
stringOffset; /* Offset to start of string storage (from start of table). */
|
||||
UnsizedArrayOf<NameRecord>
|
||||
nameRecordZ; /* The name records where count is the number of records. */
|
||||
|
@ -58,6 +58,11 @@ const hb_ot_name_entry_t *
|
||||
hb_ot_name_list_names (hb_face_t *face,
|
||||
unsigned int *num_entries /* OUT */)
|
||||
{
|
||||
#ifdef HB_NO_NAME
|
||||
if (num_entries)
|
||||
*num_entries = 0;
|
||||
return 0;
|
||||
#endif
|
||||
const OT::name_accelerator_t &name = *face->table.name;
|
||||
if (num_entries) *num_entries = name.names.length;
|
||||
return (const hb_ot_name_entry_t *) name.names;
|
||||
@ -167,6 +172,11 @@ hb_ot_name_get_utf8 (hb_face_t *face,
|
||||
unsigned int *text_size /* IN/OUT */,
|
||||
char *text /* OUT */)
|
||||
{
|
||||
#ifdef HB_NO_NAME
|
||||
if (text_size)
|
||||
*text_size = 0;
|
||||
return 0;
|
||||
#endif
|
||||
return hb_ot_name_get_utf<hb_utf8_t> (face, name_id, language, text_size,
|
||||
(hb_utf8_t::codepoint_t *) text);
|
||||
}
|
||||
@ -194,6 +204,11 @@ hb_ot_name_get_utf16 (hb_face_t *face,
|
||||
unsigned int *text_size /* IN/OUT */,
|
||||
uint16_t *text /* OUT */)
|
||||
{
|
||||
#ifdef HB_NO_NAME
|
||||
if (text_size)
|
||||
*text_size = 0;
|
||||
return 0;
|
||||
#endif
|
||||
return hb_ot_name_get_utf<hb_utf16_t> (face, name_id, language, text_size, text);
|
||||
}
|
||||
|
||||
@ -220,5 +235,10 @@ hb_ot_name_get_utf32 (hb_face_t *face,
|
||||
unsigned int *text_size /* IN/OUT */,
|
||||
uint32_t *text /* OUT */)
|
||||
{
|
||||
#ifdef HB_NO_NAME
|
||||
if (text_size)
|
||||
*text_size = 0;
|
||||
return 0;
|
||||
#endif
|
||||
return hb_ot_name_get_utf<hb_utf32_t> (face, name_id, language, text_size, text);
|
||||
}
|
||||
|
@ -131,7 +131,7 @@ struct post
|
||||
hb_bytes_t s = find_glyph_name (glyph);
|
||||
if (!s.length) return false;
|
||||
if (!buf_len) return true;
|
||||
unsigned int len = MIN (buf_len - 1, s.length);
|
||||
unsigned int len = hb_min (buf_len - 1, s.length);
|
||||
strncpy (buf, s.arrayZ, len);
|
||||
buf[len] = '\0';
|
||||
return true;
|
||||
|
@ -383,7 +383,7 @@ arabic_fallback_shape (const hb_ot_shape_plan_t *plan,
|
||||
hb_font_t *font,
|
||||
hb_buffer_t *buffer)
|
||||
{
|
||||
#if defined(HB_NO_OT_SHAPE_COMPLEX_ARABIC_FALLBACK)
|
||||
#ifdef HB_NO_OT_SHAPE_COMPLEX_ARABIC_FALLBACK
|
||||
return;
|
||||
#endif
|
||||
|
||||
|
@ -70,7 +70,7 @@ compose_hebrew (const hb_ot_shape_normalize_context_t *c,
|
||||
|
||||
bool found = (bool) c->unicode->compose (a, b, ab);
|
||||
|
||||
#if defined(HB_NO_OT_SHAPE_COMPLEX_HEBREW_FALLBACK)
|
||||
#ifdef HB_NO_OT_SHAPE_COMPLEX_HEBREW_FALLBACK
|
||||
return found;
|
||||
#endif
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user