Bug Summary

File:jdk/src/java.desktop/share/native/libharfbuzz/hb-ot-layout-gsub-table.hh
Warning:line 1209, column 5
Value stored to 'count' is never read

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -triple x86_64-pc-linux-gnu -analyze -disable-free -disable-llvm-verifier -discard-value-names -main-file-name hb-ot-layout.cc -analyzer-store=region -analyzer-opt-analyze-nested-blocks -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -mrelocation-model pic -pic-level 2 -mthread-model posix -fno-delete-null-pointer-checks -mframe-pointer=all -fmath-errno -fno-rounding-math -masm-verbose -mconstructor-aliases -munwind-tables -target-cpu x86-64 -dwarf-column-info -fno-split-dwarf-inlining -debugger-tuning=gdb -resource-dir /usr/lib/llvm-10/lib/clang/10.0.0 -I /home/daniel/Projects/java/jdk/build/linux-x86_64-server-fastdebug/support/modules_include/java.base -I /home/daniel/Projects/java/jdk/build/linux-x86_64-server-fastdebug/support/modules_include/java.base/linux -I /home/daniel/Projects/java/jdk/src/java.base/share/native/libjava -I /home/daniel/Projects/java/jdk/src/java.base/unix/native/libjava -I /home/daniel/Projects/java/jdk/src/hotspot/share/include -I /home/daniel/Projects/java/jdk/src/hotspot/os/posix/include -D LIBC=gnu -D _GNU_SOURCE -D _REENTRANT -D _LARGEFILE64_SOURCE -D LINUX -D DEBUG -D _LITTLE_ENDIAN -D ARCH="amd64" -D amd64 -D _LP64=1 -D GETPAGESIZE -D HAVE_MPROTECT -D HAVE_PTHREAD -D HAVE_SYSCONF -D HAVE_SYS_MMAN_H -D HAVE_UNISTD_H -D HB_NO_PRAGMA_GCC_DIAGNOSTIC -D HAVE_INTEL_ATOMIC_PRIMITIVES -I /usr/include/freetype2 -D LE_STANDALONE -D HEADLESS -I /home/daniel/Projects/java/jdk/src/java.desktop/unix/native/libfontmanager -I /home/daniel/Projects/java/jdk/src/java.desktop/share/native/libfontmanager -I /home/daniel/Projects/java/jdk/src/java.desktop/share/native/libharfbuzz -I /home/daniel/Projects/java/jdk/build/linux-x86_64-server-fastdebug/support/headers/java.desktop -I /home/daniel/Projects/java/jdk/src/java.desktop/share/native/libharfbuzz -I /home/daniel/Projects/java/jdk/src/java.desktop/unix/native/common/awt -I /home/daniel/Projects/java/jdk/src/java.desktop/share/native/common/awt -I /home/daniel/Projects/java/jdk/src/java.desktop/unix/native/common/font -I /home/daniel/Projects/java/jdk/src/java.desktop/share/native/common/font -I /home/daniel/Projects/java/jdk/src/java.desktop/unix/native/libawt/java2d -I /home/daniel/Projects/java/jdk/src/java.desktop/share/native/libawt/java2d -I /home/daniel/Projects/java/jdk/src/java.desktop/share/native/libawt/java2d/pipe -I /home/daniel/Projects/java/jdk/src/java.desktop/share/native/libawt/java2d/loops -D _FORTIFY_SOURCE=2 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/7.5.0/../../../../include/c++/7.5.0 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/7.5.0/../../../../include/x86_64-linux-gnu/c++/7.5.0 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/7.5.0/../../../../include/x86_64-linux-gnu/c++/7.5.0 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/7.5.0/../../../../include/c++/7.5.0/backward -internal-isystem /usr/local/include -internal-isystem /usr/lib/llvm-10/lib/clang/10.0.0/include -internal-externc-isystem /usr/include/x86_64-linux-gnu -internal-externc-isystem /include -internal-externc-isystem /usr/include -O3 -Wno-unused-parameter -Wno-unused -Wno-type-limits -Wno-missing-field-initializers -Wno-strict-aliasing -Wno-reorder -Wno-delete-non-virtual-dtor -Wno-strict-overflow -Wno-maybe-uninitialized -Wno-class-memaccess -Wno-unused-result -Wno-extra -std=c++14 -fdeprecated-macro -fdebug-compilation-dir /home/daniel/Projects/java/jdk/make -ferror-limit 19 -fmessage-length 0 -fvisibility hidden -stack-protector 1 -fgnuc-version=4.2.1 -fobjc-runtime=gcc -fcxx-exceptions -fexceptions -fdiagnostics-show-option -vectorize-loops -vectorize-slp -analyzer-output=html -faddrsig -o /home/daniel/Projects/java/scan/2021-12-21-193737-8510-1 -x c++ /home/daniel/Projects/java/jdk/src/java.desktop/share/native/libharfbuzz/hb-ot-layout.cc
1/*
2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
3 * Copyright © 2010,2012,2013 Google, Inc.
4 *
5 * This is part of HarfBuzz, a text shaping library.
6 *
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
12 *
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17 * DAMAGE.
18 *
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24 *
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
27 */
28
29#ifndef HB_OT_LAYOUT_GSUB_TABLE_HH
30#define HB_OT_LAYOUT_GSUB_TABLE_HH
31
32#include "hb-ot-layout-gsubgpos.hh"
33
34
35namespace OT {
36
37typedef hb_pair_t<hb_codepoint_t, hb_codepoint_t> hb_codepoint_pair_t;
38
39template<typename Iterator>
40static void SingleSubst_serialize (hb_serialize_context_t *c,
41 Iterator it);
42
43
44struct SingleSubstFormat1
45{
46 bool intersects (const hb_set_t *glyphs) const
47 { return (this+coverage).intersects (glyphs); }
48
49 void closure (hb_closure_context_t *c) const
50 {
51 unsigned d = deltaGlyphID;
52 + hb_iter (this+coverage)
53 | hb_filter (*c->glyphs)
54 | hb_map ([d] (hb_codepoint_t g) { return (g + d) & 0xFFFFu; })
55 | hb_sink (c->output)
56 ;
57 }
58
59 void closure_lookups (hb_closure_lookups_context_t *c) const {}
60
61 void collect_glyphs (hb_collect_glyphs_context_t *c) const
62 {
63 if (unlikely (!(this+coverage).collect_coverage (c->input))(__builtin_expect (!!(!(this+coverage).collect_coverage (c->
input)), 0))
) return;
64 unsigned d = deltaGlyphID;
65 + hb_iter (this+coverage)
66 | hb_map ([d] (hb_codepoint_t g) { return (g + d) & 0xFFFFu; })
67 | hb_sink (c->output)
68 ;
69 }
70
71 const Coverage &get_coverage () const { return this+coverage; }
72
73 bool would_apply (hb_would_apply_context_t *c) const
74 { return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED((unsigned int) -1); }
75
76 bool apply (hb_ot_apply_context_t *c) const
77 {
78 TRACE_APPLY (this)hb_no_trace_t<bool> trace;
79 hb_codepoint_t glyph_id = c->buffer->cur().codepoint;
80 unsigned int index = (this+coverage).get_coverage (glyph_id);
81 if (likely (index == NOT_COVERED)(__builtin_expect (!!(index == ((unsigned int) -1)), 1))) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 81);
82
83 /* According to the Adobe Annotated OpenType Suite, result is always
84 * limited to 16bit. */
85 glyph_id = (glyph_id + deltaGlyphID) & 0xFFFFu;
86 c->replace_glyph (glyph_id);
87
88 return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 88);
89 }
90
91 template<typename Iterator,
92 hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))typename hb_enable_if<(((hb_is_source_of<Iterator, hb_codepoint_t
>::value && Iterator::is_sorted_iterator)))>::type
* = nullptr
>
93 bool serialize (hb_serialize_context_t *c,
94 Iterator glyphs,
95 unsigned delta)
96 {
97 TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace;
98 if (unlikely (!c->extend_min (*this))(__builtin_expect (!!(!c->extend_min (*this)), 0))) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 98);
99 if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs))(__builtin_expect (!!(!coverage.serialize (c, this).serialize
(c, glyphs)), 0))
) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 99);
100 c->check_assign (deltaGlyphID, delta);
101 return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 101);
102 }
103
104 bool subset (hb_subset_context_t *c) const
105 {
106 TRACE_SUBSET (this)hb_no_trace_t<bool> trace;
107 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
108 const hb_map_t &glyph_map = *c->plan->glyph_map;
109
110 hb_codepoint_t delta = deltaGlyphID;
111
112 auto it =
113 + hb_iter (this+coverage)
114 | hb_filter (glyphset)
115 | hb_map_retains_sorting ([&] (hb_codepoint_t g) {
116 return hb_codepoint_pair_t (g,
117 (g + delta) & 0xFFFF); })
118 | hb_filter (glyphset, hb_second)
119 | hb_map_retains_sorting ([&] (hb_codepoint_pair_t p) -> hb_codepoint_pair_t
120 { return hb_pair (glyph_map[p.first], glyph_map[p.second]); })
121 ;
122
123 bool ret = bool (it);
124 SingleSubst_serialize (c->serializer, it);
125 return_trace (ret)return trace.ret (ret, __PRETTY_FUNCTION__, 125);
126 }
127
128 bool sanitize (hb_sanitize_context_t *c) const
129 {
130 TRACE_SANITIZE (this)hb_no_trace_t<bool> trace;
131 return_trace (coverage.sanitize (c, this) && deltaGlyphID.sanitize (c))return trace.ret (coverage.sanitize (c, this) && deltaGlyphID
.sanitize (c), __PRETTY_FUNCTION__, 131)
;
132 }
133
134 protected:
135 HBUINT16 format; /* Format identifier--format = 1 */
136 OffsetTo<Coverage>
137 coverage; /* Offset to Coverage table--from
138 * beginning of Substitution table */
139 HBUINT16 deltaGlyphID; /* Add to original GlyphID to get
140 * substitute GlyphID, modulo 0x10000 */
141 public:
142 DEFINE_SIZE_STATIC (6)void _instance_assertion_on_line_142 () const { static_assert
((sizeof (*this) == (6)), ""); } unsigned int get_size () const
{ return (6); } static constexpr unsigned null_size = (6); static
constexpr unsigned min_size = (6); static constexpr unsigned
static_size = (6)
;
143};
144
145struct SingleSubstFormat2
146{
147 bool intersects (const hb_set_t *glyphs) const
148 { return (this+coverage).intersects (glyphs); }
149
150 void closure (hb_closure_context_t *c) const
151 {
152 + hb_zip (this+coverage, substitute)
153 | hb_filter (*c->glyphs, hb_first)
154 | hb_map (hb_second)
155 | hb_sink (c->output)
156 ;
157 }
158
159 void closure_lookups (hb_closure_lookups_context_t *c) const {}
160
161 void collect_glyphs (hb_collect_glyphs_context_t *c) const
162 {
163 if (unlikely (!(this+coverage).collect_coverage (c->input))(__builtin_expect (!!(!(this+coverage).collect_coverage (c->
input)), 0))
) return;
164 + hb_zip (this+coverage, substitute)
165 | hb_map (hb_second)
166 | hb_sink (c->output)
167 ;
168 }
169
170 const Coverage &get_coverage () const { return this+coverage; }
171
172 bool would_apply (hb_would_apply_context_t *c) const
173 { return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED((unsigned int) -1); }
174
175 bool apply (hb_ot_apply_context_t *c) const
176 {
177 TRACE_APPLY (this)hb_no_trace_t<bool> trace;
178 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
179 if (likely (index == NOT_COVERED)(__builtin_expect (!!(index == ((unsigned int) -1)), 1))) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 179);
180
181 if (unlikely (index >= substitute.len)(__builtin_expect (!!(index >= substitute.len), 0))) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 181);
182
183 c->replace_glyph (substitute[index]);
184
185 return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 185);
186 }
187
188 template<typename Iterator,
189 hb_requires (hb_is_sorted_source_of (Iterator,typename hb_enable_if<(((hb_is_source_of<Iterator, hb_codepoint_pair_t
>::value && Iterator::is_sorted_iterator)))>::type
* = nullptr
190 hb_codepoint_pair_t))typename hb_enable_if<(((hb_is_source_of<Iterator, hb_codepoint_pair_t
>::value && Iterator::is_sorted_iterator)))>::type
* = nullptr
>
191 bool serialize (hb_serialize_context_t *c,
192 Iterator it)
193 {
194 TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace;
195 auto substitutes =
196 + it
197 | hb_map (hb_second)
198 ;
199 auto glyphs =
200 + it
201 | hb_map_retains_sorting (hb_first)
202 ;
203 if (unlikely (!c->extend_min (*this))(__builtin_expect (!!(!c->extend_min (*this)), 0))) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 203);
204 if (unlikely (!substitute.serialize (c, substitutes))(__builtin_expect (!!(!substitute.serialize (c, substitutes))
, 0))
) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 204);
205 if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs))(__builtin_expect (!!(!coverage.serialize (c, this).serialize
(c, glyphs)), 0))
) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 205);
206 return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 206);
207 }
208
209 bool subset (hb_subset_context_t *c) const
210 {
211 TRACE_SUBSET (this)hb_no_trace_t<bool> trace;
212 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
213 const hb_map_t &glyph_map = *c->plan->glyph_map;
214
215 auto it =
216 + hb_zip (this+coverage, substitute)
217 | hb_filter (glyphset, hb_first)
218 | hb_filter (glyphset, hb_second)
219 | hb_map_retains_sorting ([&] (hb_pair_t<hb_codepoint_t, const HBGlyphID &> p) -> hb_codepoint_pair_t
220 { return hb_pair (glyph_map[p.first], glyph_map[p.second]); })
221 ;
222
223 bool ret = bool (it);
224 SingleSubst_serialize (c->serializer, it);
225 return_trace (ret)return trace.ret (ret, __PRETTY_FUNCTION__, 225);
226 }
227
228 bool sanitize (hb_sanitize_context_t *c) const
229 {
230 TRACE_SANITIZE (this)hb_no_trace_t<bool> trace;
231 return_trace (coverage.sanitize (c, this) && substitute.sanitize (c))return trace.ret (coverage.sanitize (c, this) && substitute
.sanitize (c), __PRETTY_FUNCTION__, 231)
;
232 }
233
234 protected:
235 HBUINT16 format; /* Format identifier--format = 2 */
236 OffsetTo<Coverage>
237 coverage; /* Offset to Coverage table--from
238 * beginning of Substitution table */
239 ArrayOf<HBGlyphID>
240 substitute; /* Array of substitute
241 * GlyphIDs--ordered by Coverage Index */
242 public:
243 DEFINE_SIZE_ARRAY (6, substitute)void _compiles_assertion_on_line_243 () const { (void) (substitute
)[0].static_size; } void _instance_assertion_on_line_243 () const
{ static_assert ((sizeof (*this) == (6) + (1 +0) * sizeof ((
substitute)[0])), ""); } static constexpr unsigned null_size =
(6); static constexpr unsigned min_size = (6)
;
244};
245
246struct SingleSubst
247{
248
249 template<typename Iterator,
250 hb_requires (hb_is_sorted_source_of (Iterator,typename hb_enable_if<(((hb_is_source_of<Iterator, const
hb_codepoint_pair_t>::value && Iterator::is_sorted_iterator
)))>::type* = nullptr
251 const hb_codepoint_pair_t))typename hb_enable_if<(((hb_is_source_of<Iterator, const
hb_codepoint_pair_t>::value && Iterator::is_sorted_iterator
)))>::type* = nullptr
>
252 bool serialize (hb_serialize_context_t *c,
253 Iterator glyphs)
254 {
255 TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace;
256 if (unlikely (!c->extend_min (u.format))(__builtin_expect (!!(!c->extend_min (u.format)), 0))) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 256);
257 unsigned format = 2;
258 unsigned delta = 0;
259 if (glyphs)
260 {
261 format = 1;
262 auto get_delta = [=] (hb_codepoint_pair_t _)
263 { return (unsigned) (_.second - _.first) & 0xFFFF; };
264 delta = get_delta (*glyphs);
265 if (!hb_all (++(+glyphs), delta, get_delta)) format = 2;
266 }
267 u.format = format;
268 switch (u.format) {
269 case 1: return_trace (u.format1.serialize (c,return trace.ret (u.format1.serialize (c, + glyphs | hb_map_retains_sorting
(hb_first), delta), __PRETTY_FUNCTION__, 272)
270 + glyphsreturn trace.ret (u.format1.serialize (c, + glyphs | hb_map_retains_sorting
(hb_first), delta), __PRETTY_FUNCTION__, 272)
271 | hb_map_retains_sorting (hb_first),return trace.ret (u.format1.serialize (c, + glyphs | hb_map_retains_sorting
(hb_first), delta), __PRETTY_FUNCTION__, 272)
272 delta))return trace.ret (u.format1.serialize (c, + glyphs | hb_map_retains_sorting
(hb_first), delta), __PRETTY_FUNCTION__, 272)
;
273 case 2: return_trace (u.format2.serialize (c, glyphs))return trace.ret (u.format2.serialize (c, glyphs), __PRETTY_FUNCTION__
, 273)
;
274 default:return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 274);
275 }
276 }
277
278 template <typename context_t, typename ...Ts>
279 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
280 {
281 TRACE_DISPATCH (this, u.format)hb_no_trace_t<typename context_t::return_t> trace;
282 if (unlikely (!c->may_dispatch (this, &u.format))(__builtin_expect (!!(!c->may_dispatch (this, &u.format
)), 0))
) return_trace (c->no_dispatch_return_value ())return trace.ret (c->no_dispatch_return_value (), __PRETTY_FUNCTION__
, 282)
;
283 switch (u.format) {
284 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...))return trace.ret (c->dispatch (u.format1, hb_forward<Ts
> (ds)...), __PRETTY_FUNCTION__, 284)
;
285 case 2: return_trace (c->dispatch (u.format2, hb_forward<Ts> (ds)...))return trace.ret (c->dispatch (u.format2, hb_forward<Ts
> (ds)...), __PRETTY_FUNCTION__, 285)
;
286 default:return_trace (c->default_return_value ())return trace.ret (c->default_return_value (), __PRETTY_FUNCTION__
, 286)
;
287 }
288 }
289
290 protected:
291 union {
292 HBUINT16 format; /* Format identifier */
293 SingleSubstFormat1 format1;
294 SingleSubstFormat2 format2;
295 } u;
296};
297
298template<typename Iterator>
299static void
300SingleSubst_serialize (hb_serialize_context_t *c,
301 Iterator it)
302{ c->start_embed<SingleSubst> ()->serialize (c, it); }
303
304struct Sequence
305{
306 bool intersects (const hb_set_t *glyphs) const
307 { return hb_all (substitute, glyphs); }
308
309 void closure (hb_closure_context_t *c) const
310 { c->output->add_array (substitute.arrayZ, substitute.len); }
311
312 void collect_glyphs (hb_collect_glyphs_context_t *c) const
313 { c->output->add_array (substitute.arrayZ, substitute.len); }
314
315 bool apply (hb_ot_apply_context_t *c) const
316 {
317 TRACE_APPLY (this)hb_no_trace_t<bool> trace;
318 unsigned int count = substitute.len;
319
320 /* Special-case to make it in-place and not consider this
321 * as a "multiplied" substitution. */
322 if (unlikely (count == 1)(__builtin_expect (!!(count == 1), 0)))
323 {
324 c->replace_glyph (substitute.arrayZ[0]);
325 return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 325);
326 }
327 /* Spec disallows this, but Uniscribe allows it.
328 * https://github.com/harfbuzz/harfbuzz/issues/253 */
329 else if (unlikely (count == 0)(__builtin_expect (!!(count == 0), 0)))
330 {
331 c->buffer->delete_glyph ();
332 return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 332);
333 }
334
335 unsigned int klass = _hb_glyph_info_is_ligature (&c->buffer->cur()) ?
336 HB_OT_LAYOUT_GLYPH_PROPS_BASE_GLYPH : 0;
337
338 for (unsigned int i = 0; i < count; i++) {
339 _hb_glyph_info_set_lig_props_for_component (&c->buffer->cur(), i);
340 c->output_glyph_for_component (substitute.arrayZ[i], klass);
341 }
342 c->buffer->skip_glyph ();
343
344 return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 344);
345 }
346
347 template <typename Iterator,
348 hb_requires (hb_is_source_of (Iterator, hb_codepoint_t))typename hb_enable_if<((hb_is_source_of<Iterator, hb_codepoint_t
>::value))>::type* = nullptr
>
349 bool serialize (hb_serialize_context_t *c,
350 Iterator subst)
351 {
352 TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace;
353 return_trace (substitute.serialize (c, subst))return trace.ret (substitute.serialize (c, subst), __PRETTY_FUNCTION__
, 353)
;
354 }
355
356 bool subset (hb_subset_context_t *c) const
357 {
358 TRACE_SUBSET (this)hb_no_trace_t<bool> trace;
359 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
360 const hb_map_t &glyph_map = *c->plan->glyph_map;
361
362 if (!intersects (&glyphset)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 362);
363
364 auto it =
365 + hb_iter (substitute)
366 | hb_map (glyph_map)
367 ;
368
369 auto *out = c->serializer->start_embed (*this);
370 return_trace (out->serialize (c->serializer, it))return trace.ret (out->serialize (c->serializer, it), __PRETTY_FUNCTION__
, 370)
;
371 }
372
373 bool sanitize (hb_sanitize_context_t *c) const
374 {
375 TRACE_SANITIZE (this)hb_no_trace_t<bool> trace;
376 return_trace (substitute.sanitize (c))return trace.ret (substitute.sanitize (c), __PRETTY_FUNCTION__
, 376)
;
377 }
378
379 protected:
380 ArrayOf<HBGlyphID>
381 substitute; /* String of GlyphIDs to substitute */
382 public:
383 DEFINE_SIZE_ARRAY (2, substitute)void _compiles_assertion_on_line_383 () const { (void) (substitute
)[0].static_size; } void _instance_assertion_on_line_383 () const
{ static_assert ((sizeof (*this) == (2) + (1 +0) * sizeof ((
substitute)[0])), ""); } static constexpr unsigned null_size =
(2); static constexpr unsigned min_size = (2)
;
384};
385
386struct MultipleSubstFormat1
387{
388 bool intersects (const hb_set_t *glyphs) const
389 { return (this+coverage).intersects (glyphs); }
390
391 void closure (hb_closure_context_t *c) const
392 {
393 + hb_zip (this+coverage, sequence)
394 | hb_filter (*c->glyphs, hb_first)
395 | hb_map (hb_second)
396 | hb_map (hb_add (this))
397 | hb_apply ([c] (const Sequence &_) { _.closure (c); })
398 ;
399 }
400
401 void closure_lookups (hb_closure_lookups_context_t *c) const {}
402
403 void collect_glyphs (hb_collect_glyphs_context_t *c) const
404 {
405 if (unlikely (!(this+coverage).collect_coverage (c->input))(__builtin_expect (!!(!(this+coverage).collect_coverage (c->
input)), 0))
) return;
406 + hb_zip (this+coverage, sequence)
407 | hb_map (hb_second)
408 | hb_map (hb_add (this))
409 | hb_apply ([c] (const Sequence &_) { _.collect_glyphs (c); })
410 ;
411 }
412
413 const Coverage &get_coverage () const { return this+coverage; }
414
415 bool would_apply (hb_would_apply_context_t *c) const
416 { return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED((unsigned int) -1); }
417
418 bool apply (hb_ot_apply_context_t *c) const
419 {
420 TRACE_APPLY (this)hb_no_trace_t<bool> trace;
421
422 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
423 if (likely (index == NOT_COVERED)(__builtin_expect (!!(index == ((unsigned int) -1)), 1))) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 423);
424
425 return_trace ((this+sequence[index]).apply (c))return trace.ret ((this+sequence[index]).apply (c), __PRETTY_FUNCTION__
, 425)
;
426 }
427
428 bool serialize (hb_serialize_context_t *c,
429 hb_sorted_array_t<const HBGlyphID> glyphs,
430 hb_array_t<const unsigned int> substitute_len_list,
431 hb_array_t<const HBGlyphID> substitute_glyphs_list)
432 {
433 TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace;
434 if (unlikely (!c->extend_min (*this))(__builtin_expect (!!(!c->extend_min (*this)), 0))) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 434);
435 if (unlikely (!sequence.serialize (c, glyphs.length))(__builtin_expect (!!(!sequence.serialize (c, glyphs.length))
, 0))
) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 435);
436 for (unsigned int i = 0; i < glyphs.length; i++)
437 {
438 unsigned int substitute_len = substitute_len_list[i];
439 if (unlikely (!sequence[i].serialize (c, this)(__builtin_expect (!!(!sequence[i].serialize (c, this) .serialize
(c, substitute_glyphs_list.sub_array (0, substitute_len))), 0
))
440 .serialize (c, substitute_glyphs_list.sub_array (0, substitute_len)))(__builtin_expect (!!(!sequence[i].serialize (c, this) .serialize
(c, substitute_glyphs_list.sub_array (0, substitute_len))), 0
))
)
441 return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 441);
442 substitute_glyphs_list += substitute_len;
443 }
444 return_trace (coverage.serialize (c, this).serialize (c, glyphs))return trace.ret (coverage.serialize (c, this).serialize (c, glyphs
), __PRETTY_FUNCTION__, 444)
;
445 }
446
447 bool subset (hb_subset_context_t *c) const
448 {
449 TRACE_SUBSET (this)hb_no_trace_t<bool> trace;
450 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
451 const hb_map_t &glyph_map = *c->plan->glyph_map;
452
453 auto *out = c->serializer->start_embed (*this);
454 if (unlikely (!c->serializer->extend_min (out))(__builtin_expect (!!(!c->serializer->extend_min (out))
, 0))
) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 454);
455 out->format = format;
456
457 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
458 + hb_zip (this+coverage, sequence)
459 | hb_filter (glyphset, hb_first)
460 | hb_filter (subset_offset_array (c, out->sequence, this), hb_second)
461 | hb_map (hb_first)
462 | hb_map (glyph_map)
463 | hb_sink (new_coverage)
464 ;
465 out->coverage.serialize (c->serializer, out)
466 .serialize (c->serializer, new_coverage.iter ());
467 return_trace (bool (new_coverage))return trace.ret (bool (new_coverage), __PRETTY_FUNCTION__, 467
)
;
468 }
469
470 bool sanitize (hb_sanitize_context_t *c) const
471 {
472 TRACE_SANITIZE (this)hb_no_trace_t<bool> trace;
473 return_trace (coverage.sanitize (c, this) && sequence.sanitize (c, this))return trace.ret (coverage.sanitize (c, this) && sequence
.sanitize (c, this), __PRETTY_FUNCTION__, 473)
;
474 }
475
476 protected:
477 HBUINT16 format; /* Format identifier--format = 1 */
478 OffsetTo<Coverage>
479 coverage; /* Offset to Coverage table--from
480 * beginning of Substitution table */
481 OffsetArrayOf<Sequence>
482 sequence; /* Array of Sequence tables
483 * ordered by Coverage Index */
484 public:
485 DEFINE_SIZE_ARRAY (6, sequence)void _compiles_assertion_on_line_485 () const { (void) (sequence
)[0].static_size; } void _instance_assertion_on_line_485 () const
{ static_assert ((sizeof (*this) == (6) + (1 +0) * sizeof ((
sequence)[0])), ""); } static constexpr unsigned null_size = (
6); static constexpr unsigned min_size = (6)
;
486};
487
488struct MultipleSubst
489{
490 bool serialize (hb_serialize_context_t *c,
491 hb_sorted_array_t<const HBGlyphID> glyphs,
492 hb_array_t<const unsigned int> substitute_len_list,
493 hb_array_t<const HBGlyphID> substitute_glyphs_list)
494 {
495 TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace;
496 if (unlikely (!c->extend_min (u.format))(__builtin_expect (!!(!c->extend_min (u.format)), 0))) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 496);
497 unsigned int format = 1;
498 u.format = format;
499 switch (u.format) {
500 case 1: return_trace (u.format1.serialize (c, glyphs, substitute_len_list, substitute_glyphs_list))return trace.ret (u.format1.serialize (c, glyphs, substitute_len_list
, substitute_glyphs_list), __PRETTY_FUNCTION__, 500)
;
501 default:return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 501);
502 }
503 }
504
505 template <typename context_t, typename ...Ts>
506 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
507 {
508 TRACE_DISPATCH (this, u.format)hb_no_trace_t<typename context_t::return_t> trace;
509 if (unlikely (!c->may_dispatch (this, &u.format))(__builtin_expect (!!(!c->may_dispatch (this, &u.format
)), 0))
) return_trace (c->no_dispatch_return_value ())return trace.ret (c->no_dispatch_return_value (), __PRETTY_FUNCTION__
, 509)
;
510 switch (u.format) {
511 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...))return trace.ret (c->dispatch (u.format1, hb_forward<Ts
> (ds)...), __PRETTY_FUNCTION__, 511)
;
512 default:return_trace (c->default_return_value ())return trace.ret (c->default_return_value (), __PRETTY_FUNCTION__
, 512)
;
513 }
514 }
515
516 protected:
517 union {
518 HBUINT16 format; /* Format identifier */
519 MultipleSubstFormat1 format1;
520 } u;
521};
522
523struct AlternateSet
524{
525 bool intersects (const hb_set_t *glyphs) const
526 { return hb_any (alternates, glyphs); }
527
528 void closure (hb_closure_context_t *c) const
529 { c->output->add_array (alternates.arrayZ, alternates.len); }
530
531 void collect_glyphs (hb_collect_glyphs_context_t *c) const
532 { c->output->add_array (alternates.arrayZ, alternates.len); }
533
534 bool apply (hb_ot_apply_context_t *c) const
535 {
536 TRACE_APPLY (this)hb_no_trace_t<bool> trace;
537 unsigned int count = alternates.len;
538
539 if (unlikely (!count)(__builtin_expect (!!(!count), 0))) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 539);
540
541 hb_mask_t glyph_mask = c->buffer->cur().mask;
542 hb_mask_t lookup_mask = c->lookup_mask;
543
544 /* Note: This breaks badly if two features enabled this lookup together. */
545 unsigned int shift = hb_ctz (lookup_mask);
546 unsigned int alt_index = ((lookup_mask & glyph_mask) >> shift);
547
548 /* If alt_index is MAX_VALUE, randomize feature if it is the rand feature. */
549 if (alt_index == HB_OT_MAP_MAX_VALUE((1u << 8u) - 1u) && c->random)
550 alt_index = c->random_number () % count + 1;
551
552 if (unlikely (alt_index > count || alt_index == 0)(__builtin_expect (!!(alt_index > count || alt_index == 0)
, 0))
) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 552);
553
554 c->replace_glyph (alternates[alt_index - 1]);
555
556 return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 556);
557 }
558
559 unsigned
560 get_alternates (unsigned start_offset,
561 unsigned *alternate_count /* IN/OUT. May be NULL. */,
562 hb_codepoint_t *alternate_glyphs /* OUT. May be NULL. */) const
563 {
564 if (alternates.len && alternate_count)
565 {
566 + alternates.sub_array (start_offset, alternate_count)
567 | hb_sink (hb_array (alternate_glyphs, *alternate_count))
568 ;
569 }
570 return alternates.len;
571 }
572
573 template <typename Iterator,
574 hb_requires (hb_is_source_of (Iterator, hb_codepoint_t))typename hb_enable_if<((hb_is_source_of<Iterator, hb_codepoint_t
>::value))>::type* = nullptr
>
575 bool serialize (hb_serialize_context_t *c,
576 Iterator alts)
577 {
578 TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace;
579 return_trace (alternates.serialize (c, alts))return trace.ret (alternates.serialize (c, alts), __PRETTY_FUNCTION__
, 579)
;
580 }
581
582 bool subset (hb_subset_context_t *c) const
583 {
584 TRACE_SUBSET (this)hb_no_trace_t<bool> trace;
585 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
586 const hb_map_t &glyph_map = *c->plan->glyph_map;
587
588 auto it =
589 + hb_iter (alternates)
590 | hb_filter (glyphset)
591 | hb_map (glyph_map)
592 ;
593
594 auto *out = c->serializer->start_embed (*this);
595 return_trace (out->serialize (c->serializer, it) &&return trace.ret (out->serialize (c->serializer, it) &&
out->alternates, __PRETTY_FUNCTION__, 596)
596 out->alternates)return trace.ret (out->serialize (c->serializer, it) &&
out->alternates, __PRETTY_FUNCTION__, 596)
;
597 }
598
599 bool sanitize (hb_sanitize_context_t *c) const
600 {
601 TRACE_SANITIZE (this)hb_no_trace_t<bool> trace;
602 return_trace (alternates.sanitize (c))return trace.ret (alternates.sanitize (c), __PRETTY_FUNCTION__
, 602)
;
603 }
604
605 protected:
606 ArrayOf<HBGlyphID>
607 alternates; /* Array of alternate GlyphIDs--in
608 * arbitrary order */
609 public:
610 DEFINE_SIZE_ARRAY (2, alternates)void _compiles_assertion_on_line_610 () const { (void) (alternates
)[0].static_size; } void _instance_assertion_on_line_610 () const
{ static_assert ((sizeof (*this) == (2) + (1 +0) * sizeof ((
alternates)[0])), ""); } static constexpr unsigned null_size =
(2); static constexpr unsigned min_size = (2)
;
611};
612
613struct AlternateSubstFormat1
614{
615 bool intersects (const hb_set_t *glyphs) const
616 { return (this+coverage).intersects (glyphs); }
617
618 void closure (hb_closure_context_t *c) const
619 {
620 + hb_zip (this+coverage, alternateSet)
621 | hb_filter (c->glyphs, hb_first)
622 | hb_map (hb_second)
623 | hb_map (hb_add (this))
624 | hb_apply ([c] (const AlternateSet &_) { _.closure (c); })
625 ;
626 }
627
628 void closure_lookups (hb_closure_lookups_context_t *c) const {}
629
630 void collect_glyphs (hb_collect_glyphs_context_t *c) const
631 {
632 if (unlikely (!(this+coverage).collect_coverage (c->input))(__builtin_expect (!!(!(this+coverage).collect_coverage (c->
input)), 0))
) return;
633 + hb_zip (this+coverage, alternateSet)
634 | hb_map (hb_second)
635 | hb_map (hb_add (this))
636 | hb_apply ([c] (const AlternateSet &_) { _.collect_glyphs (c); })
637 ;
638 }
639
640 const Coverage &get_coverage () const { return this+coverage; }
641
642 bool would_apply (hb_would_apply_context_t *c) const
643 { return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED((unsigned int) -1); }
644
645 unsigned
646 get_glyph_alternates (hb_codepoint_t gid,
647 unsigned start_offset,
648 unsigned *alternate_count /* IN/OUT. May be NULL. */,
649 hb_codepoint_t *alternate_glyphs /* OUT. May be NULL. */) const
650 { return (this+alternateSet[(this+coverage).get_coverage (gid)])
651 .get_alternates (start_offset, alternate_count, alternate_glyphs); }
652
653 bool apply (hb_ot_apply_context_t *c) const
654 {
655 TRACE_APPLY (this)hb_no_trace_t<bool> trace;
656
657 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
658 if (likely (index == NOT_COVERED)(__builtin_expect (!!(index == ((unsigned int) -1)), 1))) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 658);
659
660 return_trace ((this+alternateSet[index]).apply (c))return trace.ret ((this+alternateSet[index]).apply (c), __PRETTY_FUNCTION__
, 660)
;
661 }
662
663 bool serialize (hb_serialize_context_t *c,
664 hb_sorted_array_t<const HBGlyphID> glyphs,
665 hb_array_t<const unsigned int> alternate_len_list,
666 hb_array_t<const HBGlyphID> alternate_glyphs_list)
667 {
668 TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace;
669 if (unlikely (!c->extend_min (*this))(__builtin_expect (!!(!c->extend_min (*this)), 0))) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 669);
670 if (unlikely (!alternateSet.serialize (c, glyphs.length))(__builtin_expect (!!(!alternateSet.serialize (c, glyphs.length
)), 0))
) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 670);
671 for (unsigned int i = 0; i < glyphs.length; i++)
672 {
673 unsigned int alternate_len = alternate_len_list[i];
674 if (unlikely (!alternateSet[i].serialize (c, this)(__builtin_expect (!!(!alternateSet[i].serialize (c, this) .serialize
(c, alternate_glyphs_list.sub_array (0, alternate_len))), 0)
)
675 .serialize (c, alternate_glyphs_list.sub_array (0, alternate_len)))(__builtin_expect (!!(!alternateSet[i].serialize (c, this) .serialize
(c, alternate_glyphs_list.sub_array (0, alternate_len))), 0)
)
)
676 return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 676);
677 alternate_glyphs_list += alternate_len;
678 }
679 return_trace (coverage.serialize (c, this).serialize (c, glyphs))return trace.ret (coverage.serialize (c, this).serialize (c, glyphs
), __PRETTY_FUNCTION__, 679)
;
680 }
681
682 bool subset (hb_subset_context_t *c) const
683 {
684 TRACE_SUBSET (this)hb_no_trace_t<bool> trace;
685 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
686 const hb_map_t &glyph_map = *c->plan->glyph_map;
687
688 auto *out = c->serializer->start_embed (*this);
689 if (unlikely (!c->serializer->extend_min (out))(__builtin_expect (!!(!c->serializer->extend_min (out))
, 0))
) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 689);
690 out->format = format;
691
692 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
693 + hb_zip (this+coverage, alternateSet)
694 | hb_filter (glyphset, hb_first)
695 | hb_filter (subset_offset_array (c, out->alternateSet, this), hb_second)
696 | hb_map (hb_first)
697 | hb_map (glyph_map)
698 | hb_sink (new_coverage)
699 ;
700 out->coverage.serialize (c->serializer, out)
701 .serialize (c->serializer, new_coverage.iter ());
702 return_trace (bool (new_coverage))return trace.ret (bool (new_coverage), __PRETTY_FUNCTION__, 702
)
;
703 }
704
705 bool sanitize (hb_sanitize_context_t *c) const
706 {
707 TRACE_SANITIZE (this)hb_no_trace_t<bool> trace;
708 return_trace (coverage.sanitize (c, this) && alternateSet.sanitize (c, this))return trace.ret (coverage.sanitize (c, this) && alternateSet
.sanitize (c, this), __PRETTY_FUNCTION__, 708)
;
709 }
710
711 protected:
712 HBUINT16 format; /* Format identifier--format = 1 */
713 OffsetTo<Coverage>
714 coverage; /* Offset to Coverage table--from
715 * beginning of Substitution table */
716 OffsetArrayOf<AlternateSet>
717 alternateSet; /* Array of AlternateSet tables
718 * ordered by Coverage Index */
719 public:
720 DEFINE_SIZE_ARRAY (6, alternateSet)void _compiles_assertion_on_line_720 () const { (void) (alternateSet
)[0].static_size; } void _instance_assertion_on_line_720 () const
{ static_assert ((sizeof (*this) == (6) + (1 +0) * sizeof ((
alternateSet)[0])), ""); } static constexpr unsigned null_size
= (6); static constexpr unsigned min_size = (6)
;
721};
722
723struct AlternateSubst
724{
725 bool serialize (hb_serialize_context_t *c,
726 hb_sorted_array_t<const HBGlyphID> glyphs,
727 hb_array_t<const unsigned int> alternate_len_list,
728 hb_array_t<const HBGlyphID> alternate_glyphs_list)
729 {
730 TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace;
731 if (unlikely (!c->extend_min (u.format))(__builtin_expect (!!(!c->extend_min (u.format)), 0))) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 731);
732 unsigned int format = 1;
733 u.format = format;
734 switch (u.format) {
735 case 1: return_trace (u.format1.serialize (c, glyphs, alternate_len_list, alternate_glyphs_list))return trace.ret (u.format1.serialize (c, glyphs, alternate_len_list
, alternate_glyphs_list), __PRETTY_FUNCTION__, 735)
;
736 default:return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 736);
737 }
738 }
739
740 template <typename context_t, typename ...Ts>
741 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
742 {
743 TRACE_DISPATCH (this, u.format)hb_no_trace_t<typename context_t::return_t> trace;
744 if (unlikely (!c->may_dispatch (this, &u.format))(__builtin_expect (!!(!c->may_dispatch (this, &u.format
)), 0))
) return_trace (c->no_dispatch_return_value ())return trace.ret (c->no_dispatch_return_value (), __PRETTY_FUNCTION__
, 744)
;
745 switch (u.format) {
746 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...))return trace.ret (c->dispatch (u.format1, hb_forward<Ts
> (ds)...), __PRETTY_FUNCTION__, 746)
;
747 default:return_trace (c->default_return_value ())return trace.ret (c->default_return_value (), __PRETTY_FUNCTION__
, 747)
;
748 }
749 }
750
751 protected:
752 union {
753 HBUINT16 format; /* Format identifier */
754 AlternateSubstFormat1 format1;
755 } u;
756};
757
758
759struct Ligature
760{
761 bool intersects (const hb_set_t *glyphs) const
762 { return hb_all (component, glyphs); }
763
764 void closure (hb_closure_context_t *c) const
765 {
766 if (!intersects (c->glyphs)) return;
767 c->output->add (ligGlyph);
768 }
769
770 void collect_glyphs (hb_collect_glyphs_context_t *c) const
771 {
772 c->input->add_array (component.arrayZ, component.get_length ());
773 c->output->add (ligGlyph);
774 }
775
776 bool would_apply (hb_would_apply_context_t *c) const
777 {
778 if (c->len != component.lenP1)
779 return false;
780
781 for (unsigned int i = 1; i < c->len; i++)
782 if (likely (c->glyphs[i] != component[i])(__builtin_expect (!!(c->glyphs[i] != component[i]), 1)))
783 return false;
784
785 return true;
786 }
787
788 bool apply (hb_ot_apply_context_t *c) const
789 {
790 TRACE_APPLY (this)hb_no_trace_t<bool> trace;
791 unsigned int count = component.lenP1;
792
793 if (unlikely (!count)(__builtin_expect (!!(!count), 0))) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 793);
794
795 /* Special-case to make it in-place and not consider this
796 * as a "ligated" substitution. */
797 if (unlikely (count == 1)(__builtin_expect (!!(count == 1), 0)))
798 {
799 c->replace_glyph (ligGlyph);
800 return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 800);
801 }
802
803 unsigned int total_component_count = 0;
804
805 unsigned int match_length = 0;
806 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH64];
807
808 if (likely (!match_input (c, count,(__builtin_expect (!!(!match_input (c, count, &component[
1], match_glyph, nullptr, &match_length, match_positions,
&total_component_count)), 1))
809 &component[1],(__builtin_expect (!!(!match_input (c, count, &component[
1], match_glyph, nullptr, &match_length, match_positions,
&total_component_count)), 1))
810 match_glyph,(__builtin_expect (!!(!match_input (c, count, &component[
1], match_glyph, nullptr, &match_length, match_positions,
&total_component_count)), 1))
811 nullptr,(__builtin_expect (!!(!match_input (c, count, &component[
1], match_glyph, nullptr, &match_length, match_positions,
&total_component_count)), 1))
812 &match_length,(__builtin_expect (!!(!match_input (c, count, &component[
1], match_glyph, nullptr, &match_length, match_positions,
&total_component_count)), 1))
813 match_positions,(__builtin_expect (!!(!match_input (c, count, &component[
1], match_glyph, nullptr, &match_length, match_positions,
&total_component_count)), 1))
814 &total_component_count))(__builtin_expect (!!(!match_input (c, count, &component[
1], match_glyph, nullptr, &match_length, match_positions,
&total_component_count)), 1))
)
815 return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 815);
816
817 ligate_input (c,
818 count,
819 match_positions,
820 match_length,
821 ligGlyph,
822 total_component_count);
823
824 return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 824);
825 }
826
827 template <typename Iterator,
828 hb_requires (hb_is_source_of (Iterator, hb_codepoint_t))typename hb_enable_if<((hb_is_source_of<Iterator, hb_codepoint_t
>::value))>::type* = nullptr
>
829 bool serialize (hb_serialize_context_t *c,
830 hb_codepoint_t ligature,
831 Iterator components /* Starting from second */)
832 {
833 TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace;
834 if (unlikely (!c->extend_min (*this))(__builtin_expect (!!(!c->extend_min (*this)), 0))) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 834);
835 ligGlyph = ligature;
836 if (unlikely (!component.serialize (c, components))(__builtin_expect (!!(!component.serialize (c, components)), 0
))
) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 836);
837 return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 837);
838 }
839
840 bool subset (hb_subset_context_t *c) const
841 {
842 TRACE_SUBSET (this)hb_no_trace_t<bool> trace;
843 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
844 const hb_map_t &glyph_map = *c->plan->glyph_map;
845
846 if (!intersects (&glyphset) || !glyphset.has (ligGlyph)) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 846);
847
848 auto it =
849 + hb_iter (component)
850 | hb_map (glyph_map)
851 ;
852
853 auto *out = c->serializer->start_embed (*this);
854 return_trace (out->serialize (c->serializer,return trace.ret (out->serialize (c->serializer, glyph_map
[ligGlyph], it), __PRETTY_FUNCTION__, 856)
855 glyph_map[ligGlyph],return trace.ret (out->serialize (c->serializer, glyph_map
[ligGlyph], it), __PRETTY_FUNCTION__, 856)
856 it))return trace.ret (out->serialize (c->serializer, glyph_map
[ligGlyph], it), __PRETTY_FUNCTION__, 856)
;
857 }
858
859 public:
860 bool sanitize (hb_sanitize_context_t *c) const
861 {
862 TRACE_SANITIZE (this)hb_no_trace_t<bool> trace;
863 return_trace (ligGlyph.sanitize (c) && component.sanitize (c))return trace.ret (ligGlyph.sanitize (c) && component.
sanitize (c), __PRETTY_FUNCTION__, 863)
;
864 }
865
866 protected:
867 HBGlyphID ligGlyph; /* GlyphID of ligature to substitute */
868 HeadlessArrayOf<HBGlyphID>
869 component; /* Array of component GlyphIDs--start
870 * with the second component--ordered
871 * in writing direction */
872 public:
873 DEFINE_SIZE_ARRAY (4, component)void _compiles_assertion_on_line_873 () const { (void) (component
)[0].static_size; } void _instance_assertion_on_line_873 () const
{ static_assert ((sizeof (*this) == (4) + (1 +0) * sizeof ((
component)[0])), ""); } static constexpr unsigned null_size =
(4); static constexpr unsigned min_size = (4)
;
874};
875
876struct LigatureSet
877{
878 bool intersects (const hb_set_t *glyphs) const
879 {
880 return
881 + hb_iter (ligature)
882 | hb_map (hb_add (this))
883 | hb_map ([glyphs] (const Ligature &_) { return _.intersects (glyphs); })
884 | hb_any
885 ;
886 }
887
888 void closure (hb_closure_context_t *c) const
889 {
890 + hb_iter (ligature)
891 | hb_map (hb_add (this))
892 | hb_apply ([c] (const Ligature &_) { _.closure (c); })
893 ;
894 }
895
896 void collect_glyphs (hb_collect_glyphs_context_t *c) const
897 {
898 + hb_iter (ligature)
899 | hb_map (hb_add (this))
900 | hb_apply ([c] (const Ligature &_) { _.collect_glyphs (c); })
901 ;
902 }
903
904 bool would_apply (hb_would_apply_context_t *c) const
905 {
906 return
907 + hb_iter (ligature)
908 | hb_map (hb_add (this))
909 | hb_map ([c] (const Ligature &_) { return _.would_apply (c); })
910 | hb_any
911 ;
912 }
913
914 bool apply (hb_ot_apply_context_t *c) const
915 {
916 TRACE_APPLY (this)hb_no_trace_t<bool> trace;
917 unsigned int num_ligs = ligature.len;
918 for (unsigned int i = 0; i < num_ligs; i++)
919 {
920 const Ligature &lig = this+ligature[i];
921 if (lig.apply (c)) return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 921);
922 }
923
924 return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 924);
925 }
926
927 bool serialize (hb_serialize_context_t *c,
928 hb_array_t<const HBGlyphID> ligatures,
929 hb_array_t<const unsigned int> component_count_list,
930 hb_array_t<const HBGlyphID> &component_list /* Starting from second for each ligature */)
931 {
932 TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace;
933 if (unlikely (!c->extend_min (*this))(__builtin_expect (!!(!c->extend_min (*this)), 0))) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 933);
934 if (unlikely (!ligature.serialize (c, ligatures.length))(__builtin_expect (!!(!ligature.serialize (c, ligatures.length
)), 0))
) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 934);
935 for (unsigned int i = 0; i < ligatures.length; i++)
936 {
937 unsigned int component_count = (unsigned) hb_max ((int) component_count_list[i] - 1, 0);
938 if (unlikely (!ligature[i].serialize (c, this)(__builtin_expect (!!(!ligature[i].serialize (c, this) .serialize
(c, ligatures[i], component_list.sub_array (0, component_count
))), 0))
939 .serialize (c,(__builtin_expect (!!(!ligature[i].serialize (c, this) .serialize
(c, ligatures[i], component_list.sub_array (0, component_count
))), 0))
940 ligatures[i],(__builtin_expect (!!(!ligature[i].serialize (c, this) .serialize
(c, ligatures[i], component_list.sub_array (0, component_count
))), 0))
941 component_list.sub_array (0, component_count)))(__builtin_expect (!!(!ligature[i].serialize (c, this) .serialize
(c, ligatures[i], component_list.sub_array (0, component_count
))), 0))
)
942 return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 942);
943 component_list += component_count;
944 }
945 return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 945);
946 }
947
948 bool subset (hb_subset_context_t *c) const
949 {
950 TRACE_SUBSET (this)hb_no_trace_t<bool> trace;
951 auto *out = c->serializer->start_embed (*this);
952 if (unlikely (!c->serializer->extend_min (out))(__builtin_expect (!!(!c->serializer->extend_min (out))
, 0))
) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 952);
953
954 + hb_iter (ligature)
955 | hb_filter (subset_offset_array (c, out->ligature, this))
956 | hb_drain
957 ;
958 return_trace (bool (out->ligature))return trace.ret (bool (out->ligature), __PRETTY_FUNCTION__
, 958)
;
959 }
960
961 bool sanitize (hb_sanitize_context_t *c) const
962 {
963 TRACE_SANITIZE (this)hb_no_trace_t<bool> trace;
964 return_trace (ligature.sanitize (c, this))return trace.ret (ligature.sanitize (c, this), __PRETTY_FUNCTION__
, 964)
;
965 }
966
967 protected:
968 OffsetArrayOf<Ligature>
969 ligature; /* Array LigatureSet tables
970 * ordered by preference */
971 public:
972 DEFINE_SIZE_ARRAY (2, ligature)void _compiles_assertion_on_line_972 () const { (void) (ligature
)[0].static_size; } void _instance_assertion_on_line_972 () const
{ static_assert ((sizeof (*this) == (2) + (1 +0) * sizeof ((
ligature)[0])), ""); } static constexpr unsigned null_size = (
2); static constexpr unsigned min_size = (2)
;
973};
974
975struct LigatureSubstFormat1
976{
977 bool intersects (const hb_set_t *glyphs) const
978 {
979 return
980 + hb_zip (this+coverage, ligatureSet)
981 | hb_filter (*glyphs, hb_first)
982 | hb_map (hb_second)
983 | hb_map ([this, glyphs] (const OffsetTo<LigatureSet> &_)
984 { return (this+_).intersects (glyphs); })
985 | hb_any
986 ;
987 }
988
989 void closure (hb_closure_context_t *c) const
990 {
991 + hb_zip (this+coverage, ligatureSet)
992 | hb_filter (*c->glyphs, hb_first)
993 | hb_map (hb_second)
994 | hb_map (hb_add (this))
995 | hb_apply ([c] (const LigatureSet &_) { _.closure (c); })
996 ;
997 }
998
999 void closure_lookups (hb_closure_lookups_context_t *c) const {}
1000
1001 void collect_glyphs (hb_collect_glyphs_context_t *c) const
1002 {
1003 if (unlikely (!(this+coverage).collect_coverage (c->input))(__builtin_expect (!!(!(this+coverage).collect_coverage (c->
input)), 0))
) return;
1004
1005 + hb_zip (this+coverage, ligatureSet)
1006 | hb_map (hb_second)
1007 | hb_map (hb_add (this))
1008 | hb_apply ([c] (const LigatureSet &_) { _.collect_glyphs (c); })
1009 ;
1010 }
1011
1012 const Coverage &get_coverage () const { return this+coverage; }
1013
1014 bool would_apply (hb_would_apply_context_t *c) const
1015 {
1016 unsigned int index = (this+coverage).get_coverage (c->glyphs[0]);
1017 if (likely (index == NOT_COVERED)(__builtin_expect (!!(index == ((unsigned int) -1)), 1))) return false;
1018
1019 const LigatureSet &lig_set = this+ligatureSet[index];
1020 return lig_set.would_apply (c);
1021 }
1022
1023 bool apply (hb_ot_apply_context_t *c) const
1024 {
1025 TRACE_APPLY (this)hb_no_trace_t<bool> trace;
1026
1027 unsigned int index = (this+coverage).get_coverage (c->buffer->cur ().codepoint);
1028 if (likely (index == NOT_COVERED)(__builtin_expect (!!(index == ((unsigned int) -1)), 1))) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1028);
1029
1030 const LigatureSet &lig_set = this+ligatureSet[index];
1031 return_trace (lig_set.apply (c))return trace.ret (lig_set.apply (c), __PRETTY_FUNCTION__, 1031
)
;
1032 }
1033
1034 bool serialize (hb_serialize_context_t *c,
1035 hb_sorted_array_t<const HBGlyphID> first_glyphs,
1036 hb_array_t<const unsigned int> ligature_per_first_glyph_count_list,
1037 hb_array_t<const HBGlyphID> ligatures_list,
1038 hb_array_t<const unsigned int> component_count_list,
1039 hb_array_t<const HBGlyphID> component_list /* Starting from second for each ligature */)
1040 {
1041 TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace;
1042 if (unlikely (!c->extend_min (*this))(__builtin_expect (!!(!c->extend_min (*this)), 0))) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1042);
1043 if (unlikely (!ligatureSet.serialize (c, first_glyphs.length))(__builtin_expect (!!(!ligatureSet.serialize (c, first_glyphs
.length)), 0))
) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1043);
1044 for (unsigned int i = 0; i < first_glyphs.length; i++)
1045 {
1046 unsigned int ligature_count = ligature_per_first_glyph_count_list[i];
1047 if (unlikely (!ligatureSet[i].serialize (c, this)(__builtin_expect (!!(!ligatureSet[i].serialize (c, this) .serialize
(c, ligatures_list.sub_array (0, ligature_count), component_count_list
.sub_array (0, ligature_count), component_list)), 0))
1048 .serialize (c,(__builtin_expect (!!(!ligatureSet[i].serialize (c, this) .serialize
(c, ligatures_list.sub_array (0, ligature_count), component_count_list
.sub_array (0, ligature_count), component_list)), 0))
1049 ligatures_list.sub_array (0, ligature_count),(__builtin_expect (!!(!ligatureSet[i].serialize (c, this) .serialize
(c, ligatures_list.sub_array (0, ligature_count), component_count_list
.sub_array (0, ligature_count), component_list)), 0))
1050 component_count_list.sub_array (0, ligature_count),(__builtin_expect (!!(!ligatureSet[i].serialize (c, this) .serialize
(c, ligatures_list.sub_array (0, ligature_count), component_count_list
.sub_array (0, ligature_count), component_list)), 0))
1051 component_list))(__builtin_expect (!!(!ligatureSet[i].serialize (c, this) .serialize
(c, ligatures_list.sub_array (0, ligature_count), component_count_list
.sub_array (0, ligature_count), component_list)), 0))
) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1051);
1052 ligatures_list += ligature_count;
1053 component_count_list += ligature_count;
1054 }
1055 return_trace (coverage.serialize (c, this).serialize (c, first_glyphs))return trace.ret (coverage.serialize (c, this).serialize (c, first_glyphs
), __PRETTY_FUNCTION__, 1055)
;
1056 }
1057
1058 bool subset (hb_subset_context_t *c) const
1059 {
1060 TRACE_SUBSET (this)hb_no_trace_t<bool> trace;
1061 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
1062 const hb_map_t &glyph_map = *c->plan->glyph_map;
1063
1064 auto *out = c->serializer->start_embed (*this);
1065 if (unlikely (!c->serializer->extend_min (out))(__builtin_expect (!!(!c->serializer->extend_min (out))
, 0))
) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1065);
1066 out->format = format;
1067
1068 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
1069 + hb_zip (this+coverage, ligatureSet)
1070 | hb_filter (glyphset, hb_first)
1071 | hb_filter (subset_offset_array (c, out->ligatureSet, this), hb_second)
1072 | hb_map (hb_first)
1073 | hb_map (glyph_map)
1074 | hb_sink (new_coverage)
1075 ;
1076 out->coverage.serialize (c->serializer, out)
1077 .serialize (c->serializer, new_coverage.iter ());
1078 return_trace (bool (new_coverage))return trace.ret (bool (new_coverage), __PRETTY_FUNCTION__, 1078
)
;
1079 }
1080
1081 bool sanitize (hb_sanitize_context_t *c) const
1082 {
1083 TRACE_SANITIZE (this)hb_no_trace_t<bool> trace;
1084 return_trace (coverage.sanitize (c, this) && ligatureSet.sanitize (c, this))return trace.ret (coverage.sanitize (c, this) && ligatureSet
.sanitize (c, this), __PRETTY_FUNCTION__, 1084)
;
1085 }
1086
1087 protected:
1088 HBUINT16 format; /* Format identifier--format = 1 */
1089 OffsetTo<Coverage>
1090 coverage; /* Offset to Coverage table--from
1091 * beginning of Substitution table */
1092 OffsetArrayOf<LigatureSet>
1093 ligatureSet; /* Array LigatureSet tables
1094 * ordered by Coverage Index */
1095 public:
1096 DEFINE_SIZE_ARRAY (6, ligatureSet)void _compiles_assertion_on_line_1096 () const { (void) (ligatureSet
)[0].static_size; } void _instance_assertion_on_line_1096 () const
{ static_assert ((sizeof (*this) == (6) + (1 +0) * sizeof ((
ligatureSet)[0])), ""); } static constexpr unsigned null_size
= (6); static constexpr unsigned min_size = (6)
;
1097};
1098
1099struct LigatureSubst
1100{
1101 bool serialize (hb_serialize_context_t *c,
1102 hb_sorted_array_t<const HBGlyphID> first_glyphs,
1103 hb_array_t<const unsigned int> ligature_per_first_glyph_count_list,
1104 hb_array_t<const HBGlyphID> ligatures_list,
1105 hb_array_t<const unsigned int> component_count_list,
1106 hb_array_t<const HBGlyphID> component_list /* Starting from second for each ligature */)
1107 {
1108 TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace;
1109 if (unlikely (!c->extend_min (u.format))(__builtin_expect (!!(!c->extend_min (u.format)), 0))) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1109);
1110 unsigned int format = 1;
1111 u.format = format;
1112 switch (u.format) {
1113 case 1: return_trace (u.format1.serialize (c,return trace.ret (u.format1.serialize (c, first_glyphs, ligature_per_first_glyph_count_list
, ligatures_list, component_count_list, component_list), __PRETTY_FUNCTION__
, 1118)
1114 first_glyphs,return trace.ret (u.format1.serialize (c, first_glyphs, ligature_per_first_glyph_count_list
, ligatures_list, component_count_list, component_list), __PRETTY_FUNCTION__
, 1118)
1115 ligature_per_first_glyph_count_list,return trace.ret (u.format1.serialize (c, first_glyphs, ligature_per_first_glyph_count_list
, ligatures_list, component_count_list, component_list), __PRETTY_FUNCTION__
, 1118)
1116 ligatures_list,return trace.ret (u.format1.serialize (c, first_glyphs, ligature_per_first_glyph_count_list
, ligatures_list, component_count_list, component_list), __PRETTY_FUNCTION__
, 1118)
1117 component_count_list,return trace.ret (u.format1.serialize (c, first_glyphs, ligature_per_first_glyph_count_list
, ligatures_list, component_count_list, component_list), __PRETTY_FUNCTION__
, 1118)
1118 component_list))return trace.ret (u.format1.serialize (c, first_glyphs, ligature_per_first_glyph_count_list
, ligatures_list, component_count_list, component_list), __PRETTY_FUNCTION__
, 1118)
;
1119 default:return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1119);
1120 }
1121 }
1122
1123 template <typename context_t, typename ...Ts>
1124 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1125 {
1126 TRACE_DISPATCH (this, u.format)hb_no_trace_t<typename context_t::return_t> trace;
1127 if (unlikely (!c->may_dispatch (this, &u.format))(__builtin_expect (!!(!c->may_dispatch (this, &u.format
)), 0))
) return_trace (c->no_dispatch_return_value ())return trace.ret (c->no_dispatch_return_value (), __PRETTY_FUNCTION__
, 1127)
;
1128 switch (u.format) {
1129 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...))return trace.ret (c->dispatch (u.format1, hb_forward<Ts
> (ds)...), __PRETTY_FUNCTION__, 1129)
;
1130 default:return_trace (c->default_return_value ())return trace.ret (c->default_return_value (), __PRETTY_FUNCTION__
, 1130)
;
1131 }
1132 }
1133
1134 protected:
1135 union {
1136 HBUINT16 format; /* Format identifier */
1137 LigatureSubstFormat1 format1;
1138 } u;
1139};
1140
1141
1142struct ContextSubst : Context {};
1143
1144struct ChainContextSubst : ChainContext {};
1145
1146struct ExtensionSubst : Extension<ExtensionSubst>
1147{
1148 typedef struct SubstLookupSubTable SubTable;
1149 bool is_reverse () const;
1150};
1151
1152
1153struct ReverseChainSingleSubstFormat1
1154{
1155 bool intersects (const hb_set_t *glyphs) const
1156 {
1157 if (!(this+coverage).intersects (glyphs))
1158 return false;
1159
1160 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
1161
1162 unsigned int count;
1163
1164 count = backtrack.len;
1165 for (unsigned int i = 0; i < count; i++)
1166 if (!(this+backtrack[i]).intersects (glyphs))
1167 return false;
1168
1169 count = lookahead.len;
1170 for (unsigned int i = 0; i < count; i++)
1171 if (!(this+lookahead[i]).intersects (glyphs))
1172 return false;
1173
1174 return true;
1175 }
1176
1177 void closure (hb_closure_context_t *c) const
1178 {
1179 if (!intersects (c->glyphs)) return;
1180
1181 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
1182 const ArrayOf<HBGlyphID> &substitute = StructAfter<ArrayOf<HBGlyphID>> (lookahead);
1183
1184 + hb_zip (this+coverage, substitute)
1185 | hb_filter (*c->glyphs, hb_first)
1186 | hb_map (hb_second)
1187 | hb_sink (c->output)
1188 ;
1189 }
1190
1191 void closure_lookups (hb_closure_lookups_context_t *c) const {}
1192
1193 void collect_glyphs (hb_collect_glyphs_context_t *c) const
1194 {
1195 if (unlikely (!(this+coverage).collect_coverage (c->input))(__builtin_expect (!!(!(this+coverage).collect_coverage (c->
input)), 0))
) return;
1196
1197 unsigned int count;
1198
1199 count = backtrack.len;
1200 for (unsigned int i = 0; i < count; i++)
1201 if (unlikely (!(this+backtrack[i]).collect_coverage (c->before))(__builtin_expect (!!(!(this+backtrack[i]).collect_coverage (
c->before)), 0))
) return;
1202
1203 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
1204 count = lookahead.len;
1205 for (unsigned int i = 0; i < count; i++)
1206 if (unlikely (!(this+lookahead[i]).collect_coverage (c->after))(__builtin_expect (!!(!(this+lookahead[i]).collect_coverage (
c->after)), 0))
) return;
1207
1208 const ArrayOf<HBGlyphID> &substitute = StructAfter<ArrayOf<HBGlyphID>> (lookahead);
1209 count = substitute.len;
Value stored to 'count' is never read
1210 c->output->add_array (substitute.arrayZ, substitute.len);
1211 }
1212
1213 const Coverage &get_coverage () const { return this+coverage; }
1214
1215 bool would_apply (hb_would_apply_context_t *c) const
1216 { return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED((unsigned int) -1); }
1217
1218 bool apply (hb_ot_apply_context_t *c) const
1219 {
1220 TRACE_APPLY (this)hb_no_trace_t<bool> trace;
1221 if (unlikely (c->nesting_level_left != HB_MAX_NESTING_LEVEL)(__builtin_expect (!!(c->nesting_level_left != 6), 0)))
1222 return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1222); /* No chaining to this type */
1223
1224 unsigned int index = (this+coverage).get_coverage (c->buffer->cur ().codepoint);
1225 if (likely (index == NOT_COVERED)(__builtin_expect (!!(index == ((unsigned int) -1)), 1))) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1225);
1226
1227 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
1228 const ArrayOf<HBGlyphID> &substitute = StructAfter<ArrayOf<HBGlyphID>> (lookahead);
1229
1230 if (unlikely (index >= substitute.len)(__builtin_expect (!!(index >= substitute.len), 0))) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1230);
1231
1232 unsigned int start_index = 0, end_index = 0;
1233 if (match_backtrack (c,
1234 backtrack.len, (HBUINT16 *) backtrack.arrayZ,
1235 match_coverage, this,
1236 &start_index) &&
1237 match_lookahead (c,
1238 lookahead.len, (HBUINT16 *) lookahead.arrayZ,
1239 match_coverage, this,
1240 1, &end_index))
1241 {
1242 c->buffer->unsafe_to_break_from_outbuffer (start_index, end_index);
1243 c->replace_glyph_inplace (substitute[index]);
1244 /* Note: We DON'T decrease buffer->idx. The main loop does it
1245 * for us. This is useful for preventing surprises if someone
1246 * calls us through a Context lookup. */
1247 return_trace (true)return trace.ret (true, __PRETTY_FUNCTION__, 1247);
1248 }
1249
1250 return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1250);
1251 }
1252
1253 bool subset (hb_subset_context_t *c) const
1254 {
1255 TRACE_SUBSET (this)hb_no_trace_t<bool> trace;
1256 // TODO(subset)
1257 return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1257);
1258 }
1259
1260 bool sanitize (hb_sanitize_context_t *c) const
1261 {
1262 TRACE_SANITIZE (this)hb_no_trace_t<bool> trace;
1263 if (!(coverage.sanitize (c, this) && backtrack.sanitize (c, this)))
1264 return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1264);
1265 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
1266 if (!lookahead.sanitize (c, this))
1267 return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1267);
1268 const ArrayOf<HBGlyphID> &substitute = StructAfter<ArrayOf<HBGlyphID>> (lookahead);
1269 return_trace (substitute.sanitize (c))return trace.ret (substitute.sanitize (c), __PRETTY_FUNCTION__
, 1269)
;
1270 }
1271
1272 protected:
1273 HBUINT16 format; /* Format identifier--format = 1 */
1274 OffsetTo<Coverage>
1275 coverage; /* Offset to Coverage table--from
1276 * beginning of table */
1277 OffsetArrayOf<Coverage>
1278 backtrack; /* Array of coverage tables
1279 * in backtracking sequence, in glyph
1280 * sequence order */
1281 OffsetArrayOf<Coverage>
1282 lookaheadX; /* Array of coverage tables
1283 * in lookahead sequence, in glyph
1284 * sequence order */
1285 ArrayOf<HBGlyphID>
1286 substituteX; /* Array of substitute
1287 * GlyphIDs--ordered by Coverage Index */
1288 public:
1289 DEFINE_SIZE_MIN (10)void _instance_assertion_on_line_1289 () const { static_assert
((sizeof (*this) >= (10)), ""); } static constexpr unsigned
null_size = (10); static constexpr unsigned min_size = (10)
;
1290};
1291
1292struct ReverseChainSingleSubst
1293{
1294 template <typename context_t, typename ...Ts>
1295 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1296 {
1297 TRACE_DISPATCH (this, u.format)hb_no_trace_t<typename context_t::return_t> trace;
1298 if (unlikely (!c->may_dispatch (this, &u.format))(__builtin_expect (!!(!c->may_dispatch (this, &u.format
)), 0))
) return_trace (c->no_dispatch_return_value ())return trace.ret (c->no_dispatch_return_value (), __PRETTY_FUNCTION__
, 1298)
;
1299 switch (u.format) {
1300 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...))return trace.ret (c->dispatch (u.format1, hb_forward<Ts
> (ds)...), __PRETTY_FUNCTION__, 1300)
;
1301 default:return_trace (c->default_return_value ())return trace.ret (c->default_return_value (), __PRETTY_FUNCTION__
, 1301)
;
1302 }
1303 }
1304
1305 protected:
1306 union {
1307 HBUINT16 format; /* Format identifier */
1308 ReverseChainSingleSubstFormat1 format1;
1309 } u;
1310};
1311
1312
1313
1314/*
1315 * SubstLookup
1316 */
1317
1318struct SubstLookupSubTable
1319{
1320 friend struct Lookup;
1321 friend struct SubstLookup;
1322
1323 enum Type {
1324 Single = 1,
1325 Multiple = 2,
1326 Alternate = 3,
1327 Ligature = 4,
1328 Context = 5,
1329 ChainContext = 6,
1330 Extension = 7,
1331 ReverseChainSingle = 8
1332 };
1333
1334 template <typename context_t, typename ...Ts>
1335 typename context_t::return_t dispatch (context_t *c, unsigned int lookup_type, Ts&&... ds) const
1336 {
1337 TRACE_DISPATCH (this, lookup_type)hb_no_trace_t<typename context_t::return_t> trace;
1338 switch (lookup_type) {
1339 case Single: return_trace (u.single.dispatch (c, hb_forward<Ts> (ds)...))return trace.ret (u.single.dispatch (c, hb_forward<Ts> (
ds)...), __PRETTY_FUNCTION__, 1339)
;
1340 case Multiple: return_trace (u.multiple.dispatch (c, hb_forward<Ts> (ds)...))return trace.ret (u.multiple.dispatch (c, hb_forward<Ts>
(ds)...), __PRETTY_FUNCTION__, 1340)
;
1341 case Alternate: return_trace (u.alternate.dispatch (c, hb_forward<Ts> (ds)...))return trace.ret (u.alternate.dispatch (c, hb_forward<Ts>
(ds)...), __PRETTY_FUNCTION__, 1341)
;
1342 case Ligature: return_trace (u.ligature.dispatch (c, hb_forward<Ts> (ds)...))return trace.ret (u.ligature.dispatch (c, hb_forward<Ts>
(ds)...), __PRETTY_FUNCTION__, 1342)
;
1343 case Context: return_trace (u.context.dispatch (c, hb_forward<Ts> (ds)...))return trace.ret (u.context.dispatch (c, hb_forward<Ts>
(ds)...), __PRETTY_FUNCTION__, 1343)
;
1344 case ChainContext: return_trace (u.chainContext.dispatch (c, hb_forward<Ts> (ds)...))return trace.ret (u.chainContext.dispatch (c, hb_forward<Ts
> (ds)...), __PRETTY_FUNCTION__, 1344)
;
1345 case Extension: return_trace (u.extension.dispatch (c, hb_forward<Ts> (ds)...))return trace.ret (u.extension.dispatch (c, hb_forward<Ts>
(ds)...), __PRETTY_FUNCTION__, 1345)
;
1346 case ReverseChainSingle: return_trace (u.reverseChainContextSingle.dispatch (c, hb_forward<Ts> (ds)...))return trace.ret (u.reverseChainContextSingle.dispatch (c, hb_forward
<Ts> (ds)...), __PRETTY_FUNCTION__, 1346)
;
1347 default: return_trace (c->default_return_value ())return trace.ret (c->default_return_value (), __PRETTY_FUNCTION__
, 1347)
;
1348 }
1349 }
1350
1351 bool intersects (const hb_set_t *glyphs, unsigned int lookup_type) const
1352 {
1353 hb_intersects_context_t c (glyphs);
1354 return dispatch (&c, lookup_type);
1355 }
1356
1357 protected:
1358 union {
1359 SingleSubst single;
1360 MultipleSubst multiple;
1361 AlternateSubst alternate;
1362 LigatureSubst ligature;
1363 ContextSubst context;
1364 ChainContextSubst chainContext;
1365 ExtensionSubst extension;
1366 ReverseChainSingleSubst reverseChainContextSingle;
1367 } u;
1368 public:
1369 DEFINE_SIZE_MIN (0)void _instance_assertion_on_line_1369 () const { static_assert
((sizeof (*this) >= (0)), ""); } static constexpr unsigned
null_size = (0); static constexpr unsigned min_size = (0)
;
1370};
1371
1372
1373struct SubstLookup : Lookup
1374{
1375 typedef SubstLookupSubTable SubTable;
1376
1377 const SubTable& get_subtable (unsigned int i) const
1378 { return Lookup::get_subtable<SubTable> (i); }
1379
1380 static inline bool lookup_type_is_reverse (unsigned int lookup_type)
1381 { return lookup_type == SubTable::ReverseChainSingle; }
1382
1383 bool is_reverse () const
1384 {
1385 unsigned int type = get_type ();
1386 if (unlikely (type == SubTable::Extension)(__builtin_expect (!!(type == SubTable::Extension), 0)))
1387 return reinterpret_cast<const ExtensionSubst &> (get_subtable (0)).is_reverse ();
1388 return lookup_type_is_reverse (type);
1389 }
1390
1391 bool apply (hb_ot_apply_context_t *c) const
1392 {
1393 TRACE_APPLY (this)hb_no_trace_t<bool> trace;
1394 return_trace (dispatch (c))return trace.ret (dispatch (c), __PRETTY_FUNCTION__, 1394);
1395 }
1396
1397 bool intersects (const hb_set_t *glyphs) const
1398 {
1399 hb_intersects_context_t c (glyphs);
1400 return dispatch (&c);
1401 }
1402
1403 hb_closure_context_t::return_t closure (hb_closure_context_t *c, unsigned int this_index) const
1404 {
1405 if (!c->should_visit_lookup (this_index))
1406 return hb_closure_context_t::default_return_value ();
1407
1408 c->set_recurse_func (dispatch_closure_recurse_func);
1409
1410 hb_closure_context_t::return_t ret = dispatch (c);
1411
1412 c->flush ();
1413
1414 return ret;
1415 }
1416
1417 hb_closure_lookups_context_t::return_t closure_lookups (hb_closure_lookups_context_t *c, unsigned this_index) const
1418 {
1419 if (c->is_lookup_visited (this_index))
1420 return hb_closure_lookups_context_t::default_return_value ();
1421
1422 c->set_lookup_visited (this_index);
1423 if (!intersects (c->glyphs))
1424 {
1425 c->set_lookup_inactive (this_index);
1426 return hb_closure_lookups_context_t::default_return_value ();
1427 }
1428
1429 c->set_recurse_func (dispatch_closure_lookups_recurse_func);
1430
1431 hb_closure_lookups_context_t::return_t ret = dispatch (c);
1432 return ret;
1433 }
1434
1435 hb_collect_glyphs_context_t::return_t collect_glyphs (hb_collect_glyphs_context_t *c) const
1436 {
1437 c->set_recurse_func (dispatch_recurse_func<hb_collect_glyphs_context_t>);
1438 return dispatch (c);
1439 }
1440
1441 template <typename set_t>
1442 void collect_coverage (set_t *glyphs) const
1443 {
1444 hb_collect_coverage_context_t<set_t> c (glyphs);
1445 dispatch (&c);
1446 }
1447
1448 bool would_apply (hb_would_apply_context_t *c,
1449 const hb_ot_layout_lookup_accelerator_t *accel) const
1450 {
1451 if (unlikely (!c->len)(__builtin_expect (!!(!c->len), 0))) return false;
1452 if (!accel->may_have (c->glyphs[0])) return false;
1453 return dispatch (c);
1454 }
1455
1456 static inline bool apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index);
1457
1458 SubTable& serialize_subtable (hb_serialize_context_t *c,
1459 unsigned int i)
1460 { return get_subtables<SubTable> ()[i].serialize (c, this); }
1461
1462 bool serialize_single (hb_serialize_context_t *c,
1463 uint32_t lookup_props,
1464 hb_sorted_array_t<const HBGlyphID> glyphs,
1465 hb_array_t<const HBGlyphID> substitutes)
1466 {
1467 TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace;
1468 if (unlikely (!Lookup::serialize (c, SubTable::Single, lookup_props, 1))(__builtin_expect (!!(!Lookup::serialize (c, SubTable::Single
, lookup_props, 1)), 0))
) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1468);
1469 return_trace (serialize_subtable (c, 0).u.single.return trace.ret (serialize_subtable (c, 0).u.single. serialize
(c, hb_zip (glyphs, substitutes)), __PRETTY_FUNCTION__, 1470
)
1470 serialize (c, hb_zip (glyphs, substitutes)))return trace.ret (serialize_subtable (c, 0).u.single. serialize
(c, hb_zip (glyphs, substitutes)), __PRETTY_FUNCTION__, 1470
)
;
1471 }
1472
1473 bool serialize_multiple (hb_serialize_context_t *c,
1474 uint32_t lookup_props,
1475 hb_sorted_array_t<const HBGlyphID> glyphs,
1476 hb_array_t<const unsigned int> substitute_len_list,
1477 hb_array_t<const HBGlyphID> substitute_glyphs_list)
1478 {
1479 TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace;
1480 if (unlikely (!Lookup::serialize (c, SubTable::Multiple, lookup_props, 1))(__builtin_expect (!!(!Lookup::serialize (c, SubTable::Multiple
, lookup_props, 1)), 0))
) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1480);
1481 return_trace (serialize_subtable (c, 0).u.multiple.return trace.ret (serialize_subtable (c, 0).u.multiple. serialize
(c, glyphs, substitute_len_list, substitute_glyphs_list), __PRETTY_FUNCTION__
, 1485)
1482 serialize (c,return trace.ret (serialize_subtable (c, 0).u.multiple. serialize
(c, glyphs, substitute_len_list, substitute_glyphs_list), __PRETTY_FUNCTION__
, 1485)
1483 glyphs,return trace.ret (serialize_subtable (c, 0).u.multiple. serialize
(c, glyphs, substitute_len_list, substitute_glyphs_list), __PRETTY_FUNCTION__
, 1485)
1484 substitute_len_list,return trace.ret (serialize_subtable (c, 0).u.multiple. serialize
(c, glyphs, substitute_len_list, substitute_glyphs_list), __PRETTY_FUNCTION__
, 1485)
1485 substitute_glyphs_list))return trace.ret (serialize_subtable (c, 0).u.multiple. serialize
(c, glyphs, substitute_len_list, substitute_glyphs_list), __PRETTY_FUNCTION__
, 1485)
;
1486 }
1487
1488 bool serialize_alternate (hb_serialize_context_t *c,
1489 uint32_t lookup_props,
1490 hb_sorted_array_t<const HBGlyphID> glyphs,
1491 hb_array_t<const unsigned int> alternate_len_list,
1492 hb_array_t<const HBGlyphID> alternate_glyphs_list)
1493 {
1494 TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace;
1495 if (unlikely (!Lookup::serialize (c, SubTable::Alternate, lookup_props, 1))(__builtin_expect (!!(!Lookup::serialize (c, SubTable::Alternate
, lookup_props, 1)), 0))
) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1495);
1496 return_trace (serialize_subtable (c, 0).u.alternate.return trace.ret (serialize_subtable (c, 0).u.alternate. serialize
(c, glyphs, alternate_len_list, alternate_glyphs_list), __PRETTY_FUNCTION__
, 1500)
1497 serialize (c,return trace.ret (serialize_subtable (c, 0).u.alternate. serialize
(c, glyphs, alternate_len_list, alternate_glyphs_list), __PRETTY_FUNCTION__
, 1500)
1498 glyphs,return trace.ret (serialize_subtable (c, 0).u.alternate. serialize
(c, glyphs, alternate_len_list, alternate_glyphs_list), __PRETTY_FUNCTION__
, 1500)
1499 alternate_len_list,return trace.ret (serialize_subtable (c, 0).u.alternate. serialize
(c, glyphs, alternate_len_list, alternate_glyphs_list), __PRETTY_FUNCTION__
, 1500)
1500 alternate_glyphs_list))return trace.ret (serialize_subtable (c, 0).u.alternate. serialize
(c, glyphs, alternate_len_list, alternate_glyphs_list), __PRETTY_FUNCTION__
, 1500)
;
1501 }
1502
1503 bool serialize_ligature (hb_serialize_context_t *c,
1504 uint32_t lookup_props,
1505 hb_sorted_array_t<const HBGlyphID> first_glyphs,
1506 hb_array_t<const unsigned int> ligature_per_first_glyph_count_list,
1507 hb_array_t<const HBGlyphID> ligatures_list,
1508 hb_array_t<const unsigned int> component_count_list,
1509 hb_array_t<const HBGlyphID> component_list /* Starting from second for each ligature */)
1510 {
1511 TRACE_SERIALIZE (this)hb_no_trace_t<bool> trace;
1512 if (unlikely (!Lookup::serialize (c, SubTable::Ligature, lookup_props, 1))(__builtin_expect (!!(!Lookup::serialize (c, SubTable::Ligature
, lookup_props, 1)), 0))
) return_trace (false)return trace.ret (false, __PRETTY_FUNCTION__, 1512);
1513 return_trace (serialize_subtable (c, 0).u.ligature.return trace.ret (serialize_subtable (c, 0).u.ligature. serialize
(c, first_glyphs, ligature_per_first_glyph_count_list, ligatures_list
, component_count_list, component_list), __PRETTY_FUNCTION__,
1519)
1514 serialize (c,return trace.ret (serialize_subtable (c, 0).u.ligature. serialize
(c, first_glyphs, ligature_per_first_glyph_count_list, ligatures_list
, component_count_list, component_list), __PRETTY_FUNCTION__,
1519)
1515 first_glyphs,return trace.ret (serialize_subtable (c, 0).u.ligature. serialize
(c, first_glyphs, ligature_per_first_glyph_count_list, ligatures_list
, component_count_list, component_list), __PRETTY_FUNCTION__,
1519)
1516 ligature_per_first_glyph_count_list,return trace.ret (serialize_subtable (c, 0).u.ligature. serialize
(c, first_glyphs, ligature_per_first_glyph_count_list, ligatures_list
, component_count_list, component_list), __PRETTY_FUNCTION__,
1519)
1517 ligatures_list,return trace.ret (serialize_subtable (c, 0).u.ligature. serialize
(c, first_glyphs, ligature_per_first_glyph_count_list, ligatures_list
, component_count_list, component_list), __PRETTY_FUNCTION__,
1519)
1518 component_count_list,return trace.ret (serialize_subtable (c, 0).u.ligature. serialize
(c, first_glyphs, ligature_per_first_glyph_count_list, ligatures_list
, component_count_list, component_list), __PRETTY_FUNCTION__,
1519)
1519 component_list))return trace.ret (serialize_subtable (c, 0).u.ligature. serialize
(c, first_glyphs, ligature_per_first_glyph_count_list, ligatures_list
, component_count_list, component_list), __PRETTY_FUNCTION__,
1519)
;
1520 }
1521
1522 template <typename context_t>
1523 static inline typename context_t::return_t dispatch_recurse_func (context_t *c, unsigned int lookup_index);
1524
1525 static inline hb_closure_context_t::return_t dispatch_closure_recurse_func (hb_closure_context_t *c, unsigned int lookup_index)
1526 {
1527 if (!c->should_visit_lookup (lookup_index))
1528 return hb_empty_t ();
1529
1530 hb_closure_context_t::return_t ret = dispatch_recurse_func (c, lookup_index);
1531
1532 /* While in theory we should flush here, it will cause timeouts because a recursive
1533 * lookup can keep growing the glyph set. Skip, and outer loop will retry up to
1534 * HB_CLOSURE_MAX_STAGES time, which should be enough for every realistic font. */
1535 //c->flush ();
1536
1537 return ret;
1538 }
1539
1540 HB_INTERNAL__attribute__((__visibility__("hidden"))) static hb_closure_lookups_context_t::return_t dispatch_closure_lookups_recurse_func (hb_closure_lookups_context_t *c, unsigned lookup_index);
1541
1542 template <typename context_t, typename ...Ts>
1543 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1544 { return Lookup::dispatch<SubTable> (c, hb_forward<Ts> (ds)...); }
1545
1546 bool subset (hb_subset_context_t *c) const
1547 { return Lookup::subset<SubTable> (c); }
1548
1549 bool sanitize (hb_sanitize_context_t *c) const
1550 { return Lookup::sanitize<SubTable> (c); }
1551};
1552
1553/*
1554 * GSUB -- Glyph Substitution
1555 * https://docs.microsoft.com/en-us/typography/opentype/spec/gsub
1556 */
1557
1558struct GSUB : GSUBGPOS
1559{
1560 static constexpr hb_tag_t tableTag = HB_OT_TAG_GSUB((hb_tag_t)((((uint32_t)('G')&0xFF)<<24)|(((uint32_t
)('S')&0xFF)<<16)|(((uint32_t)('U')&0xFF)<<
8)|((uint32_t)('B')&0xFF)))
;
1561
1562 const SubstLookup& get_lookup (unsigned int i) const
1563 { return static_cast<const SubstLookup &> (GSUBGPOS::get_lookup (i)); }
1564
1565 bool subset (hb_subset_context_t *c) const
1566 {
1567 hb_subset_layout_context_t l (c, tableTag, c->plan->gsub_lookups, c->plan->gsub_features);
1568 return GSUBGPOS::subset<SubstLookup> (&l);
1569 }
1570
1571 bool sanitize (hb_sanitize_context_t *c) const
1572 { return GSUBGPOS::sanitize<SubstLookup> (c); }
1573
1574 HB_INTERNAL__attribute__((__visibility__("hidden"))) bool is_blocklisted (hb_blob_t *blob,
1575 hb_face_t *face) const;
1576
1577 void closure_lookups (hb_face_t *face,
1578 const hb_set_t *glyphs,
1579 hb_set_t *lookup_indexes /* IN/OUT */) const
1580 { GSUBGPOS::closure_lookups<SubstLookup> (face, glyphs, lookup_indexes); }
1581
1582 typedef GSUBGPOS::accelerator_t<GSUB> accelerator_t;
1583};
1584
1585
1586struct GSUB_accelerator_t : GSUB::accelerator_t {};
1587
1588
1589/* Out-of-class implementation for methods recursing */
1590
1591#ifndef HB_NO_OT_LAYOUT
1592/*static*/ inline bool ExtensionSubst::is_reverse () const
1593{
1594 return SubstLookup::lookup_type_is_reverse (get_type ());
1595}
1596template <typename context_t>
1597/*static*/ typename context_t::return_t SubstLookup::dispatch_recurse_func (context_t *c, unsigned int lookup_index)
1598{
1599 const SubstLookup &l = c->face->table.GSUB.get_relaxed ()->table->get_lookup (lookup_index);
1600 return l.dispatch (c);
1601}
1602
1603/*static*/ inline hb_closure_lookups_context_t::return_t SubstLookup::dispatch_closure_lookups_recurse_func (hb_closure_lookups_context_t *c, unsigned this_index)
1604{
1605 const SubstLookup &l = c->face->table.GSUB.get_relaxed ()->table->get_lookup (this_index);
1606 return l.closure_lookups (c, this_index);
1607}
1608
1609/*static*/ bool SubstLookup::apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index)
1610{
1611 const SubstLookup &l = c->face->table.GSUB.get_relaxed ()->table->get_lookup (lookup_index);
1612 unsigned int saved_lookup_props = c->lookup_props;
1613 unsigned int saved_lookup_index = c->lookup_index;
1614 c->set_lookup_index (lookup_index);
1615 c->set_lookup_props (l.get_props ());
1616 bool ret = l.dispatch (c);
1617 c->set_lookup_index (saved_lookup_index);
1618 c->set_lookup_props (saved_lookup_props);
1619 return ret;
1620}
1621#endif
1622
1623
1624} /* namespace OT */
1625
1626
1627#endif /* HB_OT_LAYOUT_GSUB_TABLE_HH */