1 /* 2 * Copyright 2007,2008,2009,2010 Red Hat, Inc. 3 * Copyright 2010,2012 Google, Inc. 4 * 5 * This is part of HarfBuzz, a text shaping library. 6 * 7 * Permission is hereby granted, without written agreement and without 8 * license or royalty fees, to use, copy, modify, and distribute this 9 * software and its documentation for any purpose, provided that the 10 * above copyright notice and the following two paragraphs appear in 11 * all copies of this software. 12 * 13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR 14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES 15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN 16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH 17 * DAMAGE. 18 * 19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, 20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND 21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS 22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO 23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. 24 * 25 * Red Hat Author(s): Behdad Esfahbod 26 * Google Author(s): Behdad Esfahbod 27 */ 28 29 #ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH 30 #define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH 31 32 #include "hb-private.hh" 33 #include "hb-debug.hh" 34 #include "hb-buffer-private.hh" 35 #include "hb-ot-layout-gdef-table.hh" 36 #include "hb-set-private.hh" 37 38 39 namespace OT { 40 41 42 struct hb_closure_context_t : 43 hb_dispatch_context_t<hb_closure_context_t, hb_void_t, HB_DEBUG_CLOSURE> 44 { 45 inline const char *get_name (void) { return "CLOSURE"; } 46 typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned int lookup_index); 47 template <typename T> 48 inline return_t dispatch (const T &obj) { obj.closure (this); return HB_VOID; } 49 static return_t default_return_value (void) { return HB_VOID; } 50 bool stop_sublookup_iteration (return_t r HB_UNUSED) const { return false; } 51 return_t recurse (unsigned int lookup_index) 52 { 53 if (unlikely (nesting_level_left == 0 || !recurse_func)) 54 return default_return_value (); 55 56 nesting_level_left--; 57 recurse_func (this, lookup_index); 58 nesting_level_left++; 59 return HB_VOID; 60 } 61 62 hb_face_t *face; 63 hb_set_t *glyphs; 64 recurse_func_t recurse_func; 65 unsigned int nesting_level_left; 66 unsigned int debug_depth; 67 68 hb_closure_context_t (hb_face_t *face_, 69 hb_set_t *glyphs_, 70 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) : 71 face (face_), 72 glyphs (glyphs_), 73 recurse_func (nullptr), 74 nesting_level_left (nesting_level_left_), 75 debug_depth (0) {} 76 77 void set_recurse_func (recurse_func_t func) { recurse_func = func; } 78 }; 79 80 81 struct hb_would_apply_context_t : 82 hb_dispatch_context_t<hb_would_apply_context_t, bool, HB_DEBUG_WOULD_APPLY> 83 { 84 inline const char *get_name (void) { return "WOULD_APPLY"; } 85 template <typename T> 86 inline return_t dispatch (const T &obj) { return obj.would_apply (this); } 87 static return_t default_return_value (void) { return false; } 88 bool stop_sublookup_iteration (return_t r) const { return r; } 89 90 hb_face_t *face; 91 const hb_codepoint_t *glyphs; 92 unsigned int len; 93 bool zero_context; 94 unsigned int debug_depth; 95 96 hb_would_apply_context_t (hb_face_t *face_, 97 const hb_codepoint_t *glyphs_, 98 unsigned int len_, 99 bool zero_context_) : 100 face (face_), 101 glyphs (glyphs_), 102 len (len_), 103 zero_context (zero_context_), 104 debug_depth (0) {} 105 }; 106 107 108 struct hb_collect_glyphs_context_t : 109 hb_dispatch_context_t<hb_collect_glyphs_context_t, hb_void_t, HB_DEBUG_COLLECT_GLYPHS> 110 { 111 inline const char *get_name (void) { return "COLLECT_GLYPHS"; } 112 typedef return_t (*recurse_func_t) (hb_collect_glyphs_context_t *c, unsigned int lookup_index); 113 template <typename T> 114 inline return_t dispatch (const T &obj) { obj.collect_glyphs (this); return HB_VOID; } 115 static return_t default_return_value (void) { return HB_VOID; } 116 bool stop_sublookup_iteration (return_t r HB_UNUSED) const { return false; } 117 return_t recurse (unsigned int lookup_index) 118 { 119 if (unlikely (nesting_level_left == 0 || !recurse_func)) 120 return default_return_value (); 121 122 /* Note that GPOS sets recurse_func to nullptr already, so it doesn't get 123 * past the previous check. For GSUB, we only want to collect the output 124 * glyphs in the recursion. If output is not requested, we can go home now. 125 * 126 * Note further, that the above is not exactly correct. A recursed lookup 127 * is allowed to match input that is not matched in the context, but that's 128 * not how most fonts are built. It's possible to relax that and recurse 129 * with all sets here if it proves to be an issue. 130 */ 131 132 if (output == hb_set_get_empty ()) 133 return HB_VOID; 134 135 /* Return if new lookup was recursed to before. */ 136 if (recursed_lookups->has (lookup_index)) 137 return HB_VOID; 138 139 hb_set_t *old_before = before; 140 hb_set_t *old_input = input; 141 hb_set_t *old_after = after; 142 before = input = after = hb_set_get_empty (); 143 144 nesting_level_left--; 145 recurse_func (this, lookup_index); 146 nesting_level_left++; 147 148 before = old_before; 149 input = old_input; 150 after = old_after; 151 152 recursed_lookups->add (lookup_index); 153 154 return HB_VOID; 155 } 156 157 hb_face_t *face; 158 hb_set_t *before; 159 hb_set_t *input; 160 hb_set_t *after; 161 hb_set_t *output; 162 recurse_func_t recurse_func; 163 hb_set_t *recursed_lookups; 164 unsigned int nesting_level_left; 165 unsigned int debug_depth; 166 167 hb_collect_glyphs_context_t (hb_face_t *face_, 168 hb_set_t *glyphs_before, /* OUT. May be nullptr */ 169 hb_set_t *glyphs_input, /* OUT. May be nullptr */ 170 hb_set_t *glyphs_after, /* OUT. May be nullptr */ 171 hb_set_t *glyphs_output, /* OUT. May be nullptr */ 172 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) : 173 face (face_), 174 before (glyphs_before ? glyphs_before : hb_set_get_empty ()), 175 input (glyphs_input ? glyphs_input : hb_set_get_empty ()), 176 after (glyphs_after ? glyphs_after : hb_set_get_empty ()), 177 output (glyphs_output ? glyphs_output : hb_set_get_empty ()), 178 recurse_func (nullptr), 179 recursed_lookups (nullptr), 180 nesting_level_left (nesting_level_left_), 181 debug_depth (0) 182 { 183 recursed_lookups = hb_set_create (); 184 } 185 ~hb_collect_glyphs_context_t (void) 186 { 187 hb_set_destroy (recursed_lookups); 188 } 189 190 void set_recurse_func (recurse_func_t func) { recurse_func = func; } 191 }; 192 193 194 195 /* XXX Can we remove this? */ 196 197 template <typename set_t> 198 struct hb_add_coverage_context_t : 199 hb_dispatch_context_t<hb_add_coverage_context_t<set_t>, const Coverage &, HB_DEBUG_GET_COVERAGE> 200 { 201 inline const char *get_name (void) { return "GET_COVERAGE"; } 202 typedef const Coverage &return_t; 203 template <typename T> 204 inline return_t dispatch (const T &obj) { return obj.get_coverage (); } 205 static return_t default_return_value (void) { return Null(Coverage); } 206 bool stop_sublookup_iteration (return_t r) const 207 { 208 r.add_coverage (set); 209 return false; 210 } 211 212 hb_add_coverage_context_t (set_t *set_) : 213 set (set_), 214 debug_depth (0) {} 215 216 set_t *set; 217 unsigned int debug_depth; 218 }; 219 220 221 struct hb_apply_context_t : 222 hb_dispatch_context_t<hb_apply_context_t, bool, HB_DEBUG_APPLY> 223 { 224 struct matcher_t 225 { 226 inline matcher_t (void) : 227 lookup_props (0), 228 ignore_zwnj (false), 229 ignore_zwj (false), 230 mask (-1), 231 #define arg1(arg) (arg) /* Remove the macro to see why it's needed! */ 232 syllable arg1(0), 233 #undef arg1 234 match_func (nullptr), 235 match_data (nullptr) {}; 236 237 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const UINT16 &value, const void *data); 238 239 inline void set_ignore_zwnj (bool ignore_zwnj_) { ignore_zwnj = ignore_zwnj_; } 240 inline void set_ignore_zwj (bool ignore_zwj_) { ignore_zwj = ignore_zwj_; } 241 inline void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; } 242 inline void set_mask (hb_mask_t mask_) { mask = mask_; } 243 inline void set_syllable (uint8_t syllable_) { syllable = syllable_; } 244 inline void set_match_func (match_func_t match_func_, 245 const void *match_data_) 246 { match_func = match_func_; match_data = match_data_; } 247 248 enum may_match_t { 249 MATCH_NO, 250 MATCH_YES, 251 MATCH_MAYBE 252 }; 253 254 inline may_match_t may_match (const hb_glyph_info_t &info, 255 const UINT16 *glyph_data) const 256 { 257 if (!(info.mask & mask) || 258 (syllable && syllable != info.syllable ())) 259 return MATCH_NO; 260 261 if (match_func) 262 return match_func (info.codepoint, *glyph_data, match_data) ? MATCH_YES : MATCH_NO; 263 264 return MATCH_MAYBE; 265 } 266 267 enum may_skip_t { 268 SKIP_NO, 269 SKIP_YES, 270 SKIP_MAYBE 271 }; 272 273 inline may_skip_t 274 may_skip (const hb_apply_context_t *c, 275 const hb_glyph_info_t &info) const 276 { 277 if (!c->check_glyph_property (&info, lookup_props)) 278 return SKIP_YES; 279 280 if (unlikely (_hb_glyph_info_is_default_ignorable_and_not_hidden (&info) && 281 (ignore_zwnj || !_hb_glyph_info_is_zwnj (&info)) && 282 (ignore_zwj || !_hb_glyph_info_is_zwj (&info)))) 283 return SKIP_MAYBE; 284 285 return SKIP_NO; 286 } 287 288 protected: 289 unsigned int lookup_props; 290 bool ignore_zwnj; 291 bool ignore_zwj; 292 hb_mask_t mask; 293 uint8_t syllable; 294 match_func_t match_func; 295 const void *match_data; 296 }; 297 298 struct skipping_iterator_t 299 { 300 inline void init (hb_apply_context_t *c_, bool context_match = false) 301 { 302 c = c_; 303 match_glyph_data = nullptr; 304 matcher.set_match_func (nullptr, nullptr); 305 matcher.set_lookup_props (c->lookup_props); 306 /* Ignore ZWNJ if we are matching GSUB context, or matching GPOS. */ 307 matcher.set_ignore_zwnj (c->table_index == 1 || (context_match && c->auto_zwnj)); 308 /* Ignore ZWJ if we are matching GSUB context, or matching GPOS, or if asked to. */ 309 matcher.set_ignore_zwj (c->table_index == 1 || (context_match || c->auto_zwj)); 310 matcher.set_mask (context_match ? -1 : c->lookup_mask); 311 } 312 inline void set_lookup_props (unsigned int lookup_props) 313 { 314 matcher.set_lookup_props (lookup_props); 315 } 316 inline void set_match_func (matcher_t::match_func_t match_func_, 317 const void *match_data_, 318 const UINT16 glyph_data[]) 319 { 320 matcher.set_match_func (match_func_, match_data_); 321 match_glyph_data = glyph_data; 322 } 323 324 inline void reset (unsigned int start_index_, 325 unsigned int num_items_) 326 { 327 idx = start_index_; 328 num_items = num_items_; 329 end = c->buffer->len; 330 matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0); 331 } 332 333 inline void reject (void) { num_items++; match_glyph_data--; } 334 335 inline matcher_t::may_skip_t 336 may_skip (const hb_apply_context_t *c, 337 const hb_glyph_info_t &info) const 338 { 339 return matcher.may_skip (c, info); 340 } 341 342 inline bool next (void) 343 { 344 assert (num_items > 0); 345 while (idx + num_items < end) 346 { 347 idx++; 348 const hb_glyph_info_t &info = c->buffer->info[idx]; 349 350 matcher_t::may_skip_t skip = matcher.may_skip (c, info); 351 if (unlikely (skip == matcher_t::SKIP_YES)) 352 continue; 353 354 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data); 355 if (match == matcher_t::MATCH_YES || 356 (match == matcher_t::MATCH_MAYBE && 357 skip == matcher_t::SKIP_NO)) 358 { 359 num_items--; 360 match_glyph_data++; 361 return true; 362 } 363 364 if (skip == matcher_t::SKIP_NO) 365 return false; 366 } 367 return false; 368 } 369 inline bool prev (void) 370 { 371 assert (num_items > 0); 372 while (idx >= num_items) 373 { 374 idx--; 375 const hb_glyph_info_t &info = c->buffer->out_info[idx]; 376 377 matcher_t::may_skip_t skip = matcher.may_skip (c, info); 378 if (unlikely (skip == matcher_t::SKIP_YES)) 379 continue; 380 381 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data); 382 if (match == matcher_t::MATCH_YES || 383 (match == matcher_t::MATCH_MAYBE && 384 skip == matcher_t::SKIP_NO)) 385 { 386 num_items--; 387 match_glyph_data++; 388 return true; 389 } 390 391 if (skip == matcher_t::SKIP_NO) 392 return false; 393 } 394 return false; 395 } 396 397 unsigned int idx; 398 protected: 399 hb_apply_context_t *c; 400 matcher_t matcher; 401 const UINT16 *match_glyph_data; 402 403 unsigned int num_items; 404 unsigned int end; 405 }; 406 407 408 inline const char *get_name (void) { return "APPLY"; } 409 typedef return_t (*recurse_func_t) (hb_apply_context_t *c, unsigned int lookup_index); 410 template <typename T> 411 inline return_t dispatch (const T &obj) { return obj.apply (this); } 412 static return_t default_return_value (void) { return false; } 413 bool stop_sublookup_iteration (return_t r) const { return r; } 414 return_t recurse (unsigned int lookup_index) 415 { 416 if (unlikely (nesting_level_left == 0 || !recurse_func || buffer->max_ops-- <= 0)) 417 return default_return_value (); 418 419 nesting_level_left--; 420 bool ret = recurse_func (this, lookup_index); 421 nesting_level_left++; 422 return ret; 423 } 424 425 skipping_iterator_t iter_input, iter_context; 426 427 hb_font_t *font; 428 hb_face_t *face; 429 hb_buffer_t *buffer; 430 recurse_func_t recurse_func; 431 const GDEF &gdef; 432 const VariationStore &var_store; 433 434 hb_direction_t direction; 435 hb_mask_t lookup_mask; 436 unsigned int table_index; /* GSUB/GPOS */ 437 unsigned int lookup_index; 438 unsigned int lookup_props; 439 unsigned int nesting_level_left; 440 unsigned int debug_depth; 441 442 bool auto_zwnj; 443 bool auto_zwj; 444 bool has_glyph_classes; 445 446 447 hb_apply_context_t (unsigned int table_index_, 448 hb_font_t *font_, 449 hb_buffer_t *buffer_) : 450 iter_input (), iter_context (), 451 font (font_), face (font->face), buffer (buffer_), 452 recurse_func (nullptr), 453 gdef (*hb_ot_layout_from_face (face)->gdef), 454 var_store (gdef.get_var_store ()), 455 direction (buffer_->props.direction), 456 lookup_mask (1), 457 table_index (table_index_), 458 lookup_index ((unsigned int) -1), 459 lookup_props (0), 460 nesting_level_left (HB_MAX_NESTING_LEVEL), 461 debug_depth (0), 462 auto_zwnj (true), 463 auto_zwj (true), 464 has_glyph_classes (gdef.has_glyph_classes ()) {} 465 466 inline void set_lookup_mask (hb_mask_t mask) { lookup_mask = mask; } 467 inline void set_auto_zwj (bool auto_zwj_) { auto_zwj = auto_zwj_; } 468 inline void set_auto_zwnj (bool auto_zwnj_) { auto_zwnj = auto_zwnj_; } 469 inline void set_recurse_func (recurse_func_t func) { recurse_func = func; } 470 inline void set_lookup_index (unsigned int lookup_index_) { lookup_index = lookup_index_; } 471 inline void set_lookup_props (unsigned int lookup_props_) 472 { 473 lookup_props = lookup_props_; 474 iter_input.init (this, false); 475 iter_context.init (this, true); 476 } 477 478 inline bool 479 match_properties_mark (hb_codepoint_t glyph, 480 unsigned int glyph_props, 481 unsigned int match_props) const 482 { 483 /* If using mark filtering sets, the high short of 484 * match_props has the set index. 485 */ 486 if (match_props & LookupFlag::UseMarkFilteringSet) 487 return gdef.mark_set_covers (match_props >> 16, glyph); 488 489 /* The second byte of match_props has the meaning 490 * "ignore marks of attachment type different than 491 * the attachment type specified." 492 */ 493 if (match_props & LookupFlag::MarkAttachmentType) 494 return (match_props & LookupFlag::MarkAttachmentType) == (glyph_props & LookupFlag::MarkAttachmentType); 495 496 return true; 497 } 498 499 inline bool 500 check_glyph_property (const hb_glyph_info_t *info, 501 unsigned int match_props) const 502 { 503 hb_codepoint_t glyph = info->codepoint; 504 unsigned int glyph_props = _hb_glyph_info_get_glyph_props (info); 505 506 /* Not covered, if, for example, glyph class is ligature and 507 * match_props includes LookupFlags::IgnoreLigatures 508 */ 509 if (glyph_props & match_props & LookupFlag::IgnoreFlags) 510 return false; 511 512 if (unlikely (glyph_props & HB_OT_LAYOUT_GLYPH_PROPS_MARK)) 513 return match_properties_mark (glyph, glyph_props, match_props); 514 515 return true; 516 } 517 518 inline void _set_glyph_props (hb_codepoint_t glyph_index, 519 unsigned int class_guess = 0, 520 bool ligature = false, 521 bool component = false) const 522 { 523 unsigned int add_in = _hb_glyph_info_get_glyph_props (&buffer->cur()) & 524 HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE; 525 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_SUBSTITUTED; 526 if (ligature) 527 { 528 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_LIGATED; 529 /* In the only place that the MULTIPLIED bit is used, Uniscribe 530 * seems to only care about the "last" transformation between 531 * Ligature and Multiple substitions. Ie. if you ligate, expand, 532 * and ligate again, it forgives the multiplication and acts as 533 * if only ligation happened. As such, clear MULTIPLIED bit. 534 */ 535 add_in &= ~HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED; 536 } 537 if (component) 538 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED; 539 if (likely (has_glyph_classes)) 540 _hb_glyph_info_set_glyph_props (&buffer->cur(), add_in | gdef.get_glyph_props (glyph_index)); 541 else if (class_guess) 542 _hb_glyph_info_set_glyph_props (&buffer->cur(), add_in | class_guess); 543 } 544 545 inline void replace_glyph (hb_codepoint_t glyph_index) const 546 { 547 _set_glyph_props (glyph_index); 548 buffer->replace_glyph (glyph_index); 549 } 550 inline void replace_glyph_inplace (hb_codepoint_t glyph_index) const 551 { 552 _set_glyph_props (glyph_index); 553 buffer->cur().codepoint = glyph_index; 554 } 555 inline void replace_glyph_with_ligature (hb_codepoint_t glyph_index, 556 unsigned int class_guess) const 557 { 558 _set_glyph_props (glyph_index, class_guess, true); 559 buffer->replace_glyph (glyph_index); 560 } 561 inline void output_glyph_for_component (hb_codepoint_t glyph_index, 562 unsigned int class_guess) const 563 { 564 _set_glyph_props (glyph_index, class_guess, false, true); 565 buffer->output_glyph (glyph_index); 566 } 567 }; 568 569 570 571 typedef bool (*intersects_func_t) (hb_set_t *glyphs, const UINT16 &value, const void *data); 572 typedef void (*collect_glyphs_func_t) (hb_set_t *glyphs, const UINT16 &value, const void *data); 573 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const UINT16 &value, const void *data); 574 575 struct ContextClosureFuncs 576 { 577 intersects_func_t intersects; 578 }; 579 struct ContextCollectGlyphsFuncs 580 { 581 collect_glyphs_func_t collect; 582 }; 583 struct ContextApplyFuncs 584 { 585 match_func_t match; 586 }; 587 588 589 static inline bool intersects_glyph (hb_set_t *glyphs, const UINT16 &value, const void *data HB_UNUSED) 590 { 591 return glyphs->has (value); 592 } 593 static inline bool intersects_class (hb_set_t *glyphs, const UINT16 &value, const void *data) 594 { 595 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); 596 return class_def.intersects_class (glyphs, value); 597 } 598 static inline bool intersects_coverage (hb_set_t *glyphs, const UINT16 &value, const void *data) 599 { 600 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value; 601 return (data+coverage).intersects (glyphs); 602 } 603 604 static inline bool intersects_array (hb_closure_context_t *c, 605 unsigned int count, 606 const UINT16 values[], 607 intersects_func_t intersects_func, 608 const void *intersects_data) 609 { 610 for (unsigned int i = 0; i < count; i++) 611 if (likely (!intersects_func (c->glyphs, values[i], intersects_data))) 612 return false; 613 return true; 614 } 615 616 617 static inline void collect_glyph (hb_set_t *glyphs, const UINT16 &value, const void *data HB_UNUSED) 618 { 619 glyphs->add (value); 620 } 621 static inline void collect_class (hb_set_t *glyphs, const UINT16 &value, const void *data) 622 { 623 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); 624 class_def.add_class (glyphs, value); 625 } 626 static inline void collect_coverage (hb_set_t *glyphs, const UINT16 &value, const void *data) 627 { 628 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value; 629 (data+coverage).add_coverage (glyphs); 630 } 631 static inline void collect_array (hb_collect_glyphs_context_t *c HB_UNUSED, 632 hb_set_t *glyphs, 633 unsigned int count, 634 const UINT16 values[], 635 collect_glyphs_func_t collect_func, 636 const void *collect_data) 637 { 638 for (unsigned int i = 0; i < count; i++) 639 collect_func (glyphs, values[i], collect_data); 640 } 641 642 643 static inline bool match_glyph (hb_codepoint_t glyph_id, const UINT16 &value, const void *data HB_UNUSED) 644 { 645 return glyph_id == value; 646 } 647 static inline bool match_class (hb_codepoint_t glyph_id, const UINT16 &value, const void *data) 648 { 649 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); 650 return class_def.get_class (glyph_id) == value; 651 } 652 static inline bool match_coverage (hb_codepoint_t glyph_id, const UINT16 &value, const void *data) 653 { 654 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value; 655 return (data+coverage).get_coverage (glyph_id) != NOT_COVERED; 656 } 657 658 static inline bool would_match_input (hb_would_apply_context_t *c, 659 unsigned int count, /* Including the first glyph (not matched) */ 660 const UINT16 input[], /* Array of input values--start with second glyph */ 661 match_func_t match_func, 662 const void *match_data) 663 { 664 if (count != c->len) 665 return false; 666 667 for (unsigned int i = 1; i < count; i++) 668 if (likely (!match_func (c->glyphs[i], input[i - 1], match_data))) 669 return false; 670 671 return true; 672 } 673 static inline bool match_input (hb_apply_context_t *c, 674 unsigned int count, /* Including the first glyph (not matched) */ 675 const UINT16 input[], /* Array of input values--start with second glyph */ 676 match_func_t match_func, 677 const void *match_data, 678 unsigned int *end_offset, 679 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], 680 bool *p_is_mark_ligature = nullptr, 681 unsigned int *p_total_component_count = nullptr) 682 { 683 TRACE_APPLY (nullptr); 684 685 if (unlikely (count > HB_MAX_CONTEXT_LENGTH)) return_trace (false); 686 687 hb_buffer_t *buffer = c->buffer; 688 689 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input; 690 skippy_iter.reset (buffer->idx, count - 1); 691 skippy_iter.set_match_func (match_func, match_data, input); 692 693 /* 694 * This is perhaps the trickiest part of OpenType... Remarks: 695 * 696 * - If all components of the ligature were marks, we call this a mark ligature. 697 * 698 * - If there is no GDEF, and the ligature is NOT a mark ligature, we categorize 699 * it as a ligature glyph. 700 * 701 * - Ligatures cannot be formed across glyphs attached to different components 702 * of previous ligatures. Eg. the sequence is LAM,SHADDA,LAM,FATHA,HEH, and 703 * LAM,LAM,HEH form a ligature, leaving SHADDA,FATHA next to eachother. 704 * However, it would be wrong to ligate that SHADDA,FATHA sequence. 705 * There are a couple of exceptions to this: 706 * 707 * o If a ligature tries ligating with marks that belong to it itself, go ahead, 708 * assuming that the font designer knows what they are doing (otherwise it can 709 * break Indic stuff when a matra wants to ligate with a conjunct, 710 * 711 * o If two marks want to ligate and they belong to different components of the 712 * same ligature glyph, and said ligature glyph is to be ignored according to 713 * mark-filtering rules, then allow. 714 * https://github.com/harfbuzz/harfbuzz/issues/545 715 */ 716 717 bool is_mark_ligature = _hb_glyph_info_is_mark (&buffer->cur()); 718 719 unsigned int total_component_count = 0; 720 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->cur()); 721 722 unsigned int first_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur()); 723 unsigned int first_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->cur()); 724 725 enum { 726 LIGBASE_NOT_CHECKED, 727 LIGBASE_MAY_NOT_SKIP, 728 LIGBASE_MAY_SKIP 729 } ligbase = LIGBASE_NOT_CHECKED; 730 731 match_positions[0] = buffer->idx; 732 for (unsigned int i = 1; i < count; i++) 733 { 734 if (!skippy_iter.next ()) return_trace (false); 735 736 match_positions[i] = skippy_iter.idx; 737 738 unsigned int this_lig_id = _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]); 739 unsigned int this_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]); 740 741 if (first_lig_id && first_lig_comp) 742 { 743 /* If first component was attached to a previous ligature component, 744 * all subsequent components should be attached to the same ligature 745 * component, otherwise we shouldn't ligate them... */ 746 if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp) 747 { 748 /* ...unless, we are attached to a base ligature and that base 749 * ligature is ignorable. */ 750 if (ligbase == LIGBASE_NOT_CHECKED) 751 { 752 bool found = false; 753 const hb_glyph_info_t *out = buffer->out_info; 754 unsigned int j = buffer->out_len; 755 while (j && _hb_glyph_info_get_lig_id (&out[j - 1]) == first_lig_id) 756 { 757 if (_hb_glyph_info_get_lig_comp (&out[j - 1]) == 0) 758 { 759 j--; 760 found = true; 761 break; 762 } 763 j--; 764 } 765 766 if (found && skippy_iter.may_skip (c, out[j]) == hb_apply_context_t::matcher_t::SKIP_YES) 767 ligbase = LIGBASE_MAY_SKIP; 768 else 769 ligbase = LIGBASE_MAY_NOT_SKIP; 770 } 771 772 if (ligbase == LIGBASE_MAY_NOT_SKIP) 773 return_trace (false); 774 } 775 } 776 else 777 { 778 /* If first component was NOT attached to a previous ligature component, 779 * all subsequent components should also NOT be attached to any ligature 780 * component, unless they are attached to the first component itself! */ 781 if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id)) 782 return_trace (false); 783 } 784 785 is_mark_ligature = is_mark_ligature && _hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx]); 786 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->info[skippy_iter.idx]); 787 } 788 789 *end_offset = skippy_iter.idx - buffer->idx + 1; 790 791 if (p_is_mark_ligature) 792 *p_is_mark_ligature = is_mark_ligature; 793 794 if (p_total_component_count) 795 *p_total_component_count = total_component_count; 796 797 return_trace (true); 798 } 799 static inline bool ligate_input (hb_apply_context_t *c, 800 unsigned int count, /* Including the first glyph */ 801 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */ 802 unsigned int match_length, 803 hb_codepoint_t lig_glyph, 804 bool is_mark_ligature, 805 unsigned int total_component_count) 806 { 807 TRACE_APPLY (nullptr); 808 809 hb_buffer_t *buffer = c->buffer; 810 811 buffer->merge_clusters (buffer->idx, buffer->idx + match_length); 812 813 /* 814 * - If it *is* a mark ligature, we don't allocate a new ligature id, and leave 815 * the ligature to keep its old ligature id. This will allow it to attach to 816 * a base ligature in GPOS. Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH, 817 * and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA wit a 818 * ligature id and component value of 2. Then if SHADDA,FATHA form a ligature 819 * later, we don't want them to lose their ligature id/component, otherwise 820 * GPOS will fail to correctly position the mark ligature on top of the 821 * LAM,LAM,HEH ligature. See: 822 * https://bugzilla.gnome.org/show_bug.cgi?id=676343 823 * 824 * - If a ligature is formed of components that some of which are also ligatures 825 * themselves, and those ligature components had marks attached to *their* 826 * components, we have to attach the marks to the new ligature component 827 * positions! Now *that*'s tricky! And these marks may be following the 828 * last component of the whole sequence, so we should loop forward looking 829 * for them and update them. 830 * 831 * Eg. the sequence is LAM,LAM,SHADDA,FATHA,HEH, and the font first forms a 832 * 'calt' ligature of LAM,HEH, leaving the SHADDA and FATHA with a ligature 833 * id and component == 1. Now, during 'liga', the LAM and the LAM-HEH ligature 834 * form a LAM-LAM-HEH ligature. We need to reassign the SHADDA and FATHA to 835 * the new ligature with a component value of 2. 836 * 837 * This in fact happened to a font... See: 838 * https://bugzilla.gnome.org/show_bug.cgi?id=437633 839 */ 840 841 unsigned int klass = is_mark_ligature ? 0 : HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE; 842 unsigned int lig_id = is_mark_ligature ? 0 : _hb_allocate_lig_id (buffer); 843 unsigned int last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur()); 844 unsigned int last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur()); 845 unsigned int components_so_far = last_num_components; 846 847 if (!is_mark_ligature) 848 { 849 _hb_glyph_info_set_lig_props_for_ligature (&buffer->cur(), lig_id, total_component_count); 850 if (_hb_glyph_info_get_general_category (&buffer->cur()) == HB_UNICODE_GENERAL_CATEGORY_NON_SPACING_MARK) 851 { 852 _hb_glyph_info_set_general_category (&buffer->cur(), HB_UNICODE_GENERAL_CATEGORY_OTHER_LETTER); 853 } 854 } 855 c->replace_glyph_with_ligature (lig_glyph, klass); 856 857 for (unsigned int i = 1; i < count; i++) 858 { 859 while (buffer->idx < match_positions[i] && !buffer->in_error) 860 { 861 if (!is_mark_ligature) { 862 unsigned int this_comp = _hb_glyph_info_get_lig_comp (&buffer->cur()); 863 if (this_comp == 0) 864 this_comp = last_num_components; 865 unsigned int new_lig_comp = components_so_far - last_num_components + 866 MIN (this_comp, last_num_components); 867 _hb_glyph_info_set_lig_props_for_mark (&buffer->cur(), lig_id, new_lig_comp); 868 } 869 buffer->next_glyph (); 870 } 871 872 last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur()); 873 last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur()); 874 components_so_far += last_num_components; 875 876 /* Skip the base glyph */ 877 buffer->idx++; 878 } 879 880 if (!is_mark_ligature && last_lig_id) { 881 /* Re-adjust components for any marks following. */ 882 for (unsigned int i = buffer->idx; i < buffer->len; i++) { 883 if (last_lig_id == _hb_glyph_info_get_lig_id (&buffer->info[i])) { 884 unsigned int this_comp = _hb_glyph_info_get_lig_comp (&buffer->info[i]); 885 if (!this_comp) 886 break; 887 unsigned int new_lig_comp = components_so_far - last_num_components + 888 MIN (this_comp, last_num_components); 889 _hb_glyph_info_set_lig_props_for_mark (&buffer->info[i], lig_id, new_lig_comp); 890 } else 891 break; 892 } 893 } 894 return_trace (true); 895 } 896 897 static inline bool match_backtrack (hb_apply_context_t *c, 898 unsigned int count, 899 const UINT16 backtrack[], 900 match_func_t match_func, 901 const void *match_data, 902 unsigned int *match_start) 903 { 904 TRACE_APPLY (nullptr); 905 906 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context; 907 skippy_iter.reset (c->buffer->backtrack_len (), count); 908 skippy_iter.set_match_func (match_func, match_data, backtrack); 909 910 for (unsigned int i = 0; i < count; i++) 911 if (!skippy_iter.prev ()) 912 return_trace (false); 913 914 *match_start = skippy_iter.idx; 915 916 return_trace (true); 917 } 918 919 static inline bool match_lookahead (hb_apply_context_t *c, 920 unsigned int count, 921 const UINT16 lookahead[], 922 match_func_t match_func, 923 const void *match_data, 924 unsigned int offset, 925 unsigned int *end_index) 926 { 927 TRACE_APPLY (nullptr); 928 929 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context; 930 skippy_iter.reset (c->buffer->idx + offset - 1, count); 931 skippy_iter.set_match_func (match_func, match_data, lookahead); 932 933 for (unsigned int i = 0; i < count; i++) 934 if (!skippy_iter.next ()) 935 return_trace (false); 936 937 *end_index = skippy_iter.idx + 1; 938 939 return_trace (true); 940 } 941 942 943 944 struct LookupRecord 945 { 946 inline bool sanitize (hb_sanitize_context_t *c) const 947 { 948 TRACE_SANITIZE (this); 949 return_trace (c->check_struct (this)); 950 } 951 952 UINT16 sequenceIndex; /* Index into current glyph 953 * sequence--first glyph = 0 */ 954 UINT16 lookupListIndex; /* Lookup to apply to that 955 * position--zero--based */ 956 public: 957 DEFINE_SIZE_STATIC (4); 958 }; 959 960 961 template <typename context_t> 962 static inline void recurse_lookups (context_t *c, 963 unsigned int lookupCount, 964 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */) 965 { 966 for (unsigned int i = 0; i < lookupCount; i++) 967 c->recurse (lookupRecord[i].lookupListIndex); 968 } 969 970 static inline bool apply_lookup (hb_apply_context_t *c, 971 unsigned int count, /* Including the first glyph */ 972 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */ 973 unsigned int lookupCount, 974 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */ 975 unsigned int match_length) 976 { 977 TRACE_APPLY (nullptr); 978 979 hb_buffer_t *buffer = c->buffer; 980 int end; 981 982 /* All positions are distance from beginning of *output* buffer. 983 * Adjust. */ 984 { 985 unsigned int bl = buffer->backtrack_len (); 986 end = bl + match_length; 987 988 int delta = bl - buffer->idx; 989 /* Convert positions to new indexing. */ 990 for (unsigned int j = 0; j < count; j++) 991 match_positions[j] += delta; 992 } 993 994 for (unsigned int i = 0; i < lookupCount && !buffer->in_error; i++) 995 { 996 unsigned int idx = lookupRecord[i].sequenceIndex; 997 if (idx >= count) 998 continue; 999 1000 /* Don't recurse to ourself at same position. 1001 * Note that this test is too naive, it doesn't catch longer loops. */ 1002 if (idx == 0 && lookupRecord[i].lookupListIndex == c->lookup_index) 1003 continue; 1004 1005 if (unlikely (!buffer->move_to (match_positions[idx]))) 1006 break; 1007 1008 if (unlikely (buffer->max_ops <= 0)) 1009 break; 1010 1011 unsigned int orig_len = buffer->backtrack_len () + buffer->lookahead_len (); 1012 if (!c->recurse (lookupRecord[i].lookupListIndex)) 1013 continue; 1014 1015 unsigned int new_len = buffer->backtrack_len () + buffer->lookahead_len (); 1016 int delta = new_len - orig_len; 1017 1018 if (!delta) 1019 continue; 1020 1021 /* Recursed lookup changed buffer len. Adjust. 1022 * 1023 * TODO: 1024 * 1025 * Right now, if buffer length increased by n, we assume n new glyphs 1026 * were added right after the current position, and if buffer length 1027 * was decreased by n, we assume n match positions after the current 1028 * one where removed. The former (buffer length increased) case is 1029 * fine, but the decrease case can be improved in at least two ways, 1030 * both of which are significant: 1031 * 1032 * - If recursed-to lookup is MultipleSubst and buffer length 1033 * decreased, then it's current match position that was deleted, 1034 * NOT the one after it. 1035 * 1036 * - If buffer length was decreased by n, it does not necessarily 1037 * mean that n match positions where removed, as there might 1038 * have been marks and default-ignorables in the sequence. We 1039 * should instead drop match positions between current-position 1040 * and current-position + n instead. 1041 * 1042 * It should be possible to construct tests for both of these cases. 1043 */ 1044 1045 end += delta; 1046 if (end <= int (match_positions[idx])) 1047 { 1048 /* End might end up being smaller than match_positions[idx] if the recursed 1049 * lookup ended up removing many items, more than we have had matched. 1050 * Just never rewind end back and get out of here. 1051 * https://bugs.chromium.org/p/chromium/issues/detail?id=659496 */ 1052 end = match_positions[idx]; 1053 /* There can't be any further changes. */ 1054 break; 1055 } 1056 1057 unsigned int next = idx + 1; /* next now is the position after the recursed lookup. */ 1058 1059 if (delta > 0) 1060 { 1061 if (unlikely (delta + count > HB_MAX_CONTEXT_LENGTH)) 1062 break; 1063 } 1064 else 1065 { 1066 /* NOTE: delta is negative. */ 1067 delta = MAX (delta, (int) next - (int) count); 1068 next -= delta; 1069 } 1070 1071 /* Shift! */ 1072 memmove (match_positions + next + delta, match_positions + next, 1073 (count - next) * sizeof (match_positions[0])); 1074 next += delta; 1075 count += delta; 1076 1077 /* Fill in new entries. */ 1078 for (unsigned int j = idx + 1; j < next; j++) 1079 match_positions[j] = match_positions[j - 1] + 1; 1080 1081 /* And fixup the rest. */ 1082 for (; next < count; next++) 1083 match_positions[next] += delta; 1084 } 1085 1086 buffer->move_to (end); 1087 1088 return_trace (true); 1089 } 1090 1091 1092 1093 /* Contextual lookups */ 1094 1095 struct ContextClosureLookupContext 1096 { 1097 ContextClosureFuncs funcs; 1098 const void *intersects_data; 1099 }; 1100 1101 struct ContextCollectGlyphsLookupContext 1102 { 1103 ContextCollectGlyphsFuncs funcs; 1104 const void *collect_data; 1105 }; 1106 1107 struct ContextApplyLookupContext 1108 { 1109 ContextApplyFuncs funcs; 1110 const void *match_data; 1111 }; 1112 1113 static inline void context_closure_lookup (hb_closure_context_t *c, 1114 unsigned int inputCount, /* Including the first glyph (not matched) */ 1115 const UINT16 input[], /* Array of input values--start with second glyph */ 1116 unsigned int lookupCount, 1117 const LookupRecord lookupRecord[], 1118 ContextClosureLookupContext &lookup_context) 1119 { 1120 if (intersects_array (c, 1121 inputCount ? inputCount - 1 : 0, input, 1122 lookup_context.funcs.intersects, lookup_context.intersects_data)) 1123 recurse_lookups (c, 1124 lookupCount, lookupRecord); 1125 } 1126 1127 static inline void context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c, 1128 unsigned int inputCount, /* Including the first glyph (not matched) */ 1129 const UINT16 input[], /* Array of input values--start with second glyph */ 1130 unsigned int lookupCount, 1131 const LookupRecord lookupRecord[], 1132 ContextCollectGlyphsLookupContext &lookup_context) 1133 { 1134 collect_array (c, c->input, 1135 inputCount ? inputCount - 1 : 0, input, 1136 lookup_context.funcs.collect, lookup_context.collect_data); 1137 recurse_lookups (c, 1138 lookupCount, lookupRecord); 1139 } 1140 1141 static inline bool context_would_apply_lookup (hb_would_apply_context_t *c, 1142 unsigned int inputCount, /* Including the first glyph (not matched) */ 1143 const UINT16 input[], /* Array of input values--start with second glyph */ 1144 unsigned int lookupCount HB_UNUSED, 1145 const LookupRecord lookupRecord[] HB_UNUSED, 1146 ContextApplyLookupContext &lookup_context) 1147 { 1148 return would_match_input (c, 1149 inputCount, input, 1150 lookup_context.funcs.match, lookup_context.match_data); 1151 } 1152 static inline bool context_apply_lookup (hb_apply_context_t *c, 1153 unsigned int inputCount, /* Including the first glyph (not matched) */ 1154 const UINT16 input[], /* Array of input values--start with second glyph */ 1155 unsigned int lookupCount, 1156 const LookupRecord lookupRecord[], 1157 ContextApplyLookupContext &lookup_context) 1158 { 1159 unsigned int match_length = 0; 1160 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH]; 1161 return match_input (c, 1162 inputCount, input, 1163 lookup_context.funcs.match, lookup_context.match_data, 1164 &match_length, match_positions) 1165 && (c->buffer->unsafe_to_break (c->buffer->idx, c->buffer->idx + match_length), 1166 apply_lookup (c, 1167 inputCount, match_positions, 1168 lookupCount, lookupRecord, 1169 match_length)); 1170 } 1171 1172 struct Rule 1173 { 1174 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const 1175 { 1176 TRACE_CLOSURE (this); 1177 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0)); 1178 context_closure_lookup (c, 1179 inputCount, inputZ, 1180 lookupCount, lookupRecord, 1181 lookup_context); 1182 } 1183 1184 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ContextCollectGlyphsLookupContext &lookup_context) const 1185 { 1186 TRACE_COLLECT_GLYPHS (this); 1187 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0)); 1188 context_collect_glyphs_lookup (c, 1189 inputCount, inputZ, 1190 lookupCount, lookupRecord, 1191 lookup_context); 1192 } 1193 1194 inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const 1195 { 1196 TRACE_WOULD_APPLY (this); 1197 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0)); 1198 return_trace (context_would_apply_lookup (c, inputCount, inputZ, lookupCount, lookupRecord, lookup_context)); 1199 } 1200 1201 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const 1202 { 1203 TRACE_APPLY (this); 1204 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0)); 1205 return_trace (context_apply_lookup (c, inputCount, inputZ, lookupCount, lookupRecord, lookup_context)); 1206 } 1207 1208 public: 1209 inline bool sanitize (hb_sanitize_context_t *c) const 1210 { 1211 TRACE_SANITIZE (this); 1212 return_trace (inputCount.sanitize (c) && 1213 lookupCount.sanitize (c) && 1214 c->check_range (inputZ, 1215 inputZ[0].static_size * inputCount + 1216 lookupRecordX[0].static_size * lookupCount)); 1217 } 1218 1219 protected: 1220 UINT16 inputCount; /* Total number of glyphs in input 1221 * glyph sequence--includes the first 1222 * glyph */ 1223 UINT16 lookupCount; /* Number of LookupRecords */ 1224 UINT16 inputZ[VAR]; /* Array of match inputs--start with 1225 * second glyph */ 1226 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in 1227 * design order */ 1228 public: 1229 DEFINE_SIZE_ARRAY2 (4, inputZ, lookupRecordX); 1230 }; 1231 1232 struct RuleSet 1233 { 1234 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const 1235 { 1236 TRACE_CLOSURE (this); 1237 unsigned int num_rules = rule.len; 1238 for (unsigned int i = 0; i < num_rules; i++) 1239 (this+rule[i]).closure (c, lookup_context); 1240 } 1241 1242 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ContextCollectGlyphsLookupContext &lookup_context) const 1243 { 1244 TRACE_COLLECT_GLYPHS (this); 1245 unsigned int num_rules = rule.len; 1246 for (unsigned int i = 0; i < num_rules; i++) 1247 (this+rule[i]).collect_glyphs (c, lookup_context); 1248 } 1249 1250 inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const 1251 { 1252 TRACE_WOULD_APPLY (this); 1253 unsigned int num_rules = rule.len; 1254 for (unsigned int i = 0; i < num_rules; i++) 1255 { 1256 if ((this+rule[i]).would_apply (c, lookup_context)) 1257 return_trace (true); 1258 } 1259 return_trace (false); 1260 } 1261 1262 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const 1263 { 1264 TRACE_APPLY (this); 1265 unsigned int num_rules = rule.len; 1266 for (unsigned int i = 0; i < num_rules; i++) 1267 { 1268 if ((this+rule[i]).apply (c, lookup_context)) 1269 return_trace (true); 1270 } 1271 return_trace (false); 1272 } 1273 1274 inline bool sanitize (hb_sanitize_context_t *c) const 1275 { 1276 TRACE_SANITIZE (this); 1277 return_trace (rule.sanitize (c, this)); 1278 } 1279 1280 protected: 1281 OffsetArrayOf<Rule> 1282 rule; /* Array of Rule tables 1283 * ordered by preference */ 1284 public: 1285 DEFINE_SIZE_ARRAY (2, rule); 1286 }; 1287 1288 1289 struct ContextFormat1 1290 { 1291 inline void closure (hb_closure_context_t *c) const 1292 { 1293 TRACE_CLOSURE (this); 1294 1295 const Coverage &cov = (this+coverage); 1296 1297 struct ContextClosureLookupContext lookup_context = { 1298 {intersects_glyph}, 1299 nullptr 1300 }; 1301 1302 unsigned int count = ruleSet.len; 1303 for (unsigned int i = 0; i < count; i++) 1304 if (cov.intersects_coverage (c->glyphs, i)) { 1305 const RuleSet &rule_set = this+ruleSet[i]; 1306 rule_set.closure (c, lookup_context); 1307 } 1308 } 1309 1310 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1311 { 1312 TRACE_COLLECT_GLYPHS (this); 1313 (this+coverage).add_coverage (c->input); 1314 1315 struct ContextCollectGlyphsLookupContext lookup_context = { 1316 {collect_glyph}, 1317 nullptr 1318 }; 1319 1320 unsigned int count = ruleSet.len; 1321 for (unsigned int i = 0; i < count; i++) 1322 (this+ruleSet[i]).collect_glyphs (c, lookup_context); 1323 } 1324 1325 inline bool would_apply (hb_would_apply_context_t *c) const 1326 { 1327 TRACE_WOULD_APPLY (this); 1328 1329 const RuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])]; 1330 struct ContextApplyLookupContext lookup_context = { 1331 {match_glyph}, 1332 nullptr 1333 }; 1334 return_trace (rule_set.would_apply (c, lookup_context)); 1335 } 1336 1337 inline const Coverage &get_coverage (void) const 1338 { 1339 return this+coverage; 1340 } 1341 1342 inline bool apply (hb_apply_context_t *c) const 1343 { 1344 TRACE_APPLY (this); 1345 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); 1346 if (likely (index == NOT_COVERED)) 1347 return_trace (false); 1348 1349 const RuleSet &rule_set = this+ruleSet[index]; 1350 struct ContextApplyLookupContext lookup_context = { 1351 {match_glyph}, 1352 nullptr 1353 }; 1354 return_trace (rule_set.apply (c, lookup_context)); 1355 } 1356 1357 inline bool sanitize (hb_sanitize_context_t *c) const 1358 { 1359 TRACE_SANITIZE (this); 1360 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this)); 1361 } 1362 1363 protected: 1364 UINT16 format; /* Format identifier--format = 1 */ 1365 OffsetTo<Coverage> 1366 coverage; /* Offset to Coverage table--from 1367 * beginning of table */ 1368 OffsetArrayOf<RuleSet> 1369 ruleSet; /* Array of RuleSet tables 1370 * ordered by Coverage Index */ 1371 public: 1372 DEFINE_SIZE_ARRAY (6, ruleSet); 1373 }; 1374 1375 1376 struct ContextFormat2 1377 { 1378 inline void closure (hb_closure_context_t *c) const 1379 { 1380 TRACE_CLOSURE (this); 1381 if (!(this+coverage).intersects (c->glyphs)) 1382 return; 1383 1384 const ClassDef &class_def = this+classDef; 1385 1386 struct ContextClosureLookupContext lookup_context = { 1387 {intersects_class}, 1388 &class_def 1389 }; 1390 1391 unsigned int count = ruleSet.len; 1392 for (unsigned int i = 0; i < count; i++) 1393 if (class_def.intersects_class (c->glyphs, i)) { 1394 const RuleSet &rule_set = this+ruleSet[i]; 1395 rule_set.closure (c, lookup_context); 1396 } 1397 } 1398 1399 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1400 { 1401 TRACE_COLLECT_GLYPHS (this); 1402 (this+coverage).add_coverage (c->input); 1403 1404 const ClassDef &class_def = this+classDef; 1405 struct ContextCollectGlyphsLookupContext lookup_context = { 1406 {collect_class}, 1407 &class_def 1408 }; 1409 1410 unsigned int count = ruleSet.len; 1411 for (unsigned int i = 0; i < count; i++) 1412 (this+ruleSet[i]).collect_glyphs (c, lookup_context); 1413 } 1414 1415 inline bool would_apply (hb_would_apply_context_t *c) const 1416 { 1417 TRACE_WOULD_APPLY (this); 1418 1419 const ClassDef &class_def = this+classDef; 1420 unsigned int index = class_def.get_class (c->glyphs[0]); 1421 const RuleSet &rule_set = this+ruleSet[index]; 1422 struct ContextApplyLookupContext lookup_context = { 1423 {match_class}, 1424 &class_def 1425 }; 1426 return_trace (rule_set.would_apply (c, lookup_context)); 1427 } 1428 1429 inline const Coverage &get_coverage (void) const 1430 { 1431 return this+coverage; 1432 } 1433 1434 inline bool apply (hb_apply_context_t *c) const 1435 { 1436 TRACE_APPLY (this); 1437 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); 1438 if (likely (index == NOT_COVERED)) return_trace (false); 1439 1440 const ClassDef &class_def = this+classDef; 1441 index = class_def.get_class (c->buffer->cur().codepoint); 1442 const RuleSet &rule_set = this+ruleSet[index]; 1443 struct ContextApplyLookupContext lookup_context = { 1444 {match_class}, 1445 &class_def 1446 }; 1447 return_trace (rule_set.apply (c, lookup_context)); 1448 } 1449 1450 inline bool sanitize (hb_sanitize_context_t *c) const 1451 { 1452 TRACE_SANITIZE (this); 1453 return_trace (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this)); 1454 } 1455 1456 protected: 1457 UINT16 format; /* Format identifier--format = 2 */ 1458 OffsetTo<Coverage> 1459 coverage; /* Offset to Coverage table--from 1460 * beginning of table */ 1461 OffsetTo<ClassDef> 1462 classDef; /* Offset to glyph ClassDef table--from 1463 * beginning of table */ 1464 OffsetArrayOf<RuleSet> 1465 ruleSet; /* Array of RuleSet tables 1466 * ordered by class */ 1467 public: 1468 DEFINE_SIZE_ARRAY (8, ruleSet); 1469 }; 1470 1471 1472 struct ContextFormat3 1473 { 1474 inline void closure (hb_closure_context_t *c) const 1475 { 1476 TRACE_CLOSURE (this); 1477 if (!(this+coverageZ[0]).intersects (c->glyphs)) 1478 return; 1479 1480 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount); 1481 struct ContextClosureLookupContext lookup_context = { 1482 {intersects_coverage}, 1483 this 1484 }; 1485 context_closure_lookup (c, 1486 glyphCount, (const UINT16 *) (coverageZ + 1), 1487 lookupCount, lookupRecord, 1488 lookup_context); 1489 } 1490 1491 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1492 { 1493 TRACE_COLLECT_GLYPHS (this); 1494 (this+coverageZ[0]).add_coverage (c->input); 1495 1496 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount); 1497 struct ContextCollectGlyphsLookupContext lookup_context = { 1498 {collect_coverage}, 1499 this 1500 }; 1501 1502 context_collect_glyphs_lookup (c, 1503 glyphCount, (const UINT16 *) (coverageZ + 1), 1504 lookupCount, lookupRecord, 1505 lookup_context); 1506 } 1507 1508 inline bool would_apply (hb_would_apply_context_t *c) const 1509 { 1510 TRACE_WOULD_APPLY (this); 1511 1512 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount); 1513 struct ContextApplyLookupContext lookup_context = { 1514 {match_coverage}, 1515 this 1516 }; 1517 return_trace (context_would_apply_lookup (c, glyphCount, (const UINT16 *) (coverageZ + 1), lookupCount, lookupRecord, lookup_context)); 1518 } 1519 1520 inline const Coverage &get_coverage (void) const 1521 { 1522 return this+coverageZ[0]; 1523 } 1524 1525 inline bool apply (hb_apply_context_t *c) const 1526 { 1527 TRACE_APPLY (this); 1528 unsigned int index = (this+coverageZ[0]).get_coverage (c->buffer->cur().codepoint); 1529 if (likely (index == NOT_COVERED)) return_trace (false); 1530 1531 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount); 1532 struct ContextApplyLookupContext lookup_context = { 1533 {match_coverage}, 1534 this 1535 }; 1536 return_trace (context_apply_lookup (c, glyphCount, (const UINT16 *) (coverageZ + 1), lookupCount, lookupRecord, lookup_context)); 1537 } 1538 1539 inline bool sanitize (hb_sanitize_context_t *c) const 1540 { 1541 TRACE_SANITIZE (this); 1542 if (!c->check_struct (this)) return_trace (false); 1543 unsigned int count = glyphCount; 1544 if (!count) return_trace (false); /* We want to access coverageZ[0] freely. */ 1545 if (!c->check_array (coverageZ, coverageZ[0].static_size, count)) return_trace (false); 1546 for (unsigned int i = 0; i < count; i++) 1547 if (!coverageZ[i].sanitize (c, this)) return_trace (false); 1548 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * count); 1549 return_trace (c->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount)); 1550 } 1551 1552 protected: 1553 UINT16 format; /* Format identifier--format = 3 */ 1554 UINT16 glyphCount; /* Number of glyphs in the input glyph 1555 * sequence */ 1556 UINT16 lookupCount; /* Number of LookupRecords */ 1557 OffsetTo<Coverage> 1558 coverageZ[VAR]; /* Array of offsets to Coverage 1559 * table in glyph sequence order */ 1560 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in 1561 * design order */ 1562 public: 1563 DEFINE_SIZE_ARRAY2 (6, coverageZ, lookupRecordX); 1564 }; 1565 1566 struct Context 1567 { 1568 template <typename context_t> 1569 inline typename context_t::return_t dispatch (context_t *c) const 1570 { 1571 TRACE_DISPATCH (this, u.format); 1572 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); 1573 switch (u.format) { 1574 case 1: return_trace (c->dispatch (u.format1)); 1575 case 2: return_trace (c->dispatch (u.format2)); 1576 case 3: return_trace (c->dispatch (u.format3)); 1577 default:return_trace (c->default_return_value ()); 1578 } 1579 } 1580 1581 protected: 1582 union { 1583 UINT16 format; /* Format identifier */ 1584 ContextFormat1 format1; 1585 ContextFormat2 format2; 1586 ContextFormat3 format3; 1587 } u; 1588 }; 1589 1590 1591 /* Chaining Contextual lookups */ 1592 1593 struct ChainContextClosureLookupContext 1594 { 1595 ContextClosureFuncs funcs; 1596 const void *intersects_data[3]; 1597 }; 1598 1599 struct ChainContextCollectGlyphsLookupContext 1600 { 1601 ContextCollectGlyphsFuncs funcs; 1602 const void *collect_data[3]; 1603 }; 1604 1605 struct ChainContextApplyLookupContext 1606 { 1607 ContextApplyFuncs funcs; 1608 const void *match_data[3]; 1609 }; 1610 1611 static inline void chain_context_closure_lookup (hb_closure_context_t *c, 1612 unsigned int backtrackCount, 1613 const UINT16 backtrack[], 1614 unsigned int inputCount, /* Including the first glyph (not matched) */ 1615 const UINT16 input[], /* Array of input values--start with second glyph */ 1616 unsigned int lookaheadCount, 1617 const UINT16 lookahead[], 1618 unsigned int lookupCount, 1619 const LookupRecord lookupRecord[], 1620 ChainContextClosureLookupContext &lookup_context) 1621 { 1622 if (intersects_array (c, 1623 backtrackCount, backtrack, 1624 lookup_context.funcs.intersects, lookup_context.intersects_data[0]) 1625 && intersects_array (c, 1626 inputCount ? inputCount - 1 : 0, input, 1627 lookup_context.funcs.intersects, lookup_context.intersects_data[1]) 1628 && intersects_array (c, 1629 lookaheadCount, lookahead, 1630 lookup_context.funcs.intersects, lookup_context.intersects_data[2])) 1631 recurse_lookups (c, 1632 lookupCount, lookupRecord); 1633 } 1634 1635 static inline void chain_context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c, 1636 unsigned int backtrackCount, 1637 const UINT16 backtrack[], 1638 unsigned int inputCount, /* Including the first glyph (not matched) */ 1639 const UINT16 input[], /* Array of input values--start with second glyph */ 1640 unsigned int lookaheadCount, 1641 const UINT16 lookahead[], 1642 unsigned int lookupCount, 1643 const LookupRecord lookupRecord[], 1644 ChainContextCollectGlyphsLookupContext &lookup_context) 1645 { 1646 collect_array (c, c->before, 1647 backtrackCount, backtrack, 1648 lookup_context.funcs.collect, lookup_context.collect_data[0]); 1649 collect_array (c, c->input, 1650 inputCount ? inputCount - 1 : 0, input, 1651 lookup_context.funcs.collect, lookup_context.collect_data[1]); 1652 collect_array (c, c->after, 1653 lookaheadCount, lookahead, 1654 lookup_context.funcs.collect, lookup_context.collect_data[2]); 1655 recurse_lookups (c, 1656 lookupCount, lookupRecord); 1657 } 1658 1659 static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c, 1660 unsigned int backtrackCount, 1661 const UINT16 backtrack[] HB_UNUSED, 1662 unsigned int inputCount, /* Including the first glyph (not matched) */ 1663 const UINT16 input[], /* Array of input values--start with second glyph */ 1664 unsigned int lookaheadCount, 1665 const UINT16 lookahead[] HB_UNUSED, 1666 unsigned int lookupCount HB_UNUSED, 1667 const LookupRecord lookupRecord[] HB_UNUSED, 1668 ChainContextApplyLookupContext &lookup_context) 1669 { 1670 return (c->zero_context ? !backtrackCount && !lookaheadCount : true) 1671 && would_match_input (c, 1672 inputCount, input, 1673 lookup_context.funcs.match, lookup_context.match_data[1]); 1674 } 1675 1676 static inline bool chain_context_apply_lookup (hb_apply_context_t *c, 1677 unsigned int backtrackCount, 1678 const UINT16 backtrack[], 1679 unsigned int inputCount, /* Including the first glyph (not matched) */ 1680 const UINT16 input[], /* Array of input values--start with second glyph */ 1681 unsigned int lookaheadCount, 1682 const UINT16 lookahead[], 1683 unsigned int lookupCount, 1684 const LookupRecord lookupRecord[], 1685 ChainContextApplyLookupContext &lookup_context) 1686 { 1687 unsigned int start_index = 0, match_length = 0, end_index = 0; 1688 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH]; 1689 return match_input (c, 1690 inputCount, input, 1691 lookup_context.funcs.match, lookup_context.match_data[1], 1692 &match_length, match_positions) 1693 && match_backtrack (c, 1694 backtrackCount, backtrack, 1695 lookup_context.funcs.match, lookup_context.match_data[0], 1696 &start_index) 1697 && match_lookahead (c, 1698 lookaheadCount, lookahead, 1699 lookup_context.funcs.match, lookup_context.match_data[2], 1700 match_length, &end_index) 1701 && (c->buffer->unsafe_to_break_from_outbuffer (start_index, end_index), 1702 apply_lookup (c, 1703 inputCount, match_positions, 1704 lookupCount, lookupRecord, 1705 match_length)); 1706 } 1707 1708 struct ChainRule 1709 { 1710 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const 1711 { 1712 TRACE_CLOSURE (this); 1713 const HeadlessArrayOf<UINT16> &input = StructAfter<HeadlessArrayOf<UINT16> > (backtrack); 1714 const ArrayOf<UINT16> &lookahead = StructAfter<ArrayOf<UINT16> > (input); 1715 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1716 chain_context_closure_lookup (c, 1717 backtrack.len, backtrack.array, 1718 input.len, input.array, 1719 lookahead.len, lookahead.array, 1720 lookup.len, lookup.array, 1721 lookup_context); 1722 } 1723 1724 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const 1725 { 1726 TRACE_COLLECT_GLYPHS (this); 1727 const HeadlessArrayOf<UINT16> &input = StructAfter<HeadlessArrayOf<UINT16> > (backtrack); 1728 const ArrayOf<UINT16> &lookahead = StructAfter<ArrayOf<UINT16> > (input); 1729 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1730 chain_context_collect_glyphs_lookup (c, 1731 backtrack.len, backtrack.array, 1732 input.len, input.array, 1733 lookahead.len, lookahead.array, 1734 lookup.len, lookup.array, 1735 lookup_context); 1736 } 1737 1738 inline bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const 1739 { 1740 TRACE_WOULD_APPLY (this); 1741 const HeadlessArrayOf<UINT16> &input = StructAfter<HeadlessArrayOf<UINT16> > (backtrack); 1742 const ArrayOf<UINT16> &lookahead = StructAfter<ArrayOf<UINT16> > (input); 1743 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1744 return_trace (chain_context_would_apply_lookup (c, 1745 backtrack.len, backtrack.array, 1746 input.len, input.array, 1747 lookahead.len, lookahead.array, lookup.len, 1748 lookup.array, lookup_context)); 1749 } 1750 1751 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const 1752 { 1753 TRACE_APPLY (this); 1754 const HeadlessArrayOf<UINT16> &input = StructAfter<HeadlessArrayOf<UINT16> > (backtrack); 1755 const ArrayOf<UINT16> &lookahead = StructAfter<ArrayOf<UINT16> > (input); 1756 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1757 return_trace (chain_context_apply_lookup (c, 1758 backtrack.len, backtrack.array, 1759 input.len, input.array, 1760 lookahead.len, lookahead.array, lookup.len, 1761 lookup.array, lookup_context)); 1762 } 1763 1764 inline bool sanitize (hb_sanitize_context_t *c) const 1765 { 1766 TRACE_SANITIZE (this); 1767 if (!backtrack.sanitize (c)) return_trace (false); 1768 const HeadlessArrayOf<UINT16> &input = StructAfter<HeadlessArrayOf<UINT16> > (backtrack); 1769 if (!input.sanitize (c)) return_trace (false); 1770 const ArrayOf<UINT16> &lookahead = StructAfter<ArrayOf<UINT16> > (input); 1771 if (!lookahead.sanitize (c)) return_trace (false); 1772 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1773 return_trace (lookup.sanitize (c)); 1774 } 1775 1776 protected: 1777 ArrayOf<UINT16> 1778 backtrack; /* Array of backtracking values 1779 * (to be matched before the input 1780 * sequence) */ 1781 HeadlessArrayOf<UINT16> 1782 inputX; /* Array of input values (start with 1783 * second glyph) */ 1784 ArrayOf<UINT16> 1785 lookaheadX; /* Array of lookahead values's (to be 1786 * matched after the input sequence) */ 1787 ArrayOf<LookupRecord> 1788 lookupX; /* Array of LookupRecords--in 1789 * design order) */ 1790 public: 1791 DEFINE_SIZE_MIN (8); 1792 }; 1793 1794 struct ChainRuleSet 1795 { 1796 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const 1797 { 1798 TRACE_CLOSURE (this); 1799 unsigned int num_rules = rule.len; 1800 for (unsigned int i = 0; i < num_rules; i++) 1801 (this+rule[i]).closure (c, lookup_context); 1802 } 1803 1804 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const 1805 { 1806 TRACE_COLLECT_GLYPHS (this); 1807 unsigned int num_rules = rule.len; 1808 for (unsigned int i = 0; i < num_rules; i++) 1809 (this+rule[i]).collect_glyphs (c, lookup_context); 1810 } 1811 1812 inline bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const 1813 { 1814 TRACE_WOULD_APPLY (this); 1815 unsigned int num_rules = rule.len; 1816 for (unsigned int i = 0; i < num_rules; i++) 1817 if ((this+rule[i]).would_apply (c, lookup_context)) 1818 return_trace (true); 1819 1820 return_trace (false); 1821 } 1822 1823 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const 1824 { 1825 TRACE_APPLY (this); 1826 unsigned int num_rules = rule.len; 1827 for (unsigned int i = 0; i < num_rules; i++) 1828 if ((this+rule[i]).apply (c, lookup_context)) 1829 return_trace (true); 1830 1831 return_trace (false); 1832 } 1833 1834 inline bool sanitize (hb_sanitize_context_t *c) const 1835 { 1836 TRACE_SANITIZE (this); 1837 return_trace (rule.sanitize (c, this)); 1838 } 1839 1840 protected: 1841 OffsetArrayOf<ChainRule> 1842 rule; /* Array of ChainRule tables 1843 * ordered by preference */ 1844 public: 1845 DEFINE_SIZE_ARRAY (2, rule); 1846 }; 1847 1848 struct ChainContextFormat1 1849 { 1850 inline void closure (hb_closure_context_t *c) const 1851 { 1852 TRACE_CLOSURE (this); 1853 const Coverage &cov = (this+coverage); 1854 1855 struct ChainContextClosureLookupContext lookup_context = { 1856 {intersects_glyph}, 1857 {nullptr, nullptr, nullptr} 1858 }; 1859 1860 unsigned int count = ruleSet.len; 1861 for (unsigned int i = 0; i < count; i++) 1862 if (cov.intersects_coverage (c->glyphs, i)) { 1863 const ChainRuleSet &rule_set = this+ruleSet[i]; 1864 rule_set.closure (c, lookup_context); 1865 } 1866 } 1867 1868 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1869 { 1870 TRACE_COLLECT_GLYPHS (this); 1871 (this+coverage).add_coverage (c->input); 1872 1873 struct ChainContextCollectGlyphsLookupContext lookup_context = { 1874 {collect_glyph}, 1875 {nullptr, nullptr, nullptr} 1876 }; 1877 1878 unsigned int count = ruleSet.len; 1879 for (unsigned int i = 0; i < count; i++) 1880 (this+ruleSet[i]).collect_glyphs (c, lookup_context); 1881 } 1882 1883 inline bool would_apply (hb_would_apply_context_t *c) const 1884 { 1885 TRACE_WOULD_APPLY (this); 1886 1887 const ChainRuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])]; 1888 struct ChainContextApplyLookupContext lookup_context = { 1889 {match_glyph}, 1890 {nullptr, nullptr, nullptr} 1891 }; 1892 return_trace (rule_set.would_apply (c, lookup_context)); 1893 } 1894 1895 inline const Coverage &get_coverage (void) const 1896 { 1897 return this+coverage; 1898 } 1899 1900 inline bool apply (hb_apply_context_t *c) const 1901 { 1902 TRACE_APPLY (this); 1903 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); 1904 if (likely (index == NOT_COVERED)) return_trace (false); 1905 1906 const ChainRuleSet &rule_set = this+ruleSet[index]; 1907 struct ChainContextApplyLookupContext lookup_context = { 1908 {match_glyph}, 1909 {nullptr, nullptr, nullptr} 1910 }; 1911 return_trace (rule_set.apply (c, lookup_context)); 1912 } 1913 1914 inline bool sanitize (hb_sanitize_context_t *c) const 1915 { 1916 TRACE_SANITIZE (this); 1917 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this)); 1918 } 1919 1920 protected: 1921 UINT16 format; /* Format identifier--format = 1 */ 1922 OffsetTo<Coverage> 1923 coverage; /* Offset to Coverage table--from 1924 * beginning of table */ 1925 OffsetArrayOf<ChainRuleSet> 1926 ruleSet; /* Array of ChainRuleSet tables 1927 * ordered by Coverage Index */ 1928 public: 1929 DEFINE_SIZE_ARRAY (6, ruleSet); 1930 }; 1931 1932 struct ChainContextFormat2 1933 { 1934 inline void closure (hb_closure_context_t *c) const 1935 { 1936 TRACE_CLOSURE (this); 1937 if (!(this+coverage).intersects (c->glyphs)) 1938 return; 1939 1940 const ClassDef &backtrack_class_def = this+backtrackClassDef; 1941 const ClassDef &input_class_def = this+inputClassDef; 1942 const ClassDef &lookahead_class_def = this+lookaheadClassDef; 1943 1944 struct ChainContextClosureLookupContext lookup_context = { 1945 {intersects_class}, 1946 {&backtrack_class_def, 1947 &input_class_def, 1948 &lookahead_class_def} 1949 }; 1950 1951 unsigned int count = ruleSet.len; 1952 for (unsigned int i = 0; i < count; i++) 1953 if (input_class_def.intersects_class (c->glyphs, i)) { 1954 const ChainRuleSet &rule_set = this+ruleSet[i]; 1955 rule_set.closure (c, lookup_context); 1956 } 1957 } 1958 1959 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1960 { 1961 TRACE_COLLECT_GLYPHS (this); 1962 (this+coverage).add_coverage (c->input); 1963 1964 const ClassDef &backtrack_class_def = this+backtrackClassDef; 1965 const ClassDef &input_class_def = this+inputClassDef; 1966 const ClassDef &lookahead_class_def = this+lookaheadClassDef; 1967 1968 struct ChainContextCollectGlyphsLookupContext lookup_context = { 1969 {collect_class}, 1970 {&backtrack_class_def, 1971 &input_class_def, 1972 &lookahead_class_def} 1973 }; 1974 1975 unsigned int count = ruleSet.len; 1976 for (unsigned int i = 0; i < count; i++) 1977 (this+ruleSet[i]).collect_glyphs (c, lookup_context); 1978 } 1979 1980 inline bool would_apply (hb_would_apply_context_t *c) const 1981 { 1982 TRACE_WOULD_APPLY (this); 1983 1984 const ClassDef &backtrack_class_def = this+backtrackClassDef; 1985 const ClassDef &input_class_def = this+inputClassDef; 1986 const ClassDef &lookahead_class_def = this+lookaheadClassDef; 1987 1988 unsigned int index = input_class_def.get_class (c->glyphs[0]); 1989 const ChainRuleSet &rule_set = this+ruleSet[index]; 1990 struct ChainContextApplyLookupContext lookup_context = { 1991 {match_class}, 1992 {&backtrack_class_def, 1993 &input_class_def, 1994 &lookahead_class_def} 1995 }; 1996 return_trace (rule_set.would_apply (c, lookup_context)); 1997 } 1998 1999 inline const Coverage &get_coverage (void) const 2000 { 2001 return this+coverage; 2002 } 2003 2004 inline bool apply (hb_apply_context_t *c) const 2005 { 2006 TRACE_APPLY (this); 2007 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); 2008 if (likely (index == NOT_COVERED)) return_trace (false); 2009 2010 const ClassDef &backtrack_class_def = this+backtrackClassDef; 2011 const ClassDef &input_class_def = this+inputClassDef; 2012 const ClassDef &lookahead_class_def = this+lookaheadClassDef; 2013 2014 index = input_class_def.get_class (c->buffer->cur().codepoint); 2015 const ChainRuleSet &rule_set = this+ruleSet[index]; 2016 struct ChainContextApplyLookupContext lookup_context = { 2017 {match_class}, 2018 {&backtrack_class_def, 2019 &input_class_def, 2020 &lookahead_class_def} 2021 }; 2022 return_trace (rule_set.apply (c, lookup_context)); 2023 } 2024 2025 inline bool sanitize (hb_sanitize_context_t *c) const 2026 { 2027 TRACE_SANITIZE (this); 2028 return_trace (coverage.sanitize (c, this) && 2029 backtrackClassDef.sanitize (c, this) && 2030 inputClassDef.sanitize (c, this) && 2031 lookaheadClassDef.sanitize (c, this) && 2032 ruleSet.sanitize (c, this)); 2033 } 2034 2035 protected: 2036 UINT16 format; /* Format identifier--format = 2 */ 2037 OffsetTo<Coverage> 2038 coverage; /* Offset to Coverage table--from 2039 * beginning of table */ 2040 OffsetTo<ClassDef> 2041 backtrackClassDef; /* Offset to glyph ClassDef table 2042 * containing backtrack sequence 2043 * data--from beginning of table */ 2044 OffsetTo<ClassDef> 2045 inputClassDef; /* Offset to glyph ClassDef 2046 * table containing input sequence 2047 * data--from beginning of table */ 2048 OffsetTo<ClassDef> 2049 lookaheadClassDef; /* Offset to glyph ClassDef table 2050 * containing lookahead sequence 2051 * data--from beginning of table */ 2052 OffsetArrayOf<ChainRuleSet> 2053 ruleSet; /* Array of ChainRuleSet tables 2054 * ordered by class */ 2055 public: 2056 DEFINE_SIZE_ARRAY (12, ruleSet); 2057 }; 2058 2059 struct ChainContextFormat3 2060 { 2061 inline void closure (hb_closure_context_t *c) const 2062 { 2063 TRACE_CLOSURE (this); 2064 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2065 2066 if (!(this+input[0]).intersects (c->glyphs)) 2067 return; 2068 2069 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 2070 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 2071 struct ChainContextClosureLookupContext lookup_context = { 2072 {intersects_coverage}, 2073 {this, this, this} 2074 }; 2075 chain_context_closure_lookup (c, 2076 backtrack.len, (const UINT16 *) backtrack.array, 2077 input.len, (const UINT16 *) input.array + 1, 2078 lookahead.len, (const UINT16 *) lookahead.array, 2079 lookup.len, lookup.array, 2080 lookup_context); 2081 } 2082 2083 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 2084 { 2085 TRACE_COLLECT_GLYPHS (this); 2086 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2087 2088 (this+input[0]).add_coverage (c->input); 2089 2090 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 2091 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 2092 struct ChainContextCollectGlyphsLookupContext lookup_context = { 2093 {collect_coverage}, 2094 {this, this, this} 2095 }; 2096 chain_context_collect_glyphs_lookup (c, 2097 backtrack.len, (const UINT16 *) backtrack.array, 2098 input.len, (const UINT16 *) input.array + 1, 2099 lookahead.len, (const UINT16 *) lookahead.array, 2100 lookup.len, lookup.array, 2101 lookup_context); 2102 } 2103 2104 inline bool would_apply (hb_would_apply_context_t *c) const 2105 { 2106 TRACE_WOULD_APPLY (this); 2107 2108 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2109 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 2110 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 2111 struct ChainContextApplyLookupContext lookup_context = { 2112 {match_coverage}, 2113 {this, this, this} 2114 }; 2115 return_trace (chain_context_would_apply_lookup (c, 2116 backtrack.len, (const UINT16 *) backtrack.array, 2117 input.len, (const UINT16 *) input.array + 1, 2118 lookahead.len, (const UINT16 *) lookahead.array, 2119 lookup.len, lookup.array, lookup_context)); 2120 } 2121 2122 inline const Coverage &get_coverage (void) const 2123 { 2124 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2125 return this+input[0]; 2126 } 2127 2128 inline bool apply (hb_apply_context_t *c) const 2129 { 2130 TRACE_APPLY (this); 2131 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2132 2133 unsigned int index = (this+input[0]).get_coverage (c->buffer->cur().codepoint); 2134 if (likely (index == NOT_COVERED)) return_trace (false); 2135 2136 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 2137 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 2138 struct ChainContextApplyLookupContext lookup_context = { 2139 {match_coverage}, 2140 {this, this, this} 2141 }; 2142 return_trace (chain_context_apply_lookup (c, 2143 backtrack.len, (const UINT16 *) backtrack.array, 2144 input.len, (const UINT16 *) input.array + 1, 2145 lookahead.len, (const UINT16 *) lookahead.array, 2146 lookup.len, lookup.array, lookup_context)); 2147 } 2148 2149 inline bool sanitize (hb_sanitize_context_t *c) const 2150 { 2151 TRACE_SANITIZE (this); 2152 if (!backtrack.sanitize (c, this)) return_trace (false); 2153 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2154 if (!input.sanitize (c, this)) return_trace (false); 2155 if (!input.len) return_trace (false); /* To be consistent with Context. */ 2156 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 2157 if (!lookahead.sanitize (c, this)) return_trace (false); 2158 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 2159 return_trace (lookup.sanitize (c)); 2160 } 2161 2162 protected: 2163 UINT16 format; /* Format identifier--format = 3 */ 2164 OffsetArrayOf<Coverage> 2165 backtrack; /* Array of coverage tables 2166 * in backtracking sequence, in glyph 2167 * sequence order */ 2168 OffsetArrayOf<Coverage> 2169 inputX ; /* Array of coverage 2170 * tables in input sequence, in glyph 2171 * sequence order */ 2172 OffsetArrayOf<Coverage> 2173 lookaheadX; /* Array of coverage tables 2174 * in lookahead sequence, in glyph 2175 * sequence order */ 2176 ArrayOf<LookupRecord> 2177 lookupX; /* Array of LookupRecords--in 2178 * design order) */ 2179 public: 2180 DEFINE_SIZE_MIN (10); 2181 }; 2182 2183 struct ChainContext 2184 { 2185 template <typename context_t> 2186 inline typename context_t::return_t dispatch (context_t *c) const 2187 { 2188 TRACE_DISPATCH (this, u.format); 2189 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); 2190 switch (u.format) { 2191 case 1: return_trace (c->dispatch (u.format1)); 2192 case 2: return_trace (c->dispatch (u.format2)); 2193 case 3: return_trace (c->dispatch (u.format3)); 2194 default:return_trace (c->default_return_value ()); 2195 } 2196 } 2197 2198 protected: 2199 union { 2200 UINT16 format; /* Format identifier */ 2201 ChainContextFormat1 format1; 2202 ChainContextFormat2 format2; 2203 ChainContextFormat3 format3; 2204 } u; 2205 }; 2206 2207 2208 template <typename T> 2209 struct ExtensionFormat1 2210 { 2211 inline unsigned int get_type (void) const { return extensionLookupType; } 2212 2213 template <typename X> 2214 inline const X& get_subtable (void) const 2215 { 2216 unsigned int offset = extensionOffset; 2217 if (unlikely (!offset)) return Null(typename T::LookupSubTable); 2218 return StructAtOffset<typename T::LookupSubTable> (this, offset); 2219 } 2220 2221 template <typename context_t> 2222 inline typename context_t::return_t dispatch (context_t *c) const 2223 { 2224 TRACE_DISPATCH (this, format); 2225 if (unlikely (!c->may_dispatch (this, this))) return_trace (c->no_dispatch_return_value ()); 2226 return_trace (get_subtable<typename T::LookupSubTable> ().dispatch (c, get_type ())); 2227 } 2228 2229 /* This is called from may_dispatch() above with hb_sanitize_context_t. */ 2230 inline bool sanitize (hb_sanitize_context_t *c) const 2231 { 2232 TRACE_SANITIZE (this); 2233 return_trace (c->check_struct (this) && extensionOffset != 0); 2234 } 2235 2236 protected: 2237 UINT16 format; /* Format identifier. Set to 1. */ 2238 UINT16 extensionLookupType; /* Lookup type of subtable referenced 2239 * by ExtensionOffset (i.e. the 2240 * extension subtable). */ 2241 UINT32 extensionOffset; /* Offset to the extension subtable, 2242 * of lookup type subtable. */ 2243 public: 2244 DEFINE_SIZE_STATIC (8); 2245 }; 2246 2247 template <typename T> 2248 struct Extension 2249 { 2250 inline unsigned int get_type (void) const 2251 { 2252 switch (u.format) { 2253 case 1: return u.format1.get_type (); 2254 default:return 0; 2255 } 2256 } 2257 template <typename X> 2258 inline const X& get_subtable (void) const 2259 { 2260 switch (u.format) { 2261 case 1: return u.format1.template get_subtable<typename T::LookupSubTable> (); 2262 default:return Null(typename T::LookupSubTable); 2263 } 2264 } 2265 2266 template <typename context_t> 2267 inline typename context_t::return_t dispatch (context_t *c) const 2268 { 2269 TRACE_DISPATCH (this, u.format); 2270 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); 2271 switch (u.format) { 2272 case 1: return_trace (u.format1.dispatch (c)); 2273 default:return_trace (c->default_return_value ()); 2274 } 2275 } 2276 2277 protected: 2278 union { 2279 UINT16 format; /* Format identifier */ 2280 ExtensionFormat1<T> format1; 2281 } u; 2282 }; 2283 2284 2285 /* 2286 * GSUB/GPOS Common 2287 */ 2288 2289 struct GSUBGPOS 2290 { 2291 inline unsigned int get_script_count (void) const 2292 { return (this+scriptList).len; } 2293 inline const Tag& get_script_tag (unsigned int i) const 2294 { return (this+scriptList).get_tag (i); } 2295 inline unsigned int get_script_tags (unsigned int start_offset, 2296 unsigned int *script_count /* IN/OUT */, 2297 hb_tag_t *script_tags /* OUT */) const 2298 { return (this+scriptList).get_tags (start_offset, script_count, script_tags); } 2299 inline const Script& get_script (unsigned int i) const 2300 { return (this+scriptList)[i]; } 2301 inline bool find_script_index (hb_tag_t tag, unsigned int *index) const 2302 { return (this+scriptList).find_index (tag, index); } 2303 2304 inline unsigned int get_feature_count (void) const 2305 { return (this+featureList).len; } 2306 inline hb_tag_t get_feature_tag (unsigned int i) const 2307 { return i == Index::NOT_FOUND_INDEX ? HB_TAG_NONE : (this+featureList).get_tag (i); } 2308 inline unsigned int get_feature_tags (unsigned int start_offset, 2309 unsigned int *feature_count /* IN/OUT */, 2310 hb_tag_t *feature_tags /* OUT */) const 2311 { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); } 2312 inline const Feature& get_feature (unsigned int i) const 2313 { return (this+featureList)[i]; } 2314 inline bool find_feature_index (hb_tag_t tag, unsigned int *index) const 2315 { return (this+featureList).find_index (tag, index); } 2316 2317 inline unsigned int get_lookup_count (void) const 2318 { return (this+lookupList).len; } 2319 inline const Lookup& get_lookup (unsigned int i) const 2320 { return (this+lookupList)[i]; } 2321 2322 inline bool find_variations_index (const int *coords, unsigned int num_coords, 2323 unsigned int *index) const 2324 { return (version.to_int () >= 0x00010001u ? this+featureVars : Null(FeatureVariations)) 2325 .find_index (coords, num_coords, index); } 2326 inline const Feature& get_feature_variation (unsigned int feature_index, 2327 unsigned int variations_index) const 2328 { 2329 if (FeatureVariations::NOT_FOUND_INDEX != variations_index && 2330 version.to_int () >= 0x00010001u) 2331 { 2332 const Feature *feature = (this+featureVars).find_substitute (variations_index, 2333 feature_index); 2334 if (feature) 2335 return *feature; 2336 } 2337 return get_feature (feature_index); 2338 } 2339 2340 inline bool sanitize (hb_sanitize_context_t *c) const 2341 { 2342 TRACE_SANITIZE (this); 2343 return_trace (version.sanitize (c) && 2344 likely (version.major == 1) && 2345 scriptList.sanitize (c, this) && 2346 featureList.sanitize (c, this) && 2347 lookupList.sanitize (c, this) && 2348 (version.to_int () < 0x00010001u || featureVars.sanitize (c, this))); 2349 } 2350 2351 protected: 2352 FixedVersion<>version; /* Version of the GSUB/GPOS table--initially set 2353 * to 0x00010000u */ 2354 OffsetTo<ScriptList> 2355 scriptList; /* ScriptList table */ 2356 OffsetTo<FeatureList> 2357 featureList; /* FeatureList table */ 2358 OffsetTo<LookupList> 2359 lookupList; /* LookupList table */ 2360 LOffsetTo<FeatureVariations> 2361 featureVars; /* Offset to Feature Variations 2362 table--from beginning of table 2363 * (may be NULL). Introduced 2364 * in version 0x00010001. */ 2365 public: 2366 DEFINE_SIZE_MIN (10); 2367 }; 2368 2369 2370 } /* namespace OT */ 2371 2372 2373 #endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */ 2374