1 /* 2 * Copyright 2007,2008,2009,2010 Red Hat, Inc. 3 * Copyright 2010,2012 Google, Inc. 4 * 5 * This is part of HarfBuzz, a text shaping library. 6 * 7 * Permission is hereby granted, without written agreement and without 8 * license or royalty fees, to use, copy, modify, and distribute this 9 * software and its documentation for any purpose, provided that the 10 * above copyright notice and the following two paragraphs appear in 11 * all copies of this software. 12 * 13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR 14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES 15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN 16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH 17 * DAMAGE. 18 * 19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, 20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND 21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS 22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO 23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. 24 * 25 * Red Hat Author(s): Behdad Esfahbod 26 * Google Author(s): Behdad Esfahbod 27 */ 28 29 #ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH 30 #define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH 31 32 #include "hb-buffer-private.hh" 33 #include "hb-ot-layout-gdef-table.hh" 34 #include "hb-set-private.hh" 35 36 37 namespace OT { 38 39 40 #ifndef HB_DEBUG_CLOSURE 41 #define HB_DEBUG_CLOSURE (HB_DEBUG+0) 42 #endif 43 44 #define TRACE_CLOSURE(this) \ 45 hb_auto_trace_t<HB_DEBUG_CLOSURE, hb_void_t> trace \ 46 (&c->debug_depth, c->get_name (), this, HB_FUNC, \ 47 ""); 48 49 struct hb_closure_context_t : 50 hb_dispatch_context_t<hb_closure_context_t, hb_void_t, HB_DEBUG_CLOSURE> 51 { 52 inline const char *get_name (void) { return "CLOSURE"; } 53 typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned int lookup_index); 54 template <typename T> 55 inline return_t dispatch (const T &obj) { obj.closure (this); return HB_VOID; } 56 static return_t default_return_value (void) { return HB_VOID; } 57 bool stop_sublookup_iteration (return_t r HB_UNUSED) const { return false; } 58 return_t recurse (unsigned int lookup_index) 59 { 60 if (unlikely (nesting_level_left == 0 || !recurse_func)) 61 return default_return_value (); 62 63 nesting_level_left--; 64 recurse_func (this, lookup_index); 65 nesting_level_left++; 66 return HB_VOID; 67 } 68 69 hb_face_t *face; 70 hb_set_t *glyphs; 71 recurse_func_t recurse_func; 72 unsigned int nesting_level_left; 73 unsigned int debug_depth; 74 75 hb_closure_context_t (hb_face_t *face_, 76 hb_set_t *glyphs_, 77 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) : 78 face (face_), 79 glyphs (glyphs_), 80 recurse_func (NULL), 81 nesting_level_left (nesting_level_left_), 82 debug_depth (0) {} 83 84 void set_recurse_func (recurse_func_t func) { recurse_func = func; } 85 }; 86 87 88 89 #ifndef HB_DEBUG_WOULD_APPLY 90 #define HB_DEBUG_WOULD_APPLY (HB_DEBUG+0) 91 #endif 92 93 #define TRACE_WOULD_APPLY(this) \ 94 hb_auto_trace_t<HB_DEBUG_WOULD_APPLY, bool> trace \ 95 (&c->debug_depth, c->get_name (), this, HB_FUNC, \ 96 "%d glyphs", c->len); 97 98 struct hb_would_apply_context_t : 99 hb_dispatch_context_t<hb_would_apply_context_t, bool, HB_DEBUG_WOULD_APPLY> 100 { 101 inline const char *get_name (void) { return "WOULD_APPLY"; } 102 template <typename T> 103 inline return_t dispatch (const T &obj) { return obj.would_apply (this); } 104 static return_t default_return_value (void) { return false; } 105 bool stop_sublookup_iteration (return_t r) const { return r; } 106 107 hb_face_t *face; 108 const hb_codepoint_t *glyphs; 109 unsigned int len; 110 bool zero_context; 111 unsigned int debug_depth; 112 113 hb_would_apply_context_t (hb_face_t *face_, 114 const hb_codepoint_t *glyphs_, 115 unsigned int len_, 116 bool zero_context_) : 117 face (face_), 118 glyphs (glyphs_), 119 len (len_), 120 zero_context (zero_context_), 121 debug_depth (0) {} 122 }; 123 124 125 126 #ifndef HB_DEBUG_COLLECT_GLYPHS 127 #define HB_DEBUG_COLLECT_GLYPHS (HB_DEBUG+0) 128 #endif 129 130 #define TRACE_COLLECT_GLYPHS(this) \ 131 hb_auto_trace_t<HB_DEBUG_COLLECT_GLYPHS, hb_void_t> trace \ 132 (&c->debug_depth, c->get_name (), this, HB_FUNC, \ 133 ""); 134 135 struct hb_collect_glyphs_context_t : 136 hb_dispatch_context_t<hb_collect_glyphs_context_t, hb_void_t, HB_DEBUG_COLLECT_GLYPHS> 137 { 138 inline const char *get_name (void) { return "COLLECT_GLYPHS"; } 139 typedef return_t (*recurse_func_t) (hb_collect_glyphs_context_t *c, unsigned int lookup_index); 140 template <typename T> 141 inline return_t dispatch (const T &obj) { obj.collect_glyphs (this); return HB_VOID; } 142 static return_t default_return_value (void) { return HB_VOID; } 143 bool stop_sublookup_iteration (return_t r HB_UNUSED) const { return false; } 144 return_t recurse (unsigned int lookup_index) 145 { 146 if (unlikely (nesting_level_left == 0 || !recurse_func)) 147 return default_return_value (); 148 149 /* Note that GPOS sets recurse_func to NULL already, so it doesn't get 150 * past the previous check. For GSUB, we only want to collect the output 151 * glyphs in the recursion. If output is not requested, we can go home now. 152 * 153 * Note further, that the above is not exactly correct. A recursed lookup 154 * is allowed to match input that is not matched in the context, but that's 155 * not how most fonts are built. It's possible to relax that and recurse 156 * with all sets here if it proves to be an issue. 157 */ 158 159 if (output == hb_set_get_empty ()) 160 return HB_VOID; 161 162 /* Return if new lookup was recursed to before. */ 163 if (recursed_lookups.has (lookup_index)) 164 return HB_VOID; 165 166 hb_set_t *old_before = before; 167 hb_set_t *old_input = input; 168 hb_set_t *old_after = after; 169 before = input = after = hb_set_get_empty (); 170 171 nesting_level_left--; 172 recurse_func (this, lookup_index); 173 nesting_level_left++; 174 175 before = old_before; 176 input = old_input; 177 after = old_after; 178 179 recursed_lookups.add (lookup_index); 180 181 return HB_VOID; 182 } 183 184 hb_face_t *face; 185 hb_set_t *before; 186 hb_set_t *input; 187 hb_set_t *after; 188 hb_set_t *output; 189 recurse_func_t recurse_func; 190 hb_set_t recursed_lookups; 191 unsigned int nesting_level_left; 192 unsigned int debug_depth; 193 194 hb_collect_glyphs_context_t (hb_face_t *face_, 195 hb_set_t *glyphs_before, /* OUT. May be NULL */ 196 hb_set_t *glyphs_input, /* OUT. May be NULL */ 197 hb_set_t *glyphs_after, /* OUT. May be NULL */ 198 hb_set_t *glyphs_output, /* OUT. May be NULL */ 199 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) : 200 face (face_), 201 before (glyphs_before ? glyphs_before : hb_set_get_empty ()), 202 input (glyphs_input ? glyphs_input : hb_set_get_empty ()), 203 after (glyphs_after ? glyphs_after : hb_set_get_empty ()), 204 output (glyphs_output ? glyphs_output : hb_set_get_empty ()), 205 recurse_func (NULL), 206 recursed_lookups (), 207 nesting_level_left (nesting_level_left_), 208 debug_depth (0) 209 { 210 recursed_lookups.init (); 211 } 212 ~hb_collect_glyphs_context_t (void) 213 { 214 recursed_lookups.fini (); 215 } 216 217 void set_recurse_func (recurse_func_t func) { recurse_func = func; } 218 }; 219 220 221 222 #ifndef HB_DEBUG_GET_COVERAGE 223 #define HB_DEBUG_GET_COVERAGE (HB_DEBUG+0) 224 #endif 225 226 /* XXX Can we remove this? */ 227 228 template <typename set_t> 229 struct hb_add_coverage_context_t : 230 hb_dispatch_context_t<hb_add_coverage_context_t<set_t>, const Coverage &, HB_DEBUG_GET_COVERAGE> 231 { 232 inline const char *get_name (void) { return "GET_COVERAGE"; } 233 typedef const Coverage &return_t; 234 template <typename T> 235 inline return_t dispatch (const T &obj) { return obj.get_coverage (); } 236 static return_t default_return_value (void) { return Null(Coverage); } 237 bool stop_sublookup_iteration (return_t r) const 238 { 239 r.add_coverage (set); 240 return false; 241 } 242 243 hb_add_coverage_context_t (set_t *set_) : 244 set (set_), 245 debug_depth (0) {} 246 247 set_t *set; 248 unsigned int debug_depth; 249 }; 250 251 252 253 #ifndef HB_DEBUG_APPLY 254 #define HB_DEBUG_APPLY (HB_DEBUG+0) 255 #endif 256 257 #define TRACE_APPLY(this) \ 258 hb_auto_trace_t<HB_DEBUG_APPLY, bool> trace \ 259 (&c->debug_depth, c->get_name (), this, HB_FUNC, \ 260 "idx %d gid %u lookup %d", \ 261 c->buffer->idx, c->buffer->cur().codepoint, (int) c->lookup_index); 262 263 struct hb_apply_context_t : 264 hb_dispatch_context_t<hb_apply_context_t, bool, HB_DEBUG_APPLY> 265 { 266 struct matcher_t 267 { 268 inline matcher_t (void) : 269 lookup_props (0), 270 ignore_zwnj (false), 271 ignore_zwj (false), 272 mask (-1), 273 #define arg1(arg) (arg) /* Remove the macro to see why it's needed! */ 274 syllable arg1(0), 275 #undef arg1 276 match_func (NULL), 277 match_data (NULL) {}; 278 279 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data); 280 281 inline void set_ignore_zwnj (bool ignore_zwnj_) { ignore_zwnj = ignore_zwnj_; } 282 inline void set_ignore_zwj (bool ignore_zwj_) { ignore_zwj = ignore_zwj_; } 283 inline void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; } 284 inline void set_mask (hb_mask_t mask_) { mask = mask_; } 285 inline void set_syllable (uint8_t syllable_) { syllable = syllable_; } 286 inline void set_match_func (match_func_t match_func_, 287 const void *match_data_) 288 { match_func = match_func_; match_data = match_data_; } 289 290 enum may_match_t { 291 MATCH_NO, 292 MATCH_YES, 293 MATCH_MAYBE 294 }; 295 296 inline may_match_t may_match (const hb_glyph_info_t &info, 297 const USHORT *glyph_data) const 298 { 299 if (!(info.mask & mask) || 300 (syllable && syllable != info.syllable ())) 301 return MATCH_NO; 302 303 if (match_func) 304 return match_func (info.codepoint, *glyph_data, match_data) ? MATCH_YES : MATCH_NO; 305 306 return MATCH_MAYBE; 307 } 308 309 enum may_skip_t { 310 SKIP_NO, 311 SKIP_YES, 312 SKIP_MAYBE 313 }; 314 315 inline may_skip_t 316 may_skip (const hb_apply_context_t *c, 317 const hb_glyph_info_t &info) const 318 { 319 if (!c->check_glyph_property (&info, lookup_props)) 320 return SKIP_YES; 321 322 if (unlikely (_hb_glyph_info_is_default_ignorable (&info) && 323 (ignore_zwnj || !_hb_glyph_info_is_zwnj (&info)) && 324 (ignore_zwj || !_hb_glyph_info_is_zwj (&info)))) 325 return SKIP_MAYBE; 326 327 return SKIP_NO; 328 } 329 330 protected: 331 unsigned int lookup_props; 332 bool ignore_zwnj; 333 bool ignore_zwj; 334 hb_mask_t mask; 335 uint8_t syllable; 336 match_func_t match_func; 337 const void *match_data; 338 }; 339 340 struct skipping_iterator_t 341 { 342 inline void init (hb_apply_context_t *c_, bool context_match = false) 343 { 344 c = c_; 345 match_glyph_data = NULL, 346 matcher.set_match_func (NULL, NULL); 347 matcher.set_lookup_props (c->lookup_props); 348 /* Ignore ZWNJ if we are matching GSUB context, or matching GPOS. */ 349 matcher.set_ignore_zwnj (context_match || c->table_index == 1); 350 /* Ignore ZWJ if we are matching GSUB context, or matching GPOS, or if asked to. */ 351 matcher.set_ignore_zwj (context_match || c->table_index == 1 || c->auto_zwj); 352 matcher.set_mask (context_match ? -1 : c->lookup_mask); 353 } 354 inline void set_lookup_props (unsigned int lookup_props) 355 { 356 matcher.set_lookup_props (lookup_props); 357 } 358 inline void set_match_func (matcher_t::match_func_t match_func_, 359 const void *match_data_, 360 const USHORT glyph_data[]) 361 { 362 matcher.set_match_func (match_func_, match_data_); 363 match_glyph_data = glyph_data; 364 } 365 366 inline void reset (unsigned int start_index_, 367 unsigned int num_items_) 368 { 369 idx = start_index_; 370 num_items = num_items_; 371 end = c->buffer->len; 372 matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0); 373 } 374 375 inline void reject (void) { num_items++; match_glyph_data--; } 376 377 inline bool next (void) 378 { 379 assert (num_items > 0); 380 while (idx + num_items < end) 381 { 382 idx++; 383 const hb_glyph_info_t &info = c->buffer->info[idx]; 384 385 matcher_t::may_skip_t skip = matcher.may_skip (c, info); 386 if (unlikely (skip == matcher_t::SKIP_YES)) 387 continue; 388 389 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data); 390 if (match == matcher_t::MATCH_YES || 391 (match == matcher_t::MATCH_MAYBE && 392 skip == matcher_t::SKIP_NO)) 393 { 394 num_items--; 395 match_glyph_data++; 396 return true; 397 } 398 399 if (skip == matcher_t::SKIP_NO) 400 return false; 401 } 402 return false; 403 } 404 inline bool prev (void) 405 { 406 assert (num_items > 0); 407 while (idx >= num_items) 408 { 409 idx--; 410 const hb_glyph_info_t &info = c->buffer->out_info[idx]; 411 412 matcher_t::may_skip_t skip = matcher.may_skip (c, info); 413 if (unlikely (skip == matcher_t::SKIP_YES)) 414 continue; 415 416 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data); 417 if (match == matcher_t::MATCH_YES || 418 (match == matcher_t::MATCH_MAYBE && 419 skip == matcher_t::SKIP_NO)) 420 { 421 num_items--; 422 match_glyph_data++; 423 return true; 424 } 425 426 if (skip == matcher_t::SKIP_NO) 427 return false; 428 } 429 return false; 430 } 431 432 unsigned int idx; 433 protected: 434 hb_apply_context_t *c; 435 matcher_t matcher; 436 const USHORT *match_glyph_data; 437 438 unsigned int num_items; 439 unsigned int end; 440 }; 441 442 443 inline const char *get_name (void) { return "APPLY"; } 444 typedef return_t (*recurse_func_t) (hb_apply_context_t *c, unsigned int lookup_index); 445 template <typename T> 446 inline return_t dispatch (const T &obj) { return obj.apply (this); } 447 static return_t default_return_value (void) { return false; } 448 bool stop_sublookup_iteration (return_t r) const { return r; } 449 return_t recurse (unsigned int lookup_index) 450 { 451 if (unlikely (nesting_level_left == 0 || !recurse_func)) 452 return default_return_value (); 453 454 nesting_level_left--; 455 bool ret = recurse_func (this, lookup_index); 456 nesting_level_left++; 457 return ret; 458 } 459 460 unsigned int table_index; /* GSUB/GPOS */ 461 hb_font_t *font; 462 hb_face_t *face; 463 hb_buffer_t *buffer; 464 hb_direction_t direction; 465 hb_mask_t lookup_mask; 466 bool auto_zwj; 467 recurse_func_t recurse_func; 468 unsigned int nesting_level_left; 469 unsigned int lookup_props; 470 const GDEF &gdef; 471 bool has_glyph_classes; 472 skipping_iterator_t iter_input, iter_context; 473 unsigned int lookup_index; 474 unsigned int debug_depth; 475 476 477 hb_apply_context_t (unsigned int table_index_, 478 hb_font_t *font_, 479 hb_buffer_t *buffer_) : 480 table_index (table_index_), 481 font (font_), face (font->face), buffer (buffer_), 482 direction (buffer_->props.direction), 483 lookup_mask (1), 484 auto_zwj (true), 485 recurse_func (NULL), 486 nesting_level_left (HB_MAX_NESTING_LEVEL), 487 lookup_props (0), 488 gdef (*hb_ot_layout_from_face (face)->gdef), 489 has_glyph_classes (gdef.has_glyph_classes ()), 490 iter_input (), 491 iter_context (), 492 lookup_index ((unsigned int) -1), 493 debug_depth (0) {} 494 495 inline void set_lookup_mask (hb_mask_t mask) { lookup_mask = mask; } 496 inline void set_auto_zwj (bool auto_zwj_) { auto_zwj = auto_zwj_; } 497 inline void set_recurse_func (recurse_func_t func) { recurse_func = func; } 498 inline void set_lookup_index (unsigned int lookup_index_) { lookup_index = lookup_index_; } 499 inline void set_lookup_props (unsigned int lookup_props_) 500 { 501 lookup_props = lookup_props_; 502 iter_input.init (this, false); 503 iter_context.init (this, true); 504 } 505 506 inline bool 507 match_properties_mark (hb_codepoint_t glyph, 508 unsigned int glyph_props, 509 unsigned int match_props) const 510 { 511 /* If using mark filtering sets, the high short of 512 * match_props has the set index. 513 */ 514 if (match_props & LookupFlag::UseMarkFilteringSet) 515 return gdef.mark_set_covers (match_props >> 16, glyph); 516 517 /* The second byte of match_props has the meaning 518 * "ignore marks of attachment type different than 519 * the attachment type specified." 520 */ 521 if (match_props & LookupFlag::MarkAttachmentType) 522 return (match_props & LookupFlag::MarkAttachmentType) == (glyph_props & LookupFlag::MarkAttachmentType); 523 524 return true; 525 } 526 527 inline bool 528 check_glyph_property (const hb_glyph_info_t *info, 529 unsigned int match_props) const 530 { 531 hb_codepoint_t glyph = info->codepoint; 532 unsigned int glyph_props = _hb_glyph_info_get_glyph_props (info); 533 534 /* Not covered, if, for example, glyph class is ligature and 535 * match_props includes LookupFlags::IgnoreLigatures 536 */ 537 if (glyph_props & match_props & LookupFlag::IgnoreFlags) 538 return false; 539 540 if (unlikely (glyph_props & HB_OT_LAYOUT_GLYPH_PROPS_MARK)) 541 return match_properties_mark (glyph, glyph_props, match_props); 542 543 return true; 544 } 545 546 inline void _set_glyph_props (hb_codepoint_t glyph_index, 547 unsigned int class_guess = 0, 548 bool ligature = false, 549 bool component = false) const 550 { 551 unsigned int add_in = _hb_glyph_info_get_glyph_props (&buffer->cur()) & 552 HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE; 553 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_SUBSTITUTED; 554 if (ligature) 555 { 556 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_LIGATED; 557 /* In the only place that the MULTIPLIED bit is used, Uniscribe 558 * seems to only care about the "last" transformation between 559 * Ligature and Multiple substitions. Ie. if you ligate, expand, 560 * and ligate again, it forgives the multiplication and acts as 561 * if only ligation happened. As such, clear MULTIPLIED bit. 562 */ 563 add_in &= ~HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED; 564 } 565 if (component) 566 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED; 567 if (likely (has_glyph_classes)) 568 _hb_glyph_info_set_glyph_props (&buffer->cur(), add_in | gdef.get_glyph_props (glyph_index)); 569 else if (class_guess) 570 _hb_glyph_info_set_glyph_props (&buffer->cur(), add_in | class_guess); 571 } 572 573 inline void replace_glyph (hb_codepoint_t glyph_index) const 574 { 575 _set_glyph_props (glyph_index); 576 buffer->replace_glyph (glyph_index); 577 } 578 inline void replace_glyph_inplace (hb_codepoint_t glyph_index) const 579 { 580 _set_glyph_props (glyph_index); 581 buffer->cur().codepoint = glyph_index; 582 } 583 inline void replace_glyph_with_ligature (hb_codepoint_t glyph_index, 584 unsigned int class_guess) const 585 { 586 _set_glyph_props (glyph_index, class_guess, true); 587 buffer->replace_glyph (glyph_index); 588 } 589 inline void output_glyph_for_component (hb_codepoint_t glyph_index, 590 unsigned int class_guess) const 591 { 592 _set_glyph_props (glyph_index, class_guess, false, true); 593 buffer->output_glyph (glyph_index); 594 } 595 }; 596 597 598 599 typedef bool (*intersects_func_t) (hb_set_t *glyphs, const USHORT &value, const void *data); 600 typedef void (*collect_glyphs_func_t) (hb_set_t *glyphs, const USHORT &value, const void *data); 601 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data); 602 603 struct ContextClosureFuncs 604 { 605 intersects_func_t intersects; 606 }; 607 struct ContextCollectGlyphsFuncs 608 { 609 collect_glyphs_func_t collect; 610 }; 611 struct ContextApplyFuncs 612 { 613 match_func_t match; 614 }; 615 616 617 static inline bool intersects_glyph (hb_set_t *glyphs, const USHORT &value, const void *data HB_UNUSED) 618 { 619 return glyphs->has (value); 620 } 621 static inline bool intersects_class (hb_set_t *glyphs, const USHORT &value, const void *data) 622 { 623 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); 624 return class_def.intersects_class (glyphs, value); 625 } 626 static inline bool intersects_coverage (hb_set_t *glyphs, const USHORT &value, const void *data) 627 { 628 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value; 629 return (data+coverage).intersects (glyphs); 630 } 631 632 static inline bool intersects_array (hb_closure_context_t *c, 633 unsigned int count, 634 const USHORT values[], 635 intersects_func_t intersects_func, 636 const void *intersects_data) 637 { 638 for (unsigned int i = 0; i < count; i++) 639 if (likely (!intersects_func (c->glyphs, values[i], intersects_data))) 640 return false; 641 return true; 642 } 643 644 645 static inline void collect_glyph (hb_set_t *glyphs, const USHORT &value, const void *data HB_UNUSED) 646 { 647 glyphs->add (value); 648 } 649 static inline void collect_class (hb_set_t *glyphs, const USHORT &value, const void *data) 650 { 651 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); 652 class_def.add_class (glyphs, value); 653 } 654 static inline void collect_coverage (hb_set_t *glyphs, const USHORT &value, const void *data) 655 { 656 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value; 657 (data+coverage).add_coverage (glyphs); 658 } 659 static inline void collect_array (hb_collect_glyphs_context_t *c HB_UNUSED, 660 hb_set_t *glyphs, 661 unsigned int count, 662 const USHORT values[], 663 collect_glyphs_func_t collect_func, 664 const void *collect_data) 665 { 666 for (unsigned int i = 0; i < count; i++) 667 collect_func (glyphs, values[i], collect_data); 668 } 669 670 671 static inline bool match_glyph (hb_codepoint_t glyph_id, const USHORT &value, const void *data HB_UNUSED) 672 { 673 return glyph_id == value; 674 } 675 static inline bool match_class (hb_codepoint_t glyph_id, const USHORT &value, const void *data) 676 { 677 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); 678 return class_def.get_class (glyph_id) == value; 679 } 680 static inline bool match_coverage (hb_codepoint_t glyph_id, const USHORT &value, const void *data) 681 { 682 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value; 683 return (data+coverage).get_coverage (glyph_id) != NOT_COVERED; 684 } 685 686 static inline bool would_match_input (hb_would_apply_context_t *c, 687 unsigned int count, /* Including the first glyph (not matched) */ 688 const USHORT input[], /* Array of input values--start with second glyph */ 689 match_func_t match_func, 690 const void *match_data) 691 { 692 if (count != c->len) 693 return false; 694 695 for (unsigned int i = 1; i < count; i++) 696 if (likely (!match_func (c->glyphs[i], input[i - 1], match_data))) 697 return false; 698 699 return true; 700 } 701 static inline bool match_input (hb_apply_context_t *c, 702 unsigned int count, /* Including the first glyph (not matched) */ 703 const USHORT input[], /* Array of input values--start with second glyph */ 704 match_func_t match_func, 705 const void *match_data, 706 unsigned int *end_offset, 707 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], 708 bool *p_is_mark_ligature = NULL, 709 unsigned int *p_total_component_count = NULL) 710 { 711 TRACE_APPLY (NULL); 712 713 if (unlikely (count > HB_MAX_CONTEXT_LENGTH)) return_trace (false); 714 715 hb_buffer_t *buffer = c->buffer; 716 717 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input; 718 skippy_iter.reset (buffer->idx, count - 1); 719 skippy_iter.set_match_func (match_func, match_data, input); 720 721 /* 722 * This is perhaps the trickiest part of OpenType... Remarks: 723 * 724 * - If all components of the ligature were marks, we call this a mark ligature. 725 * 726 * - If there is no GDEF, and the ligature is NOT a mark ligature, we categorize 727 * it as a ligature glyph. 728 * 729 * - Ligatures cannot be formed across glyphs attached to different components 730 * of previous ligatures. Eg. the sequence is LAM,SHADDA,LAM,FATHA,HEH, and 731 * LAM,LAM,HEH form a ligature, leaving SHADDA,FATHA next to eachother. 732 * However, it would be wrong to ligate that SHADDA,FATHA sequence.o 733 * There is an exception to this: If a ligature tries ligating with marks that 734 * belong to it itself, go ahead, assuming that the font designer knows what 735 * they are doing (otherwise it can break Indic stuff when a matra wants to 736 * ligate with a conjunct...) 737 */ 738 739 bool is_mark_ligature = _hb_glyph_info_is_mark (&buffer->cur()); 740 741 unsigned int total_component_count = 0; 742 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->cur()); 743 744 unsigned int first_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur()); 745 unsigned int first_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->cur()); 746 747 match_positions[0] = buffer->idx; 748 for (unsigned int i = 1; i < count; i++) 749 { 750 if (!skippy_iter.next ()) return_trace (false); 751 752 match_positions[i] = skippy_iter.idx; 753 754 unsigned int this_lig_id = _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]); 755 unsigned int this_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]); 756 757 if (first_lig_id && first_lig_comp) { 758 /* If first component was attached to a previous ligature component, 759 * all subsequent components should be attached to the same ligature 760 * component, otherwise we shouldn't ligate them. */ 761 if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp) 762 return_trace (false); 763 } else { 764 /* If first component was NOT attached to a previous ligature component, 765 * all subsequent components should also NOT be attached to any ligature 766 * component, unless they are attached to the first component itself! */ 767 if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id)) 768 return_trace (false); 769 } 770 771 is_mark_ligature = is_mark_ligature && _hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx]); 772 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->info[skippy_iter.idx]); 773 } 774 775 *end_offset = skippy_iter.idx - buffer->idx + 1; 776 777 if (p_is_mark_ligature) 778 *p_is_mark_ligature = is_mark_ligature; 779 780 if (p_total_component_count) 781 *p_total_component_count = total_component_count; 782 783 return_trace (true); 784 } 785 static inline bool ligate_input (hb_apply_context_t *c, 786 unsigned int count, /* Including the first glyph */ 787 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */ 788 unsigned int match_length, 789 hb_codepoint_t lig_glyph, 790 bool is_mark_ligature, 791 unsigned int total_component_count) 792 { 793 TRACE_APPLY (NULL); 794 795 hb_buffer_t *buffer = c->buffer; 796 797 buffer->merge_clusters (buffer->idx, buffer->idx + match_length); 798 799 /* 800 * - If it *is* a mark ligature, we don't allocate a new ligature id, and leave 801 * the ligature to keep its old ligature id. This will allow it to attach to 802 * a base ligature in GPOS. Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH, 803 * and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA wit a 804 * ligature id and component value of 2. Then if SHADDA,FATHA form a ligature 805 * later, we don't want them to lose their ligature id/component, otherwise 806 * GPOS will fail to correctly position the mark ligature on top of the 807 * LAM,LAM,HEH ligature. See: 808 * https://bugzilla.gnome.org/show_bug.cgi?id=676343 809 * 810 * - If a ligature is formed of components that some of which are also ligatures 811 * themselves, and those ligature components had marks attached to *their* 812 * components, we have to attach the marks to the new ligature component 813 * positions! Now *that*'s tricky! And these marks may be following the 814 * last component of the whole sequence, so we should loop forward looking 815 * for them and update them. 816 * 817 * Eg. the sequence is LAM,LAM,SHADDA,FATHA,HEH, and the font first forms a 818 * 'calt' ligature of LAM,HEH, leaving the SHADDA and FATHA with a ligature 819 * id and component == 1. Now, during 'liga', the LAM and the LAM-HEH ligature 820 * form a LAM-LAM-HEH ligature. We need to reassign the SHADDA and FATHA to 821 * the new ligature with a component value of 2. 822 * 823 * This in fact happened to a font... See: 824 * https://bugzilla.gnome.org/show_bug.cgi?id=437633 825 */ 826 827 unsigned int klass = is_mark_ligature ? 0 : HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE; 828 unsigned int lig_id = is_mark_ligature ? 0 : _hb_allocate_lig_id (buffer); 829 unsigned int last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur()); 830 unsigned int last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur()); 831 unsigned int components_so_far = last_num_components; 832 833 if (!is_mark_ligature) 834 { 835 _hb_glyph_info_set_lig_props_for_ligature (&buffer->cur(), lig_id, total_component_count); 836 if (_hb_glyph_info_get_general_category (&buffer->cur()) == HB_UNICODE_GENERAL_CATEGORY_NON_SPACING_MARK) 837 { 838 _hb_glyph_info_set_general_category (&buffer->cur(), HB_UNICODE_GENERAL_CATEGORY_OTHER_LETTER); 839 } 840 } 841 c->replace_glyph_with_ligature (lig_glyph, klass); 842 843 for (unsigned int i = 1; i < count; i++) 844 { 845 while (buffer->idx < match_positions[i] && !buffer->in_error) 846 { 847 if (!is_mark_ligature) { 848 unsigned int this_comp = _hb_glyph_info_get_lig_comp (&buffer->cur()); 849 if (this_comp == 0) 850 this_comp = last_num_components; 851 unsigned int new_lig_comp = components_so_far - last_num_components + 852 MIN (this_comp, last_num_components); 853 _hb_glyph_info_set_lig_props_for_mark (&buffer->cur(), lig_id, new_lig_comp); 854 } 855 buffer->next_glyph (); 856 } 857 858 last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur()); 859 last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur()); 860 components_so_far += last_num_components; 861 862 /* Skip the base glyph */ 863 buffer->idx++; 864 } 865 866 if (!is_mark_ligature && last_lig_id) { 867 /* Re-adjust components for any marks following. */ 868 for (unsigned int i = buffer->idx; i < buffer->len; i++) { 869 if (last_lig_id == _hb_glyph_info_get_lig_id (&buffer->info[i])) { 870 unsigned int this_comp = _hb_glyph_info_get_lig_comp (&buffer->info[i]); 871 if (!this_comp) 872 break; 873 unsigned int new_lig_comp = components_so_far - last_num_components + 874 MIN (this_comp, last_num_components); 875 _hb_glyph_info_set_lig_props_for_mark (&buffer->info[i], lig_id, new_lig_comp); 876 } else 877 break; 878 } 879 } 880 return_trace (true); 881 } 882 883 static inline bool match_backtrack (hb_apply_context_t *c, 884 unsigned int count, 885 const USHORT backtrack[], 886 match_func_t match_func, 887 const void *match_data) 888 { 889 TRACE_APPLY (NULL); 890 891 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context; 892 skippy_iter.reset (c->buffer->backtrack_len (), count); 893 skippy_iter.set_match_func (match_func, match_data, backtrack); 894 895 for (unsigned int i = 0; i < count; i++) 896 if (!skippy_iter.prev ()) 897 return_trace (false); 898 899 return_trace (true); 900 } 901 902 static inline bool match_lookahead (hb_apply_context_t *c, 903 unsigned int count, 904 const USHORT lookahead[], 905 match_func_t match_func, 906 const void *match_data, 907 unsigned int offset) 908 { 909 TRACE_APPLY (NULL); 910 911 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context; 912 skippy_iter.reset (c->buffer->idx + offset - 1, count); 913 skippy_iter.set_match_func (match_func, match_data, lookahead); 914 915 for (unsigned int i = 0; i < count; i++) 916 if (!skippy_iter.next ()) 917 return_trace (false); 918 919 return_trace (true); 920 } 921 922 923 924 struct LookupRecord 925 { 926 inline bool sanitize (hb_sanitize_context_t *c) const 927 { 928 TRACE_SANITIZE (this); 929 return_trace (c->check_struct (this)); 930 } 931 932 USHORT sequenceIndex; /* Index into current glyph 933 * sequence--first glyph = 0 */ 934 USHORT lookupListIndex; /* Lookup to apply to that 935 * position--zero--based */ 936 public: 937 DEFINE_SIZE_STATIC (4); 938 }; 939 940 941 template <typename context_t> 942 static inline void recurse_lookups (context_t *c, 943 unsigned int lookupCount, 944 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */) 945 { 946 for (unsigned int i = 0; i < lookupCount; i++) 947 c->recurse (lookupRecord[i].lookupListIndex); 948 } 949 950 static inline bool apply_lookup (hb_apply_context_t *c, 951 unsigned int count, /* Including the first glyph */ 952 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */ 953 unsigned int lookupCount, 954 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */ 955 unsigned int match_length) 956 { 957 TRACE_APPLY (NULL); 958 959 hb_buffer_t *buffer = c->buffer; 960 unsigned int end; 961 962 /* All positions are distance from beginning of *output* buffer. 963 * Adjust. */ 964 { 965 unsigned int bl = buffer->backtrack_len (); 966 end = bl + match_length; 967 968 int delta = bl - buffer->idx; 969 /* Convert positions to new indexing. */ 970 for (unsigned int j = 0; j < count; j++) 971 match_positions[j] += delta; 972 } 973 974 for (unsigned int i = 0; i < lookupCount && !buffer->in_error; i++) 975 { 976 unsigned int idx = lookupRecord[i].sequenceIndex; 977 if (idx >= count) 978 continue; 979 980 /* Don't recurse to ourself at same position. 981 * Note that this test is too naive, it doesn't catch longer loops. */ 982 if (idx == 0 && lookupRecord[i].lookupListIndex == c->lookup_index) 983 continue; 984 985 buffer->move_to (match_positions[idx]); 986 987 unsigned int orig_len = buffer->backtrack_len () + buffer->lookahead_len (); 988 if (!c->recurse (lookupRecord[i].lookupListIndex)) 989 continue; 990 991 unsigned int new_len = buffer->backtrack_len () + buffer->lookahead_len (); 992 int delta = new_len - orig_len; 993 994 if (!delta) 995 continue; 996 997 /* Recursed lookup changed buffer len. Adjust. */ 998 999 /* end can't go back past the current match position. 1000 * Note: this is only true because we do NOT allow MultipleSubst 1001 * with zero sequence len. */ 1002 end = MAX (MIN((int) match_positions[idx] + 1, (int) new_len), int (end) + delta); 1003 1004 unsigned int next = idx + 1; /* next now is the position after the recursed lookup. */ 1005 1006 if (delta > 0) 1007 { 1008 if (unlikely (delta + count > HB_MAX_CONTEXT_LENGTH)) 1009 break; 1010 } 1011 else 1012 { 1013 /* NOTE: delta is negative. */ 1014 delta = MAX (delta, (int) next - (int) count); 1015 next -= delta; 1016 } 1017 1018 /* Shift! */ 1019 memmove (match_positions + next + delta, match_positions + next, 1020 (count - next) * sizeof (match_positions[0])); 1021 next += delta; 1022 count += delta; 1023 1024 /* Fill in new entries. */ 1025 for (unsigned int j = idx + 1; j < next; j++) 1026 match_positions[j] = match_positions[j - 1] + 1; 1027 1028 /* And fixup the rest. */ 1029 for (; next < count; next++) 1030 match_positions[next] += delta; 1031 } 1032 1033 buffer->move_to (end); 1034 1035 return_trace (true); 1036 } 1037 1038 1039 1040 /* Contextual lookups */ 1041 1042 struct ContextClosureLookupContext 1043 { 1044 ContextClosureFuncs funcs; 1045 const void *intersects_data; 1046 }; 1047 1048 struct ContextCollectGlyphsLookupContext 1049 { 1050 ContextCollectGlyphsFuncs funcs; 1051 const void *collect_data; 1052 }; 1053 1054 struct ContextApplyLookupContext 1055 { 1056 ContextApplyFuncs funcs; 1057 const void *match_data; 1058 }; 1059 1060 static inline void context_closure_lookup (hb_closure_context_t *c, 1061 unsigned int inputCount, /* Including the first glyph (not matched) */ 1062 const USHORT input[], /* Array of input values--start with second glyph */ 1063 unsigned int lookupCount, 1064 const LookupRecord lookupRecord[], 1065 ContextClosureLookupContext &lookup_context) 1066 { 1067 if (intersects_array (c, 1068 inputCount ? inputCount - 1 : 0, input, 1069 lookup_context.funcs.intersects, lookup_context.intersects_data)) 1070 recurse_lookups (c, 1071 lookupCount, lookupRecord); 1072 } 1073 1074 static inline void context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c, 1075 unsigned int inputCount, /* Including the first glyph (not matched) */ 1076 const USHORT input[], /* Array of input values--start with second glyph */ 1077 unsigned int lookupCount, 1078 const LookupRecord lookupRecord[], 1079 ContextCollectGlyphsLookupContext &lookup_context) 1080 { 1081 collect_array (c, c->input, 1082 inputCount ? inputCount - 1 : 0, input, 1083 lookup_context.funcs.collect, lookup_context.collect_data); 1084 recurse_lookups (c, 1085 lookupCount, lookupRecord); 1086 } 1087 1088 static inline bool context_would_apply_lookup (hb_would_apply_context_t *c, 1089 unsigned int inputCount, /* Including the first glyph (not matched) */ 1090 const USHORT input[], /* Array of input values--start with second glyph */ 1091 unsigned int lookupCount HB_UNUSED, 1092 const LookupRecord lookupRecord[] HB_UNUSED, 1093 ContextApplyLookupContext &lookup_context) 1094 { 1095 return would_match_input (c, 1096 inputCount, input, 1097 lookup_context.funcs.match, lookup_context.match_data); 1098 } 1099 static inline bool context_apply_lookup (hb_apply_context_t *c, 1100 unsigned int inputCount, /* Including the first glyph (not matched) */ 1101 const USHORT input[], /* Array of input values--start with second glyph */ 1102 unsigned int lookupCount, 1103 const LookupRecord lookupRecord[], 1104 ContextApplyLookupContext &lookup_context) 1105 { 1106 unsigned int match_length = 0; 1107 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH]; 1108 return match_input (c, 1109 inputCount, input, 1110 lookup_context.funcs.match, lookup_context.match_data, 1111 &match_length, match_positions) 1112 && apply_lookup (c, 1113 inputCount, match_positions, 1114 lookupCount, lookupRecord, 1115 match_length); 1116 } 1117 1118 struct Rule 1119 { 1120 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const 1121 { 1122 TRACE_CLOSURE (this); 1123 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0)); 1124 context_closure_lookup (c, 1125 inputCount, inputZ, 1126 lookupCount, lookupRecord, 1127 lookup_context); 1128 } 1129 1130 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ContextCollectGlyphsLookupContext &lookup_context) const 1131 { 1132 TRACE_COLLECT_GLYPHS (this); 1133 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0)); 1134 context_collect_glyphs_lookup (c, 1135 inputCount, inputZ, 1136 lookupCount, lookupRecord, 1137 lookup_context); 1138 } 1139 1140 inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const 1141 { 1142 TRACE_WOULD_APPLY (this); 1143 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0)); 1144 return_trace (context_would_apply_lookup (c, inputCount, inputZ, lookupCount, lookupRecord, lookup_context)); 1145 } 1146 1147 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const 1148 { 1149 TRACE_APPLY (this); 1150 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0)); 1151 return_trace (context_apply_lookup (c, inputCount, inputZ, lookupCount, lookupRecord, lookup_context)); 1152 } 1153 1154 public: 1155 inline bool sanitize (hb_sanitize_context_t *c) const 1156 { 1157 TRACE_SANITIZE (this); 1158 return inputCount.sanitize (c) 1159 && lookupCount.sanitize (c) 1160 && c->check_range (inputZ, 1161 inputZ[0].static_size * inputCount 1162 + lookupRecordX[0].static_size * lookupCount); 1163 } 1164 1165 protected: 1166 USHORT inputCount; /* Total number of glyphs in input 1167 * glyph sequence--includes the first 1168 * glyph */ 1169 USHORT lookupCount; /* Number of LookupRecords */ 1170 USHORT inputZ[VAR]; /* Array of match inputs--start with 1171 * second glyph */ 1172 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in 1173 * design order */ 1174 public: 1175 DEFINE_SIZE_ARRAY2 (4, inputZ, lookupRecordX); 1176 }; 1177 1178 struct RuleSet 1179 { 1180 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const 1181 { 1182 TRACE_CLOSURE (this); 1183 unsigned int num_rules = rule.len; 1184 for (unsigned int i = 0; i < num_rules; i++) 1185 (this+rule[i]).closure (c, lookup_context); 1186 } 1187 1188 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ContextCollectGlyphsLookupContext &lookup_context) const 1189 { 1190 TRACE_COLLECT_GLYPHS (this); 1191 unsigned int num_rules = rule.len; 1192 for (unsigned int i = 0; i < num_rules; i++) 1193 (this+rule[i]).collect_glyphs (c, lookup_context); 1194 } 1195 1196 inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const 1197 { 1198 TRACE_WOULD_APPLY (this); 1199 unsigned int num_rules = rule.len; 1200 for (unsigned int i = 0; i < num_rules; i++) 1201 { 1202 if ((this+rule[i]).would_apply (c, lookup_context)) 1203 return_trace (true); 1204 } 1205 return_trace (false); 1206 } 1207 1208 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const 1209 { 1210 TRACE_APPLY (this); 1211 unsigned int num_rules = rule.len; 1212 for (unsigned int i = 0; i < num_rules; i++) 1213 { 1214 if ((this+rule[i]).apply (c, lookup_context)) 1215 return_trace (true); 1216 } 1217 return_trace (false); 1218 } 1219 1220 inline bool sanitize (hb_sanitize_context_t *c) const 1221 { 1222 TRACE_SANITIZE (this); 1223 return_trace (rule.sanitize (c, this)); 1224 } 1225 1226 protected: 1227 OffsetArrayOf<Rule> 1228 rule; /* Array of Rule tables 1229 * ordered by preference */ 1230 public: 1231 DEFINE_SIZE_ARRAY (2, rule); 1232 }; 1233 1234 1235 struct ContextFormat1 1236 { 1237 inline void closure (hb_closure_context_t *c) const 1238 { 1239 TRACE_CLOSURE (this); 1240 1241 const Coverage &cov = (this+coverage); 1242 1243 struct ContextClosureLookupContext lookup_context = { 1244 {intersects_glyph}, 1245 NULL 1246 }; 1247 1248 unsigned int count = ruleSet.len; 1249 for (unsigned int i = 0; i < count; i++) 1250 if (cov.intersects_coverage (c->glyphs, i)) { 1251 const RuleSet &rule_set = this+ruleSet[i]; 1252 rule_set.closure (c, lookup_context); 1253 } 1254 } 1255 1256 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1257 { 1258 TRACE_COLLECT_GLYPHS (this); 1259 (this+coverage).add_coverage (c->input); 1260 1261 struct ContextCollectGlyphsLookupContext lookup_context = { 1262 {collect_glyph}, 1263 NULL 1264 }; 1265 1266 unsigned int count = ruleSet.len; 1267 for (unsigned int i = 0; i < count; i++) 1268 (this+ruleSet[i]).collect_glyphs (c, lookup_context); 1269 } 1270 1271 inline bool would_apply (hb_would_apply_context_t *c) const 1272 { 1273 TRACE_WOULD_APPLY (this); 1274 1275 const RuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])]; 1276 struct ContextApplyLookupContext lookup_context = { 1277 {match_glyph}, 1278 NULL 1279 }; 1280 return_trace (rule_set.would_apply (c, lookup_context)); 1281 } 1282 1283 inline const Coverage &get_coverage (void) const 1284 { 1285 return this+coverage; 1286 } 1287 1288 inline bool apply (hb_apply_context_t *c) const 1289 { 1290 TRACE_APPLY (this); 1291 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); 1292 if (likely (index == NOT_COVERED)) 1293 return_trace (false); 1294 1295 const RuleSet &rule_set = this+ruleSet[index]; 1296 struct ContextApplyLookupContext lookup_context = { 1297 {match_glyph}, 1298 NULL 1299 }; 1300 return_trace (rule_set.apply (c, lookup_context)); 1301 } 1302 1303 inline bool sanitize (hb_sanitize_context_t *c) const 1304 { 1305 TRACE_SANITIZE (this); 1306 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this)); 1307 } 1308 1309 protected: 1310 USHORT format; /* Format identifier--format = 1 */ 1311 OffsetTo<Coverage> 1312 coverage; /* Offset to Coverage table--from 1313 * beginning of table */ 1314 OffsetArrayOf<RuleSet> 1315 ruleSet; /* Array of RuleSet tables 1316 * ordered by Coverage Index */ 1317 public: 1318 DEFINE_SIZE_ARRAY (6, ruleSet); 1319 }; 1320 1321 1322 struct ContextFormat2 1323 { 1324 inline void closure (hb_closure_context_t *c) const 1325 { 1326 TRACE_CLOSURE (this); 1327 if (!(this+coverage).intersects (c->glyphs)) 1328 return; 1329 1330 const ClassDef &class_def = this+classDef; 1331 1332 struct ContextClosureLookupContext lookup_context = { 1333 {intersects_class}, 1334 &class_def 1335 }; 1336 1337 unsigned int count = ruleSet.len; 1338 for (unsigned int i = 0; i < count; i++) 1339 if (class_def.intersects_class (c->glyphs, i)) { 1340 const RuleSet &rule_set = this+ruleSet[i]; 1341 rule_set.closure (c, lookup_context); 1342 } 1343 } 1344 1345 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1346 { 1347 TRACE_COLLECT_GLYPHS (this); 1348 (this+coverage).add_coverage (c->input); 1349 1350 const ClassDef &class_def = this+classDef; 1351 struct ContextCollectGlyphsLookupContext lookup_context = { 1352 {collect_class}, 1353 &class_def 1354 }; 1355 1356 unsigned int count = ruleSet.len; 1357 for (unsigned int i = 0; i < count; i++) 1358 (this+ruleSet[i]).collect_glyphs (c, lookup_context); 1359 } 1360 1361 inline bool would_apply (hb_would_apply_context_t *c) const 1362 { 1363 TRACE_WOULD_APPLY (this); 1364 1365 const ClassDef &class_def = this+classDef; 1366 unsigned int index = class_def.get_class (c->glyphs[0]); 1367 const RuleSet &rule_set = this+ruleSet[index]; 1368 struct ContextApplyLookupContext lookup_context = { 1369 {match_class}, 1370 &class_def 1371 }; 1372 return_trace (rule_set.would_apply (c, lookup_context)); 1373 } 1374 1375 inline const Coverage &get_coverage (void) const 1376 { 1377 return this+coverage; 1378 } 1379 1380 inline bool apply (hb_apply_context_t *c) const 1381 { 1382 TRACE_APPLY (this); 1383 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); 1384 if (likely (index == NOT_COVERED)) return_trace (false); 1385 1386 const ClassDef &class_def = this+classDef; 1387 index = class_def.get_class (c->buffer->cur().codepoint); 1388 const RuleSet &rule_set = this+ruleSet[index]; 1389 struct ContextApplyLookupContext lookup_context = { 1390 {match_class}, 1391 &class_def 1392 }; 1393 return_trace (rule_set.apply (c, lookup_context)); 1394 } 1395 1396 inline bool sanitize (hb_sanitize_context_t *c) const 1397 { 1398 TRACE_SANITIZE (this); 1399 return_trace (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this)); 1400 } 1401 1402 protected: 1403 USHORT format; /* Format identifier--format = 2 */ 1404 OffsetTo<Coverage> 1405 coverage; /* Offset to Coverage table--from 1406 * beginning of table */ 1407 OffsetTo<ClassDef> 1408 classDef; /* Offset to glyph ClassDef table--from 1409 * beginning of table */ 1410 OffsetArrayOf<RuleSet> 1411 ruleSet; /* Array of RuleSet tables 1412 * ordered by class */ 1413 public: 1414 DEFINE_SIZE_ARRAY (8, ruleSet); 1415 }; 1416 1417 1418 struct ContextFormat3 1419 { 1420 inline void closure (hb_closure_context_t *c) const 1421 { 1422 TRACE_CLOSURE (this); 1423 if (!(this+coverageZ[0]).intersects (c->glyphs)) 1424 return; 1425 1426 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount); 1427 struct ContextClosureLookupContext lookup_context = { 1428 {intersects_coverage}, 1429 this 1430 }; 1431 context_closure_lookup (c, 1432 glyphCount, (const USHORT *) (coverageZ + 1), 1433 lookupCount, lookupRecord, 1434 lookup_context); 1435 } 1436 1437 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1438 { 1439 TRACE_COLLECT_GLYPHS (this); 1440 (this+coverageZ[0]).add_coverage (c->input); 1441 1442 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount); 1443 struct ContextCollectGlyphsLookupContext lookup_context = { 1444 {collect_coverage}, 1445 this 1446 }; 1447 1448 context_collect_glyphs_lookup (c, 1449 glyphCount, (const USHORT *) (coverageZ + 1), 1450 lookupCount, lookupRecord, 1451 lookup_context); 1452 } 1453 1454 inline bool would_apply (hb_would_apply_context_t *c) const 1455 { 1456 TRACE_WOULD_APPLY (this); 1457 1458 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount); 1459 struct ContextApplyLookupContext lookup_context = { 1460 {match_coverage}, 1461 this 1462 }; 1463 return_trace (context_would_apply_lookup (c, glyphCount, (const USHORT *) (coverageZ + 1), lookupCount, lookupRecord, lookup_context)); 1464 } 1465 1466 inline const Coverage &get_coverage (void) const 1467 { 1468 return this+coverageZ[0]; 1469 } 1470 1471 inline bool apply (hb_apply_context_t *c) const 1472 { 1473 TRACE_APPLY (this); 1474 unsigned int index = (this+coverageZ[0]).get_coverage (c->buffer->cur().codepoint); 1475 if (likely (index == NOT_COVERED)) return_trace (false); 1476 1477 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount); 1478 struct ContextApplyLookupContext lookup_context = { 1479 {match_coverage}, 1480 this 1481 }; 1482 return_trace (context_apply_lookup (c, glyphCount, (const USHORT *) (coverageZ + 1), lookupCount, lookupRecord, lookup_context)); 1483 } 1484 1485 inline bool sanitize (hb_sanitize_context_t *c) const 1486 { 1487 TRACE_SANITIZE (this); 1488 if (!c->check_struct (this)) return_trace (false); 1489 unsigned int count = glyphCount; 1490 if (!count) return_trace (false); /* We want to access coverageZ[0] freely. */ 1491 if (!c->check_array (coverageZ, coverageZ[0].static_size, count)) return_trace (false); 1492 for (unsigned int i = 0; i < count; i++) 1493 if (!coverageZ[i].sanitize (c, this)) return_trace (false); 1494 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * count); 1495 return_trace (c->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount)); 1496 } 1497 1498 protected: 1499 USHORT format; /* Format identifier--format = 3 */ 1500 USHORT glyphCount; /* Number of glyphs in the input glyph 1501 * sequence */ 1502 USHORT lookupCount; /* Number of LookupRecords */ 1503 OffsetTo<Coverage> 1504 coverageZ[VAR]; /* Array of offsets to Coverage 1505 * table in glyph sequence order */ 1506 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in 1507 * design order */ 1508 public: 1509 DEFINE_SIZE_ARRAY2 (6, coverageZ, lookupRecordX); 1510 }; 1511 1512 struct Context 1513 { 1514 template <typename context_t> 1515 inline typename context_t::return_t dispatch (context_t *c) const 1516 { 1517 TRACE_DISPATCH (this, u.format); 1518 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); 1519 switch (u.format) { 1520 case 1: return_trace (c->dispatch (u.format1)); 1521 case 2: return_trace (c->dispatch (u.format2)); 1522 case 3: return_trace (c->dispatch (u.format3)); 1523 default:return_trace (c->default_return_value ()); 1524 } 1525 } 1526 1527 protected: 1528 union { 1529 USHORT format; /* Format identifier */ 1530 ContextFormat1 format1; 1531 ContextFormat2 format2; 1532 ContextFormat3 format3; 1533 } u; 1534 }; 1535 1536 1537 /* Chaining Contextual lookups */ 1538 1539 struct ChainContextClosureLookupContext 1540 { 1541 ContextClosureFuncs funcs; 1542 const void *intersects_data[3]; 1543 }; 1544 1545 struct ChainContextCollectGlyphsLookupContext 1546 { 1547 ContextCollectGlyphsFuncs funcs; 1548 const void *collect_data[3]; 1549 }; 1550 1551 struct ChainContextApplyLookupContext 1552 { 1553 ContextApplyFuncs funcs; 1554 const void *match_data[3]; 1555 }; 1556 1557 static inline void chain_context_closure_lookup (hb_closure_context_t *c, 1558 unsigned int backtrackCount, 1559 const USHORT backtrack[], 1560 unsigned int inputCount, /* Including the first glyph (not matched) */ 1561 const USHORT input[], /* Array of input values--start with second glyph */ 1562 unsigned int lookaheadCount, 1563 const USHORT lookahead[], 1564 unsigned int lookupCount, 1565 const LookupRecord lookupRecord[], 1566 ChainContextClosureLookupContext &lookup_context) 1567 { 1568 if (intersects_array (c, 1569 backtrackCount, backtrack, 1570 lookup_context.funcs.intersects, lookup_context.intersects_data[0]) 1571 && intersects_array (c, 1572 inputCount ? inputCount - 1 : 0, input, 1573 lookup_context.funcs.intersects, lookup_context.intersects_data[1]) 1574 && intersects_array (c, 1575 lookaheadCount, lookahead, 1576 lookup_context.funcs.intersects, lookup_context.intersects_data[2])) 1577 recurse_lookups (c, 1578 lookupCount, lookupRecord); 1579 } 1580 1581 static inline void chain_context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c, 1582 unsigned int backtrackCount, 1583 const USHORT backtrack[], 1584 unsigned int inputCount, /* Including the first glyph (not matched) */ 1585 const USHORT input[], /* Array of input values--start with second glyph */ 1586 unsigned int lookaheadCount, 1587 const USHORT lookahead[], 1588 unsigned int lookupCount, 1589 const LookupRecord lookupRecord[], 1590 ChainContextCollectGlyphsLookupContext &lookup_context) 1591 { 1592 collect_array (c, c->before, 1593 backtrackCount, backtrack, 1594 lookup_context.funcs.collect, lookup_context.collect_data[0]); 1595 collect_array (c, c->input, 1596 inputCount ? inputCount - 1 : 0, input, 1597 lookup_context.funcs.collect, lookup_context.collect_data[1]); 1598 collect_array (c, c->after, 1599 lookaheadCount, lookahead, 1600 lookup_context.funcs.collect, lookup_context.collect_data[2]); 1601 recurse_lookups (c, 1602 lookupCount, lookupRecord); 1603 } 1604 1605 static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c, 1606 unsigned int backtrackCount, 1607 const USHORT backtrack[] HB_UNUSED, 1608 unsigned int inputCount, /* Including the first glyph (not matched) */ 1609 const USHORT input[], /* Array of input values--start with second glyph */ 1610 unsigned int lookaheadCount, 1611 const USHORT lookahead[] HB_UNUSED, 1612 unsigned int lookupCount HB_UNUSED, 1613 const LookupRecord lookupRecord[] HB_UNUSED, 1614 ChainContextApplyLookupContext &lookup_context) 1615 { 1616 return (c->zero_context ? !backtrackCount && !lookaheadCount : true) 1617 && would_match_input (c, 1618 inputCount, input, 1619 lookup_context.funcs.match, lookup_context.match_data[1]); 1620 } 1621 1622 static inline bool chain_context_apply_lookup (hb_apply_context_t *c, 1623 unsigned int backtrackCount, 1624 const USHORT backtrack[], 1625 unsigned int inputCount, /* Including the first glyph (not matched) */ 1626 const USHORT input[], /* Array of input values--start with second glyph */ 1627 unsigned int lookaheadCount, 1628 const USHORT lookahead[], 1629 unsigned int lookupCount, 1630 const LookupRecord lookupRecord[], 1631 ChainContextApplyLookupContext &lookup_context) 1632 { 1633 unsigned int match_length = 0; 1634 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH]; 1635 return match_input (c, 1636 inputCount, input, 1637 lookup_context.funcs.match, lookup_context.match_data[1], 1638 &match_length, match_positions) 1639 && match_backtrack (c, 1640 backtrackCount, backtrack, 1641 lookup_context.funcs.match, lookup_context.match_data[0]) 1642 && match_lookahead (c, 1643 lookaheadCount, lookahead, 1644 lookup_context.funcs.match, lookup_context.match_data[2], 1645 match_length) 1646 && apply_lookup (c, 1647 inputCount, match_positions, 1648 lookupCount, lookupRecord, 1649 match_length); 1650 } 1651 1652 struct ChainRule 1653 { 1654 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const 1655 { 1656 TRACE_CLOSURE (this); 1657 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 1658 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 1659 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1660 chain_context_closure_lookup (c, 1661 backtrack.len, backtrack.array, 1662 input.len, input.array, 1663 lookahead.len, lookahead.array, 1664 lookup.len, lookup.array, 1665 lookup_context); 1666 } 1667 1668 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const 1669 { 1670 TRACE_COLLECT_GLYPHS (this); 1671 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 1672 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 1673 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1674 chain_context_collect_glyphs_lookup (c, 1675 backtrack.len, backtrack.array, 1676 input.len, input.array, 1677 lookahead.len, lookahead.array, 1678 lookup.len, lookup.array, 1679 lookup_context); 1680 } 1681 1682 inline bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const 1683 { 1684 TRACE_WOULD_APPLY (this); 1685 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 1686 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 1687 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1688 return_trace (chain_context_would_apply_lookup (c, 1689 backtrack.len, backtrack.array, 1690 input.len, input.array, 1691 lookahead.len, lookahead.array, lookup.len, 1692 lookup.array, lookup_context)); 1693 } 1694 1695 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const 1696 { 1697 TRACE_APPLY (this); 1698 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 1699 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 1700 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1701 return_trace (chain_context_apply_lookup (c, 1702 backtrack.len, backtrack.array, 1703 input.len, input.array, 1704 lookahead.len, lookahead.array, lookup.len, 1705 lookup.array, lookup_context)); 1706 } 1707 1708 inline bool sanitize (hb_sanitize_context_t *c) const 1709 { 1710 TRACE_SANITIZE (this); 1711 if (!backtrack.sanitize (c)) return_trace (false); 1712 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 1713 if (!input.sanitize (c)) return_trace (false); 1714 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 1715 if (!lookahead.sanitize (c)) return_trace (false); 1716 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1717 return_trace (lookup.sanitize (c)); 1718 } 1719 1720 protected: 1721 ArrayOf<USHORT> 1722 backtrack; /* Array of backtracking values 1723 * (to be matched before the input 1724 * sequence) */ 1725 HeadlessArrayOf<USHORT> 1726 inputX; /* Array of input values (start with 1727 * second glyph) */ 1728 ArrayOf<USHORT> 1729 lookaheadX; /* Array of lookahead values's (to be 1730 * matched after the input sequence) */ 1731 ArrayOf<LookupRecord> 1732 lookupX; /* Array of LookupRecords--in 1733 * design order) */ 1734 public: 1735 DEFINE_SIZE_MIN (8); 1736 }; 1737 1738 struct ChainRuleSet 1739 { 1740 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const 1741 { 1742 TRACE_CLOSURE (this); 1743 unsigned int num_rules = rule.len; 1744 for (unsigned int i = 0; i < num_rules; i++) 1745 (this+rule[i]).closure (c, lookup_context); 1746 } 1747 1748 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const 1749 { 1750 TRACE_COLLECT_GLYPHS (this); 1751 unsigned int num_rules = rule.len; 1752 for (unsigned int i = 0; i < num_rules; i++) 1753 (this+rule[i]).collect_glyphs (c, lookup_context); 1754 } 1755 1756 inline bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const 1757 { 1758 TRACE_WOULD_APPLY (this); 1759 unsigned int num_rules = rule.len; 1760 for (unsigned int i = 0; i < num_rules; i++) 1761 if ((this+rule[i]).would_apply (c, lookup_context)) 1762 return_trace (true); 1763 1764 return_trace (false); 1765 } 1766 1767 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const 1768 { 1769 TRACE_APPLY (this); 1770 unsigned int num_rules = rule.len; 1771 for (unsigned int i = 0; i < num_rules; i++) 1772 if ((this+rule[i]).apply (c, lookup_context)) 1773 return_trace (true); 1774 1775 return_trace (false); 1776 } 1777 1778 inline bool sanitize (hb_sanitize_context_t *c) const 1779 { 1780 TRACE_SANITIZE (this); 1781 return_trace (rule.sanitize (c, this)); 1782 } 1783 1784 protected: 1785 OffsetArrayOf<ChainRule> 1786 rule; /* Array of ChainRule tables 1787 * ordered by preference */ 1788 public: 1789 DEFINE_SIZE_ARRAY (2, rule); 1790 }; 1791 1792 struct ChainContextFormat1 1793 { 1794 inline void closure (hb_closure_context_t *c) const 1795 { 1796 TRACE_CLOSURE (this); 1797 const Coverage &cov = (this+coverage); 1798 1799 struct ChainContextClosureLookupContext lookup_context = { 1800 {intersects_glyph}, 1801 {NULL, NULL, NULL} 1802 }; 1803 1804 unsigned int count = ruleSet.len; 1805 for (unsigned int i = 0; i < count; i++) 1806 if (cov.intersects_coverage (c->glyphs, i)) { 1807 const ChainRuleSet &rule_set = this+ruleSet[i]; 1808 rule_set.closure (c, lookup_context); 1809 } 1810 } 1811 1812 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1813 { 1814 TRACE_COLLECT_GLYPHS (this); 1815 (this+coverage).add_coverage (c->input); 1816 1817 struct ChainContextCollectGlyphsLookupContext lookup_context = { 1818 {collect_glyph}, 1819 {NULL, NULL, NULL} 1820 }; 1821 1822 unsigned int count = ruleSet.len; 1823 for (unsigned int i = 0; i < count; i++) 1824 (this+ruleSet[i]).collect_glyphs (c, lookup_context); 1825 } 1826 1827 inline bool would_apply (hb_would_apply_context_t *c) const 1828 { 1829 TRACE_WOULD_APPLY (this); 1830 1831 const ChainRuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])]; 1832 struct ChainContextApplyLookupContext lookup_context = { 1833 {match_glyph}, 1834 {NULL, NULL, NULL} 1835 }; 1836 return_trace (rule_set.would_apply (c, lookup_context)); 1837 } 1838 1839 inline const Coverage &get_coverage (void) const 1840 { 1841 return this+coverage; 1842 } 1843 1844 inline bool apply (hb_apply_context_t *c) const 1845 { 1846 TRACE_APPLY (this); 1847 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); 1848 if (likely (index == NOT_COVERED)) return_trace (false); 1849 1850 const ChainRuleSet &rule_set = this+ruleSet[index]; 1851 struct ChainContextApplyLookupContext lookup_context = { 1852 {match_glyph}, 1853 {NULL, NULL, NULL} 1854 }; 1855 return_trace (rule_set.apply (c, lookup_context)); 1856 } 1857 1858 inline bool sanitize (hb_sanitize_context_t *c) const 1859 { 1860 TRACE_SANITIZE (this); 1861 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this)); 1862 } 1863 1864 protected: 1865 USHORT format; /* Format identifier--format = 1 */ 1866 OffsetTo<Coverage> 1867 coverage; /* Offset to Coverage table--from 1868 * beginning of table */ 1869 OffsetArrayOf<ChainRuleSet> 1870 ruleSet; /* Array of ChainRuleSet tables 1871 * ordered by Coverage Index */ 1872 public: 1873 DEFINE_SIZE_ARRAY (6, ruleSet); 1874 }; 1875 1876 struct ChainContextFormat2 1877 { 1878 inline void closure (hb_closure_context_t *c) const 1879 { 1880 TRACE_CLOSURE (this); 1881 if (!(this+coverage).intersects (c->glyphs)) 1882 return; 1883 1884 const ClassDef &backtrack_class_def = this+backtrackClassDef; 1885 const ClassDef &input_class_def = this+inputClassDef; 1886 const ClassDef &lookahead_class_def = this+lookaheadClassDef; 1887 1888 struct ChainContextClosureLookupContext lookup_context = { 1889 {intersects_class}, 1890 {&backtrack_class_def, 1891 &input_class_def, 1892 &lookahead_class_def} 1893 }; 1894 1895 unsigned int count = ruleSet.len; 1896 for (unsigned int i = 0; i < count; i++) 1897 if (input_class_def.intersects_class (c->glyphs, i)) { 1898 const ChainRuleSet &rule_set = this+ruleSet[i]; 1899 rule_set.closure (c, lookup_context); 1900 } 1901 } 1902 1903 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1904 { 1905 TRACE_COLLECT_GLYPHS (this); 1906 (this+coverage).add_coverage (c->input); 1907 1908 const ClassDef &backtrack_class_def = this+backtrackClassDef; 1909 const ClassDef &input_class_def = this+inputClassDef; 1910 const ClassDef &lookahead_class_def = this+lookaheadClassDef; 1911 1912 struct ChainContextCollectGlyphsLookupContext lookup_context = { 1913 {collect_class}, 1914 {&backtrack_class_def, 1915 &input_class_def, 1916 &lookahead_class_def} 1917 }; 1918 1919 unsigned int count = ruleSet.len; 1920 for (unsigned int i = 0; i < count; i++) 1921 (this+ruleSet[i]).collect_glyphs (c, lookup_context); 1922 } 1923 1924 inline bool would_apply (hb_would_apply_context_t *c) const 1925 { 1926 TRACE_WOULD_APPLY (this); 1927 1928 const ClassDef &backtrack_class_def = this+backtrackClassDef; 1929 const ClassDef &input_class_def = this+inputClassDef; 1930 const ClassDef &lookahead_class_def = this+lookaheadClassDef; 1931 1932 unsigned int index = input_class_def.get_class (c->glyphs[0]); 1933 const ChainRuleSet &rule_set = this+ruleSet[index]; 1934 struct ChainContextApplyLookupContext lookup_context = { 1935 {match_class}, 1936 {&backtrack_class_def, 1937 &input_class_def, 1938 &lookahead_class_def} 1939 }; 1940 return_trace (rule_set.would_apply (c, lookup_context)); 1941 } 1942 1943 inline const Coverage &get_coverage (void) const 1944 { 1945 return this+coverage; 1946 } 1947 1948 inline bool apply (hb_apply_context_t *c) const 1949 { 1950 TRACE_APPLY (this); 1951 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); 1952 if (likely (index == NOT_COVERED)) return_trace (false); 1953 1954 const ClassDef &backtrack_class_def = this+backtrackClassDef; 1955 const ClassDef &input_class_def = this+inputClassDef; 1956 const ClassDef &lookahead_class_def = this+lookaheadClassDef; 1957 1958 index = input_class_def.get_class (c->buffer->cur().codepoint); 1959 const ChainRuleSet &rule_set = this+ruleSet[index]; 1960 struct ChainContextApplyLookupContext lookup_context = { 1961 {match_class}, 1962 {&backtrack_class_def, 1963 &input_class_def, 1964 &lookahead_class_def} 1965 }; 1966 return_trace (rule_set.apply (c, lookup_context)); 1967 } 1968 1969 inline bool sanitize (hb_sanitize_context_t *c) const 1970 { 1971 TRACE_SANITIZE (this); 1972 return_trace (coverage.sanitize (c, this) && 1973 backtrackClassDef.sanitize (c, this) && 1974 inputClassDef.sanitize (c, this) && 1975 lookaheadClassDef.sanitize (c, this) && 1976 ruleSet.sanitize (c, this)); 1977 } 1978 1979 protected: 1980 USHORT format; /* Format identifier--format = 2 */ 1981 OffsetTo<Coverage> 1982 coverage; /* Offset to Coverage table--from 1983 * beginning of table */ 1984 OffsetTo<ClassDef> 1985 backtrackClassDef; /* Offset to glyph ClassDef table 1986 * containing backtrack sequence 1987 * data--from beginning of table */ 1988 OffsetTo<ClassDef> 1989 inputClassDef; /* Offset to glyph ClassDef 1990 * table containing input sequence 1991 * data--from beginning of table */ 1992 OffsetTo<ClassDef> 1993 lookaheadClassDef; /* Offset to glyph ClassDef table 1994 * containing lookahead sequence 1995 * data--from beginning of table */ 1996 OffsetArrayOf<ChainRuleSet> 1997 ruleSet; /* Array of ChainRuleSet tables 1998 * ordered by class */ 1999 public: 2000 DEFINE_SIZE_ARRAY (12, ruleSet); 2001 }; 2002 2003 struct ChainContextFormat3 2004 { 2005 inline void closure (hb_closure_context_t *c) const 2006 { 2007 TRACE_CLOSURE (this); 2008 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2009 2010 if (!(this+input[0]).intersects (c->glyphs)) 2011 return; 2012 2013 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 2014 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 2015 struct ChainContextClosureLookupContext lookup_context = { 2016 {intersects_coverage}, 2017 {this, this, this} 2018 }; 2019 chain_context_closure_lookup (c, 2020 backtrack.len, (const USHORT *) backtrack.array, 2021 input.len, (const USHORT *) input.array + 1, 2022 lookahead.len, (const USHORT *) lookahead.array, 2023 lookup.len, lookup.array, 2024 lookup_context); 2025 } 2026 2027 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 2028 { 2029 TRACE_COLLECT_GLYPHS (this); 2030 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2031 2032 (this+input[0]).add_coverage (c->input); 2033 2034 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 2035 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 2036 struct ChainContextCollectGlyphsLookupContext lookup_context = { 2037 {collect_coverage}, 2038 {this, this, this} 2039 }; 2040 chain_context_collect_glyphs_lookup (c, 2041 backtrack.len, (const USHORT *) backtrack.array, 2042 input.len, (const USHORT *) input.array + 1, 2043 lookahead.len, (const USHORT *) lookahead.array, 2044 lookup.len, lookup.array, 2045 lookup_context); 2046 } 2047 2048 inline bool would_apply (hb_would_apply_context_t *c) const 2049 { 2050 TRACE_WOULD_APPLY (this); 2051 2052 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2053 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 2054 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 2055 struct ChainContextApplyLookupContext lookup_context = { 2056 {match_coverage}, 2057 {this, this, this} 2058 }; 2059 return_trace (chain_context_would_apply_lookup (c, 2060 backtrack.len, (const USHORT *) backtrack.array, 2061 input.len, (const USHORT *) input.array + 1, 2062 lookahead.len, (const USHORT *) lookahead.array, 2063 lookup.len, lookup.array, lookup_context)); 2064 } 2065 2066 inline const Coverage &get_coverage (void) const 2067 { 2068 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2069 return this+input[0]; 2070 } 2071 2072 inline bool apply (hb_apply_context_t *c) const 2073 { 2074 TRACE_APPLY (this); 2075 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2076 2077 unsigned int index = (this+input[0]).get_coverage (c->buffer->cur().codepoint); 2078 if (likely (index == NOT_COVERED)) return_trace (false); 2079 2080 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 2081 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 2082 struct ChainContextApplyLookupContext lookup_context = { 2083 {match_coverage}, 2084 {this, this, this} 2085 }; 2086 return_trace (chain_context_apply_lookup (c, 2087 backtrack.len, (const USHORT *) backtrack.array, 2088 input.len, (const USHORT *) input.array + 1, 2089 lookahead.len, (const USHORT *) lookahead.array, 2090 lookup.len, lookup.array, lookup_context)); 2091 } 2092 2093 inline bool sanitize (hb_sanitize_context_t *c) const 2094 { 2095 TRACE_SANITIZE (this); 2096 if (!backtrack.sanitize (c, this)) return_trace (false); 2097 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2098 if (!input.sanitize (c, this)) return_trace (false); 2099 if (!input.len) return_trace (false); /* To be consistent with Context. */ 2100 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 2101 if (!lookahead.sanitize (c, this)) return_trace (false); 2102 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 2103 return_trace (lookup.sanitize (c)); 2104 } 2105 2106 protected: 2107 USHORT format; /* Format identifier--format = 3 */ 2108 OffsetArrayOf<Coverage> 2109 backtrack; /* Array of coverage tables 2110 * in backtracking sequence, in glyph 2111 * sequence order */ 2112 OffsetArrayOf<Coverage> 2113 inputX ; /* Array of coverage 2114 * tables in input sequence, in glyph 2115 * sequence order */ 2116 OffsetArrayOf<Coverage> 2117 lookaheadX; /* Array of coverage tables 2118 * in lookahead sequence, in glyph 2119 * sequence order */ 2120 ArrayOf<LookupRecord> 2121 lookupX; /* Array of LookupRecords--in 2122 * design order) */ 2123 public: 2124 DEFINE_SIZE_MIN (10); 2125 }; 2126 2127 struct ChainContext 2128 { 2129 template <typename context_t> 2130 inline typename context_t::return_t dispatch (context_t *c) const 2131 { 2132 TRACE_DISPATCH (this, u.format); 2133 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); 2134 switch (u.format) { 2135 case 1: return_trace (c->dispatch (u.format1)); 2136 case 2: return_trace (c->dispatch (u.format2)); 2137 case 3: return_trace (c->dispatch (u.format3)); 2138 default:return_trace (c->default_return_value ()); 2139 } 2140 } 2141 2142 protected: 2143 union { 2144 USHORT format; /* Format identifier */ 2145 ChainContextFormat1 format1; 2146 ChainContextFormat2 format2; 2147 ChainContextFormat3 format3; 2148 } u; 2149 }; 2150 2151 2152 template <typename T> 2153 struct ExtensionFormat1 2154 { 2155 inline unsigned int get_type (void) const { return extensionLookupType; } 2156 2157 template <typename X> 2158 inline const X& get_subtable (void) const 2159 { 2160 unsigned int offset = extensionOffset; 2161 if (unlikely (!offset)) return Null(typename T::LookupSubTable); 2162 return StructAtOffset<typename T::LookupSubTable> (this, offset); 2163 } 2164 2165 template <typename context_t> 2166 inline typename context_t::return_t dispatch (context_t *c) const 2167 { 2168 TRACE_DISPATCH (this, format); 2169 if (unlikely (!c->may_dispatch (this, this))) return_trace (c->no_dispatch_return_value ()); 2170 return_trace (get_subtable<typename T::LookupSubTable> ().dispatch (c, get_type ())); 2171 } 2172 2173 /* This is called from may_dispatch() above with hb_sanitize_context_t. */ 2174 inline bool sanitize (hb_sanitize_context_t *c) const 2175 { 2176 TRACE_SANITIZE (this); 2177 return_trace (c->check_struct (this) && extensionOffset != 0); 2178 } 2179 2180 protected: 2181 USHORT format; /* Format identifier. Set to 1. */ 2182 USHORT extensionLookupType; /* Lookup type of subtable referenced 2183 * by ExtensionOffset (i.e. the 2184 * extension subtable). */ 2185 ULONG extensionOffset; /* Offset to the extension subtable, 2186 * of lookup type subtable. */ 2187 public: 2188 DEFINE_SIZE_STATIC (8); 2189 }; 2190 2191 template <typename T> 2192 struct Extension 2193 { 2194 inline unsigned int get_type (void) const 2195 { 2196 switch (u.format) { 2197 case 1: return u.format1.get_type (); 2198 default:return 0; 2199 } 2200 } 2201 template <typename X> 2202 inline const X& get_subtable (void) const 2203 { 2204 switch (u.format) { 2205 case 1: return u.format1.template get_subtable<typename T::LookupSubTable> (); 2206 default:return Null(typename T::LookupSubTable); 2207 } 2208 } 2209 2210 template <typename context_t> 2211 inline typename context_t::return_t dispatch (context_t *c) const 2212 { 2213 TRACE_DISPATCH (this, u.format); 2214 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); 2215 switch (u.format) { 2216 case 1: return_trace (u.format1.dispatch (c)); 2217 default:return_trace (c->default_return_value ()); 2218 } 2219 } 2220 2221 protected: 2222 union { 2223 USHORT format; /* Format identifier */ 2224 ExtensionFormat1<T> format1; 2225 } u; 2226 }; 2227 2228 2229 /* 2230 * GSUB/GPOS Common 2231 */ 2232 2233 struct GSUBGPOS 2234 { 2235 static const hb_tag_t GSUBTag = HB_OT_TAG_GSUB; 2236 static const hb_tag_t GPOSTag = HB_OT_TAG_GPOS; 2237 2238 inline unsigned int get_script_count (void) const 2239 { return (this+scriptList).len; } 2240 inline const Tag& get_script_tag (unsigned int i) const 2241 { return (this+scriptList).get_tag (i); } 2242 inline unsigned int get_script_tags (unsigned int start_offset, 2243 unsigned int *script_count /* IN/OUT */, 2244 hb_tag_t *script_tags /* OUT */) const 2245 { return (this+scriptList).get_tags (start_offset, script_count, script_tags); } 2246 inline const Script& get_script (unsigned int i) const 2247 { return (this+scriptList)[i]; } 2248 inline bool find_script_index (hb_tag_t tag, unsigned int *index) const 2249 { return (this+scriptList).find_index (tag, index); } 2250 2251 inline unsigned int get_feature_count (void) const 2252 { return (this+featureList).len; } 2253 inline hb_tag_t get_feature_tag (unsigned int i) const 2254 { return i == Index::NOT_FOUND_INDEX ? HB_TAG_NONE : (this+featureList).get_tag (i); } 2255 inline unsigned int get_feature_tags (unsigned int start_offset, 2256 unsigned int *feature_count /* IN/OUT */, 2257 hb_tag_t *feature_tags /* OUT */) const 2258 { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); } 2259 inline const Feature& get_feature (unsigned int i) const 2260 { return (this+featureList)[i]; } 2261 inline bool find_feature_index (hb_tag_t tag, unsigned int *index) const 2262 { return (this+featureList).find_index (tag, index); } 2263 2264 inline unsigned int get_lookup_count (void) const 2265 { return (this+lookupList).len; } 2266 inline const Lookup& get_lookup (unsigned int i) const 2267 { return (this+lookupList)[i]; } 2268 2269 inline bool sanitize (hb_sanitize_context_t *c) const 2270 { 2271 TRACE_SANITIZE (this); 2272 return_trace (version.sanitize (c) && 2273 likely (version.major == 1) && 2274 scriptList.sanitize (c, this) && 2275 featureList.sanitize (c, this) && 2276 lookupList.sanitize (c, this)); 2277 } 2278 2279 protected: 2280 FixedVersion<>version; /* Version of the GSUB/GPOS table--initially set 2281 * to 0x00010000u */ 2282 OffsetTo<ScriptList> 2283 scriptList; /* ScriptList table */ 2284 OffsetTo<FeatureList> 2285 featureList; /* FeatureList table */ 2286 OffsetTo<LookupList> 2287 lookupList; /* LookupList table */ 2288 public: 2289 DEFINE_SIZE_STATIC (10); 2290 }; 2291 2292 2293 } /* namespace OT */ 2294 2295 2296 #endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */ 2297